context
stringlengths
2.52k
185k
gt
stringclasses
1 value
using System; using System.Globalization; using System.Xml; using System.Xml.XPath; namespace Hydra.Framework.XmlSerialization.Exslt { // //********************************************************************** /// <summary> /// This class implements additional functions in the /// "http://gotdotnet.com/exslt/dates-and-times" namespace. /// </summary> //********************************************************************** // public class GDNDatesAndTimes : ExsltDatesAndTimes { #region date2:avg() // //********************************************************************** /// <summary> /// Implements the following function /// string date2:avg(node-set) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-avg.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> //********************************************************************** // public string avg(XPathNodeIterator iterator) { TimeSpan sum = new TimeSpan(0, 0, 0, 0); int count = iterator.Count; if (count == 0) { return ""; } try { while (iterator.MoveNext()) { sum = XmlConvert.ToTimeSpan(iterator.Current.Value).Add(sum); } } catch (FormatException) { return ""; } return duration(sum.TotalSeconds / count); } #endregion #region date2:min() // //********************************************************************** /// <summary> /// Implements the following function /// string date2:min(node-set) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-min.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> //********************************************************************** // public string min(XPathNodeIterator iterator) { TimeSpan min, t; if (iterator.Count == 0) { return ""; } try { iterator.MoveNext(); min = XmlConvert.ToTimeSpan(iterator.Current.Value); while (iterator.MoveNext()) { t = XmlConvert.ToTimeSpan(iterator.Current.Value); min = (t < min) ? t : min; } } catch (FormatException) { return ""; } return XmlConvert.ToString(min); } #endregion #region date2:max() // //********************************************************************** /// <summary> /// Implements the following function /// string date2:max(node-set) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-max.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> //********************************************************************** // public string max(XPathNodeIterator iterator) { TimeSpan max, t; if (iterator.Count == 0) { return ""; } try { iterator.MoveNext(); max = XmlConvert.ToTimeSpan(iterator.Current.Value); while (iterator.MoveNext()) { t = XmlConvert.ToTimeSpan(iterator.Current.Value); max = (t > max) ? t : max; } } catch (FormatException) { return ""; } return XmlConvert.ToString(max); } #endregion #region date2:day-abbreviation() // //********************************************************************** /// <summary> /// This wrapper method will be renamed during custom build /// to provide conformant EXSLT function name. /// </summary> //********************************************************************** // public string dayAbbreviation_RENAME_ME(string d, string c) { return dayAbbreviation(d, c); } // //********************************************************************** /// <summary> /// This wrapper method will be renamed during custom build /// to provide conformant EXSLT function name. /// </summary> //********************************************************************** // public new string dayAbbreviation_RENAME_ME(string c) { return dayAbbreviation(c); } // //********************************************************************** /// <summary> /// Implements the following function /// string date2:day-abbreviation(string) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-day-abbreviation.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> /// <returns>The abbreviated current day name according to /// specified culture or the empty string if the culture isn't /// supported.</returns> //********************************************************************** // public new string dayAbbreviation(string culture) { try { CultureInfo ci = new CultureInfo(culture); return ci.DateTimeFormat.GetAbbreviatedDayName(DateTime.Now.DayOfWeek); } catch (Exception) { return ""; } } // //********************************************************************** /// <summary> /// Implements the following function /// string date2:day-abbreviation(string, string) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-day-abbreviation.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> /// <returns>The abbreviated day name of the specified date according to /// specified culture or the empty string if the input date is invalid or /// the culture isn't supported.</returns> //********************************************************************** // public string dayAbbreviation(string d, string culture) { try { DateTZ date = new DateTZ(d); CultureInfo ci = new CultureInfo(culture); return ci.DateTimeFormat.GetAbbreviatedDayName(date.d.DayOfWeek); } catch (Exception) { return ""; } } #endregion #region date2:day-name() // //********************************************************************** /// <summary> /// This wrapper method will be renamed during custom build /// to provide conformant EXSLT function name. /// </summary> //********************************************************************** // public string dayName_RENAME_ME(string d, string c) { return dayName(d, c); } // //********************************************************************** /// <summary> /// This wrapper method will be renamed during custom build /// to provide conformant EXSLT function name. /// </summary> //********************************************************************** // public new string dayName_RENAME_ME(string c) { return dayName(c); } // //********************************************************************** /// <summary> /// Implements the following function /// string date2:day-name(string, string?) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-day-name.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> /// <returns>The day name of the specified date according to /// specified culture or the empty string if the input date is invalid or /// the culture isn't supported.</returns> //********************************************************************** // public string dayName(string d, string culture) { try { DateTZ date = new DateTZ(d); CultureInfo ci = new CultureInfo(culture); return ci.DateTimeFormat.GetDayName(date.d.DayOfWeek); } catch (Exception) { return ""; } } // //********************************************************************** /// <summary> /// Implements the following function /// string date2:day-name(string, string?) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-day-name.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> /// <returns>The day name of the current date according to /// specified culture or the empty string if /// the culture isn't supported.</returns> //********************************************************************** // public new string dayName(string culture) { try { CultureInfo ci = new CultureInfo(culture); return ci.DateTimeFormat.GetDayName(DateTime.Now.DayOfWeek); } catch (Exception) { return ""; } } #endregion #region date2:month-abbreviation() // //********************************************************************** /// <summary> /// This wrapper method will be renamed during custom build /// to provide conformant EXSLT function name. /// </summary> //********************************************************************** // public string monthAbbreviation_RENAME_ME(string d, string c) { return monthAbbreviation(d, c); } // //********************************************************************** /// <summary> /// This wrapper method will be renamed during custom build /// to provide conformant EXSLT function name. /// </summary> //********************************************************************** // public new string monthAbbreviation_RENAME_ME(string c) { return monthAbbreviation(c); } // //********************************************************************** /// <summary> /// Implements the following function /// string date2:month-abbreviation(string) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-month-abbreviation.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> /// <returns>The abbreviated current month name according to /// specified culture or the empty string if the culture isn't /// supported.</returns> //********************************************************************** // public new string monthAbbreviation(string culture) { try { CultureInfo ci = new CultureInfo(culture); return ci.DateTimeFormat.GetAbbreviatedMonthName(DateTime.Now.Month); } catch (Exception) { return ""; } } // //********************************************************************** /// <summary> /// Implements the following function /// string date2:month-abbreviation(string, string) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-month-abbreviation.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> /// <returns>The abbreviated month name of the specified date according to /// specified culture or the empty string if the input date is invalid or /// the culture isn't supported.</returns> //********************************************************************** // public string monthAbbreviation(string d, string culture) { try { DateTZ date = new DateTZ(d); CultureInfo ci = new CultureInfo(culture); return ci.DateTimeFormat.GetAbbreviatedMonthName(date.d.Month); } catch (Exception) { return ""; } } #endregion #region date2:month-name() // //********************************************************************** /// <summary> /// This wrapper method will be renamed during custom build /// to provide conformant EXSLT function name. /// </summary> //********************************************************************** // public string monthName_RENAME_ME(string d, string c) { return monthName(d, c); } // //********************************************************************** /// <summary> /// This wrapper method will be renamed during custom build /// to provide conformant EXSLT function name. /// </summary> //********************************************************************** // public new string monthName_RENAME_ME(string c) { return monthName(c); } // //********************************************************************** /// <summary> /// Implements the following function /// string date2:month-name(string, string?) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-month-name.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> /// <returns>The month name of the specified date according to /// specified culture or the empty string if the input date is invalid or /// the culture isn't supported.</returns> //********************************************************************** // public string monthName(string d, string culture) { try { DateTZ date = new DateTZ(d); CultureInfo ci = new CultureInfo(culture); return ci.DateTimeFormat.GetMonthName(date.d.Month); } catch (Exception) { return ""; } } // //********************************************************************** /// <summary> /// Implements the following function /// string date2:month-name(string, string?) /// See http://www.xmland.net/exslt/doc/GDNDatesAndTimes-month-name.xml /// </summary> /// <remarks>THIS FUNCTION IS NOT PART OF EXSLT!!!</remarks> /// <returns>The month name of the current date according to /// specified culture or the empty string if /// the culture isn't supported.</returns> //********************************************************************** // public new string monthName(string culture) { try { CultureInfo ci = new CultureInfo(culture); return ci.DateTimeFormat.GetMonthName(DateTime.Now.Month); } catch (Exception) { return ""; } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void ConvertToInt64WithTruncationVector128Double() { var test = new SimdScalarUnaryOpConvertTest__ConvertToInt64WithTruncationVector128Double(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); // Validates passing an instance member of a class works test.RunClassFldScenario(); // Validates passing the field of a local struct works test.RunStructLclFldScenario(); // Validates passing an instance member of a struct works test.RunStructFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimdScalarUnaryOpConvertTest__ConvertToInt64WithTruncationVector128Double { private struct TestStruct { public Vector128<Double> _fld; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref testStruct._fld), ref Unsafe.As<Double, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); return testStruct; } public void RunStructFldScenario(SimdScalarUnaryOpConvertTest__ConvertToInt64WithTruncationVector128Double testClass) { var result = Sse2.X64.ConvertToInt64WithTruncation(_fld); testClass.ValidateResult(_fld, result); } } private static readonly int LargestVectorSize = 16; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Double>>() / sizeof(Double); private static Double[] _data = new Double[Op1ElementCount]; private static Vector128<Double> _clsVar; private Vector128<Double> _fld; private SimdScalarUnaryOpTest__DataTable<Double> _dataTable; static SimdScalarUnaryOpConvertTest__ConvertToInt64WithTruncationVector128Double() { for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar), ref Unsafe.As<Double, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); } public SimdScalarUnaryOpConvertTest__ConvertToInt64WithTruncationVector128Double() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetDouble(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld), ref Unsafe.As<Double, byte>(ref _data[0]), (uint)Unsafe.SizeOf<Vector128<Double>>()); for (var i = 0; i < Op1ElementCount; i++) { _data[i] = TestLibrary.Generator.GetDouble(); } _dataTable = new SimdScalarUnaryOpTest__DataTable<Double>(_data, LargestVectorSize); } public bool IsSupported => Sse2.X64.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Sse2.X64.ConvertToInt64WithTruncation( Unsafe.Read<Vector128<Double>>(_dataTable.inArrayPtr) ); ValidateResult(_dataTable.inArrayPtr, result); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Sse2.X64.ConvertToInt64WithTruncation( Sse2.LoadVector128((Double*)(_dataTable.inArrayPtr)) ); ValidateResult(_dataTable.inArrayPtr, result); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Sse2.X64.ConvertToInt64WithTruncation( Sse2.LoadAlignedVector128((Double*)(_dataTable.inArrayPtr)) ); ValidateResult(_dataTable.inArrayPtr, result); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Sse2.X64).GetMethod(nameof(Sse2.X64.ConvertToInt64WithTruncation), new Type[] { typeof(Vector128<Double>) }) .Invoke(null, new object[] { Unsafe.Read<Vector128<Double>>(_dataTable.inArrayPtr) }); ValidateResult(_dataTable.inArrayPtr, (Int64)(result)); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Sse2.X64).GetMethod(nameof(Sse2.X64.ConvertToInt64WithTruncation), new Type[] { typeof(Vector128<Double>) }) .Invoke(null, new object[] { Sse2.LoadVector128((Double*)(_dataTable.inArrayPtr)) }); ValidateResult(_dataTable.inArrayPtr, (Int64)(result)); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Sse2.X64).GetMethod(nameof(Sse2.X64.ConvertToInt64WithTruncation), new Type[] { typeof(Vector128<Double>) }) .Invoke(null, new object[] { Sse2.LoadAlignedVector128((Double*)(_dataTable.inArrayPtr)) }); ValidateResult(_dataTable.inArrayPtr, (Int64)(result)); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Sse2.X64.ConvertToInt64WithTruncation( _clsVar ); ValidateResult(_clsVar, result); } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var firstOp = Unsafe.Read<Vector128<Double>>(_dataTable.inArrayPtr); var result = Sse2.X64.ConvertToInt64WithTruncation(firstOp); ValidateResult(firstOp, result); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var firstOp = Sse2.LoadVector128((Double*)(_dataTable.inArrayPtr)); var result = Sse2.X64.ConvertToInt64WithTruncation(firstOp); ValidateResult(firstOp, result); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var firstOp = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArrayPtr)); var result = Sse2.X64.ConvertToInt64WithTruncation(firstOp); ValidateResult(firstOp, result); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new SimdScalarUnaryOpConvertTest__ConvertToInt64WithTruncationVector128Double(); var result = Sse2.X64.ConvertToInt64WithTruncation(test._fld); ValidateResult(test._fld, result); } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Sse2.X64.ConvertToInt64WithTruncation(_fld); ValidateResult(_fld, result); } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Sse2.X64.ConvertToInt64WithTruncation(test._fld); ValidateResult(test._fld, result); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector128<Double> firstOp, Int64 result, [CallerMemberName] string method = "") { Double[] inArray = new Double[Op1ElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray[0]), firstOp); ValidateResult(inArray, result, method); } private void ValidateResult(void* firstOp, Int64 result, [CallerMemberName] string method = "") { Double[] inArray = new Double[Op1ElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray[0]), ref Unsafe.AsRef<byte>(firstOp), (uint)Unsafe.SizeOf<Vector128<Double>>()); ValidateResult(inArray, result, method); } private void ValidateResult(Double[] firstOp, Int64 result, [CallerMemberName] string method = "") { bool succeeded = true; if ((long)firstOp[0] != result) { succeeded = false; } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Sse2.X64)}.{nameof(Sse2.X64.ConvertToInt64WithTruncation)}<Int64>(Vector128<Double>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" firstOp: ({string.Join(", ", firstOp)})"); TestLibrary.TestFramework.LogInformation($" result: result"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
//******************************************************************************************************************************************************************************************// // Copyright (c) 2022 @redhook62 (adfsmfa@gmail.com) // // // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), // // to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, // // and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // // // // // // https://github.com/neos-sdi/adfsmfa // // // //******************************************************************************************************************************************************************************************// using System; using System.Collections.Generic; using System.Text; using Microsoft.Deployment.WindowsInstaller; using System.ServiceProcess; using System.Diagnostics; using System.IO; using System.Collections; using System.Threading; using System.Windows.Forms; using Microsoft.Win32; using System.Linq; using System.Reflection; using Microsoft.ManagementConsole; namespace Neos.IdentityServer.Deployment { public partial class CustomActions { /// <summary> /// StartService method implementation /// </summary> public static bool StartService(Session session, string exeFileName) { try { if (!File.Exists(exeFileName)) return false; session.Log("Service Starting [mfanotifhub]"); Trace.TraceInformation("Service Starting [mfanotifhub]"); internalStartService(); session.Log("Service Started [mfanotifhub]"); Trace.TraceInformation("Service Started [mfanotifhub]"); return true; } catch (Exception e) { session.Log("Service error [mfanotifhub] : "+e.Message); Trace.TraceInformation("Service error [mfanotifhub] : " + e.Message); return false; } } /// <summary> /// StopService method implementation /// </summary> public static bool StopService(Session session, string exeFileName) { bool ret = false; try { if (!File.Exists(exeFileName)) return false; session.Log("Service Stopping [mfanotifhub]"); ret = internalStopService(); session.Log("Service Stopped [mfanotifhub]"); return ret; } catch (Exception e) { session.Log("Service error [mfanotifhub] : " + e.Message); return false; } } /// <summary> /// GetInstallPath method implementation /// Doing that, because with Wix Custom actions runs in 32 bits, and hangs in 64 Bits... /// </summary> private static string GetInstallPath(Session session) { string baseDirectory = Path.GetPathRoot(Environment.GetFolderPath(Environment.SpecialFolder.System)); string programFiles = "Program Files"; string programFilesX86 = "Program Files (x86)"; if (Environment.Is64BitOperatingSystem) return Path.Combine(baseDirectory, programFiles)+@"\MFA\"; else return Path.Combine(baseDirectory, programFilesX86) + @"\MFA\"; } public static void ResetProgressBar(Session session, int totalStatements, string actionName, string actionDesc) { using (Record actionrecord = new Record(2)) { actionrecord.SetString(1, actionName); actionrecord.SetString(2, actionDesc); // actionrecord.SetString(3, "[0]"); session.Message(InstallMessage.ActionStart, actionrecord); } Application.DoEvents(); using (Record record = new Record(4)) { record.SetInteger(1, 0); record.SetInteger(2, totalStatements-1); record.SetInteger(3, 0); record.SetInteger(4, 0); session.Message(InstallMessage.Progress, record); } Application.DoEvents(); using (Record record2 = new Record(3)) { record2.SetInteger(1, 1); record2.SetInteger(2, 1); record2.SetInteger(3, 1); session.Message(InstallMessage.Progress, record2); } Application.DoEvents(); } public static void ProgressBarMessage(Session session, string message) { using (Record actiondata = new Record(0)) { actiondata.SetString(0, message); session.Message(InstallMessage.ActionData, actiondata); Application.DoEvents(); Thread.Sleep(500); } } [CustomAction] public static ActionResult InstallService(Session session) { const int iNumberItems = 5; string path = string.Empty; try { ResetProgressBar(session, iNumberItems, "xInstallService", "Installing MFA Services..."); try { RegisterEventLogs(); path = GetInstallPath(session); ProgressBarMessage(session, "installtion paths and Evenlog sources"); } catch (Exception e) { session.Log(e.Message); } if (!IsServiceInstalled(session, path + @"Neos.IdentityServer.MultiFactor.NotificationHub.exe")) { ProgressBarMessage(session, "Installing MFA Service"); internalInstallService(session, path + @"Neos.IdentityServer.MultiFactor.NotificationHub.exe"); } if (!IsSnapinInstalled(session, path + @"Neos.IdentityServer.Console.dll")) { ProgressBarMessage(session, "Installing MFA Admin Console"); internalInstallSnapin(session, path + @"Neos.IdentityServer.Console.dll"); } if (IsServiceInstalled(session, path + @"Neos.IdentityServer.MultiFactor.NotificationHub.exe")) { ProgressBarMessage(session, "Starting MFA Service"); StartService(session, path + @"Neos.IdentityServer.MultiFactor.NotificationHub.exe"); ProgressBarMessage(session, "Removing Backup Files"); } return ActionResult.Success; } catch (Exception e) { session.Log(e.Message); return ActionResult.Failure; } } [CustomAction] public static ActionResult UnInstallService(Session session) { const int iNumberItems = 5; string path = string.Empty; try { ResetProgressBar(session, iNumberItems, "xUnInstallService", "UnInstalling MFA Services..."); try { path = GetInstallPath(session); ProgressBarMessage(session, "installtion paths"); } catch (Exception e) { session.Log(e.Message); } if (IsServiceInstalled(session, path + @"Neos.IdentityServer.MultiFactor.NotificationHub.exe")) { ProgressBarMessage(session, "Stopping MFA Service"); if (StopService(session, path + @"Neos.IdentityServer.MultiFactor.NotificationHub.exe")) { ProgressBarMessage(session, "UnInstalling MFA Service"); internalUninstallService(session, path + @"Neos.IdentityServer.MultiFactor.NotificationHub.exe"); } } if (IsSnapinInstalled(session, path + @"Neos.IdentityServer.Console.dll")) { ProgressBarMessage(session, "UnInstalling MFA Admin Console"); internalUninstallSnapin(session, path + @"Neos.IdentityServer.Console.dll"); } return ActionResult.Success; } catch (Exception e) { session.Log(e.Message); return ActionResult.Failure; } finally { try { ProgressBarMessage(session, "Removing Backup Files"); File.Delete(path + @"Config\Config.db"); File.Delete(path + @"Config\System.db"); } catch (Exception e) { session.Log(e.Message); } } } /// <summary> /// internalInstallService method implementation /// </summary> private static void internalInstallService(Session session, string exeFilename) { try { IDictionary installstate = new Hashtable(); string dir = Path.GetDirectoryName(exeFilename); string file = dir + "\\" + Path.GetFileNameWithoutExtension(exeFilename) + ".installLog"; File.Delete(file); System.Configuration.Install.AssemblyInstaller installer = new System.Configuration.Install.AssemblyInstaller(); installer.UseNewContext = true; installer.Path = exeFilename; installer.CommandLine = new string[2] { string.Format("/logFile={0}", file), string.Format("/InstallStateDir={0}", dir) }; installer.Install(installstate); installer.Commit(installstate); string state = Path.GetFileNameWithoutExtension(exeFilename) + ".installState"; File.Delete(dir + "\\" + state); } catch (Exception e) { session.Log(e.Message); throw e; } } /// <summary> /// internalUninstallService method implementation /// </summary> private static void internalUninstallService(Session session, string exeFilename) { try { string dir = Path.GetDirectoryName(exeFilename); string file = dir + "\\" + Path.GetFileNameWithoutExtension(exeFilename) + ".installLog"; File.Delete(file); System.Configuration.Install.AssemblyInstaller installer = new System.Configuration.Install.AssemblyInstaller(); installer.UseNewContext = true; installer.Path = exeFilename; installer.CommandLine = new string[2] { string.Format("/logFile={0}", file), string.Format("/InstallStateDir={0}", dir) }; installer.Uninstall(new Hashtable()); string state = Path.GetFileNameWithoutExtension(exeFilename) + ".installState"; File.Delete(dir + "\\" + state); } catch (Exception e) { session.Log(e.Message); throw e; } } /// <summary> /// internalInstallSnapin method implementation /// </summary> private static void internalInstallSnapin(Session session, string dllFilename) { try { IDictionary installstate = new Hashtable(); string dir = Path.GetDirectoryName(dllFilename); string file = dir + "\\" + Path.GetFileNameWithoutExtension(dllFilename) + ".installLog"; File.Delete(file); System.Configuration.Install.AssemblyInstaller installer = new System.Configuration.Install.AssemblyInstaller(); installer.UseNewContext = true; installer.Path = dllFilename; installer.CommandLine = new string[2] { string.Format("/logFile={0}", file), string.Format("/InstallStateDir={0}", dir) }; installer.Install(installstate); installer.Commit(installstate); string state = Path.GetFileNameWithoutExtension(dllFilename) + ".installState"; File.Delete(dir + "\\" + state); } catch (Exception e) { session.Log(e.Message); throw e; } } /// <summary> /// internalUninstallSnapin method implementation /// </summary> private static void internalUninstallSnapin(Session session, string dllFilename) { try { string dir = Path.GetDirectoryName(dllFilename); string file = dir + "\\" + Path.GetFileNameWithoutExtension(dllFilename) + ".installLog"; File.Delete(file); System.Configuration.Install.AssemblyInstaller installer = new System.Configuration.Install.AssemblyInstaller(); installer.UseNewContext = true; installer.Path = dllFilename; installer.CommandLine = new string[2] { string.Format("/logFile={0}", file), string.Format("/InstallStateDir={0}", dir) }; installer.Uninstall(new Hashtable()); string state = Path.GetFileNameWithoutExtension(dllFilename) + ".installState"; File.Delete(dir +"\\" + state); } catch (Exception e) { session.Log(e.Message); throw e; } } /// <summary> /// internalStartService method implementation /// </summary> private static bool internalStartService() { ServiceController ADFSController = null; try { ADFSController = new ServiceController("mfanotifhub"); if ((ADFSController.Status != ServiceControllerStatus.Running) && (ADFSController.Status != ServiceControllerStatus.StartPending)) { ADFSController.Start(new string[] {"Install"}); ADFSController.WaitForStatus(ServiceControllerStatus.Running, new TimeSpan(0, 1, 0)); } return true; } catch (Exception) { return false; } finally { ADFSController.Close(); } } /// <summary> /// internalStopService method implementation /// </summary> private static bool internalStopService() { ServiceController ADFSController = null; try { ADFSController = new ServiceController("mfanotifhub"); if ((ADFSController.Status != ServiceControllerStatus.Stopped) && (ADFSController.Status != ServiceControllerStatus.StopPending)) { ADFSController.Stop(); ADFSController.WaitForStatus(ServiceControllerStatus.Stopped, new TimeSpan(0, 1, 0)); } return true; } catch (Exception) { return false; } finally { ADFSController.Close(); } } /// <summary> /// IsServiceInstalled method implementation /// </summary> private static bool IsServiceInstalled(Session session, string exeFilename) { if (!File.Exists(exeFilename)) return false; else return ServiceController.GetServices().Any(serviceController => serviceController.ServiceName.Equals("mfanotifhub")); } /// <summary> /// IsSnapinInstalled method implementation /// </summary> private static bool IsSnapinInstalled(Session session, string dllFilename) { string xx = @"Neos.IdentityServer.Console.ADFSSnapIn, Neos.IdentityServer.Console, Version=3.0.0.0, Culture=neutral, " + GetAssemblyPublicKey(); if (!File.Exists(dllFilename)) return false; else { RegistryKey rkey64 = RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, RegistryView.Registry64); try { RegistryKey key = rkey64.OpenSubKey(@"Software\Microsoft\MMC\Snapins\FX:{9627F1F3-A6D2-4cf8-90A2-10F85A7A4EE7}", RegistryKeyPermissionCheck.ReadSubTree); try { object o = key.GetValue("Type"); if (o != null) { Assembly assembly = Assembly.LoadFile(dllFilename); foreach (Type type in assembly.GetTypes()) { if (type.IsDefined(typeof(SnapInSettingsAttribute))) { SnapInSettingsAttribute attrib = (SnapInSettingsAttribute)type.GetCustomAttribute(typeof(SnapInSettingsAttribute), false); if (attrib != null) { return (type.AssemblyQualifiedName.ToLower().Equals(xx.ToLower())); } } } } } catch { return false; } finally { key.Close(); } } catch { return false; } finally { rkey64.Close(); } } return false; } /// <summary> /// GetAssemblyPublicKey method implmentation /// </summary> public static string GetAssemblyPublicKey() { string assemblyname = Assembly.GetExecutingAssembly().FullName; string[] str = assemblyname.Split(','); return str[str.Length - 1]; } private static string EventLogSource = "ADFS MFA DataServices"; private static string AdminEventLogSource = "ADFS MFA Administration"; private static string MFAEventLogSource = "ADFS MFA Service"; private static string MMCEventLogSource = "ADFS MFA MMC"; private static string NOTIFEventLogSource = "ADFS MFA Notification Hub"; private static string EventLogGroup = "Application"; /// <summary> /// RegisterEventLogs method implementation /// </summary> private static void RegisterEventLogs() { if (!EventLog.SourceExists(EventLogSource)) EventLog.CreateEventSource(EventLogSource, EventLogGroup); if (!EventLog.SourceExists(AdminEventLogSource)) EventLog.CreateEventSource(AdminEventLogSource, EventLogGroup); if (!EventLog.SourceExists(MFAEventLogSource)) EventLog.CreateEventSource(MFAEventLogSource, EventLogGroup); if (!EventLog.SourceExists(MMCEventLogSource)) EventLog.CreateEventSource(MMCEventLogSource, EventLogGroup); if (!EventLog.SourceExists(NOTIFEventLogSource)) EventLog.CreateEventSource(NOTIFEventLogSource, EventLogGroup); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Xml; using System.Collections; namespace System.Data.Common { internal sealed class Int32Storage : DataStorage { private const int defaultValue = 0; // Convert.ToInt32(null) private int[] _values; internal Int32Storage(DataColumn column) : base(column, typeof(int), defaultValue, StorageType.Int32) { } public override object Aggregate(int[] records, AggregateType kind) { bool hasData = false; try { switch (kind) { case AggregateType.Sum: long sum = 0; foreach (int record in records) { if (HasValue(record)) { checked { sum += _values[record]; } hasData = true; } } if (hasData) { return sum; } return _nullValue; case AggregateType.Mean: long meanSum = 0; int meanCount = 0; foreach (int record in records) { if (HasValue(record)) { checked { meanSum += _values[record]; } meanCount++; hasData = true; } } if (hasData) { int mean; checked { mean = (int)(meanSum / meanCount); } return mean; } return _nullValue; case AggregateType.Var: case AggregateType.StDev: int count = 0; double var = 0.0f; double prec = 0.0f; double dsum = 0.0f; double sqrsum = 0.0f; foreach (int record in records) { if (HasValue(record)) { dsum += _values[record]; sqrsum += _values[record] * (double)_values[record]; count++; } } if (count > 1) { var = count * sqrsum - (dsum * dsum); prec = var / (dsum * dsum); // we are dealing with the risk of a cancellation error // double is guaranteed only for 15 digits so a difference // with a result less than 1e-15 should be considered as zero if ((prec < 1e-15) || (var < 0)) var = 0; else var = var / (count * (count - 1)); if (kind == AggregateType.StDev) { return Math.Sqrt(var); } return var; } return _nullValue; case AggregateType.Min: int min = int.MaxValue; for (int i = 0; i < records.Length; i++) { int record = records[i]; if (HasValue(record)) { min = Math.Min(_values[record], min); hasData = true; } } if (hasData) { return min; } return _nullValue; case AggregateType.Max: int max = int.MinValue; for (int i = 0; i < records.Length; i++) { int record = records[i]; if (HasValue(record)) { max = Math.Max(_values[record], max); hasData = true; } } if (hasData) { return max; } return _nullValue; case AggregateType.First: if (records.Length > 0) { return _values[records[0]]; } return null; case AggregateType.Count: count = 0; for (int i = 0; i < records.Length; i++) { if (HasValue(records[i])) { count++; } } return count; } } catch (OverflowException) { throw ExprException.Overflow(typeof(int)); } throw ExceptionBuilder.AggregateException(kind, _dataType); } public override int Compare(int recordNo1, int recordNo2) { int valueNo1 = _values[recordNo1]; int valueNo2 = _values[recordNo2]; if (valueNo1 == defaultValue || valueNo2 == defaultValue) { int bitCheck = CompareBits(recordNo1, recordNo2); if (0 != bitCheck) { return bitCheck; } } //return valueNo1.CompareTo(valueNo2); return (valueNo1 < valueNo2 ? -1 : (valueNo1 > valueNo2 ? 1 : 0)); // similar to Int32.CompareTo(Int32) } public override int CompareValueTo(int recordNo, object value) { System.Diagnostics.Debug.Assert(0 <= recordNo, "Invalid record"); System.Diagnostics.Debug.Assert(null != value, "null value"); if (_nullValue == value) { return (HasValue(recordNo) ? 1 : 0); } int valueNo1 = _values[recordNo]; if ((defaultValue == valueNo1) && !HasValue(recordNo)) { return -1; } return valueNo1.CompareTo((int)value); //return(valueNo1 < valueNo2 ? -1 : (valueNo1 > valueNo2 ? 1 : 0)); // similar to Int32.CompareTo(Int32) } public override object ConvertValue(object value) { if (_nullValue != value) { if (null != value) { value = ((IConvertible)value).ToInt32(FormatProvider); } else { value = _nullValue; } } return value; } public override void Copy(int recordNo1, int recordNo2) { CopyBits(recordNo1, recordNo2); _values[recordNo2] = _values[recordNo1]; } public override object Get(int record) { int value = _values[record]; if (value != Int32Storage.defaultValue) { return value; } return GetBits(record); } public override void Set(int record, object value) { System.Diagnostics.Debug.Assert(null != value, "null value"); if (_nullValue == value) { _values[record] = defaultValue; SetNullBit(record, true); } else { _values[record] = ((IConvertible)value).ToInt32(FormatProvider); SetNullBit(record, false); } } public override void SetCapacity(int capacity) { int[] newValues = new int[capacity]; if (null != _values) { Array.Copy(_values, 0, newValues, 0, Math.Min(capacity, _values.Length)); } _values = newValues; base.SetCapacity(capacity); } public override object ConvertXmlToObject(string s) { return XmlConvert.ToInt32(s); } public override string ConvertObjectToXml(object value) { return XmlConvert.ToString((int)value); } protected override object GetEmptyStorage(int recordCount) { return new int[recordCount]; } protected override void CopyValue(int record, object store, BitArray nullbits, int storeIndex) { int[] typedStore = (int[])store; typedStore[storeIndex] = _values[record]; nullbits.Set(storeIndex, !HasValue(record)); } protected override void SetStorage(object store, BitArray nullbits) { _values = (int[])store; SetNullStorage(nullbits); } } }
using J2N.Runtime.CompilerServices; using J2N.Threading.Atomic; using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using JCG = J2N.Collections.Generic; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using InfoStream = Lucene.Net.Util.InfoStream; using ThreadState = Lucene.Net.Index.DocumentsWriterPerThreadPool.ThreadState; /// <summary> /// This class controls <see cref="DocumentsWriterPerThread"/> flushing during /// indexing. It tracks the memory consumption per /// <see cref="DocumentsWriterPerThread"/> and uses a configured <see cref="flushPolicy"/> to /// decide if a <see cref="DocumentsWriterPerThread"/> must flush. /// <para/> /// In addition to the <see cref="flushPolicy"/> the flush control might set certain /// <see cref="DocumentsWriterPerThread"/> as flush pending iff a /// <see cref="DocumentsWriterPerThread"/> exceeds the /// <see cref="IndexWriterConfig.RAMPerThreadHardLimitMB"/> to prevent address /// space exhaustion. /// </summary> internal sealed class DocumentsWriterFlushControl { private readonly long hardMaxBytesPerDWPT; private long activeBytes = 0; private long flushBytes = 0; private volatile int numPending = 0; private int numDocsSinceStalled = 0; // only with assert internal readonly AtomicBoolean flushDeletes = new AtomicBoolean(false); private bool fullFlush = false; private readonly Queue<DocumentsWriterPerThread> flushQueue = new Queue<DocumentsWriterPerThread>(); // only for safety reasons if a DWPT is close to the RAM limit private readonly LinkedList<BlockedFlush> blockedFlushes = new LinkedList<BlockedFlush>(); private readonly IDictionary<DocumentsWriterPerThread, long?> flushingWriters = new JCG.Dictionary<DocumentsWriterPerThread, long?>(IdentityEqualityComparer<DocumentsWriterPerThread>.Default); internal double maxConfiguredRamBuffer = 0; internal long peakActiveBytes = 0; // only with assert internal long peakFlushBytes = 0; // only with assert internal long peakNetBytes = 0; // only with assert internal long peakDelta = 0; // only with assert internal readonly DocumentsWriterStallControl stallControl; private readonly DocumentsWriterPerThreadPool perThreadPool; private readonly FlushPolicy flushPolicy; private bool closed = false; private readonly DocumentsWriter documentsWriter; private readonly LiveIndexWriterConfig config; private readonly BufferedUpdatesStream bufferedUpdatesStream; private readonly InfoStream infoStream; internal DocumentsWriterFlushControl(DocumentsWriter documentsWriter, LiveIndexWriterConfig config, BufferedUpdatesStream bufferedUpdatesStream) { this.infoStream = config.InfoStream; this.stallControl = new DocumentsWriterStallControl(); this.perThreadPool = documentsWriter.perThreadPool; this.flushPolicy = documentsWriter.flushPolicy; this.config = config; this.hardMaxBytesPerDWPT = config.RAMPerThreadHardLimitMB * 1024 * 1024; this.documentsWriter = documentsWriter; this.bufferedUpdatesStream = bufferedUpdatesStream; } public long ActiveBytes { get { lock (this) { return activeBytes; } } } public long FlushBytes { get { lock (this) { return flushBytes; } } } public long NetBytes { get { lock (this) { return flushBytes + activeBytes; } } } private long StallLimitBytes { get { double maxRamMB = config.RAMBufferSizeMB; return maxRamMB != IndexWriterConfig.DISABLE_AUTO_FLUSH ? (long)(2 * (maxRamMB * 1024 * 1024)) : long.MaxValue; } } private bool AssertMemory() { double maxRamMB = config.RAMBufferSizeMB; if (maxRamMB != IndexWriterConfig.DISABLE_AUTO_FLUSH) { // for this assert we must be tolerant to ram buffer changes! maxConfiguredRamBuffer = Math.Max(maxRamMB, maxConfiguredRamBuffer); long ram = flushBytes + activeBytes; long ramBufferBytes = (long)(maxConfiguredRamBuffer * 1024 * 1024); // take peakDelta into account - worst case is that all flushing, pending and blocked DWPT had maxMem and the last doc had the peakDelta // 2 * ramBufferBytes -> before we stall we need to cross the 2xRAM Buffer border this is still a valid limit // (numPending + numFlushingDWPT() + numBlockedFlushes()) * peakDelta) -> those are the total number of DWPT that are not active but not yet fully fluhsed // all of them could theoretically be taken out of the loop once they crossed the RAM buffer and the last document was the peak delta // (numDocsSinceStalled * peakDelta) -> at any given time there could be n threads in flight that crossed the stall control before we reached the limit and each of them could hold a peak document long expected = (2 * (ramBufferBytes)) + ((numPending + NumFlushingDWPT + NumBlockedFlushes) * peakDelta) + (numDocsSinceStalled * peakDelta); // the expected ram consumption is an upper bound at this point and not really the expected consumption if (peakDelta < (ramBufferBytes >> 1)) { /* * if we are indexing with very low maxRamBuffer like 0.1MB memory can * easily overflow if we check out some DWPT based on docCount and have * several DWPT in flight indexing large documents (compared to the ram * buffer). this means that those DWPT and their threads will not hit * the stall control before asserting the memory which would in turn * fail. To prevent this we only assert if the the largest document seen * is smaller than the 1/2 of the maxRamBufferMB */ Debug.Assert(ram <= expected, "actual mem: " + ram + " byte, expected mem: " + expected + " byte, flush mem: " + flushBytes + ", active mem: " + activeBytes + ", pending DWPT: " + numPending + ", flushing DWPT: " + NumFlushingDWPT + ", blocked DWPT: " + NumBlockedFlushes + ", peakDelta mem: " + peakDelta + " byte"); } } return true; } private void CommitPerThreadBytes(ThreadState perThread) { long delta = perThread.dwpt.BytesUsed - perThread.bytesUsed; perThread.bytesUsed += delta; /* * We need to differentiate here if we are pending since setFlushPending * moves the perThread memory to the flushBytes and we could be set to * pending during a delete */ if (perThread.flushPending) { flushBytes += delta; } else { activeBytes += delta; } Debug.Assert(UpdatePeaks(delta)); } // only for asserts private bool UpdatePeaks(long delta) { peakActiveBytes = Math.Max(peakActiveBytes, activeBytes); peakFlushBytes = Math.Max(peakFlushBytes, flushBytes); peakNetBytes = Math.Max(peakNetBytes, NetBytes); peakDelta = Math.Max(peakDelta, delta); return true; } internal DocumentsWriterPerThread DoAfterDocument(ThreadState perThread, bool isUpdate) { lock (this) { try { CommitPerThreadBytes(perThread); if (!perThread.flushPending) { if (isUpdate) { flushPolicy.OnUpdate(this, perThread); } else { flushPolicy.OnInsert(this, perThread); } if (!perThread.flushPending && perThread.bytesUsed > hardMaxBytesPerDWPT) { // Safety check to prevent a single DWPT exceeding its RAM limit. this // is super important since we can not address more than 2048 MB per DWPT SetFlushPending(perThread); } } DocumentsWriterPerThread flushingDWPT; if (fullFlush) { if (perThread.flushPending) { CheckoutAndBlock(perThread); flushingDWPT = NextPendingFlush(); } else { flushingDWPT = null; } } else { flushingDWPT = TryCheckoutForFlush(perThread); } return flushingDWPT; } finally { bool stalled = UpdateStallState(); Debug.Assert(AssertNumDocsSinceStalled(stalled) && AssertMemory()); } } } private bool AssertNumDocsSinceStalled(bool stalled) { /* * updates the number of documents "finished" while we are in a stalled state. * this is important for asserting memory upper bounds since it corresponds * to the number of threads that are in-flight and crossed the stall control * check before we actually stalled. * see #assertMemory() */ if (stalled) { numDocsSinceStalled++; } else { numDocsSinceStalled = 0; } return true; } internal void DoAfterFlush(DocumentsWriterPerThread dwpt) { lock (this) { Debug.Assert(flushingWriters.ContainsKey(dwpt)); try { long? bytes = flushingWriters[dwpt]; flushingWriters.Remove(dwpt); flushBytes -= (long)bytes; perThreadPool.Recycle(dwpt); Debug.Assert(AssertMemory()); } finally { try { UpdateStallState(); } finally { Monitor.PulseAll(this); } } } } private bool UpdateStallState() { //Debug.Assert(Thread.holdsLock(this)); long limit = StallLimitBytes; /* * we block indexing threads if net byte grows due to slow flushes * yet, for small ram buffers and large documents we can easily * reach the limit without any ongoing flushes. we need to ensure * that we don't stall/block if an ongoing or pending flush can * not free up enough memory to release the stall lock. */ bool stall = ((activeBytes + flushBytes) > limit) && (activeBytes < limit) && !closed; stallControl.UpdateStalled(stall); return stall; } public void WaitForFlush() { lock (this) { while (flushingWriters.Count != 0) { //#if !NETSTANDARD1_6 // try // { //#endif Monitor.Wait(this); //#if !NETSTANDARD1_6 // LUCENENET NOTE: Senseless to catch and rethrow the same exception type // } // catch (ThreadInterruptedException e) // { // throw new ThreadInterruptedException("Thread Interrupted Exception", e); // } //#endif } } } /// <summary> /// Sets flush pending state on the given <see cref="ThreadState"/>. The /// <see cref="ThreadState"/> must have indexed at least on <see cref="Documents.Document"/> and must not be /// already pending. /// </summary> public void SetFlushPending(ThreadState perThread) { lock (this) { Debug.Assert(!perThread.flushPending); if (perThread.dwpt.NumDocsInRAM > 0) { perThread.flushPending = true; // write access synced long bytes = perThread.bytesUsed; flushBytes += bytes; activeBytes -= bytes; numPending++; // write access synced Debug.Assert(AssertMemory()); } // don't assert on numDocs since we could hit an abort excp. while selecting that dwpt for flushing } } internal void DoOnAbort(ThreadState state) { lock (this) { try { if (state.flushPending) { flushBytes -= state.bytesUsed; } else { activeBytes -= state.bytesUsed; } Debug.Assert(AssertMemory()); // Take it out of the loop this DWPT is stale perThreadPool.Reset(state, closed); } finally { UpdateStallState(); } } } internal DocumentsWriterPerThread TryCheckoutForFlush(ThreadState perThread) { lock (this) { return perThread.flushPending ? InternalTryCheckOutForFlush(perThread) : null; } } private void CheckoutAndBlock(ThreadState perThread) { perThread.@Lock(); try { Debug.Assert(perThread.flushPending, "can not block non-pending threadstate"); Debug.Assert(fullFlush, "can not block if fullFlush == false"); DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; dwpt = perThreadPool.Reset(perThread, closed); numPending--; blockedFlushes.AddLast(new BlockedFlush(dwpt, bytes)); } finally { perThread.Unlock(); } } private DocumentsWriterPerThread InternalTryCheckOutForFlush(ThreadState perThread) { //Debug.Assert(Thread.HoldsLock(this)); Debug.Assert(perThread.flushPending); try { // We are pending so all memory is already moved to flushBytes if (perThread.TryLock()) { try { if (perThread.IsInitialized) { //Debug.Assert(perThread.HeldByCurrentThread); DocumentsWriterPerThread dwpt; long bytes = perThread.bytesUsed; // do that before // replace! dwpt = perThreadPool.Reset(perThread, closed); Debug.Assert(!flushingWriters.ContainsKey(dwpt), "DWPT is already flushing"); // Record the flushing DWPT to reduce flushBytes in doAfterFlush flushingWriters[dwpt] = bytes; numPending--; // write access synced return dwpt; } } finally { perThread.Unlock(); } } return null; } finally { UpdateStallState(); } } public override string ToString() { return "DocumentsWriterFlushControl [activeBytes=" + activeBytes + ", flushBytes=" + flushBytes + "]"; } internal DocumentsWriterPerThread NextPendingFlush() { int numPending; bool fullFlush; lock (this) { DocumentsWriterPerThread poll; if (flushQueue.Count > 0 && (poll = flushQueue.Dequeue()) != null) { UpdateStallState(); return poll; } fullFlush = this.fullFlush; numPending = this.numPending; } if (numPending > 0 && !fullFlush) // don't check if we are doing a full flush { int limit = perThreadPool.NumThreadStatesActive; for (int i = 0; i < limit && numPending > 0; i++) { ThreadState next = perThreadPool.GetThreadState(i); if (next.flushPending) { DocumentsWriterPerThread dwpt = TryCheckoutForFlush(next); if (dwpt != null) { return dwpt; } } } } return null; } internal void SetClosed() { lock (this) { // set by DW to signal that we should not release new DWPT after close if (!closed) { this.closed = true; perThreadPool.DeactivateUnreleasedStates(); } } } /// <summary> /// Returns an iterator that provides access to all currently active <see cref="ThreadState"/>s /// </summary> public IEnumerator<ThreadState> AllActiveThreadStates() { return GetPerThreadsIterator(perThreadPool.NumThreadStatesActive); } private IEnumerator<ThreadState> GetPerThreadsIterator(int upto) { return new IteratorAnonymousInnerClassHelper(this, upto); } private class IteratorAnonymousInnerClassHelper : IEnumerator<ThreadState> { private readonly DocumentsWriterFlushControl outerInstance; private ThreadState current; private int upto; private int i; public IteratorAnonymousInnerClassHelper(DocumentsWriterFlushControl outerInstance, int upto) { this.outerInstance = outerInstance; this.upto = upto; i = 0; } public ThreadState Current => current; public void Dispose() { } object System.Collections.IEnumerator.Current => Current; public bool MoveNext() { if (i < upto) { current = outerInstance.perThreadPool.GetThreadState(i++); return true; } return false; } public void Reset() { throw new NotSupportedException(); } } internal void DoOnDelete() { lock (this) { // pass null this is a global delete no update flushPolicy.OnDelete(this, null); } } /// <summary> /// Returns the number of delete terms in the global pool /// </summary> public int NumGlobalTermDeletes => documentsWriter.deleteQueue.NumGlobalTermDeletes + bufferedUpdatesStream.NumTerms; public long DeleteBytesUsed => documentsWriter.deleteQueue.BytesUsed + bufferedUpdatesStream.BytesUsed; internal int NumFlushingDWPT { get { lock (this) { return flushingWriters.Count; } } } public bool GetAndResetApplyAllDeletes() { return flushDeletes.GetAndSet(false); } public void SetApplyAllDeletes() { flushDeletes.Value = true; } internal int NumActiveDWPT => this.perThreadPool.NumThreadStatesActive; internal ThreadState ObtainAndLock() { ThreadState perThread = perThreadPool.GetAndLock(Thread.CurrentThread, documentsWriter); bool success = false; try { if (perThread.IsInitialized && perThread.dwpt.deleteQueue != documentsWriter.deleteQueue) { // There is a flush-all in process and this DWPT is // now stale -- enroll it for flush and try for // another DWPT: AddFlushableState(perThread); } success = true; // simply return the ThreadState even in a flush all case sine we already hold the lock return perThread; } finally { if (!success) // make sure we unlock if this fails { perThread.Unlock(); } } } internal void MarkForFullFlush() { DocumentsWriterDeleteQueue flushingQueue; lock (this) { Debug.Assert(!fullFlush, "called DWFC#markForFullFlush() while full flush is still running"); Debug.Assert(fullFlushBuffer.Count == 0, "full flush buffer should be empty: " + fullFlushBuffer); fullFlush = true; flushingQueue = documentsWriter.deleteQueue; // Set a new delete queue - all subsequent DWPT will use this queue until // we do another full flush DocumentsWriterDeleteQueue newQueue = new DocumentsWriterDeleteQueue(flushingQueue.generation + 1); documentsWriter.deleteQueue = newQueue; } int limit = perThreadPool.NumThreadStatesActive; for (int i = 0; i < limit; i++) { ThreadState next = perThreadPool.GetThreadState(i); next.@Lock(); try { if (!next.IsInitialized) { if (closed && next.IsActive) { perThreadPool.DeactivateThreadState(next); } continue; } Debug.Assert(next.dwpt.deleteQueue == flushingQueue || next.dwpt.deleteQueue == documentsWriter.deleteQueue, " flushingQueue: " + flushingQueue + " currentqueue: " + documentsWriter.deleteQueue + " perThread queue: " + next.dwpt.deleteQueue + " numDocsInRam: " + next.dwpt.NumDocsInRAM); if (next.dwpt.deleteQueue != flushingQueue) { // this one is already a new DWPT continue; } AddFlushableState(next); } finally { next.Unlock(); } } lock (this) { /* make sure we move all DWPT that are where concurrently marked as * pending and moved to blocked are moved over to the flushQueue. There is * a chance that this happens since we marking DWPT for full flush without * blocking indexing.*/ PruneBlockedQueue(flushingQueue); Debug.Assert(AssertBlockedFlushes(documentsWriter.deleteQueue)); //FlushQueue.AddAll(FullFlushBuffer); foreach (var dwpt in fullFlushBuffer) { flushQueue.Enqueue(dwpt); } fullFlushBuffer.Clear(); UpdateStallState(); } Debug.Assert(AssertActiveDeleteQueue(documentsWriter.deleteQueue)); } private bool AssertActiveDeleteQueue(DocumentsWriterDeleteQueue queue) { int limit = perThreadPool.NumThreadStatesActive; for (int i = 0; i < limit; i++) { ThreadState next = perThreadPool.GetThreadState(i); next.@Lock(); try { Debug.Assert(!next.IsInitialized || next.dwpt.deleteQueue == queue, "isInitialized: " + next.IsInitialized + " numDocs: " + (next.IsInitialized ? next.dwpt.NumDocsInRAM : 0)); } finally { next.Unlock(); } } return true; } private readonly IList<DocumentsWriterPerThread> fullFlushBuffer = new List<DocumentsWriterPerThread>(); internal void AddFlushableState(ThreadState perThread) { if (infoStream.IsEnabled("DWFC")) { infoStream.Message("DWFC", "addFlushableState " + perThread.dwpt); } DocumentsWriterPerThread dwpt = perThread.dwpt; //Debug.Assert(perThread.HeldByCurrentThread); Debug.Assert(perThread.IsInitialized); Debug.Assert(fullFlush); Debug.Assert(dwpt.deleteQueue != documentsWriter.deleteQueue); if (dwpt.NumDocsInRAM > 0) { lock (this) { if (!perThread.flushPending) { SetFlushPending(perThread); } DocumentsWriterPerThread flushingDWPT = InternalTryCheckOutForFlush(perThread); Debug.Assert(flushingDWPT != null, "DWPT must never be null here since we hold the lock and it holds documents"); Debug.Assert(dwpt == flushingDWPT, "flushControl returned different DWPT"); fullFlushBuffer.Add(flushingDWPT); } } else { perThreadPool.Reset(perThread, closed); // make this state inactive } } /// <summary> /// Prunes the blockedQueue by removing all DWPT that are associated with the given flush queue. /// </summary> private void PruneBlockedQueue(DocumentsWriterDeleteQueue flushingQueue) { var node = blockedFlushes.First; while (node != null) { var nextNode = node.Next; BlockedFlush blockedFlush = node.Value; if (blockedFlush.Dwpt.deleteQueue == flushingQueue) { blockedFlushes.Remove(node); Debug.Assert(!flushingWriters.ContainsKey(blockedFlush.Dwpt), "DWPT is already flushing"); // Record the flushing DWPT to reduce flushBytes in doAfterFlush flushingWriters[blockedFlush.Dwpt] = blockedFlush.Bytes; // don't decr pending here - its already done when DWPT is blocked flushQueue.Enqueue(blockedFlush.Dwpt); } node = nextNode; } } internal void FinishFullFlush() { lock (this) { Debug.Assert(fullFlush); Debug.Assert(flushQueue.Count == 0); Debug.Assert(flushingWriters.Count == 0); try { if (blockedFlushes.Count > 0) { Debug.Assert(AssertBlockedFlushes(documentsWriter.deleteQueue)); PruneBlockedQueue(documentsWriter.deleteQueue); Debug.Assert(blockedFlushes.Count == 0); } } finally { fullFlush = false; UpdateStallState(); } } } internal bool AssertBlockedFlushes(DocumentsWriterDeleteQueue flushingQueue) { foreach (BlockedFlush blockedFlush in blockedFlushes) { Debug.Assert(blockedFlush.Dwpt.deleteQueue == flushingQueue); } return true; } internal void AbortFullFlushes(ISet<string> newFiles) { lock (this) { try { AbortPendingFlushes(newFiles); } finally { fullFlush = false; } } } internal void AbortPendingFlushes(ISet<string> newFiles) { lock (this) { try { foreach (DocumentsWriterPerThread dwpt in flushQueue) { try { documentsWriter.SubtractFlushedNumDocs(dwpt.NumDocsInRAM); dwpt.Abort(newFiles); } catch (Exception) { // ignore - keep on aborting the flush queue } finally { DoAfterFlush(dwpt); } } foreach (BlockedFlush blockedFlush in blockedFlushes) { try { flushingWriters[blockedFlush.Dwpt] = blockedFlush.Bytes; documentsWriter.SubtractFlushedNumDocs(blockedFlush.Dwpt.NumDocsInRAM); blockedFlush.Dwpt.Abort(newFiles); } catch (Exception) { // ignore - keep on aborting the blocked queue } finally { DoAfterFlush(blockedFlush.Dwpt); } } } finally { flushQueue.Clear(); blockedFlushes.Clear(); UpdateStallState(); } } } /// <summary> /// Returns <c>true</c> if a full flush is currently running /// </summary> internal bool IsFullFlush { get { lock (this) { return fullFlush; } } } /// <summary> /// Returns the number of flushes that are already checked out but not yet /// actively flushing /// </summary> internal int NumQueuedFlushes { get { lock (this) { return flushQueue.Count; } } } /// <summary> /// Returns the number of flushes that are checked out but not yet available /// for flushing. This only applies during a full flush if a DWPT needs /// flushing but must not be flushed until the full flush has finished. /// </summary> internal int NumBlockedFlushes { get { lock (this) { return blockedFlushes.Count; } } } private class BlockedFlush { internal DocumentsWriterPerThread Dwpt { get; private set; } internal long Bytes { get; private set; } internal BlockedFlush(DocumentsWriterPerThread dwpt, long bytes) : base() { this.Dwpt = dwpt; this.Bytes = bytes; } } /// <summary> /// This method will block if too many DWPT are currently flushing and no /// checked out DWPT are available /// </summary> internal void WaitIfStalled() { if (infoStream.IsEnabled("DWFC")) { infoStream.Message("DWFC", "waitIfStalled: numFlushesPending: " + flushQueue.Count + " netBytes: " + NetBytes + " flushBytes: " + FlushBytes + " fullFlush: " + fullFlush); } stallControl.WaitIfStalled(); } /// <summary> /// Returns <c>true</c> iff stalled /// </summary> internal bool AnyStalledThreads() { return stallControl.AnyStalledThreads(); } /// <summary> /// Returns the <see cref="IndexWriter"/> <see cref="Util.InfoStream"/> /// </summary> public InfoStream InfoStream => infoStream; } }
/* * * (c) Copyright Ascensio System Limited 2010-2021 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading; using ASC.Common; using ASC.Common.Security.Authentication; using ASC.Common.Web; using ASC.Files.Core; using ASC.MessagingSystem; using ASC.Web.Core.Files; using ASC.Web.Files.Classes; using ASC.Web.Files.Core.Compress; using ASC.Web.Files.Helpers; using ASC.Web.Files.Resources; using ASC.Web.Files.Utils; using File = ASC.Files.Core.File; namespace ASC.Web.Files.Services.WCFService.FileOperations { class FileDownloadOperation : FileOperation { private readonly Dictionary<object, string> files; private readonly Dictionary<string, string> headers; public override FileOperationType OperationType { get { return FileOperationType.Download; } } public FileDownloadOperation(Dictionary<object, string> folders, Dictionary<object, string> files, Dictionary<string, string> headers) : base(folders.Select(f => f.Key).ToList(), files.Select(f => f.Key).ToList()) { this.files = files; this.headers = headers; } protected override void Do() { var entriesPathId = GetEntriesPathId(); if (entriesPathId == null || entriesPathId.Count == 0) { if (0 < Files.Count) throw new FileNotFoundException(FilesCommonResource.ErrorMassage_FileNotFound); throw new DirectoryNotFoundException(FilesCommonResource.ErrorMassage_FolderNotFound); } ReplaceLongPath(entriesPathId); using (var stream = CompressTo(entriesPathId)) { if (stream != null) { stream.Position = 0; string fileName = FileConstant.DownloadTitle + CompressToArchive.Instance.ArchiveExtension; var store = Global.GetStore(); var path = string.Format(@"{0}\{1}", ((IAccount)Thread.CurrentPrincipal.Identity).ID, fileName); if (store.IsFile(FileConstant.StorageDomainTmp, path)) { store.Delete(FileConstant.StorageDomainTmp, path); } store.Save( FileConstant.StorageDomainTmp, path, stream, MimeMapping.GetMimeMapping(path), "attachment; filename=\"" + fileName + "\""); Status = string.Format("{0}?{1}=bulk&ext={2}", FilesLinkUtility.FileHandlerPath, FilesLinkUtility.Action, CompressToArchive.Instance.ArchiveExtension); } } } private ItemNameValueCollection ExecPathFromFile(File file, string path) { FileMarker.RemoveMarkAsNew(file); var title = file.Title; if (files.ContainsKey(file.ID.ToString())) { var convertToExt = files[file.ID.ToString()]; if (!string.IsNullOrEmpty(convertToExt)) { title = FileUtility.ReplaceFileExtension(title, convertToExt); } } var entriesPathId = new ItemNameValueCollection(); entriesPathId.Add(path + title, file.ID.ToString()); return entriesPathId; } private ItemNameValueCollection GetEntriesPathId() { var entriesPathId = new ItemNameValueCollection(); if (0 < Files.Count) { var files = FileDao.GetFiles(Files); files = FilesSecurity.FilterRead(files); files.ForEach(file => entriesPathId.Add(ExecPathFromFile(file, string.Empty))); } if (0 < Folders.Count) { FilesSecurity.FilterRead(FolderDao.GetFolders(Files)) .ForEach(folder => FileMarker.RemoveMarkAsNew(folder)); var filesInFolder = GetFilesInFolders(Folders, string.Empty); entriesPathId.Add(filesInFolder); } return entriesPathId; } private ItemNameValueCollection GetFilesInFolders(IEnumerable<object> folderIds, string path) { CancellationToken.ThrowIfCancellationRequested(); var entriesPathId = new ItemNameValueCollection(); foreach (var folderId in folderIds) { CancellationToken.ThrowIfCancellationRequested(); var folder = FolderDao.GetFolder(folderId); if (folder == null || !FilesSecurity.CanRead(folder)) continue; var folderPath = path + folder.Title + "/"; var files = FileDao.GetFiles(folder.ID, null, FilterType.None, false, Guid.Empty, string.Empty, true); files = FilesSecurity.FilterRead(files); files.ForEach(file => entriesPathId.Add(ExecPathFromFile(file, folderPath))); FileMarker.RemoveMarkAsNew(folder); var nestedFolders = FolderDao.GetFolders(folder.ID); nestedFolders = FilesSecurity.FilterRead(nestedFolders); if (files.Count == 0 && nestedFolders.Count == 0) { entriesPathId.Add(folderPath, String.Empty); } var filesInFolder = GetFilesInFolders(nestedFolders.ConvertAll(f => f.ID), folderPath); entriesPathId.Add(filesInFolder); } return entriesPathId; } private Stream CompressTo(ItemNameValueCollection entriesPathId) { var stream = TempStream.Create(); using (ICompress compressTo = new CompressToArchive(stream)) { foreach (var path in entriesPathId.AllKeys) { var counter = 0; foreach (var entryId in entriesPathId[path]) { if (CancellationToken.IsCancellationRequested) { compressTo.Dispose(); stream.Dispose(); CancellationToken.ThrowIfCancellationRequested(); } var newtitle = path; File file = null; var convertToExt = string.Empty; if (!string.IsNullOrEmpty(entryId)) { FileDao.InvalidateCache(entryId); file = FileDao.GetFile(entryId); if (file == null) { Error = FilesCommonResource.ErrorMassage_FileNotFound; continue; } if (files.ContainsKey(file.ID.ToString())) { convertToExt = files[file.ID.ToString()]; if (!string.IsNullOrEmpty(convertToExt)) { newtitle = FileUtility.ReplaceFileExtension(path, convertToExt); } } } if (0 < counter) { var suffix = " (" + counter + ")"; if (!string.IsNullOrEmpty(entryId)) { newtitle = 0 < newtitle.IndexOf('.') ? newtitle.Insert(newtitle.LastIndexOf('.'), suffix) : newtitle + suffix; } else { break; } } compressTo.CreateEntry(newtitle); if (!string.IsNullOrEmpty(entryId) && file != null) { try { if (FileConverter.EnableConvert(file, convertToExt)) { //Take from converter using (var readStream = FileConverter.Exec(file, convertToExt)) { compressTo.PutStream(readStream); if (!string.IsNullOrEmpty(convertToExt)) { FilesMessageService.Send(file, headers, MessageAction.FileDownloadedAs, file.Title, convertToExt); } else { FilesMessageService.Send(file, headers, MessageAction.FileDownloaded, file.Title); } } } else { using (var readStream = FileDao.GetFileStream(file)) { compressTo.PutStream(readStream); FilesMessageService.Send(file, headers, MessageAction.FileDownloaded, file.Title); } } } catch (Exception ex) { Error = ex.Message; Logger.Error(Error, ex); } } else { compressTo.PutNextEntry(); } compressTo.CloseEntry(); counter++; } ProgressStep(); } } return stream; } private void ReplaceLongPath(ItemNameValueCollection entriesPathId) { foreach (var path in new List<string>(entriesPathId.AllKeys)) { CancellationToken.ThrowIfCancellationRequested(); if (200 >= path.Length || 0 >= path.IndexOf('/')) continue; var ids = entriesPathId[path]; entriesPathId.Remove(path); var newtitle = "LONG_FOLDER_NAME" + path.Substring(path.LastIndexOf('/')); entriesPathId.Add(newtitle, ids); } } class ItemNameValueCollection { private readonly Dictionary<string, List<string>> dic = new Dictionary<string, List<string>>(); public IEnumerable<string> AllKeys { get { return dic.Keys; } } public IEnumerable<string> this[string name] { get { return dic[name].ToArray(); } } public int Count { get { return dic.Count; } } public void Add(string name, string value) { if (!dic.ContainsKey(name)) { dic.Add(name, new List<string>()); } dic[name].Add(value); } public void Add(ItemNameValueCollection collection) { foreach (var key in collection.AllKeys) { foreach (var value in collection[key]) { Add(key, value); } } } public void Add(string name, IEnumerable<string> values) { if (!dic.ContainsKey(name)) { dic.Add(name, new List<string>()); } dic[name].AddRange(values); } public void Remove(string name) { dic.Remove(name); } } } }
// Borrowed from the Papercut project: papercut.codeplex.com. using System; using System.Collections.Generic; using System.IO; using System.Net.Mime; namespace SpecsFor.Mvc.Smtp.Mime { /// <summary> /// This class is responsible for parsing a string array of lines /// containing a MIME message. /// </summary> public class MimeReader { private static readonly char[] HeaderWhitespaceChars = new char[] { ' ', '\t' }; private Queue<string> _lines; /// <summary> /// Gets the lines. /// </summary> /// <value>The lines.</value> public Queue<string> Lines { get { return _lines; } } private MimeEntity _entity; /// <summary> /// Initializes a new instance of the <see cref="MimeReader"/> class. /// </summary> private MimeReader() { _entity = new MimeEntity(); } /// <summary> /// Initializes a new instance of the <see cref="MimeReader"/> class. /// </summary> /// <param name="entity">The entity.</param> /// <param name="lines">The lines.</param> private MimeReader(MimeEntity entity, Queue<string> lines) : this() { if (entity == null) { throw new ArgumentNullException("entity"); } if (lines == null) { throw new ArgumentNullException("lines"); } _lines = lines; _entity = new MimeEntity(entity); } /// <summary> /// Initializes a new instance of the <see cref="MimeReader"/> class. /// </summary> /// <param name="lines">The lines.</param> public MimeReader(string[] lines) : this() { if (lines == null) { throw new ArgumentNullException("lines"); } _lines = new Queue<string>(lines); } /// <summary> /// Parse headers into _entity.Headers NameValueCollection. /// </summary> private int ParseHeaders() { string lastHeader = string.Empty; string line = string.Empty; // the first empty line is the end of the headers. while (_lines.Count > 0 && !string.IsNullOrEmpty(_lines.Peek())) { line = _lines.Dequeue(); //if a header line starts with a space or tab then it is a continuation of the //previous line. if (line.StartsWith(" ") || line.StartsWith(Convert.ToString('\t'))) { _entity.Headers[lastHeader] = string.Concat(_entity.Headers[lastHeader], line); continue; } int separatorIndex = line.IndexOf(':'); if (separatorIndex < 0) { System.Diagnostics.Debug.WriteLine("Invalid header: " + line); continue; } //This is an invalid header field. Ignore this line. string headerName = line.Substring(0, separatorIndex); string headerValue = line.Substring(separatorIndex + 1).Trim(HeaderWhitespaceChars); _entity.Headers.Add(headerName.ToLower(), headerValue); lastHeader = headerName; } if (_lines.Count > 0) { _lines.Dequeue(); } //remove closing header CRLF. return _entity.Headers.Count; } /// <summary> /// Processes mime specific headers. /// </summary> /// <returns>A mime entity with mime specific headers parsed.</returns> private void ProcessHeaders() { foreach (string key in _entity.Headers.AllKeys) { switch (key) { case "content-description": _entity.ContentDescription = _entity.Headers[key]; break; case "content-disposition": _entity.ContentDisposition = new ContentDisposition(_entity.Headers[key]); break; case "content-id": _entity.ContentId = _entity.Headers[key]; break; case "content-transfer-encoding": _entity.TransferEncoding = _entity.Headers[key]; _entity.ContentTransferEncoding = MimeReader.GetTransferEncoding(_entity.Headers[key]); break; case "content-type": _entity.SetContentType(MimeReader.GetContentType(_entity.Headers[key])); break; case "mime-version": _entity.MimeVersion = _entity.Headers[key]; break; } } } /// <summary> /// Creates the MIME entity. /// </summary> /// <returns>A mime entity containing 0 or more children representing the mime message.</returns> public MimeEntity CreateMimeEntity() { ParseHeaders(); ProcessHeaders(); ParseBody(); SetDecodedContentStream(); return _entity; } /// <summary> /// Sets the decoded content stream by decoding the EncodedMessage /// and writing it to the entity content stream. /// </summary> /// <param name="entity">The entity containing the encoded message.</param> private void SetDecodedContentStream() { switch (_entity.ContentTransferEncoding) { case System.Net.Mime.TransferEncoding.Base64: _entity.Content = new MemoryStream(Convert.FromBase64String(_entity.EncodedMessage.ToString()), false); break; case System.Net.Mime.TransferEncoding.QuotedPrintable: _entity.Content = new MemoryStream(GetBytes(QuotedPrintableEncoding.Decode(_entity.EncodedMessage.ToString())), false); break; case System.Net.Mime.TransferEncoding.SevenBit: default: _entity.Content = new MemoryStream(GetBytes(_entity.EncodedMessage.ToString()), false); break; } } /// <summary> /// Gets a byte[] of content for the provided string. /// </summary> /// <param name="decodedContent">Content.</param> /// <returns>A byte[] containing content.</returns> private byte[] GetBytes(string content) { using (MemoryStream stream = new MemoryStream()) { using (StreamWriter writer = new StreamWriter(stream)) { writer.Write(content); } return stream.ToArray(); } } /// <summary> /// Parses the body. /// </summary> private void ParseBody() { if (_entity.HasBoundary) { while (_lines.Count > 0 && !string.Equals(_lines.Peek(), _entity.EndBoundary)) { /*Check to verify the current line is not the same as the parent starting boundary. If it is the same as the parent starting boundary this indicates existence of a new child entity. Return and process the next child.*/ if (_entity.Parent != null && string.Equals(_entity.Parent.StartBoundary, _lines.Peek())) { return; } if (string.Equals(_lines.Peek(), _entity.StartBoundary)) { AddChildEntity(_entity, _lines); } //Parse a new child mime part. else if (string.Equals(_entity.ContentType.MediaType, MediaTypes.MessageRfc822, StringComparison.InvariantCultureIgnoreCase) && string.Equals(_entity.ContentDisposition.DispositionType, DispositionTypeNames.Attachment, StringComparison.InvariantCultureIgnoreCase)) { /*If the content type is message/rfc822 the stop condition to parse headers has already been encountered. But, a content type of message/rfc822 would have the message headers immediately following the mime headers so we need to parse the headers for the attached message now. This is done by creating a new child entity.*/ AddChildEntity(_entity, _lines); break; } else { _entity.EncodedMessage.Append(string.Concat(_lines.Dequeue(), "\r\n")); } //Append the message content. } } //Parse a multipart message. else { while (_lines.Count > 0) { _entity.EncodedMessage.Append(string.Concat(_lines.Dequeue(), "\r\n")); } } //Parse a single part message. } /// <summary> /// Adds the child entity. /// </summary> /// <param name="entity">The entity.</param> private void AddChildEntity(MimeEntity entity, Queue<string> lines) { /*if (entity == null) { return; } if (lines == null) { return; }*/ MimeReader reader = new MimeReader(entity, lines); entity.Children.Add(reader.CreateMimeEntity()); } /// <summary> /// Gets the type of the content. /// </summary> /// <param name="contentType">Type of the content.</param> /// <returns></returns> public static ContentType GetContentType(string contentType) { if (string.IsNullOrEmpty(contentType)) { contentType = "text/plain; charset=us-ascii"; } try { return new ContentType(contentType); } catch (FormatException f) { return new ContentType( contentType .Replace("\"", "") .Replace(" = ", "=")); } } /// <summary> /// Gets the type of the media. /// </summary> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> public static string GetMediaType(string mediaType) { if (string.IsNullOrEmpty(mediaType)) { return "text/plain"; } return mediaType.Trim(); } /// <summary> /// Gets the type of the media main. /// </summary> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> public static string GetMediaMainType(string mediaType) { int separatorIndex = mediaType.IndexOf('/'); if (separatorIndex < 0) { return mediaType; } else { return mediaType.Substring(0, separatorIndex); } } /// <summary> /// Gets the type of the media sub. /// </summary> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> public static string GetMediaSubType(string mediaType) { int separatorIndex = mediaType.IndexOf('/'); if (separatorIndex < 0) { if (mediaType.Equals("text")) { return "plain"; } return string.Empty; } else { if (mediaType.Length > separatorIndex) { return mediaType.Substring(separatorIndex + 1); } else { string mainType = GetMediaMainType(mediaType); if (mainType.Equals("text")) { return "plain"; } return string.Empty; } } } /// <summary> /// Gets the transfer encoding. /// </summary> /// <param name="transferEncoding">The transfer encoding.</param> /// <returns></returns> /// <remarks> /// The transfer encoding determination follows the same rules as /// Peter Huber's article w/ the exception of not throwing exceptions /// when binary is provided as a transferEncoding. Instead it is left /// to the calling code to check for binary. /// </remarks> public static TransferEncoding GetTransferEncoding(string transferEncoding) { switch (transferEncoding.Trim().ToLowerInvariant()) { case "7bit": case "8bit": return System.Net.Mime.TransferEncoding.SevenBit; case "quoted-printable": return System.Net.Mime.TransferEncoding.QuotedPrintable; case "base64": return System.Net.Mime.TransferEncoding.Base64; case "binary": default: return System.Net.Mime.TransferEncoding.Unknown; } } } }
/* Copyright 2019 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Text; using System.Windows.Input; using ArcGIS.Desktop.Framework; using ArcGIS.Desktop.Framework.Contracts; using System.Threading.Tasks; using System.Windows.Threading; using ArcGIS.Core.CIM; using ArcGIS.Core.Geometry; using ArcGIS.Desktop.Framework.Threading.Tasks; using ArcGIS.Desktop.Mapping; using BingStreetside.Utility; namespace BingStreetside { /// <summary> /// This sample demonstrates the usage of the WebBrowser control and how to interface between C# and HTML5/JavaScript and vise versa. /// The sample is using a Bing Map's Streetside API to demonstrate these functions. In order to use this sample you have to apply with Bing Maps for a Bing Maps API developer key. You can find the instructions on how to do this below. /// </summary> /// <remarks> /// Using Bing Maps API: To use the Bing Maps APIs, you must have a (Bing Maps Key)[https://msdn.microsoft.com/en-us/library/dd877180.aspx]. /// Note: When you use the Bing Maps APIs with a Bing Maps Key, usage transactions are logged. See Understanding (Bing Maps Transactions)[https://msdn.microsoft.com/en-us/library/ff859477.aspx] for more information. /// Creating a Bing Maps Key /// 1. Go to the Bing Maps Dev Center at https://www.bingmapsportal.com/. /// ** If you have a Bing Maps account, sign in with the Microsoft account that you used to create the account or create a new one.For new accounts, follow the instructions in (Creating a Bing Maps Account)[https://msdn.microsoft.com/en-us/library/gg650598.aspx]. /// 2. Select Keys under My Account. /// 3. Provide the following information to create a key: /// ** Application name: Required.The name of the application. /// ** Application URL: The URL of the application. /// ** Key type: Required. Select the key type that you want to create.You can find descriptions of key and application types (here)[https://www.microsoft.com/maps/create-a-bing-maps-key.aspx]. /// ** Application type: Required. Select the application type that best represents the application that will use this key.You can find descriptions of key and application types (here)[https://www.microsoft.com/maps/create-a-bing-maps-key.aspx]. /// 4. Type the characters of the security code, and then click Create. The new key displays in the list of available keys.Use this key to authenticate your Bing Maps application as described in the documentation for the Bing Maps API you are using. /// /// Note: the Bing map preview SDK overview used in this sample can be found here: https://www.bing.com/mapspreview/sdk/mapcontrol/isdk#overview /// /// Using the sample: /// 1. In Visual Studio click the Build menu. Then select Build Solution. /// 1. Click Start button to open ArcGIS Pro. /// 1. ArcGIS Pro will open. /// 1. Create a new project using the Map.aptx template. /// 1. With a map view active go to the "Bing Streetside" tab and click the "Show Bing Streetside Pane" button. /// 1. This will open the "Bing Streetside Viewer" dock pane. /// ![UI](Screenshots/screenshot1.png) /// 1. Paste the "Bing Maps Key" that you obtained from Microsoft (see instructions above) and click the "Define Bing Map Key" button. /// 1. For convenience you can also define your Bing Key under the following code comment: "TODO: define your bing map key here:" /// 1. The "Bing Streetside Viewer" dock pane now displays Bing Map's street view pane (starting at Esri). /// ![UI](Screenshots/screenshot2.png) /// 1. Click on the "N New York St" arrow pointing north on the "Bing Streetside Viewer" and see the location on the map pane being updated. /// ![UI](Screenshots/screenshot3.png) /// 1. The view heading on the "Bing Map Streetside" view can be changed by clicking on the "Change Heading" control above the "Bing Map Streetside" control and dragging the heading arrow into a new direction. /// ![UI](Screenshots/screenshot4.png) /// 1. Click the "Bing Streetside View Tool" button and click on a new street location on the map pane. /// 1. Notice that "Bing Map Streetside" will update it's view to the new clicked on location. /// ![UI](Screenshots/screenshot5.png) /// </remarks> internal class BingStreetsideModule : Module { private static BingStreetsideModule _this = null; /// <summary> /// Retrieve the singleton instance to this module here /// </summary> public static BingStreetsideModule Current { get { return _this ?? (_this = (BingStreetsideModule)FrameworkApplication.FindModule("BingStreetside_Module")); } } public BingStreetsideModule() { SetupOverlaySymbols(); } #region Overrides /// <summary> /// Called by Framework when ArcGIS Pro is closing /// </summary> /// <returns>False to prevent Pro from closing, otherwise True</returns> protected override bool CanUnload() { //TODO - add your business logic //return false to ~cancel~ Application close return true; } #endregion Overrides #region Overlay symbols/add/remove graphics private static readonly List<IDisposable> BingMapCoords = new List<IDisposable>(); private static CIMPointSymbol _pointCoordSymbol = null; private readonly static object Lock = new object(); public static void ShowCurrentBingMapCoord(MapPoint mapPoint) { var activeMapView = MapView.Active; if (activeMapView == null) return; lock (Lock) { foreach (var graphic in BingMapCoords) graphic.Dispose(); BingMapCoords.Clear(); Debug.WriteLine($"SetCurrentBingMapCoord: {mapPoint.X} {mapPoint.Y}"); BingMapCoords.Add( activeMapView.AddOverlay( mapPoint, _pointCoordSymbol.MakeSymbolReference())); } } private static void SetupOverlaySymbols() { QueuedTask.Run(() => { var markerCoordPoint = SymbolFactory.Instance.ConstructMarker(ColorFactory.Instance.GreenRGB, 12, SimpleMarkerStyle.Circle); _pointCoordSymbol = SymbolFactory.Instance.ConstructPointSymbol(markerCoordPoint); }); } #endregion Overlay symbols/add/remove graphics private static bool _bFirst = true; /// <summary>Get the Lat, Long from the Bing StreetSide View to set the location on the Pro Map</summary> public static Task SetMapLocationFromBing(double? longitude, double? latitude, int heading) { #region Process Heading var activeMapView = MapView.Active; if (activeMapView == null) return null; #endregion return QueuedTask.Run(() => { try { var cam = activeMapView.Camera; var bHeadingChange = Convert.ToInt32(cam.Heading) != heading; cam.Heading = Convert.ToDouble(heading); if (longitude.HasValue && latitude.HasValue) { var pt = MapPointBuilder.CreateMapPoint(longitude.Value, latitude.Value, SpatialReferences.WGS84); var center = GeometryEngine.Instance.Project(pt, activeMapView.Map.SpatialReference) as MapPoint; if (center == null) return; ShowCurrentBingMapCoord(center); #region Update Map // check if the center is outside the map view extent var env = activeMapView.Extent.Expand(0.75, 0.75, true); var bWithin = GeometryEngine.Instance.Within(center, env); if (!bWithin) { cam.X = center.X; cam.Y = center.Y; } if (_bFirst) { cam.Scale = 2000; activeMapView.ZoomTo(cam, TimeSpan.FromMilliseconds(100)); _bFirst = false; } else activeMapView.PanTo(cam, TimeSpan.FromMilliseconds(1000)); bHeadingChange = false; #endregion } if (bHeadingChange) { activeMapView.PanTo(cam, TimeSpan.FromMilliseconds(300)); } } catch (Exception ex) { Debug.WriteLine($@"Error in SetMapLocationFromBing: {ex.Message}"); } }); } public static void SetMapLocationOnBingMaps(double lng, double lat) { WebBrowserUtility.InvokeScript("setViewCenterFromWPF", new Object[] {lng, lat}); } } }
//------------------------------------------------------------------------------ // <copyright file="SQLInt16Storage.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // <owner current="true" primary="true">[....]</owner> // <owner current="true" primary="false">[....]</owner> // <owner current="false" primary="false">[....]</owner> //------------------------------------------------------------------------------ namespace System.Data.Common { using System; using System.Xml; using System.Data.SqlTypes; using System.Diagnostics; using System.Globalization; using System.IO; using System.Xml.Serialization; using System.Collections; internal sealed class SqlInt16Storage : DataStorage { private SqlInt16[] values; public SqlInt16Storage(DataColumn column) : base(column, typeof(SqlInt16), SqlInt16.Null, SqlInt16.Null, StorageType.SqlInt16) { } override public Object Aggregate(int[] records, AggregateType kind) { bool hasData = false; try { switch (kind) { case AggregateType.Sum: SqlInt64 sum = 0; foreach (int record in records) { if (IsNull(record)) continue; checked { sum += values[record];} hasData = true; } if (hasData) { return sum; } return NullValue; case AggregateType.Mean: SqlInt64 meanSum = 0; int meanCount = 0; foreach (int record in records) { if (IsNull(record)) continue; checked { meanSum += (values[record]).ToSqlInt64();} meanCount++; hasData = true; } if (hasData) { SqlInt16 mean = 0; checked {mean = (meanSum /(SqlInt64) meanCount).ToSqlInt16();} return mean; } return NullValue; case AggregateType.Var: case AggregateType.StDev: int count = 0; SqlDouble var = (SqlDouble)0; SqlDouble prec = (SqlDouble)0; SqlDouble dsum = (SqlDouble)0; SqlDouble sqrsum = (SqlDouble)0; foreach (int record in records) { if (IsNull(record)) continue; dsum += (values[record]).ToSqlDouble(); sqrsum += (values[record]).ToSqlDouble() * (values[record]).ToSqlDouble(); count++; } if (count > 1) { var = ((SqlDouble)count * sqrsum - (dsum * dsum)); prec = var / (dsum * dsum); // we are dealing with the risk of a cancellation error // double is guaranteed only for 15 digits so a difference // with a result less than 1e-15 should be considered as zero if ((prec < 1e-15) || (var <0)) var = 0; else var = var / (count * (count -1)); if (kind == AggregateType.StDev) { return Math.Sqrt(var.Value); } return var; } return NullValue; case AggregateType.Min: SqlInt16 min = SqlInt16.MaxValue; for (int i = 0; i < records.Length; i++) { int record = records[i]; if (IsNull(record)) continue; if ((SqlInt16.LessThan(values[record], min)).IsTrue) min = values[record]; hasData = true; } if (hasData) { return min; } return NullValue; case AggregateType.Max: SqlInt16 max = SqlInt16.MinValue; for (int i = 0; i < records.Length; i++) { int record = records[i]; if (IsNull(record)) continue; if ((SqlInt16.GreaterThan(values[record], max)).IsTrue) max = values[record]; hasData = true; } if (hasData) { return max; } return NullValue; case AggregateType.First: if (records.Length > 0) { return values[records[0]]; } return null;// no data => null case AggregateType.Count: count = 0; for (int i = 0; i < records.Length; i++) { if (!IsNull(records[i])) count++; } return count; } } catch (OverflowException) { throw ExprException.Overflow(typeof(SqlInt16)); } throw ExceptionBuilder.AggregateException(kind, DataType); } override public int Compare(int recordNo1, int recordNo2) { return values[recordNo1].CompareTo(values[recordNo2]); } override public int CompareValueTo(int recordNo, Object value) { return values[recordNo].CompareTo((SqlInt16)value); } override public object ConvertValue(object value) { if (null != value) { return SqlConvert.ConvertToSqlInt16(value); } return NullValue; } override public void Copy(int recordNo1, int recordNo2) { values[recordNo2] = values[recordNo1]; } override public Object Get(int record) { return values[record]; } override public bool IsNull(int record) { return (values[record].IsNull); } override public void Set(int record, Object value) { values[record] = SqlConvert.ConvertToSqlInt16( value); } override public void SetCapacity(int capacity) { SqlInt16[] newValues = new SqlInt16[capacity]; if (null != values) { Array.Copy(values, 0, newValues, 0, Math.Min(capacity, values.Length)); } values = newValues; } override public object ConvertXmlToObject(string s) { SqlInt16 newValue = new SqlInt16(); string tempStr =string.Concat("<col>", s, "</col>"); // this is done since you can give fragmet to reader, bug 98767 StringReader strReader = new StringReader(tempStr); IXmlSerializable tmp = newValue; using (XmlTextReader xmlTextReader = new XmlTextReader(strReader)) { tmp.ReadXml(xmlTextReader); } return ((SqlInt16)tmp); } override public string ConvertObjectToXml(object value) { Debug.Assert(!DataStorage.IsObjectNull(value), "we shouldn't have null here"); Debug.Assert((value.GetType() == typeof(SqlInt16)), "wrong input type"); StringWriter strwriter = new StringWriter(FormatProvider); using (XmlTextWriter xmlTextWriter = new XmlTextWriter (strwriter)) { ((IXmlSerializable)value).WriteXml(xmlTextWriter); } return (strwriter.ToString ()); } override protected object GetEmptyStorage(int recordCount) { return new SqlInt16[recordCount]; } override protected void CopyValue(int record, object store, BitArray nullbits, int storeIndex) { SqlInt16[] typedStore = (SqlInt16[]) store; typedStore[storeIndex] = values[record]; nullbits.Set(storeIndex, IsNull(record)); } override protected void SetStorage(object store, BitArray nullbits) { values = (SqlInt16[]) store; //SetNullStorage(nullbits); } } }
using VulkanCore.Tests.Utilities; using Xunit; using Xunit.Abstractions; namespace VulkanCore.Tests { public class CommandBufferTest : HandleTestBase { [Fact] public void BeginAndEnd() { CommandBuffer.Begin(); CommandBuffer.End(); } [Fact] public void CmdBeginAndEndQuery() { using (QueryPool queryPool = Device.CreateQueryPool(new QueryPoolCreateInfo(QueryType.Occlusion, 1))) { CommandBuffer.Begin(); CommandBuffer.CmdBeginQuery(queryPool, 0); CommandBuffer.CmdEndQuery(queryPool, 0); CommandBuffer.End(); } } [Fact] public void CmdWriteTimestamp() { using (QueryPool queryPool = Device.CreateQueryPool(new QueryPoolCreateInfo(QueryType.Timestamp, 1))) { CommandBuffer.Begin(); CommandBuffer.CmdWriteTimestamp(PipelineStages.AllCommands, queryPool, 0); CommandBuffer.End(); } } [Fact] public void CmdCopyQueryPoolResults() { const long bufferSize = 256L; using (QueryPool queryPool = Device.CreateQueryPool(new QueryPoolCreateInfo(QueryType.Timestamp, 1))) using (Buffer buffer = Device.CreateBuffer(new BufferCreateInfo(bufferSize, BufferUsages.TransferDst))) using (DeviceMemory memory = Device.AllocateMemory(new MemoryAllocateInfo(bufferSize, 0))) { // Required to keep the validation layer happy. // Ideally we should allocate memory based on these requirements. buffer.GetMemoryRequirements(); buffer.BindMemory(memory); CommandBuffer.Begin(); CommandBuffer.CmdCopyQueryPoolResults(queryPool, 0, 1, buffer, 0, bufferSize); CommandBuffer.End(); } } [Fact] public void CmdResetQueryPool() { using (QueryPool queryPool = Device.CreateQueryPool(new QueryPoolCreateInfo(QueryType.Timestamp, 1))) { CommandBuffer.Begin(); CommandBuffer.CmdResetQueryPool(queryPool, 0, 1); CommandBuffer.End(); } } [Fact] public void CmdBeginEndRenderPass() { using (RenderPass renderPass = Device.CreateRenderPass(new RenderPassCreateInfo(new[] { new SubpassDescription(null) }))) using (Framebuffer framebuffer = renderPass.CreateFramebuffer(new FramebufferCreateInfo(null, 32, 32))) { CommandBuffer.Begin(); CommandBuffer.CmdBeginRenderPass(new RenderPassBeginInfo(framebuffer, default(Rect2D))); CommandBuffer.CmdEndRenderPass(); CommandBuffer.End(); } } [Fact] public void CmdSetScissors() { CommandBuffer.Begin(); CommandBuffer.CmdSetScissor(new Rect2D(Offset2D.Zero, new Extent2D(32, 32))); CommandBuffer.CmdSetScissors(0, 1, new[] { new Rect2D(Offset2D.Zero, new Extent2D(32, 32)) }); CommandBuffer.End(); } [Fact] public void CmdSetViewports() { CommandBuffer.Begin(); CommandBuffer.CmdSetViewport(new Viewport(0, 0, 32, 32)); CommandBuffer.CmdSetViewports(0, 1, new[] { new Viewport(0, 0, 32, 32) }); CommandBuffer.End(); } [Fact] public void CmdSetLineWidth() { CommandBuffer.Begin(); CommandBuffer.CmdSetLineWidth(1.0f); CommandBuffer.End(); } [Fact] public void CmdSetDepthParameters() { CommandBuffer.Begin(); CommandBuffer.CmdSetDepthBias(1.0f, 1.0f, 1.0f); CommandBuffer.CmdSetDepthBounds(0.0f, 1.0f); CommandBuffer.End(); } [Fact] public void CmdSetBlendConstants() { CommandBuffer.Begin(); CommandBuffer.CmdSetBlendConstants(new ColorF4(1.0f, 1.0f, 1.0f, 1.0f)); CommandBuffer.End(); } [Fact] public void CmdSetStencilParameters() { CommandBuffer.Begin(); CommandBuffer.CmdSetStencilCompareMask(StencilFaces.Front, ~0); CommandBuffer.CmdSetStencilReference(StencilFaces.Front, 1); CommandBuffer.CmdSetStencilWriteMask(StencilFaces.Front, ~0); CommandBuffer.End(); } [Fact] public void CmdBindDescriptorSet() { const int bufferSize = 256; var layoutCreateInfo = new DescriptorSetLayoutCreateInfo( new DescriptorSetLayoutBinding(0, DescriptorType.StorageBuffer, 1)); var descriptorPoolCreateInfo = new DescriptorPoolCreateInfo( 1, new[] { new DescriptorPoolSize(DescriptorType.StorageBuffer, 1) }); using (DescriptorSetLayout descriptorSetLayout = Device.CreateDescriptorSetLayout(layoutCreateInfo)) using (DescriptorPool descriptorPool = Device.CreateDescriptorPool(descriptorPoolCreateInfo)) using (PipelineLayout pipelineLayout = Device.CreatePipelineLayout(new PipelineLayoutCreateInfo(new[] { descriptorSetLayout }))) using (Buffer buffer = Device.CreateBuffer(new BufferCreateInfo(bufferSize, BufferUsages.StorageBuffer))) using (DeviceMemory memory = Device.AllocateMemory(new MemoryAllocateInfo(bufferSize, 0))) { // Required to satisfy the validation layer. buffer.GetMemoryRequirements(); buffer.BindMemory(memory); DescriptorSet descriptorSet = descriptorPool.AllocateSets(new DescriptorSetAllocateInfo(1, descriptorSetLayout))[0]; var descriptorWrite = new WriteDescriptorSet(descriptorSet, 0, 0, 1, DescriptorType.StorageBuffer, bufferInfo: new[] { new DescriptorBufferInfo(buffer) }); descriptorPool.UpdateSets(new[] { descriptorWrite }); CommandBuffer.Begin(); CommandBuffer.CmdBindDescriptorSet(PipelineBindPoint.Graphics, pipelineLayout, descriptorSet); CommandBuffer.CmdBindDescriptorSets(PipelineBindPoint.Graphics, pipelineLayout, 0, new[] { descriptorSet }); CommandBuffer.End(); descriptorPool.Reset(); } } [Fact] public void CmdBindVertexAndIndexBuffer() { const int bufferSize = 256; using (Buffer buffer = Device.CreateBuffer(new BufferCreateInfo(bufferSize, BufferUsages.VertexBuffer | BufferUsages.IndexBuffer))) { MemoryRequirements memReq = buffer.GetMemoryRequirements(); int memTypeIndex = PhysicalDeviceMemoryProperties.MemoryTypes.IndexOf( memReq.MemoryTypeBits, MemoryProperties.HostVisible); using (DeviceMemory memory = Device.AllocateMemory(new MemoryAllocateInfo(memReq.Size, memTypeIndex))) { buffer.BindMemory(memory); CommandBuffer.Begin(); CommandBuffer.CmdBindVertexBuffer(buffer); CommandBuffer.CmdBindVertexBuffers(0, 1, new[] { buffer }, new long[] { 0 }); CommandBuffer.CmdBindIndexBuffer(buffer); CommandBuffer.End(); } } } [Fact] public void CmdBindPipeline() { var descriptorSetLayoutCreateInfo = new DescriptorSetLayoutCreateInfo( new DescriptorSetLayoutBinding(0, DescriptorType.StorageBuffer, 1, ShaderStages.Compute), new DescriptorSetLayoutBinding(1, DescriptorType.StorageBuffer, 1, ShaderStages.Compute)); using (DescriptorSetLayout descriptorSetLayout = Device.CreateDescriptorSetLayout(descriptorSetLayoutCreateInfo)) using (PipelineLayout pipelineLayout = Device.CreatePipelineLayout(new PipelineLayoutCreateInfo(new[] { descriptorSetLayout }))) using (ShaderModule shader = Device.CreateShaderModule(new ShaderModuleCreateInfo(ReadAllBytes("Shader.comp.spv")))) { var pipelineCreateInfo = new ComputePipelineCreateInfo( new PipelineShaderStageCreateInfo(ShaderStages.Compute, shader, "main"), pipelineLayout); using (Pipeline pipeline = Device.CreateComputePipeline(pipelineCreateInfo)) { CommandBuffer.Begin(); CommandBuffer.CmdBindPipeline(PipelineBindPoint.Compute, pipeline); CommandBuffer.End(); } } } [Fact] public void CmdSetAndResetEvent() { using (Event evt = Device.CreateEvent()) { CommandBuffer.Begin(); CommandBuffer.CmdSetEvent(evt, PipelineStages.AllCommands); CommandBuffer.CmdResetEvent(evt, PipelineStages.AllCommands); CommandBuffer.End(); } } [Fact] public void CmdWaitEvents() { using (Event evt = Device.CreateEvent()) { CommandBuffer.Begin(); CommandBuffer.CmdWaitEvent(evt, PipelineStages.AllCommands, PipelineStages.AllCommands); CommandBuffer.CmdWaitEvents(new[] { evt }, PipelineStages.AllCommands, PipelineStages.AllCommands); CommandBuffer.End(); } } [Fact] public void CmdDraw() { var renderPassCreateInfo = new RenderPassCreateInfo( new[] { new SubpassDescription(new[] { new AttachmentReference(0, ImageLayout.ColorAttachmentOptimal) }) }, new[] { new AttachmentDescription { Format = Format.B8G8R8A8UNorm, Samples = SampleCounts.Count1, FinalLayout = ImageLayout.ColorAttachmentOptimal, LoadOp = AttachmentLoadOp.DontCare } }); var imageCreateInfo = new ImageCreateInfo { Usage = ImageUsages.ColorAttachment, Format = Format.B8G8R8A8UNorm, Extent = new Extent3D(2, 2, 1), ImageType = ImageType.Image2D, MipLevels = 1, ArrayLayers = 1, Samples = SampleCounts.Count1 }; var imageViewCreateInfo = new ImageViewCreateInfo( Format.B8G8R8A8UNorm, new ImageSubresourceRange(ImageAspects.Color, 0, 1, 0, 1)); using (ShaderModule vertexShader = Device.CreateShaderModule(new ShaderModuleCreateInfo(ReadAllBytes("Shader.vert.spv")))) using (ShaderModule fragmentShader = Device.CreateShaderModule(new ShaderModuleCreateInfo(ReadAllBytes("Shader.frag.spv")))) using (PipelineLayout pipelineLayout = Device.CreatePipelineLayout()) using (RenderPass renderPass = Device.CreateRenderPass(renderPassCreateInfo)) using (Image image = Device.CreateImage(imageCreateInfo)) { MemoryRequirements imageMemReq = image.GetMemoryRequirements(); int memTypeIndex = PhysicalDeviceMemoryProperties.MemoryTypes.IndexOf(imageMemReq.MemoryTypeBits, MemoryProperties.DeviceLocal); using (DeviceMemory imageMemory = Device.AllocateMemory(new MemoryAllocateInfo(imageMemReq.Size, memTypeIndex))) { image.BindMemory(imageMemory); using (ImageView imageView = image.CreateView(imageViewCreateInfo)) using (Framebuffer framebuffer = renderPass.CreateFramebuffer(new FramebufferCreateInfo(new[] { imageView }, 2, 2))) using (Pipeline pipeline = Device.CreateGraphicsPipeline(new GraphicsPipelineCreateInfo( pipelineLayout, renderPass, 0, new[] { new PipelineShaderStageCreateInfo(ShaderStages.Vertex, vertexShader, "main"), new PipelineShaderStageCreateInfo(ShaderStages.Fragment, fragmentShader, "main") }, new PipelineInputAssemblyStateCreateInfo(), new PipelineVertexInputStateCreateInfo(), new PipelineRasterizationStateCreateInfo { RasterizerDiscardEnable = true, LineWidth = 1.0f }))) { CommandBuffer.Begin(); CommandBuffer.CmdBeginRenderPass(new RenderPassBeginInfo(framebuffer, new Rect2D(0, 0, 2, 2))); CommandBuffer.CmdBindPipeline(PipelineBindPoint.Graphics, pipeline); CommandBuffer.CmdDraw(3); CommandBuffer.CmdEndRenderPass(); CommandBuffer.End(); } } } } [Fact] public void Reset() { CommandPool.Reset(); CommandBuffer.Reset(); } [Fact] public void Free() { using (CommandPool.AllocateBuffers(new CommandBufferAllocateInfo(CommandBufferLevel.Primary, 1))[0]) { } CommandBuffer[] buffers = CommandPool.AllocateBuffers(new CommandBufferAllocateInfo(CommandBufferLevel.Primary, 1)); CommandPool.FreeBuffers(buffers); } public CommandBufferTest(DefaultHandles defaults, ITestOutputHelper output) : base(defaults, output) { CommandPool = Device.CreateCommandPool( new CommandPoolCreateInfo(defaults.GraphicsQueue.FamilyIndex, CommandPoolCreateFlags.ResetCommandBuffer)); CommandBuffer = CommandPool.AllocateBuffers(new CommandBufferAllocateInfo(CommandBufferLevel.Primary, 1))[0]; } public CommandPool CommandPool { get; } public CommandBuffer CommandBuffer { get; } public override void Dispose() { CommandPool.Dispose(); base.Dispose(); } } }
using System; using System.Data; using System.Text; using System.Text.RegularExpressions; using CslaGenerator.Metadata; namespace CslaGenerator.Util { /// <summary> /// Summary description for VbCslaTemplateHelper. /// </summary> public class VbCslaTemplateHelper : CslaTemplateHelper { public override string GetInitValue(TypeCodeEx typeCode) { if (typeCode == TypeCodeEx.Int16 || typeCode == TypeCodeEx.Int32 || typeCode == TypeCodeEx.Int64 || typeCode == TypeCodeEx.Double || typeCode == TypeCodeEx.Decimal || typeCode == TypeCodeEx.Single) { return "0"; } else if (typeCode == TypeCodeEx.String) { return "String.Empty"; } else if (typeCode == TypeCodeEx.Boolean) { return "False"; } else if (typeCode == TypeCodeEx.Byte) { return "0"; } else if (typeCode == TypeCodeEx.Object) { return "Nothing"; } else if (typeCode == TypeCodeEx.Guid) { return "Guid.Empty"; } else if (typeCode == TypeCodeEx.SmartDate) { return "New SmartDate(True)"; } else if (typeCode == TypeCodeEx.DateTime) { return "DateTime.Now"; } else if (typeCode == TypeCodeEx.Char) { return "Char.MinValue"; } else if (typeCode == TypeCodeEx.ByteArray) { return "new Byte() {}"; } else { return String.Empty; } } public override string GetInitValue(ValueProperty prop) { if (AllowNull(prop) && prop.PropertyType != TypeCodeEx.SmartDate) return "Nothing"; else return GetInitValue(prop.PropertyType); } public override string GetReaderAssignmentStatement(ValueProperty prop) { return GetReaderAssignmentStatement(prop,false); } public override string GetReaderAssignmentStatement(ValueProperty prop, bool Structure) { string statement; if (Structure) statement = "nfo." + prop.Name; else statement = FormatFieldName(prop.Name); if (PropertyMode == CslaPropertyMode.Managed) if (AllowNull(prop)) { string formatString; if (TypeHelper.IsNullableType(prop.PropertyType)) formatString = "LoadProperty({0}, If(Not dr.IsDBNull(\"{2}\"), New {3}(dr.{1}(\"{2}\")), Nothing))"; else formatString = "LoadProperty({0}, If(Not dr.IsDBNull(\"{2}\"), dr.{1}(\"{2}\"), Nothing))"; return String.Format(formatString, FormatManaged(prop.Name), GetReaderMethod(prop.PropertyType), prop.ParameterName, GetDataType(prop)); } else return String.Format("LoadProperty({0}, dr.{1}(\"{2}\"))", FormatManaged(prop.Name), GetReaderMethod(prop.PropertyType), prop.ParameterName); else return string.Format(GetDataReaderStatement(prop), statement); } public override string GetDataReaderStatement(ValueProperty prop) { bool ternarySupport = (GeneratorController.Current.CurrentUnit.GenerationParams.TargetFramework != TargetFramework.CSLA20 && GeneratorController.Current.CurrentUnit.GenerationParams.TargetFramework != TargetFramework.CSLA10); bool nullable = AllowNull(prop); StringBuilder st = new StringBuilder(); if (nullable && prop.PropertyType != TypeCodeEx.ByteArray) { if (ternarySupport) st.AppendFormat("If(Not dr.IsDBNull(\"{0}\"), ", prop.ParameterName); else st.AppendFormat("If Not dr.IsDBNull(\"{0}\") Then ", prop.ParameterName); } if (ternarySupport) st.Insert(0, "{0} = "); else st.Append("{0} = "); if (nullable && TypeHelper.IsNullableType(prop.PropertyType)) st.AppendFormat("New {0}(", GetDataType(prop)); st.Append("dr."); if (prop.DbBindColumn.ColumnOriginType == ColumnOriginType.None) st.Append(GetReaderMethod(prop.PropertyType)); else st.Append(GetReaderMethod(GetDbType(prop.DbBindColumn), prop.PropertyType)); st.Append("(\"" + prop.ParameterName + "\""); if (prop.PropertyType == TypeCodeEx.SmartDate) st.Append(", true"); st.Append(")"); if (nullable && TypeHelper.IsNullableType(prop.PropertyType)) st.Append(")"); if (nullable && ternarySupport && prop.PropertyType != TypeCodeEx.ByteArray) st.Append(", Nothing)"); if (prop.PropertyType == TypeCodeEx.ByteArray) { st.Remove(0, 6); return "{0} = TryCast(" + st.ToString() + ", Byte())"; } return st.ToString(); } //protected override string GetDataType(TypeCodeEx type) // original public override string GetDataType(TypeCodeEx type) { if (type == TypeCodeEx.ByteArray) return "Byte()"; return type.ToString(); } public override string GetDataType(Property prop) { string t = GetDataType(prop.PropertyType); if (AllowNull(prop)) { if (TypeHelper.IsNullableType(prop.PropertyType)) if (CurrentUnit.GenerationParams.TargetFramework == TargetFramework.CSLA10 || CurrentUnit.GenerationParams.TargetFramework == TargetFramework.CSLA20) t = string.Format("Nullable(Of {0})", t); else t += "?"; } return t; } public override string GetParameterSet(Property prop, bool Criteria) { bool nullable = AllowNull(prop); string propName; if (Criteria) propName = "crit." + FormatPascal(prop.Name); else if (PropertyMode == CslaPropertyMode.Managed) propName = String.Format("ReadProperty({0})", FormatManaged(prop.Name)); else propName = FormatFieldName(prop.Name); switch (prop.PropertyType) { case Metadata.TypeCodeEx.SmartDate: return propName + ".DBValue"; case Metadata.TypeCodeEx.Guid: if (nullable) return string.Format("GetNullableParameter(Of {0})({1})", prop.PropertyType.ToString(), propName); else return "IIf (" + propName + ".Equals(Guid.Empty), DBNull.Value, " + propName + ")"; default: if (nullable) { if (TypeHelper.IsNullableType(prop.PropertyType)) return string.Format("GetNullableParameter(Of {0})({1})", prop.PropertyType.ToString(), propName); else return "IIf (" + propName + " Is Nothing, DBNull.Value, " + propName + ")"; } else return propName; } } //protected internal override string GetLanguageVariableType(DbType dataType) // original public override string GetLanguageVariableType(DbType dataType) { switch (dataType) { case DbType.AnsiString: return "String"; case DbType.AnsiStringFixedLength: return "String"; case DbType.Binary: return "Byte()"; case DbType.Boolean: return "Boolean"; case DbType.Byte: return "Byte"; case DbType.Currency: return "Decimal"; case DbType.Date: case DbType.DateTime: return "DateTime"; case DbType.Decimal: return "Decimal"; case DbType.Double: return "Double"; case DbType.Guid: return "Guid"; case DbType.Int16: return "Short"; case DbType.Int32: return "Integer"; case DbType.Int64: return "Long"; case DbType.Object: return "Object"; case DbType.SByte: return "SByte"; case DbType.Single: return "Single"; case DbType.String: return "String"; case DbType.StringFixedLength: return "String"; case DbType.Time: return "TimeSpan"; case DbType.UInt16: return "Short"; case DbType.UInt32: return "Integer"; case DbType.UInt64: return "Long"; case DbType.VarNumeric: return "Decimal"; default: { return "__UNKNOWN__" + dataType.ToString(); } } } public override string GetRelationString(CslaObjectInfo info, ChildProperty child) { string indent = new string(' ', IndentLevel * 4); StringBuilder sb = new StringBuilder(); CslaObjectInfo childInfo = FindChildInfo(info,child.TypeName); string joinColumn = String.Empty; if (child.LoadParameters.Count > 0) { if (IsCollectionType(childInfo.ObjectType)) { joinColumn = child.LoadParameters[0].Property.Name; childInfo = FindChildInfo(info,childInfo.ItemType); } if (joinColumn == String.Empty) { joinColumn = child.LoadParameters[0].Property.Name; } } sb.Append(indent); sb.Append("ds.Relations.Add(\""); sb.Append(info.ObjectName); sb.Append(childInfo.ObjectName); sb.Append("\", ds.Tables("); sb.Append(_resultSetCount.ToString()); sb.Append(").Columns(\""); sb.Append(joinColumn); sb.Append("\"), ds.Tables("); sb.Append((_resultSetCount + 1).ToString()); sb.Append(").Columns(\""); sb.Append(joinColumn); sb.Append("\"), False)"); _resultSetCount++; return sb.ToString(); } public override string GetXmlCommentString(string xmlComment) { string indent = new string(' ', IndentLevel * 4); // add leading indent and comment sign xmlComment = indent + "''' " + xmlComment; return Regex.Replace(xmlComment, "\r\n", "\r\n" + indent + "''' ", RegexOptions.Multiline); } public override string GetUsingStatementsString(CslaObjectInfo info) { string[] usingNamespaces = GetNamespaces(info); string result = String.Empty; foreach (string namespaceName in usingNamespaces) { result += "Imports " + namespaceName + "\n"; } return(result); } public override string GetAttributesString(string[] attributes) { if (attributes == null || attributes.Length == 0) return string.Empty; return "<" + string.Join(", ", attributes) + "> _"; } public override string LoadProperty(ValueProperty prop, string value) { string result = base.LoadProperty(prop, value); return result.Substring(0, result.Length - 1); } } }
using UnityEngine; using UnityEditor; using System; using System.Linq; using System.IO; using System.Collections.Generic; using System.Text.RegularExpressions; using V1=AssetBundleGraph; using Model=UnityEngine.AssetBundles.GraphTool.DataModel.Version2; namespace UnityEngine.AssetBundles.GraphTool { [CustomNode("Modify Assets/Overwrite Import Setting", 60)] public class ImportSetting : Node, Model.NodeDataImporter { public enum ConfigStatus { NoSampleFound, TooManySamplesFound, GoodSampleFound } private static readonly string[] s_importerTypeList = new string[] { Model.Settings.GUI_TEXT_SETTINGTEMPLATE_MODEL, Model.Settings.GUI_TEXT_SETTINGTEMPLATE_TEXTURE, Model.Settings.GUI_TEXT_SETTINGTEMPLATE_AUDIO, Model.Settings.GUI_TEXT_SETTINGTEMPLATE_VIDEO }; [SerializeField] private SerializableMultiTargetString m_spritePackingTagNameTemplate; [SerializeField] private bool m_overwritePackingTag; private Editor m_importerEditor; public override string ActiveStyle { get { return "node 8 on"; } } public override string InactiveStyle { get { return "node 8"; } } public override string Category { get { return "Modify"; } } public override void Initialize(Model.NodeData data) { m_spritePackingTagNameTemplate = new SerializableMultiTargetString("*"); m_overwritePackingTag = false; data.AddDefaultInputPoint(); data.AddDefaultOutputPoint(); } public void Import(V1.NodeData v1, Model.NodeData v2) { // do nothing } public override Node Clone(Model.NodeData newData) { var newNode = new ImportSetting(); newData.AddDefaultInputPoint(); newData.AddDefaultOutputPoint(); return newNode; } public override bool OnAssetsReimported( Model.NodeData nodeData, AssetReferenceStreamManager streamManager, BuildTarget target, string[] importedAssets, string[] deletedAssets, string[] movedAssets, string[] movedFromAssetPaths) { var samplingDirectoryPath = FileUtility.PathCombine(Model.Settings.Path.ImporterSettingsPath, nodeData.Id); foreach(var imported in importedAssets) { if(imported.StartsWith(samplingDirectoryPath)) { return true; } } return false; } public override void OnInspectorGUI(NodeGUI node, AssetReferenceStreamManager streamManager, NodeGUIEditor editor, Action onValueChanged) { EditorGUILayout.HelpBox("Overwrite Import Setting: Overwrite import settings of incoming assets.", MessageType.Info); editor.UpdateNodeName(node); GUILayout.Space(10f); /* importer node has no platform key. platform key is contained by Unity's importer inspector itself. */ using (new EditorGUILayout.VerticalScope()) { Type incomingType = TypeUtility.FindFirstIncomingAssetType(streamManager, node.Data.InputPoints[0]); ImportSetting.ConfigStatus status = ImportSetting.GetConfigStatus(node.Data); if(incomingType == null) { // try to retrieve incoming type from configuration if(status == ImportSetting.ConfigStatus.GoodSampleFound) { incomingType = ImportSetting.GetReferenceAssetImporter(node.Data).GetType(); } else { using (new EditorGUILayout.VerticalScope (GUI.skin.box)) { EditorGUILayout.HelpBox ("Import setting type can be set by incoming asset, or you can specify by selecting.", MessageType.Info); using (new EditorGUILayout.HorizontalScope ()) { EditorGUILayout.LabelField ("Importer Type"); if (GUILayout.Button ("", "Popup", GUILayout.MinWidth (150f))) { var menu = new GenericMenu (); for (var i = 0; i < s_importerTypeList.Length; i++) { var index = i; menu.AddItem ( new GUIContent (s_importerTypeList [i]), false, () => { ResetConfig (node.Data); var configFilePath = FileUtility.GetImportSettingTemplateFilePath (s_importerTypeList [index]); SaveSampleFile (node.Data, configFilePath); } ); } menu.ShowAsContext (); } } } return; } } switch(status) { case ImportSetting.ConfigStatus.NoSampleFound: // ImportSetting.Setup() must run to grab another sample to configure. EditorGUILayout.HelpBox("Press Refresh to configure.", MessageType.Info); node.Data.NeedsRevisit = true; break; case ImportSetting.ConfigStatus.GoodSampleFound: if (m_importerEditor == null) { m_importerEditor = Editor.CreateEditor (ImportSetting.GetReferenceAssetImporter (node.Data)); } if (incomingType == typeof(UnityEditor.TextureImporter)) { using (new EditorGUILayout.VerticalScope (GUI.skin.box)) { m_overwritePackingTag = EditorGUILayout.ToggleLeft ("Configure Sprite Packing Tag", m_overwritePackingTag); using (new EditorGUI.DisabledScope (!m_overwritePackingTag)) { var val = m_spritePackingTagNameTemplate [editor.CurrentEditingGroup]; var newValue = EditorGUILayout.TextField ("Packing Tag", val); if (newValue != val) { using (new RecordUndoScope ("Undo Change Packing Tag", node, true)) { m_spritePackingTagNameTemplate [editor.CurrentEditingGroup] = newValue; onValueChanged (); } } EditorGUILayout.HelpBox ( "You can configure packing tag name with \"*\" to include group name in your sprite tag.", MessageType.Info); } } GUILayout.Space (10); } GUILayout.Label (string.Format("Import Setting ({0})", incomingType.Name)); m_importerEditor.OnInspectorGUI (); GUILayout.Space (40); using (new EditorGUILayout.HorizontalScope (GUI.skin.box)) { GUILayout.Space (4); EditorGUILayout.LabelField ("Clear Saved Import Setting"); if (GUILayout.Button ("Clear")) { if (EditorUtility.DisplayDialog ("Clear Saved Import Setting", string.Format ("Do you want to reset saved import setting for \"{0}\"? This operation is not undoable.", node.Name), "OK", "Cancel")) { ResetConfig (node.Data); } } } break; case ImportSetting.ConfigStatus.TooManySamplesFound: if (GUILayout.Button("Reset Import Setting")) { ResetConfig(node.Data); } break; } } return; } public override void Prepare (BuildTarget target, Model.NodeData node, IEnumerable<PerformGraph.AssetGroups> incoming, IEnumerable<Model.ConnectionData> connectionsToOutput, PerformGraph.Output Output) { Action<Type, Type, AssetReference> multipleAssetTypeFound = (Type expectedType, Type foundType, AssetReference foundAsset) => { throw new NodeException(string.Format("{3} :ImportSetting expect {0}, but different type of incoming asset is found({1} {2})", expectedType.FullName, foundType.FullName, foundAsset.fileNameAndExtension, node.Name), node.Id); }; Action<Type> unsupportedType = (Type unsupported) => { throw new NodeException(string.Format("{0} :Incoming asset type is not supported by ImportSetting (Incoming type:{1}). Perhaps you want to use Modifier instead?", node.Name, (unsupported != null)?unsupported.FullName:"null"), node.Id); }; Action<Type, Type> incomingTypeMismatch = (Type expectedType, Type incomingType) => { throw new NodeException(string.Format("{0} :Incoming asset type is does not match with this ImportSetting (Expected type:{1}, Incoming type:{2}).", node.Name, (expectedType != null)?expectedType.FullName:"null", (incomingType != null)?incomingType.FullName:"null"), node.Id); }; Action<ConfigStatus> errorInConfig = (ConfigStatus _) => { var firstAsset = TypeUtility.GetFirstIncomingAsset(incoming); if(firstAsset != null) { // give a try first in sampling file var configFilePath = FileUtility.GetImportSettingTemplateFilePath(firstAsset); SaveSampleFile(node, configFilePath); ValidateInputSetting(node, target, incoming, multipleAssetTypeFound, unsupportedType, incomingTypeMismatch, (ConfigStatus eType) => { if(eType == ConfigStatus.NoSampleFound) { throw new NodeException(node.Name + " :ImportSetting has no sampling file. Please configure it from Inspector.", node.Id); } if(eType == ConfigStatus.TooManySamplesFound) { throw new NodeException(node.Name + " :ImportSetting has too many sampling file. Please fix it from Inspector.", node.Id); } }); } }; ValidateInputSetting(node, target, incoming, multipleAssetTypeFound, unsupportedType, incomingTypeMismatch, errorInConfig); // ImportSettings does not add, filter or change structure of group, so just pass given group of assets if(Output != null) { var dst = (connectionsToOutput == null || !connectionsToOutput.Any())? null : connectionsToOutput.First(); if(incoming != null) { foreach(var ag in incoming) { Output(dst, ag.assetGroups); } } else { Output(dst, new Dictionary<string, List<AssetReference>>()); } } } public override void Build (BuildTarget target, Model.NodeData node, IEnumerable<PerformGraph.AssetGroups> incoming, IEnumerable<Model.ConnectionData> connectionsToOutput, PerformGraph.Output Output, Action<Model.NodeData, string, float> progressFunc) { if(incoming != null){ ApplyImportSetting(target, node, incoming); } } private void SaveSampleFile(Model.NodeData node, string configFilePath) { var samplingDirectoryPath = FileUtility.PathCombine(Model.Settings.Path.ImporterSettingsPath, node.Id); if (!Directory.Exists(samplingDirectoryPath)) { Directory.CreateDirectory(samplingDirectoryPath); } UnityEngine.Assertions.Assert.IsNotNull(configFilePath); var targetFilePath = FileUtility.PathCombine(samplingDirectoryPath, Path.GetFileName(configFilePath)); FileUtility.CopyFile(configFilePath, targetFilePath); AssetDatabase.Refresh(ImportAssetOptions.ImportRecursive); } public static ConfigStatus GetConfigStatus(Model.NodeData node) { var sampleFileDir = FileUtility.PathCombine(Model.Settings.Path.ImporterSettingsPath, node.Id); if(!Directory.Exists(sampleFileDir)) { return ConfigStatus.NoSampleFound; } var sampleFiles = FileUtility.GetFilePathsInFolder(sampleFileDir) .Where(path => !path.EndsWith(Model.Settings.UNITY_METAFILE_EXTENSION)) .ToList(); if(sampleFiles.Count == 0) { return ConfigStatus.NoSampleFound; } if(sampleFiles.Count == 1) { return ConfigStatus.GoodSampleFound; } return ConfigStatus.TooManySamplesFound; } public void ResetConfig(Model.NodeData node) { if (m_importerEditor != null) { UnityEngine.Object.DestroyImmediate (m_importerEditor); m_importerEditor = null; } var sampleFileDir = FileUtility.PathCombine(Model.Settings.Path.ImporterSettingsPath, node.Id); FileUtility.RemakeDirectory(sampleFileDir); } public static AssetImporter GetReferenceAssetImporter(Model.NodeData node) { var sampleFileDir = FileUtility.PathCombine(Model.Settings.Path.ImporterSettingsPath, node.Id); UnityEngine.Assertions.Assert.IsTrue(Directory.Exists(sampleFileDir)); var sampleFiles = FileUtility.GetFilePathsInFolder(sampleFileDir) .Where(path => !path.EndsWith(Model.Settings.UNITY_METAFILE_EXTENSION)) .ToList(); UnityEngine.Assertions.Assert.IsTrue(sampleFiles.Count == 1); return AssetImporter.GetAtPath(sampleFiles[0]); } private void ApplyImportSetting(BuildTarget target, Model.NodeData node, IEnumerable<PerformGraph.AssetGroups> incoming) { var referenceImporter = GetReferenceAssetImporter(node); var configurator = new ImportSettingsConfigurator(referenceImporter); foreach(var ag in incoming) { foreach(var groupKey in ag.assetGroups.Keys) { var assets = ag.assetGroups[groupKey]; foreach(var asset in assets) { var importer = AssetImporter.GetAtPath(asset.importFrom); bool importerModified = false; if(!configurator.IsEqual(importer, m_overwritePackingTag)) { configurator.OverwriteImportSettings(importer); importerModified = true; } if(m_overwritePackingTag) { if(asset.filterType == typeof(UnityEditor.TextureImporter) ) { var textureImporter = AssetImporter.GetAtPath(asset.importFrom) as TextureImporter; var newTagName = GetTagName(target, groupKey); if(textureImporter.spritePackingTag != newTagName) { textureImporter.spritePackingTag = newTagName; importerModified = true; } } } if(importerModified) { importer.SaveAndReimport(); asset.TouchImportAsset(); } } } } } private string GetTagName(BuildTarget target, string groupName) { return m_spritePackingTagNameTemplate[target].Replace("*", groupName); } private void ApplySpriteTag(BuildTarget target, IEnumerable<PerformGraph.AssetGroups> incoming) { foreach(var ag in incoming) { foreach(var groupKey in ag.assetGroups.Keys) { var assets = ag.assetGroups[groupKey]; foreach(var asset in assets) { if(asset.filterType == typeof(UnityEditor.TextureImporter) ) { var importer = AssetImporter.GetAtPath(asset.importFrom) as TextureImporter; importer.spritePackingTag = GetTagName(target, groupKey); importer.SaveAndReimport(); asset.TouchImportAsset(); } } } } } public static void ValidateInputSetting ( Model.NodeData node, BuildTarget target, IEnumerable<PerformGraph.AssetGroups> incoming, Action<Type, Type, AssetReference> multipleAssetTypeFound, Action<Type> unsupportedType, Action<Type, Type> incomingTypeMismatch, Action<ConfigStatus> errorInConfig ) { Type expectedType = TypeUtility.FindFirstIncomingAssetType(incoming); if(multipleAssetTypeFound != null) { if(expectedType != null && incoming != null) { foreach(var ag in incoming) { foreach(var assets in ag.assetGroups.Values) { foreach(var a in assets) { Type assetType = a.filterType; if(assetType != expectedType) { multipleAssetTypeFound(expectedType, assetType, a); } } } } } } if(unsupportedType != null) { if(expectedType != null) { if(expectedType == typeof(UnityEditor.TextureImporter) || expectedType == typeof(UnityEditor.ModelImporter) || expectedType == typeof(UnityEditor.AudioImporter) #if UNITY_5_6 || UNITY_5_6_OR_NEWER || expectedType == typeof(UnityEditor.VideoClipImporter) #endif ) { // good. do nothing } else { unsupportedType(expectedType); } } } var status = GetConfigStatus(node); if(errorInConfig != null) { if(status != ConfigStatus.GoodSampleFound) { errorInConfig(status); } } if(incomingTypeMismatch != null) { // if there is no incoming assets, there is no way to check if // right type of asset is coming in - so we'll just skip the test if(incoming != null && expectedType != null && status == ConfigStatus.GoodSampleFound) { Type targetType = GetReferenceAssetImporter(node).GetType(); if( targetType != expectedType ) { incomingTypeMismatch(targetType, expectedType); } } } } } }
/* * Exchange Web Services Managed API * * Copyright (c) Microsoft Corporation * All rights reserved. * * MIT License * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the "Software"), to deal in the Software * without restriction, including without limitation the rights to use, copy, modify, merge, * publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons * to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ namespace Microsoft.Exchange.WebServices.Data { using System; using System.Collections.Generic; /// <summary> /// Represents a GetAttachment request. /// </summary> internal sealed class GetAttachmentRequest : MultiResponseServiceRequest<GetAttachmentResponse> { private List<Attachment> attachments = new List<Attachment>(); private List<string> attachmentIds = new List<string>(); private List<PropertyDefinitionBase> additionalProperties = new List<PropertyDefinitionBase>(); private BodyType? bodyType; /// <summary> /// Initializes a new instance of the <see cref="GetAttachmentRequest"/> class. /// </summary> /// <param name="service">The service.</param> /// <param name="errorHandlingMode"> Indicates how errors should be handled.</param> internal GetAttachmentRequest(ExchangeService service, ServiceErrorHandling errorHandlingMode) : base(service, errorHandlingMode) { } /// <summary> /// Validate request. /// </summary> internal override void Validate() { base.Validate(); if (this.Attachments.Count > 0) { EwsUtilities.ValidateParamCollection(this.Attachments, "Attachments"); } if (this.AttachmentIds.Count > 0) { EwsUtilities.ValidateParamCollection(this.AttachmentIds, "AttachmentIds"); } if (this.AttachmentIds.Count == 0 && this.Attachments.Count == 0) { throw new ArgumentException(Strings.CollectionIsEmpty, @"Attachments/AttachmentIds"); } for (int i = 0; i < this.AdditionalProperties.Count; i++) { EwsUtilities.ValidateParam(this.AdditionalProperties[i], string.Format("AdditionalProperties[{0}]", i)); } } /// <summary> /// Creates the service response. /// </summary> /// <param name="service">The service.</param> /// <param name="responseIndex">Index of the response.</param> /// <returns>Service response.</returns> internal override GetAttachmentResponse CreateServiceResponse(ExchangeService service, int responseIndex) { return new GetAttachmentResponse(this.Attachments.Count > 0 ? this.Attachments[responseIndex] : null); } /// <summary> /// Gets the expected response message count. /// </summary> /// <returns>Number of expected response messages.</returns> internal override int GetExpectedResponseMessageCount() { return this.Attachments.Count + this.AttachmentIds.Count; } /// <summary> /// Gets the name of the XML element. /// </summary> /// <returns>XML element name,</returns> internal override string GetXmlElementName() { return XmlElementNames.GetAttachment; } /// <summary> /// Gets the name of the response XML element. /// </summary> /// <returns>XML element name,</returns> internal override string GetResponseXmlElementName() { return XmlElementNames.GetAttachmentResponse; } /// <summary> /// Gets the name of the response message XML element. /// </summary> /// <returns>XML element name,</returns> internal override string GetResponseMessageXmlElementName() { return XmlElementNames.GetAttachmentResponseMessage; } /// <summary> /// Writes XML elements. /// </summary> /// <param name="writer">The writer.</param> internal override void WriteElementsToXml(EwsServiceXmlWriter writer) { if (this.BodyType.HasValue || this.AdditionalProperties.Count > 0) { writer.WriteStartElement(XmlNamespace.Messages, XmlElementNames.AttachmentShape); if (this.BodyType.HasValue) { writer.WriteElementValue( XmlNamespace.Types, XmlElementNames.BodyType, this.BodyType.Value); } if (this.AdditionalProperties.Count > 0) { PropertySet.WriteAdditionalPropertiesToXml(writer, this.AdditionalProperties); } writer.WriteEndElement(); // AttachmentShape } writer.WriteStartElement(XmlNamespace.Messages, XmlElementNames.AttachmentIds); foreach (Attachment attachment in this.Attachments) { this.WriteAttachmentIdXml(writer, attachment.Id); } foreach (string attachmentId in this.AttachmentIds) { this.WriteAttachmentIdXml(writer, attachmentId); } writer.WriteEndElement(); } /// <summary> /// Gets the request version. /// </summary> /// <returns>Earliest Exchange version in which this request is supported.</returns> internal override ExchangeVersion GetMinimumRequiredServerVersion() { return ExchangeVersion.Exchange2007_SP1; } /// <summary> /// Gets the attachments. /// </summary> /// <value>The attachments.</value> public List<Attachment> Attachments { get { return this.attachments; } } /// <summary> /// Gets the attachment ids. /// </summary> /// <value>The attachment ids.</value> public List<string> AttachmentIds { get { return this.attachmentIds; } } /// <summary> /// Gets the additional properties. /// </summary> /// <value>The additional properties.</value> public List<PropertyDefinitionBase> AdditionalProperties { get { return this.additionalProperties; } } /// <summary> /// Gets or sets the type of the body. /// </summary> /// <value>The type of the body.</value> public BodyType? BodyType { get { return this.bodyType; } set { this.bodyType = value; } } /// <summary> /// Gets a value indicating whether the TimeZoneContext SOAP header should be emitted. /// </summary> /// <value> /// <c>true</c> if the time zone should be emitted; otherwise, <c>false</c>. /// </value> internal override bool EmitTimeZoneHeader { get { // we currently do not emit "AttachmentResponseShapeType.IncludeMimeContent" // return this.additionalProperties.Contains(ItemSchema.MimeContent); } } /// <summary> /// Writes attachment id elements. /// </summary> /// <param name="writer">The writer.</param> /// <param name="attachmentId">The attachment id.</param> private void WriteAttachmentIdXml(EwsServiceXmlWriter writer, string attachmentId) { writer.WriteStartElement(XmlNamespace.Types, XmlElementNames.AttachmentId); writer.WriteAttributeValue(XmlAttributeNames.Id, attachmentId); writer.WriteEndElement(); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Microsoft.Build.Framework; using Microsoft.Build.Tasks; using Microsoft.Build.UnitTests; using Microsoft.Build.Utilities; using Microsoft.Build.Shared; using System; using System.CodeDom.Compiler; using System.Collections.Generic; using System.Reflection; using System.Reflection.Emit; using System.IO; using System.Text.RegularExpressions; using System.Text; using System.Xml.Xsl; using System.Xml; using Xunit; namespace Microsoft.Build.UnitTests { #if !MONO /// <summary> /// These tests run. The temporary output folder for this test is Path.Combine(Path.GetTempPath(), DateTime.Now.Ticks.ToString()) /// 1. When combination of (xml, xmlfile) x (xsl, xslfile). /// 2. When Xsl parameters are missing. /// 3. When Xml parameters are missing. /// 4. Both missing. /// 5. Too many Xml parameters. /// 6. Too many Xsl parameters. /// 7. Setting Out parameter to file. /// 8. Setting Out parameter to screen. /// 9. Setting correct "Parameter" parameters for Xsl. /// 10. Setting the combination of "Parameter" parameters (Name, Namespace, Value) and testing the cases when they should run ok. /// 11. Setting "Parameter" parameter as empty string (should run OK). /// 12. Compiled Dll with type information. /// 13. Compiled Dll without type information. /// 14. Load Xslt with incorrect character as CNAME (load exception). /// 15. Missing XmlFile file. /// 16. Missing XslFile file. /// 17. Missing XsltCompiledDll file. /// 18. Bad XML on "Parameter" parameter. /// 19. Out parameter pointing to nonexistent location (K:\folder\file.xml) /// 20. XslDocument that throws runtime exception. /// 21. Passing a dll that has two types to XsltCompiledDll parameter without specifying a type. /// </summary> sealed public class XslTransformation_Tests { /// <summary> /// The "surround" regex. /// </summary> private readonly Regex _surroundMatch = new Regex("surround", RegexOptions.Multiline | RegexOptions.Compiled); /// <summary> /// The contents of xmldocument for tests. /// </summary> private readonly string _xmlDocument = "<root Name=\"param1\" Value=\"value111\"><abc><cde/></abc></root>"; /// <summary> /// The contents of another xmldocument for tests. /// </summary> private readonly string _xmlDocument2 = "<root></root>"; /// <summary> /// The contents of xsl document for tests. /// </summary> private readonly string _xslDocument = "<xsl:stylesheet version=\"1.0\" xmlns:xsl=\"http://www.w3.org/1999/XSL/Transform\" xmlns:msxsl=\"urn:schemas-microsoft-com:xslt\" exclude-result-prefixes=\"msxsl\"><xsl:output method=\"xml\" indent=\"yes\"/><xsl:template match=\"@* | node()\"><surround><xsl:copy><xsl:apply-templates select=\"@* | node()\"/></xsl:copy></surround></xsl:template></xsl:stylesheet>"; /// <summary> /// The contents of another xsl document for tests /// </summary> private readonly string _xslDocument2 = "<?xml version = \"1.0\" ?><xsl:stylesheet version=\"1.0\" xmlns:xsl=\"http://www.w3.org/1999/XSL/Transform\"><xsl:template match = \"myInclude\"><xsl:apply-templates select = \"document(@path)\"/></xsl:template><xsl:template match = \"@*|node()\"><xsl:copy><xsl:apply-templates select = \"@*|node()\"/></xsl:copy></xsl:template></xsl:stylesheet>"; /// <summary> /// The contents of xslparameters for tests. /// </summary> private readonly string _xslParameters = "<Parameter Name=\"param1\" Value=\"1\" /><Parameter Name=\"param2\" Namespace=\"http://eksiduyuru.com\" Value=\"2\" />"; /// <summary> /// The contents of xslt file for testing parameters. /// </summary> private readonly string _xslParameterDocument = "<xsl:stylesheet version=\"1.0\" xmlns:xsl=\"http://www.w3.org/1999/XSL/Transform\" xmlns:msxsl=\"urn:schemas-microsoft-com:xslt\" exclude-result-prefixes=\"msxsl\" xmlns:myns=\"http://eksiduyuru.com\"><xsl:output method=\"xml\" indent=\"yes\"/><xsl:param name=\"param1\" /><xsl:param name=\"myns:param2\" /><xsl:template match=\"/\"><values>param 1: <xsl:value-of select=\"$param1\" />param 2: <xsl:value-of select=\"$myns:param2\" /></values></xsl:template></xsl:stylesheet>"; /// <summary> /// The errorious xsl documents /// </summary> private readonly string _errorXslDocument = "<xsl:stylesheet version=\"1.0\" xmlns:xsl=\"http://www.w3.org/1999/XSL/Transform\" xmlns:msxsl=\"urn:schemas-microsoft-com:xslt\"><xsl:template match=\"/\"><xsl:element name=\"$a\"></xsl:element></xsl:template></xsl:stylesheet>"; /// <summary> /// The errorious xsl document 2. /// </summary> private readonly string _errorXslDocument2 = "<xsl:stylesheet version=\"1.0\" xmlns:xsl=\"http://www.w3.org/1999/XSL/Transform\" xmlns:msxsl=\"urn:schemas-microsoft-com:xslt\" exclude-result-prefixes=\"msxsl\"><xsl:template match=\"/\"><xsl:message terminate=\"yes\">error?</xsl:message></xsl:template></xsl:stylesheet>"; /// <summary> /// When combination of (xml, xmlfile) x (xsl, xslfile). /// </summary> [Fact] public void XmlXslParameters() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // Test when Xml and Xsl parameters are correct for (int xmi = 0; xmi < xmlInputs.Count; xmi++) { for (int xsi = 0; xsi < xslInputs.Count; xsi++) { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; XslTransformation.XmlInput.XmlModes xmlKey = xmlInputs[xmi].Key; object xmlValue = xmlInputs[xmi].Value; XslTransformation.XsltInput.XslModes xslKey = xslInputs[xsi].Key; object xslValue = xslInputs[xsi].Value; switch (xmlKey) { case XslTransformation.XmlInput.XmlModes.Xml: t.XmlContent = (string)xmlValue; break; case XslTransformation.XmlInput.XmlModes.XmlFile: t.XmlInputPaths = (TaskItem[])xmlValue; break; default: Assert.True(false, "Test error"); break; } switch (xslKey) { case XslTransformation.XsltInput.XslModes.Xslt: t.XslContent = (string)xslValue; break; case XslTransformation.XsltInput.XslModes.XsltFile: t.XslInputPath = (TaskItem)xslValue; break; case XslTransformation.XsltInput.XslModes.XsltCompiledDll: t.XslCompiledDllPath = (TaskItem)xslValue; break; default: Assert.True(false, "Test error"); break; } Assert.True(t.Execute()); // "The test should have passed at the both params correct test" } } CleanUp(dir); } /// <summary> /// When Xsl parameters are missing. /// </summary> [Fact] public void MissingXslParameter() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // test Xsl missing. for (int xmi = 0; xmi < xmlInputs.Count; xmi++) { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; XslTransformation.XmlInput.XmlModes xmlKey = xmlInputs[xmi].Key; object xmlValue = xmlInputs[xmi].Value; switch (xmlKey) { case XslTransformation.XmlInput.XmlModes.Xml: t.XmlContent = (string)xmlValue; break; case XslTransformation.XmlInput.XmlModes.XmlFile: t.XmlInputPaths = (TaskItem[])xmlValue; break; default: Assert.True(false, "Test error"); break; } Assert.False(t.Execute()); // "The test should fail when there is missing Xsl params" Console.WriteLine(engine.Log); Assert.Contains("MSB3701", engine.Log); // "The output should contain MSB3701 error message at missing Xsl params test" } CleanUp(dir); } /// <summary> /// When Xml parameters are missing. /// </summary> [Fact] public void MissingXmlParameter() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // Test Xml missing. for (int xsi = 0; xsi < xslInputs.Count; xsi++) { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; XslTransformation.XsltInput.XslModes xslKey = xslInputs[xsi].Key; object xslValue = xslInputs[xsi].Value; switch (xslKey) { case XslTransformation.XsltInput.XslModes.Xslt: t.XslContent = (string)xslValue; break; case XslTransformation.XsltInput.XslModes.XsltFile: t.XslInputPath = (TaskItem)xslValue; break; case XslTransformation.XsltInput.XslModes.XsltCompiledDll: t.XslCompiledDllPath = (TaskItem)xslValue; break; default: Assert.True(false, "Test error"); break; } Assert.False(t.Execute()); // "The test should fail when there is missing Xml params" Console.WriteLine(engine.Log); Assert.Contains("MSB3701", engine.Log); // "The output should contain MSB3701 error message at missing Xml params test" engine.Log = ""; } CleanUp(dir); } /// <summary> /// Both missing. /// </summary> [Fact] public void MissingXmlXslParameter() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // Test both missing. { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; Assert.False(t.Execute()); // "The test should fail when there is no params" Console.WriteLine(engine.Log); Assert.Contains("MSB3701", engine.Log); // "The output should contain MSB3701 error message" } CleanUp(dir); } /// <summary> /// Too many Xml parameters. /// </summary> [Fact] public void ManyXmlParameters() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // Test too many Xml. { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XmlInputPaths = xmlPaths; t.XslContent = _xslDocument; Assert.Equal(_xmlDocument, t.XmlContent); Assert.Equal(xmlPaths, t.XmlInputPaths); Assert.False(t.Execute()); // "The test should fail when there are too many files" Console.WriteLine(engine.Log); Assert.Contains("MSB3701", engine.Log); } CleanUp(dir); } /// <summary> /// Too many Xsl parameters. /// </summary> [Fact] public void ManyXslParameters() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // Test too many Xsl. { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XslContent = _xslDocument; t.XslInputPath = xslPath; Assert.Equal(_xslDocument, t.XslContent); Assert.Equal(xslPath, t.XslInputPath); Assert.False(t.Execute()); // "The test should fail when there are too many files" Console.WriteLine(engine.Log); Assert.Contains("MSB3701", engine.Log); // "The output should contain MSB3701 error message at no params test" } CleanUp(dir); } /// <summary> /// Test out parameter. /// </summary> [Fact] public void OutputTest() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // Test Out { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.XmlContent = _xmlDocument; t.XslContent = _xslDocument; t.OutputPaths = outputPaths; Assert.True(t.Execute()); // "Test out should have given true when executed" Assert.Equal(String.Empty, engine.Log); // "The log should be empty" Console.WriteLine(engine.Log); using (StreamReader sr = new StreamReader(t.OutputPaths[0].ItemSpec)) { string fileContents = sr.ReadToEnd(); MatchCollection mc = _surroundMatch.Matches(fileContents); Assert.Equal(8, mc.Count); // "The file test doesn't match" } } CleanUp(dir); } /// <summary> /// Setting correct "Parameter" parameters for Xsl. /// </summary> [Fact] public void XsltParamatersCorrect() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // Test Correct Xslt Parameters { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XslContent = _xslParameterDocument; t.Parameters = _xslParameters; t.Execute(); Console.WriteLine(engine.Log); using (StreamReader sr = new StreamReader(t.OutputPaths[0].ItemSpec)) { string fileContents = sr.ReadToEnd(); Assert.Contains("param 1: 1param 2: 2", fileContents); } } CleanUp(dir); } /// <summary> /// Setting the combination of "Parameter" parameters (Name, Namespace, Value) and testing the cases when they should run ok. /// </summary> [Fact] public void XsltParametersIncorrect() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // Test Xslt Parameters { string[] attrs = new string[] { "Name=\"param2\"", "Namespace=\"http://eksiduyuru.com\"", "Value=\"2\"" }; for (int i = 0; i < Math.Pow(2, attrs.Length); i++) { string res = ""; for (int k = 0; k < attrs.Length; k++) { if ((i & (int)Math.Pow(2, k)) != 0) { res += attrs[k] + " "; } } XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XslContent = _xslParameterDocument; t.Parameters = "<Parameter " + res + "/>"; Assert.Equal("<Parameter " + res + "/>", t.Parameters); bool result = t.Execute(); Console.WriteLine(engine.Log); if (i == 5 || i == 7) { Assert.True(result); // "Only 5th and 7th values should pass." } else { Assert.False(result); // "Only 5th and 7th values should pass." } } } CleanUp(dir); } /// <summary> /// Setting "Parameter" parameter as empty string (should run OK). /// </summary> [Fact] public void EmptyParameters() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // load empty parameters { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlInputPaths = xmlPaths; t.XslInputPath = xslPath; t.Parameters = " "; Assert.True(t.Execute()); // "This test should've passed (empty parameters)." Console.WriteLine(engine.Log); } CleanUp(dir); } /// <summary> /// Compiled Dll with type information. /// </summary> [Fact] public void CompiledDllWithType() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // Test Compiled DLLs // with type specified. { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; xslCompiledPath.ItemSpec = xslCompiledPath.ItemSpec + ";xslt"; t.XslCompiledDllPath = xslCompiledPath; Assert.Equal(xslCompiledPath.ItemSpec, t.XslCompiledDllPath.ItemSpec); Assert.True(t.Execute()); // "XsltComiledDll1 execution should've passed" Console.WriteLine(engine.Log); Assert.DoesNotContain("MSB", engine.Log); // "The log should not contain any errors. (XsltComiledDll1)" } CleanUp(dir); } /// <summary> /// Compiled Dll without type information. /// </summary> [Fact] public void CompiledDllWithoutType() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // without type specified. { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XslCompiledDllPath = xslCompiledPath; Assert.True(t.Execute(), "XsltComiledDll2 execution should've passed" + engine.Log); Console.WriteLine(engine.Log); Assert.False(engine.MockLogger.ErrorCount > 0); // "The log should not contain any errors. (XsltComiledDll2)" } CleanUp(dir); } /// <summary> /// Load Xslt with incorrect character as CNAME (load exception). /// </summary> [Fact] public void BadXsltFile() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // load bad xslt { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XslContent = _errorXslDocument; try { t.Execute(); Console.WriteLine(engine.Log); } catch (Exception e) { Assert.Contains("The '$' character", e.Message); } } CleanUp(dir); } /// <summary> /// Load Xslt with incorrect character as CNAME (load exception). /// </summary> [Fact] public void MissingOutputFile() { Assert.Throws<System.ArgumentNullException>(() => { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // load missing xml { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.XmlInputPaths = xmlPaths; t.XslInputPath = xslPath; Assert.False(t.Execute()); // "This test should've failed (no output)." Console.WriteLine(engine.Log); } CleanUp(dir); } ); } /// <summary> /// Missing XmlFile file. /// </summary> [Fact] public void MissingXmlFile() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // load missing xml { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; xmlPaths[0].ItemSpec = xmlPaths[0].ItemSpec + "bad"; t.XmlInputPaths = xmlPaths; t.XslInputPath = xslPath; Console.WriteLine(engine.Log); Assert.False(t.Execute()); // "This test should've failed (bad xml)." Assert.Contains("MSB3703", engine.Log); } CleanUp(dir); } /// <summary> /// Missing XslFile file. /// </summary> [Fact] public void MissingXsltFile() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // load missing xsl { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlInputPaths = xmlPaths; xslPath.ItemSpec = xslPath.ItemSpec + "bad"; t.XslInputPath = xslPath; Assert.False(t.Execute()); // "This test should've failed (bad xslt)." Console.WriteLine(engine.Log); Assert.Contains("MSB3704", engine.Log); } CleanUp(dir); } /// <summary> /// Missing XsltCompiledDll file. /// </summary> [Fact] public void MissingCompiledDllFile() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // missing xsltCompiledDll { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; xslCompiledPath.ItemSpec = xslCompiledPath.ItemSpec + "bad;xslt"; t.XslCompiledDllPath = xslCompiledPath; Assert.False(t.Execute()); // "XsltComiledDllBad execution should've failed" Console.WriteLine(engine.Log); Assert.Contains("MSB3704", engine.Log); } CleanUp(dir); } /// <summary> /// Bad XML on "Parameter" parameter. /// </summary> [Fact] public void BadXmlAsParameter() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // load bad xml on parameters { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XslContent = _xslParameterDocument; t.Parameters = "<<>>"; try { Assert.False(t.Execute()); // "This test should've failed (bad params1)." Console.WriteLine(engine.Log); } catch (Exception e) { Assert.Contains("'<'", e.Message); } } CleanUp(dir); } /// <summary> /// Out parameter pointing to nonexistent location (K:\folder\file.xml) /// </summary> [Fact] public void OutputFileCannotBeWritten() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // load bad output { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XslContent = _xslDocument; t.OutputPaths = new TaskItem[] { new TaskItem("k:\\folder\\file.xml") }; try { Assert.False(t.Execute()); // "This test should've failed (bad output)." Console.WriteLine(engine.Log); } catch (Exception e) { Assert.Contains("MSB3701", e.Message); } } CleanUp(dir); } /// <summary> /// XslDocument that throws runtime exception. /// </summary> [Fact] public void XsltDocumentThrowsError() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // load error xslDocument { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XslContent = _errorXslDocument2; try { Assert.False(t.Execute()); // "This test should've failed (xsl with error)." Console.WriteLine(engine.Log); } catch (Exception e) { Assert.Contains("error?", e.Message); } } CleanUp(dir); } /// <summary> /// Passing a dll that has two types to XsltCompiledDll parameter without specifying a type. /// </summary> [Fact] public void CompiledDllWithTwoTypes() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // doubletype string doubleTypePath = Path.Combine(dir, "double.dll"); CompileDoubleType(doubleTypePath); { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlContent = _xmlDocument; t.XslCompiledDllPath = new TaskItem(doubleTypePath); try { t.Execute(); Console.WriteLine(engine.Log); } catch (Exception e) { Assert.Contains("error?", e.Message); } System.Diagnostics.Debug.WriteLine(engine.Log); } CleanUp(dir); } /// <summary> /// Matching XmlInputPaths and OutputPaths /// </summary> [Fact] public void MultipleXmlInputs_Matching() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); var otherXmlPath = new TaskItem(Path.Combine(dir, Guid.NewGuid().ToString())); using (StreamWriter sw = new StreamWriter(otherXmlPath.ItemSpec, false)) { sw.Write(_xmlDocument2); } // xmlPaths have one XmlPath, lets duplicate it TaskItem[] xmlMultiPaths = new TaskItem[] { xmlPaths[0], otherXmlPath, xmlPaths[0], xmlPaths[0] }; // outputPaths have one output path, lets duplicate it TaskItem[] outputMultiPaths = new TaskItem[] { new TaskItem(outputPaths[0].ItemSpec + ".1.xml"), new TaskItem(outputPaths[0].ItemSpec + ".2.xml"), new TaskItem(outputPaths[0].ItemSpec + ".3.xml"), new TaskItem(outputPaths[0].ItemSpec + ".4.xml") }; { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.XslInputPath = xslPath; t.XmlInputPaths = xmlMultiPaths; t.OutputPaths = outputMultiPaths; Assert.True(t.Execute(), "CompiledDllWithTwoTypes execution should've passed" + engine.Log); Console.WriteLine(engine.Log); foreach (TaskItem tsk in t.OutputPaths) { Assert.True(File.Exists(tsk.ItemSpec), tsk.ItemSpec + " should exist on output dir"); } // The first and second input XML files are not equivalent, so their output files // should be different Assert.NotEqual(new FileInfo(xmlMultiPaths[0].ItemSpec).Length, new FileInfo(xmlMultiPaths[1].ItemSpec).Length); Assert.NotEqual(new FileInfo(outputMultiPaths[0].ItemSpec).Length, new FileInfo(outputMultiPaths[1].ItemSpec).Length); System.Diagnostics.Debug.WriteLine(engine.Log); } CleanUp(dir); } /// <summary> /// Not Matching XmlInputPaths and OutputPaths /// </summary> [Fact] public void MultipleXmlInputs_NotMatching() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); // xmlPaths have one XmlPath, lets duplicate it **4 times ** TaskItem[] xmlMultiPaths = new TaskItem[] { xmlPaths[0], xmlPaths[0], xmlPaths[0], xmlPaths[0] }; // outputPaths have one output path, lets duplicate it **3 times ** TaskItem[] outputMultiPathsShort = new TaskItem[] { new TaskItem(outputPaths[0].ItemSpec + ".1.xml"), new TaskItem(outputPaths[0].ItemSpec + ".2.xml"), new TaskItem(outputPaths[0].ItemSpec + ".3.xml") }; TaskItem[] outputMultiPathsLong = new TaskItem[] { new TaskItem(outputPaths[0].ItemSpec + ".1.xml"), new TaskItem(outputPaths[0].ItemSpec + ".2.xml"), new TaskItem(outputPaths[0].ItemSpec + ".3.xml"), new TaskItem(outputPaths[0].ItemSpec + ".4.xml"), new TaskItem(outputPaths[0].ItemSpec + ".5.xml") }; // Short version. { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.XslInputPath = xslPath; t.XmlInputPaths = xmlMultiPaths; t.OutputPaths = outputMultiPathsShort; Assert.False(t.Execute(), "CompiledDllWithTwoTypes execution should've failed" + engine.Log); System.Diagnostics.Debug.WriteLine(engine.Log); } // Long version { XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.XslInputPath = xslPath; t.XmlInputPaths = xmlMultiPaths; t.OutputPaths = outputMultiPathsLong; Assert.False(t.Execute(), "CompiledDllWithTwoTypes execution should've failed" + engine.Log); Console.WriteLine(engine.Log); System.Diagnostics.Debug.WriteLine(engine.Log); } CleanUp(dir); } /// <summary> /// Validate that the XslTransformation task allows use of the document function /// </summary> [Fact] public void XslDocumentFunctionWorks() { string dir; TaskItem[] xmlPaths; TaskItem xslPath; TaskItem xslCompiledPath; TaskItem[] outputPaths; List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs; List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs; MockEngine engine; Prepare(out dir, out xmlPaths, out xslPath, out xslCompiledPath, out outputPaths, out xmlInputs, out xslInputs, out engine); var otherXslPath = new TaskItem(Path.Combine(dir, Guid.NewGuid().ToString() + ".xslt")); using (StreamWriter sw = new StreamWriter(otherXslPath.ItemSpec, false)) { sw.Write(_xslDocument2); } // Initialize first xml file for the XslTransformation task to consume var myXmlPath1 = new TaskItem(Path.Combine(dir, "a.xml")); using (StreamWriter sw = new StreamWriter(myXmlPath1.ItemSpec, false)) { sw.Write("<document><myInclude path = \"b.xml\"/></document>"); } // Initialize second xml file for the first one to consume var myXmlPath2 = new TaskItem(Path.Combine(dir, "b.xml")); using (StreamWriter sw = new StreamWriter(myXmlPath2.ItemSpec, false)) { sw.Write("<stuff/>"); } // Validate that execution passes when UseTrustedSettings is true XslTransformation t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlInputPaths = new TaskItem[] { myXmlPath1 }; t.XslInputPath = otherXslPath; t.UseTrustedSettings = true; Assert.True(t.Execute()); // "Test should have passed and allowed the use of the document() function within the xslt file" // Validate that execution fails when UseTrustedSettings is false t = new XslTransformation(); t.BuildEngine = engine; t.OutputPaths = outputPaths; t.XmlInputPaths = new TaskItem[] { myXmlPath1 }; t.XslInputPath = otherXslPath; t.UseTrustedSettings = false; Assert.False(t.Execute()); // "Test should have failed and not allowed the use of the document() function within the xslt file" CleanUp(dir); } /// <summary> /// Prepares the test environment, creates necessary files. /// </summary> /// <param name="dir">The temp dir</param> /// <param name="xmlPaths">The xml file's path</param> /// <param name="xslPath">The xsl file's path</param> /// <param name="xslCompiledPath">The xsl dll's path</param> /// <param name="outputPaths">The output file's path</param> /// <param name="xmlInputs">The xml input ways</param> /// <param name="xslInputs">The xsl input ways</param> /// <param name="engine">The Mock engine</param> private void Prepare(out string dir, out TaskItem[] xmlPaths, out TaskItem xslPath, out TaskItem xslCompiledPath, out TaskItem[] outputPaths, out List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>> xmlInputs, out List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>> xslInputs, out MockEngine engine) { dir = Path.Combine(Path.GetTempPath(), DateTime.Now.Ticks.ToString()); Directory.CreateDirectory(dir); // save XML and XSLT documents. xmlPaths = new TaskItem[] { new TaskItem(Path.Combine(dir, "doc.xml")) }; xslPath = new TaskItem(Path.Combine(dir, "doc.xslt")); xslCompiledPath = new TaskItem(Path.Combine(dir, "doc.dll")); outputPaths = new TaskItem[] { new TaskItem(Path.Combine(dir, "testout.xml")) }; using (StreamWriter sw = new StreamWriter(xmlPaths[0].ItemSpec, false)) { sw.Write(_xmlDocument); sw.Close(); } using (StreamWriter sw = new StreamWriter(xslPath.ItemSpec, false)) { sw.Write(_xslDocument); sw.Close(); } xmlInputs = new List<KeyValuePair<XslTransformation.XmlInput.XmlModes, object>>(); xslInputs = new List<KeyValuePair<XslTransformation.XsltInput.XslModes, object>>(); xmlInputs.Add(new KeyValuePair<XslTransformation.XmlInput.XmlModes, object>(XslTransformation.XmlInput.XmlModes.Xml, _xmlDocument)); xmlInputs.Add(new KeyValuePair<XslTransformation.XmlInput.XmlModes, object>(XslTransformation.XmlInput.XmlModes.XmlFile, xmlPaths)); xslInputs.Add(new KeyValuePair<XslTransformation.XsltInput.XslModes, object>(XslTransformation.XsltInput.XslModes.Xslt, _xslDocument)); xslInputs.Add(new KeyValuePair<XslTransformation.XsltInput.XslModes, object>(XslTransformation.XsltInput.XslModes.XsltFile, xslPath)); Compile(xslPath.ItemSpec, xslCompiledPath.ItemSpec); engine = new MockEngine(); List<bool> results = new List<bool>(); } /// <summary> /// Clean ups the test files /// </summary> /// <param name="dir">The directory for temp files.</param> private void CleanUp(string dir) { try { FileUtilities.DeleteWithoutTrailingBackslash(dir, true); } catch { } } #region Compiler #pragma warning disable 0618 // XmlReaderSettings.ProhibitDtd is obsolete /// <summary> /// Compiles given stylesheets into an assembly. /// </summary> private void Compile(string inputFile, string outputFile) { const string CompiledQueryName = "xslt"; string outputDir = Path.GetDirectoryName(outputFile) + Path.DirectorySeparatorChar; XsltSettings xsltSettings = new XsltSettings(true, true); XmlUrlResolver xmlResolver = new XmlUrlResolver(); XmlReaderSettings readerSettings = new XmlReaderSettings(); AssemblyBuilder asmBldr; readerSettings.ProhibitDtd = false; readerSettings.XmlResolver = xmlResolver; string scriptAsmPathPrefix = outputDir + Path.GetFileNameWithoutExtension(outputFile) + ".script"; // Create assembly and module builders AssemblyName asmName = new AssemblyName(); asmName.Name = CompiledQueryName; asmBldr = AppDomain.CurrentDomain.DefineDynamicAssembly(asmName, AssemblyBuilderAccess.Save, outputDir); // Add custom attribute to assembly marking it as security transparent so that Assert will not be allowed // and link demands will be converted to full demands. asmBldr.SetCustomAttribute(new CustomAttributeBuilder(typeof(System.Security.SecurityTransparentAttribute).GetConstructor(Type.EmptyTypes), new object[] { })); // Mark the assembly with GeneratedCodeAttribute to improve profiling experience asmBldr.SetCustomAttribute(new CustomAttributeBuilder(typeof(GeneratedCodeAttribute).GetConstructor(new Type[] { typeof(string), typeof(string) }), new object[] { "XsltCompiler", "2.0.0.0" })); ModuleBuilder modBldr = asmBldr.DefineDynamicModule(Path.GetFileName(outputFile), Path.GetFileName(outputFile), true); string sourceUri = inputFile; string className = Path.GetFileNameWithoutExtension(inputFile); string scriptAsmId = ""; // Always use the .dll extension; otherwise Fusion won't be able to locate this dependency string scriptAsmPath = scriptAsmPathPrefix + scriptAsmId + ".dll"; // Create TypeBuilder and compile the stylesheet into it TypeBuilder typeBldr = modBldr.DefineType(CompiledQueryName, TypeAttributes.Public | TypeAttributes.Abstract | TypeAttributes.Sealed | TypeAttributes.BeforeFieldInit); CompilerErrorCollection errors = null; try { using (XmlReader reader = XmlReader.Create(sourceUri, readerSettings)) { errors = XslCompiledTransform.CompileToType( reader, xsltSettings, xmlResolver, false, typeBldr, scriptAsmPath ); } } catch (Exception e) { Assert.True(false, "Compiler didn't work" + e.ToString()); } asmBldr.Save(Path.GetFileName(outputFile), PortableExecutableKinds.ILOnly, ImageFileMachine.I386); } #pragma warning restore 0618 /// <summary> /// Creates a dll that has 2 types in it. /// </summary> /// <param name="outputFile">The dll name.</param> private void CompileDoubleType(string outputFile) { string outputDir = Path.GetDirectoryName(outputFile) + Path.DirectorySeparatorChar; const string CompiledQueryName = "xslt"; AssemblyBuilder asmBldr; // Create assembly and module builders AssemblyName asmName = new AssemblyName(); asmName.Name = "assmname"; asmBldr = AppDomain.CurrentDomain.DefineDynamicAssembly(asmName, AssemblyBuilderAccess.Save, outputDir); ModuleBuilder modBldr = asmBldr.DefineDynamicModule(Path.GetFileName(outputFile), Path.GetFileName(outputFile), true); // Create TypeBuilder and compile the stylesheet into it TypeBuilder typeBldr = modBldr.DefineType(CompiledQueryName, TypeAttributes.Public | TypeAttributes.Abstract | TypeAttributes.Sealed | TypeAttributes.BeforeFieldInit); typeBldr.DefineField("x", typeof(int), FieldAttributes.Private); TypeBuilder typeBldr2 = modBldr.DefineType(CompiledQueryName + "2", TypeAttributes.Public | TypeAttributes.Abstract | TypeAttributes.Sealed | TypeAttributes.BeforeFieldInit); typeBldr2.DefineField("x", typeof(int), FieldAttributes.Private); typeBldr.CreateType(); typeBldr2.CreateType(); asmBldr.Save(Path.GetFileName(outputFile), PortableExecutableKinds.ILOnly, ImageFileMachine.I386); } #endregion } #endif }
#region Imports using System; using System.ComponentModel; using System.Diagnostics; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.Collections.ObjectModel; using System.Configuration; using System.Reflection; using System.Threading; using System.Globalization; using System.IO; using System.Workflow.Runtime.Hosting; using System.Workflow.Runtime.Configuration; using System.Workflow.ComponentModel; using System.Workflow.Runtime.Tracking; using System.Workflow.ComponentModel.Compiler; using System.Xml; using System.Workflow.Runtime.DebugEngine; using System.Workflow.ComponentModel.Serialization; using System.ComponentModel.Design; using System.ComponentModel.Design.Serialization; #endregion namespace System.Workflow.Runtime { #region Class WorkflowRuntimeEventArgs [Obsolete("The System.Workflow.* types are deprecated. Instead, please use the new types from System.Activities.*")] public sealed class WorkflowRuntimeEventArgs : EventArgs { private bool _isStarted; internal WorkflowRuntimeEventArgs(bool isStarted) { _isStarted = isStarted; } public bool IsStarted { get { return _isStarted; } } } #endregion internal class FanOutOnKeyDictionary<K, V> : IEnumerable<Dictionary<K, V>> { Dictionary<int, Dictionary<K, V>> dictionaryDictionary; public FanOutOnKeyDictionary(int fanDegree) { dictionaryDictionary = new Dictionary<int, Dictionary<K, V>>(fanDegree); for (int i = 0; i < fanDegree; ++i) { dictionaryDictionary.Add(i, new Dictionary<K, V>()); } } public Dictionary<K, V> this[K key] { get { return dictionaryDictionary[Math.Abs(key.GetHashCode() % dictionaryDictionary.Count)]; } } public bool SafeTryGetValue(K key, out V value) { Dictionary<K, V> dict = this[key]; lock (dict) { return dict.TryGetValue(key, out value); } } #region IEnumerable<Dictionary<K,V>> Members public IEnumerator<Dictionary<K, V>> GetEnumerator() { return dictionaryDictionary.Values.GetEnumerator(); } #endregion #region IEnumerable Members IEnumerator IEnumerable.GetEnumerator() { return dictionaryDictionary.Values.GetEnumerator(); } #endregion } [Obsolete("The System.Workflow.* types are deprecated. Instead, please use the new types from System.Activities.*")] public class WorkflowRuntime : IServiceProvider, IDisposable { #region Private members internal const string DefaultName = "WorkflowRuntime"; // Instances aggregation private FanOutOnKeyDictionary<Guid, WorkflowExecutor> workflowExecutors; private WorkflowDefinitionDispenser _workflowDefinitionDispenser; private PerformanceCounterManager _performanceCounterManager; private bool _disposed = false; //This is Instance Specific Flag to mark the given instance of //Instance Service is started or not. private bool isInstanceStarted; private DebugController debugController; private object _servicesLock = new object(); // protects integrity or the services collection private object _startStopLock = new object(); // serializes calls to start and stop private Guid _uid = Guid.NewGuid(); private BooleanSwitch disableWorkflowDebugging = new BooleanSwitch("DisableWorkflowDebugging", "Disables workflow debugging in host"); private TrackingListenerFactory _trackingFactory = new TrackingListenerFactory(); private static Dictionary<Guid, WeakReference> _runtimes = new Dictionary<Guid, WeakReference>(); private static object _runtimesLock = new object(); // protects the collection of runtime objects #endregion #region Constructors and Configure methods static WorkflowRuntime() { // listen to activity definition resolve events Activity.ActivityResolve += OnActivityDefinitionResolve; Activity.WorkflowChangeActionsResolve += OnWorkflowChangeActionsResolve; } public WorkflowRuntime() { this.PrivateInitialize(null); } public WorkflowRuntime(string configSectionName) { if (configSectionName == null) throw new ArgumentNullException("configSectionName"); WorkflowRuntimeSection settings = ConfigurationManager.GetSection(configSectionName) as WorkflowRuntimeSection; if (settings == null) throw new ArgumentException(String.Format(CultureInfo.CurrentCulture, ExecutionStringManager.ConfigurationSectionNotFound, configSectionName), "configSectionName"); this.PrivateInitialize(settings); } /// <summary> Creates a WorkflowRuntime from settings. </summary> /// <param name="configuration"> The settings for this container </param> public WorkflowRuntime(WorkflowRuntimeSection settings) { if (settings == null) throw new ArgumentNullException("settings"); this.PrivateInitialize(settings); } private void VerifyInternalState() { if (_disposed) throw new ObjectDisposedException("WorkflowRuntime"); } /// <summary>Initializes this container with the provided settings.</summary> /// <param name="settings"></param> private void PrivateInitialize(WorkflowRuntimeSection settings) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime: Created WorkflowRuntime {0}", _uid); _workflowDefinitionDispenser = new WorkflowDefinitionDispenser(this, (settings != null) ? settings.ValidateOnCreate : true, (settings != null) ? settings.WorkflowDefinitionCacheCapacity : 0); workflowExecutors = new FanOutOnKeyDictionary<Guid, WorkflowExecutor>((Environment.ProcessorCount * 4) - 1); _name = DefaultName; if (settings == null || settings.EnablePerformanceCounters) // on by default this.PerformanceCounterManager = new PerformanceCounterManager(); if (settings != null) { _name = settings.Name; _configurationParameters = settings.CommonParameters; foreach (WorkflowRuntimeServiceElement service in settings.Services) { AddServiceFromSettings(service); } } // create controller if (!disableWorkflowDebugging.Enabled) { DebugController.InitializeProcessSecurity(); this.debugController = new DebugController(this, _name); } lock (_runtimesLock) { if (!_runtimes.ContainsKey(_uid)) _runtimes.Add(_uid, new WeakReference(this)); } } public void Dispose() { lock (_startStopLock) { if (!_disposed) { if (this.debugController != null) { this.debugController.Close(); } _workflowDefinitionDispenser.Dispose(); _startedServices = false; _disposed = true; } } lock (_runtimesLock) { // // Clean up our weakref entries if (_runtimes.ContainsKey(_uid)) _runtimes.Remove(_uid); } } internal bool IsZombie { get { return this._disposed; } } #endregion #region Workflow accessor methods public WorkflowInstance GetWorkflow(Guid instanceId) { if (instanceId == Guid.Empty) throw new ArgumentException(String.Format(CultureInfo.CurrentCulture, ExecutionStringManager.CantBeEmptyGuid, "instanceId")); VerifyInternalState(); if (!IsStarted) throw new InvalidOperationException(ExecutionStringManager.WorkflowRuntimeNotStarted); WorkflowExecutor executor = Load(instanceId, null, null); return executor.WorkflowInstance; } public ReadOnlyCollection<WorkflowInstance> GetLoadedWorkflows() { VerifyInternalState(); List<WorkflowInstance> lSchedules = new List<WorkflowInstance>(); foreach (WorkflowExecutor executor in GetWorkflowExecutors()) { lSchedules.Add(executor.WorkflowInstance); } return lSchedules.AsReadOnly(); } internal WorkflowDefinitionDispenser DefinitionDispenser { get { return _workflowDefinitionDispenser; } } #endregion #region Service accessors internal List<TrackingService> TrackingServices { get { List<TrackingService> retval = new List<TrackingService>(); foreach (TrackingService trackingService in GetAllServices(typeof(TrackingService))) { retval.Add(trackingService); } return retval; } } internal WorkflowSchedulerService SchedulerService { get { return GetService<WorkflowSchedulerService>(); } } internal WorkflowCommitWorkBatchService TransactionService { get { return (WorkflowCommitWorkBatchService)GetService(typeof(WorkflowCommitWorkBatchService)); } } internal WorkflowPersistenceService WorkflowPersistenceService { get { return (WorkflowPersistenceService)GetService(typeof(WorkflowPersistenceService)); } } internal System.Workflow.Runtime.PerformanceCounterManager PerformanceCounterManager { get { return _performanceCounterManager; } private set { _performanceCounterManager = value; } } internal TrackingListenerFactory TrackingListenerFactory { get { return _trackingFactory; } } #endregion #region Workflow creation methods internal Activity GetWorkflowDefinition(Type workflowType) { if (workflowType == null) throw new ArgumentNullException("workflowType"); VerifyInternalState(); return _workflowDefinitionDispenser.GetRootActivity(workflowType, false, true); } public WorkflowInstance CreateWorkflow(Type workflowType) { if (workflowType == null) throw new ArgumentNullException("workflowType"); if (!typeof(Activity).IsAssignableFrom(workflowType)) throw new ArgumentException(ExecutionStringManager.TypeMustImplementRootActivity, "workflowType"); VerifyInternalState(); return InternalCreateWorkflow(new CreationContext(workflowType, null, null, null), Guid.NewGuid()); } public WorkflowInstance CreateWorkflow(Type workflowType, Dictionary<string, object> namedArgumentValues) { return CreateWorkflow(workflowType, namedArgumentValues, Guid.NewGuid()); } public WorkflowInstance CreateWorkflow(XmlReader workflowDefinitionReader) { if (workflowDefinitionReader == null) throw new ArgumentNullException("workflowDefinitionReader"); VerifyInternalState(); return CreateWorkflow(workflowDefinitionReader, null, null); } public WorkflowInstance CreateWorkflow(XmlReader workflowDefinitionReader, XmlReader rulesReader, Dictionary<string, object> namedArgumentValues) { return CreateWorkflow(workflowDefinitionReader, rulesReader, namedArgumentValues, Guid.NewGuid()); } public WorkflowInstance CreateWorkflow(Type workflowType, Dictionary<string, object> namedArgumentValues, Guid instanceId) { if (workflowType == null) throw new ArgumentNullException("workflowType"); if (!typeof(Activity).IsAssignableFrom(workflowType)) throw new ArgumentException(ExecutionStringManager.TypeMustImplementRootActivity, "workflowType"); VerifyInternalState(); return InternalCreateWorkflow(new CreationContext(workflowType, null, null, namedArgumentValues), instanceId); } public WorkflowInstance CreateWorkflow(XmlReader workflowDefinitionReader, XmlReader rulesReader, Dictionary<string, object> namedArgumentValues, Guid instanceId) { if (workflowDefinitionReader == null) throw new ArgumentNullException("workflowDefinitionReader"); VerifyInternalState(); CreationContext context = new CreationContext(workflowDefinitionReader, rulesReader, namedArgumentValues); return InternalCreateWorkflow(context, instanceId); } internal WorkflowInstance InternalCreateWorkflow(CreationContext context, Guid instanceId) { using (new WorkflowTraceTransfer(instanceId)) { VerifyInternalState(); if (!IsStarted) this.StartRuntime(); WorkflowExecutor executor = GetWorkflowExecutor(instanceId, context); if (!context.Created) { throw new InvalidOperationException(ExecutionStringManager.WorkflowWithIdAlreadyExists); } return executor.WorkflowInstance; } } internal sealed class WorkflowExecutorInitializingEventArgs : EventArgs { private bool _loading = false; internal WorkflowExecutorInitializingEventArgs(bool loading) { _loading = loading; } internal bool Loading { get { return _loading; } } } // register for idle events here /// <summary> /// Raised whenever a WorkflowExecutor is constructed. This signals either a new instance /// or a loading (args) and gives listening components a chance to set up subscriptions. /// </summary> internal event EventHandler<WorkflowExecutorInitializingEventArgs> WorkflowExecutorInitializing; public event EventHandler<WorkflowEventArgs> WorkflowIdled; public event EventHandler<WorkflowEventArgs> WorkflowCreated; public event EventHandler<WorkflowEventArgs> WorkflowStarted; public event EventHandler<WorkflowEventArgs> WorkflowLoaded; public event EventHandler<WorkflowEventArgs> WorkflowUnloaded; public event EventHandler<WorkflowCompletedEventArgs> WorkflowCompleted; public event EventHandler<WorkflowTerminatedEventArgs> WorkflowTerminated; public event EventHandler<WorkflowEventArgs> WorkflowAborted; public event EventHandler<WorkflowSuspendedEventArgs> WorkflowSuspended; public event EventHandler<WorkflowEventArgs> WorkflowPersisted; public event EventHandler<WorkflowEventArgs> WorkflowResumed; internal event EventHandler<WorkflowEventArgs> WorkflowDynamicallyChanged; public event EventHandler<ServicesExceptionNotHandledEventArgs> ServicesExceptionNotHandled; public event EventHandler<WorkflowRuntimeEventArgs> Stopped; public event EventHandler<WorkflowRuntimeEventArgs> Started; internal WorkflowExecutor Load(WorkflowInstance instance) { return Load(instance.InstanceId, null, instance); } internal WorkflowExecutor Load(Guid key, CreationContext context, WorkflowInstance workflowInstance) { WorkflowExecutor executor; Dictionary<Guid, WorkflowExecutor> executors = workflowExecutors[key]; lock (executors) { if (!IsStarted) throw new InvalidOperationException(ExecutionStringManager.WorkflowRuntimeNotStarted); if (executors.TryGetValue(key, out executor)) { if (executor.IsInstanceValid) { return executor; } } // If we get here, 'executor' is either null or unusable. // Before grabbing the lock, allocate a resource as we // may need to insert a new resource. executor = new WorkflowExecutor(key); if (workflowInstance == null) workflowInstance = new WorkflowInstance(key, this); InitializeExecutor(key, context, executor, workflowInstance); try { // If we get here, 'executor' is either null or has not been replaced. // If it has not been replaced, we know that it is unusable WorkflowTrace.Host.TraceInformation("WorkflowRuntime:: replacing unusable executor for key {0} with new one (hc: {1})", key, executor.GetHashCode()); executors[key] = executor; RegisterExecutor(context != null && context.IsActivation, executor); } catch { WorkflowExecutor currentRes; if (executors.TryGetValue(key, out currentRes)) { if (Object.Equals(executor, currentRes)) { executors.Remove(key); } } throw; } } executor.Registered(context != null && context.IsActivation); return executor; } // this should be called under scheduler lock // todo assert this condition internal void ReplaceWorkflowExecutor(Guid instanceId, WorkflowExecutor oldWorkflowExecutor, WorkflowExecutor newWorkflowExecutor) { Dictionary<Guid, WorkflowExecutor> executors = workflowExecutors[instanceId]; lock (executors) { oldWorkflowExecutor.IsInstanceValid = false; WorkflowTrace.Host.TraceInformation("WorkflowRuntime:: replacing old executor for key {0} with new one", instanceId); executors[instanceId] = newWorkflowExecutor; } } private Activity InitializeExecutor(Guid instanceId, CreationContext context, WorkflowExecutor executor, WorkflowInstance workflowInstance) { Activity rootActivity = null; if (context != null && context.IsActivation) { Activity workflowDefinition = null; string xomlText = null; string rulesText = null; if (context.Type != null) { workflowDefinition = _workflowDefinitionDispenser.GetRootActivity(context.Type, false, true); //spawn a new instance rootActivity = _workflowDefinitionDispenser.GetRootActivity(context.Type, true, false); } else if (context.XomlReader != null) { try { context.XomlReader.MoveToContent(); while (!context.XomlReader.EOF && !context.XomlReader.IsStartElement()) context.XomlReader.Read(); xomlText = context.XomlReader.ReadOuterXml(); if (context.RulesReader != null) { context.RulesReader.MoveToContent(); while (!context.RulesReader.EOF && !context.RulesReader.IsStartElement()) context.RulesReader.Read(); rulesText = context.RulesReader.ReadOuterXml(); } } catch (Exception e) { throw new ArgumentException(ExecutionStringManager.InvalidXAML, e); } if (!string.IsNullOrEmpty(xomlText)) { workflowDefinition = _workflowDefinitionDispenser.GetRootActivity(xomlText, rulesText, false, true); //spawn a new instance rootActivity = _workflowDefinitionDispenser.GetRootActivity(xomlText, rulesText, true, false); } else throw new ArgumentException(ExecutionStringManager.InvalidXAML); } rootActivity.SetValue(Activity.WorkflowDefinitionProperty, workflowDefinition); WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "Creating instance " + instanceId.ToString()); context.Created = true; executor.Initialize(rootActivity, context.InvokerExecutor, context.InvokeActivityID, instanceId, context.Args, workflowInstance); } else { if (this.WorkflowPersistenceService == null) { string errMsg = String.Format(CultureInfo.CurrentCulture, ExecutionStringManager.MissingPersistenceService, instanceId); WorkflowTrace.Runtime.TraceEvent(TraceEventType.Error, 0, errMsg); throw new InvalidOperationException(errMsg); } // get the state from the persistenceService using (RuntimeEnvironment runtimeEnv = new RuntimeEnvironment(this)) { rootActivity = this.WorkflowPersistenceService.LoadWorkflowInstanceState(instanceId); } if (rootActivity == null) { throw new InvalidOperationException(string.Format(Thread.CurrentThread.CurrentCulture, ExecutionStringManager.InstanceNotFound, instanceId)); } executor.Reload(rootActivity, workflowInstance); } return rootActivity; } private void RegisterExecutor(bool isActivation, WorkflowExecutor executor) { if (isActivation) { executor.RegisterWithRuntime(this); } else { executor.ReRegisterWithRuntime(this); } } /// <summary> /// On receipt of this call unload the instance /// This will be invoked by the runtime executor /// </summary> /// <param name="instanceId"></param> internal void OnIdle(WorkflowExecutor executor) { // raise the OnIdle event , typically handled // by the hosting environment try { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "Received OnIdle Event for instance, {0}", executor.InstanceId); WorkflowInstance scheduleInstance = executor.WorkflowInstance; if (WorkflowIdled != null) { WorkflowIdled(this, new WorkflowEventArgs(scheduleInstance)); } } catch (Exception) { // WorkflowTrace.Host.TraceEvent(TraceEventType.Warning, 0, "OnIdle Event for instance, {0} threw an exception", executor.InstanceId); throw; } } private void _unRegister(WorkflowExecutor executor) { TryRemoveWorkflowExecutor(executor.InstanceId, executor); WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime::_removeInstance, instance:{0}, hc:{1}", executor.InstanceId, executor.GetHashCode()); // be sure to flush all traces WorkflowTrace.Runtime.Flush(); WorkflowTrace.Tracking.Flush(); WorkflowTrace.Host.Flush(); } private WorkflowExecutor GetWorkflowExecutor(Guid instanceId, CreationContext context) { try { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime dispensing resource, instanceId: {0}", instanceId); WorkflowExecutor executor = this.Load(instanceId, context, null); WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime dispensing resource instanceId: {0}, hc: {1}", instanceId, executor.GetHashCode()); return executor; } catch (OutOfMemoryException) { WorkflowTrace.Host.TraceEvent(TraceEventType.Error, 0, "WorkflowRuntime dispensing resource, can't create service due to OOM!(1), instance, {0}", instanceId); throw; } catch (Exception e) { WorkflowTrace.Host.TraceEvent(TraceEventType.Error, 0, "WorkflowRuntime dispensing resource, can't create service due to unexpected exception!(2), instance, {0}, exception, {1}", instanceId, e); throw; } } #endregion #region Workflow event handlers internal void OnScheduleCompleted(WorkflowExecutor schedule, WorkflowCompletedEventArgs args) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime:ScheduleCompleted event raised for instance Id {0}", schedule.InstanceId); Debug.Assert(schedule != null); try { //Notify Subscribers if (WorkflowCompleted != null) WorkflowCompleted(this, args); } catch (Exception) { WorkflowTrace.Host.TraceEvent(TraceEventType.Error, 0, "WorkflowRuntime:OnScheduleCompleted Event threw an exception."); throw; } finally { _unRegister(schedule); } } internal void OnScheduleSuspended(WorkflowExecutor schedule, WorkflowSuspendedEventArgs args) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime:ScheduleSuspension event raised for instance Id {0}", schedule.InstanceId); try { if (WorkflowSuspended != null) WorkflowSuspended(this, args); } catch (Exception) { WorkflowTrace.Host.TraceEvent(TraceEventType.Error, 0, "WorkflowRuntime:OnScheduleSuspended Event threw an exception."); throw; } } internal void OnScheduleTerminated(WorkflowExecutor schedule, WorkflowTerminatedEventArgs args) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime:ScheduleTermination event raised for instance Id {0}", schedule.InstanceId); try { if (WorkflowTerminated != null) WorkflowTerminated(this, args); } catch (Exception) { WorkflowTrace.Host.TraceEvent(TraceEventType.Error, 0, "WorkflowRuntime:OnScheduleTerminated Event threw an exception."); throw; } finally { _unRegister(schedule); } } internal void OnScheduleLoaded(WorkflowExecutor schedule) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime:ScheduleLoaded event raised for instance Id {0}", schedule.InstanceId); _OnServiceEvent(schedule, false, WorkflowLoaded); } internal void OnScheduleAborted(WorkflowExecutor schedule) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime:ScheduleAborted event raised for instance Id {0}", schedule.InstanceId); _OnServiceEvent(schedule, true, WorkflowAborted); } internal void OnScheduleUnloaded(WorkflowExecutor schedule) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime:ScheduleUnloaded event raised for instance Id {0}", schedule.InstanceId); _OnServiceEvent(schedule, true, WorkflowUnloaded); } internal void OnScheduleResumed(WorkflowExecutor schedule) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime:ScheduleResumed event raised for instance Id {0}", schedule.InstanceId); _OnServiceEvent(schedule, false, WorkflowResumed); } internal void OnScheduleDynamicallyChanged(WorkflowExecutor schedule) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime:ScheduleDynamicallyChanged event raised for instance Id {0}", schedule.InstanceId); _OnServiceEvent(schedule, false, WorkflowDynamicallyChanged); } internal void OnSchedulePersisted(WorkflowExecutor schedule) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime:SchedulePersisted event raised for instance Id {0}", schedule.InstanceId); _OnServiceEvent(schedule, false, WorkflowPersisted); } private void _OnServiceEvent(WorkflowExecutor sched, bool unregister, EventHandler<WorkflowEventArgs> handler) { Debug.Assert(sched != null); try { WorkflowEventArgs args = new WorkflowEventArgs(sched.WorkflowInstance); //Notify Subscribers if (handler != null) handler(this, args); } catch (Exception) { WorkflowTrace.Host.TraceEvent(TraceEventType.Error, 0, "WorkflowRuntime:OnService Event threw an exception."); throw; } finally { if (unregister) { _unRegister(sched); } } } internal void RaiseServicesExceptionNotHandledEvent(Exception exception, Guid instanceId) { VerifyInternalState(); WorkflowTrace.Host.TraceEvent(TraceEventType.Critical, 0, "WorkflowRuntime:ServicesExceptionNotHandled event raised for instance Id {0} {1}", instanceId, exception.ToString()); EventHandler<ServicesExceptionNotHandledEventArgs> handler = ServicesExceptionNotHandled; if (handler != null) handler(this, new ServicesExceptionNotHandledEventArgs(exception, instanceId)); } #endregion #region More service accessors private Dictionary<Type, List<object>> _services = new Dictionary<Type, List<object>>(); private string _name; private bool _startedServices; private NameValueConfigurationCollection _configurationParameters; private Dictionary<string, Type> _trackingServiceReplacement; /// <summary> The name of this container. </summary> public string Name { get { return _name; } set { lock (_startStopLock) { if (_startedServices) throw new InvalidOperationException(ExecutionStringManager.CantChangeNameAfterStart); VerifyInternalState(); _name = value; } } } /// <summary> /// Returns the configuration parameters that can be shared by all services /// </summary> internal NameValueConfigurationCollection CommonParameters { get { return _configurationParameters; } } // A previous tracking service whose type has the string as its AssemblyQualifiedName // will be replaced by the current tracking service of the Type. This dictionary is // neede in order to replace the previous tracking service used by a persisted workflow // because what is persisted is the one-way hashed string of that AssemblyQualifiedName. internal Dictionary<string, Type> TrackingServiceReplacement { get { return _trackingServiceReplacement; } } /// <summary> Adds a service to this container. </summary> /// <param name="service"> The service to add </param> /// <exception cref="InvalidOperationException"/> public void AddService(object service) { if (service == null) throw new ArgumentNullException("service"); VerifyInternalState(); using (new WorkflowRuntime.EventContext()) { lock (_startStopLock) { AddServiceImpl(service); } } } private void AddServiceImpl(object service) { //ASSERT: _startStopLock is held lock (_servicesLock) { if (GetAllServices(service.GetType()).Contains(service)) throw new InvalidOperationException(ExecutionStringManager.CantAddServiceTwice); if (_startedServices && IsCoreService(service)) throw new InvalidOperationException(ExecutionStringManager.CantChangeImmutableContainer); Type basetype = service.GetType(); if (basetype.IsSubclassOf(typeof(TrackingService))) { AddTrackingServiceReplacementInfo(basetype); } foreach (Type t in basetype.GetInterfaces()) { List<object> al; if (_services.ContainsKey(t)) { al = _services[t]; } else { al = new List<object>(); _services.Add(t, al); } al.Add(service); } while (basetype != null) { List<object> al = null; if (_services.ContainsKey(basetype)) { al = _services[basetype]; } else { al = new List<object>(); _services.Add(basetype, al); } al.Add(service); basetype = basetype.BaseType; } } WorkflowRuntimeService wrs = service as WorkflowRuntimeService; if (wrs != null) { wrs.SetRuntime(this); if (_startedServices) wrs.Start(); } } /// <summary> Removes a service. </summary> /// <param name="service"> The service to remove </param> public void RemoveService(object service) { if (service == null) throw new ArgumentNullException("service"); VerifyInternalState(); using (new WorkflowRuntime.EventContext()) { lock (_startStopLock) { lock (_servicesLock) { if (_startedServices && IsCoreService(service)) throw new InvalidOperationException(ExecutionStringManager.CantChangeImmutableContainer); if (!GetAllServices(service.GetType()).Contains(service)) throw new InvalidOperationException(ExecutionStringManager.CantRemoveServiceNotContained); Type type = service.GetType(); if (type.IsSubclassOf(typeof(TrackingService))) { RemoveTrackingServiceReplacementInfo(type); } foreach (List<object> al in _services.Values) { if (al.Contains(service)) { al.Remove(service); } } } WorkflowRuntimeService wrs = service as WorkflowRuntimeService; if (wrs != null) { if (_startedServices) wrs.Stop(); wrs.SetRuntime(null); } } } } private void AddTrackingServiceReplacementInfo(Type type) { Debug.Assert(type.IsSubclassOf(typeof(TrackingService)), "Argument should be a subtype of TrackingService"); object[] attributes = type.GetCustomAttributes(typeof(PreviousTrackingServiceAttribute), true); if (attributes != null && attributes.Length > 0) { foreach (object attribute in attributes) { if (_trackingServiceReplacement == null) { _trackingServiceReplacement = new Dictionary<string, Type>(); } _trackingServiceReplacement.Add(((PreviousTrackingServiceAttribute)attribute).AssemblyQualifiedName, type); } } } private void RemoveTrackingServiceReplacementInfo(Type type) { Debug.Assert(type.IsSubclassOf(typeof(TrackingService)), "Argument should be a subtype of TrackingService"); object[] attributes = type.GetCustomAttributes(typeof(PreviousTrackingServiceAttribute), true); if (attributes != null && attributes.Length > 0) { foreach (object attribute in attributes) { string previousTrackingService = ((PreviousTrackingServiceAttribute)attribute).AssemblyQualifiedName; if (_trackingServiceReplacement.ContainsKey(previousTrackingService)) { _trackingServiceReplacement.Remove(previousTrackingService); } } } } private bool IsCoreService(object service) { return service is WorkflowSchedulerService || service is WorkflowPersistenceService || service is TrackingService || service is WorkflowCommitWorkBatchService || service is WorkflowLoaderService; } /// <summary> Returns a collection of all services that implement the give type. </summary> /// <param name="serviceType"> The type to look for </param> /// <returns> A collection of zero or more services </returns> public ReadOnlyCollection<object> GetAllServices(Type serviceType) { if (serviceType == null) throw new ArgumentNullException("serviceType"); VerifyInternalState(); lock (_servicesLock) { List<object> retval = new List<object>(); if (_services.ContainsKey(serviceType)) retval.AddRange(_services[serviceType]); return new ReadOnlyCollection<object>(retval); } } public T GetService<T>() { VerifyInternalState(); return (T)GetService(typeof(T)); } public ReadOnlyCollection<T> GetAllServices<T>() { VerifyInternalState(); List<T> l = new List<T>(); foreach (T t in GetAllServices(typeof(T))) l.Add(t); return new ReadOnlyCollection<T>(l); } /// <summary> Looks for a service of the given type. </summary> /// <param name="serviceType"> The type of object to find </param> /// <returns> An object of the requested type, or null</returns> public object GetService(Type serviceType) { if (serviceType == null) throw new ArgumentNullException("serviceType"); VerifyInternalState(); lock (_servicesLock) { object retval = null; if (_services.ContainsKey(serviceType)) { List<object> al = _services[serviceType]; if (al.Count > 1) throw new InvalidOperationException(String.Format(CultureInfo.CurrentCulture, ExecutionStringManager.MoreThanOneService, serviceType.ToString())); if (al.Count == 1) retval = al[0]; } return retval; } } #endregion #region Other methods /// <summary> Raises the Starting event </summary> /// <remarks> /// </remarks> public void StartRuntime() { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime: Starting WorkflowRuntime {0}", _uid); lock (_startStopLock) { VerifyInternalState(); if (!_startedServices) { if (GetAllServices(typeof(WorkflowCommitWorkBatchService)).Count == 0) AddServiceImpl(new DefaultWorkflowCommitWorkBatchService()); if (GetAllServices(typeof(WorkflowSchedulerService)).Count == 0) AddServiceImpl(new DefaultWorkflowSchedulerService()); if (GetAllServices(typeof(WorkflowLoaderService)).Count == 0) AddServiceImpl(new DefaultWorkflowLoaderService()); if (GetAllServices(typeof(WorkflowCommitWorkBatchService)).Count != 1) throw new InvalidOperationException(String.Format( CultureInfo.CurrentCulture, ExecutionStringManager.InvalidWorkflowRuntimeConfiguration, typeof(WorkflowCommitWorkBatchService).Name)); if (GetAllServices(typeof(WorkflowSchedulerService)).Count != 1) throw new InvalidOperationException(String.Format( CultureInfo.CurrentCulture, ExecutionStringManager.InvalidWorkflowRuntimeConfiguration, typeof(WorkflowSchedulerService).Name)); if (GetAllServices(typeof(WorkflowLoaderService)).Count != 1) throw new InvalidOperationException(String.Format( CultureInfo.CurrentCulture, ExecutionStringManager.InvalidWorkflowRuntimeConfiguration, typeof(WorkflowLoaderService).Name)); if (GetAllServices(typeof(WorkflowPersistenceService)).Count > 1) throw new InvalidOperationException(String.Format( CultureInfo.CurrentCulture, ExecutionStringManager.InvalidWorkflowRuntimeConfiguration, typeof(WorkflowPersistenceService).Name)); if (GetAllServices(typeof(WorkflowTimerService)).Count == 0) { AddServiceImpl(new WorkflowTimerService()); } //Mark this instance has started isInstanceStarted = true; //Set up static tracking structures _trackingFactory.Initialize(this); if (this.PerformanceCounterManager != null) { this.PerformanceCounterManager.Initialize(this); this.PerformanceCounterManager.SetInstanceName(this.Name); } foreach (WorkflowRuntimeService s in GetAllServices<WorkflowRuntimeService>()) { s.Start(); } _startedServices = true; using (new WorkflowRuntime.EventContext()) { EventHandler<WorkflowRuntimeEventArgs> ss = Started; if (ss != null) ss(this, new WorkflowRuntimeEventArgs(isInstanceStarted)); } } } WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime: Started WorkflowRuntime {0}", _uid); } void DynamicUpdateCommit(object sender, WorkflowExecutor.DynamicUpdateEventArgs e) { if (null == sender) throw new ArgumentNullException("sender"); if (!typeof(WorkflowExecutor).IsInstanceOfType(sender)) throw new ArgumentException(String.Format(CultureInfo.CurrentCulture, ExecutionStringManager.InvalidArgumentType, "sender", typeof(WorkflowExecutor).ToString())); WorkflowExecutor exec = (WorkflowExecutor)sender; OnScheduleDynamicallyChanged(exec); } internal void WorkflowExecutorCreated(WorkflowExecutor workflowExecutor, bool loaded) { // // Fire the event for all other components that need to register for notification of WorkflowExecutor events EventHandler<WorkflowExecutorInitializingEventArgs> localEvent = WorkflowExecutorInitializing; if (null != localEvent) localEvent(workflowExecutor, new WorkflowExecutorInitializingEventArgs(loaded)); workflowExecutor.WorkflowExecutionEvent += new EventHandler<WorkflowExecutor.WorkflowExecutionEventArgs>(WorkflowExecutionEvent); } void WorkflowExecutionEvent(object sender, WorkflowExecutor.WorkflowExecutionEventArgs e) { if (null == sender) throw new ArgumentNullException("sender"); if (!typeof(WorkflowExecutor).IsInstanceOfType(sender)) throw new ArgumentException("sender"); WorkflowExecutor exec = (WorkflowExecutor)sender; switch (e.EventType) { case WorkflowEventInternal.Idle: OnIdle(exec); break; case WorkflowEventInternal.Created: if (WorkflowCreated != null) WorkflowCreated(this, new WorkflowEventArgs(exec.WorkflowInstance)); break; case WorkflowEventInternal.Started: if (WorkflowStarted != null) WorkflowStarted(this, new WorkflowEventArgs(exec.WorkflowInstance)); break; case WorkflowEventInternal.Loaded: OnScheduleLoaded(exec); break; case WorkflowEventInternal.Unloaded: OnScheduleUnloaded(exec); break; case WorkflowEventInternal.Completed: OnScheduleCompleted(exec, CreateCompletedEventArgs(exec)); break; case WorkflowEventInternal.Terminated: WorkflowExecutor.WorkflowExecutionTerminatedEventArgs args = (WorkflowExecutor.WorkflowExecutionTerminatedEventArgs)e; if (null != args.Exception) OnScheduleTerminated(exec, new WorkflowTerminatedEventArgs(exec.WorkflowInstance, args.Exception)); else OnScheduleTerminated(exec, new WorkflowTerminatedEventArgs(exec.WorkflowInstance, args.Error)); break; case WorkflowEventInternal.Aborted: OnScheduleAborted(exec); break; case WorkflowEventInternal.Suspended: WorkflowExecutor.WorkflowExecutionSuspendedEventArgs sargs = (WorkflowExecutor.WorkflowExecutionSuspendedEventArgs)e; OnScheduleSuspended(exec, new WorkflowSuspendedEventArgs(exec.WorkflowInstance, sargs.Error)); break; case WorkflowEventInternal.Persisted: OnSchedulePersisted(exec); break; case WorkflowEventInternal.Resumed: OnScheduleResumed(exec); break; case WorkflowEventInternal.DynamicChangeCommit: DynamicUpdateCommit(exec, (WorkflowExecutor.DynamicUpdateEventArgs)e); break; default: break; } } private WorkflowCompletedEventArgs CreateCompletedEventArgs(WorkflowExecutor exec) { WorkflowCompletedEventArgs args = new WorkflowCompletedEventArgs(exec.WorkflowInstance, exec.WorkflowDefinition); foreach (PropertyInfo property in _workflowDefinitionDispenser.GetOutputParameters(exec.RootActivity)) args.OutputParameters.Add(property.Name, property.GetValue(exec.RootActivity, null)); return args; } private void StopServices() { // Stop remaining services foreach (WorkflowRuntimeService s in GetAllServices<WorkflowRuntimeService>()) { s.Stop(); } } /// <summary> Fires the Stopping event </summary> public void StopRuntime() { VerifyInternalState(); using (new WorkflowRuntime.EventContext()) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime: Stopping WorkflowRuntime {0}", _uid); lock (_startStopLock) { if (_startedServices) { try { isInstanceStarted = false; if (this.WorkflowPersistenceService != null) { // // GetWorkflowExecutors() takes a lock on workflowExecutors // and then returns a copy of the list. As long as GetWorkflowExecutors() // returns a non empty/null list we'll attempt to unload what's in it. IList<WorkflowExecutor> executors = GetWorkflowExecutors(); while ((null != executors) && (executors.Count > 0)) { foreach (WorkflowExecutor executor in executors) { if (executor.IsInstanceValid) { try { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime: Calling Unload on instance {0} executor hc {1}", executor.InstanceIdString, executor.GetHashCode()); executor.Unload(); } catch (ExecutorLocksHeldException) { // // This exception means that an atomic scope is ongoing // (we cannot unload/suspend during an atomic scope) // This instance will still be in the GetWorkflowExecutors list // so we'll attempt to unload it on the next outer loop // Yes, we may loop indefinitely if an atomic tx is hung // See WorkflowInstance.Unload for an example of retrying // when this exception is thrown. } catch (InvalidOperationException) { if (executor.IsInstanceValid) { // // Failed to stop, reset the flag isInstanceStarted = true; throw; } } catch { // // Failed to stop, reset the flag isInstanceStarted = true; throw; } } } // // Check if anything was added to the main list // while we were working on the copy. // This happens if a executor reverts to a checkpoint. // There is the potential to loop indefinitely if // an instance continually reverts. executors = GetWorkflowExecutors(); } } StopServices(); _startedServices = false; WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime: Stopped WorkflowRuntime {0}", _uid); // // Clean up tracking _trackingFactory.Uninitialize(this); if (this.PerformanceCounterManager != null) { this.PerformanceCounterManager.Uninitialize(this); } EventHandler<WorkflowRuntimeEventArgs> handler = Stopped; if (handler != null) handler(this, new WorkflowRuntimeEventArgs(isInstanceStarted)); } catch (Exception) { WorkflowTrace.Host.TraceEvent(TraceEventType.Error, 0, "WorkflowRuntime::StartUnload Unexpected Exception"); throw; } finally { isInstanceStarted = false; } } } } } /// <summary> True if services have been started and not stopped </summary> public bool IsStarted { get { return _startedServices; } } private static Activity OnActivityDefinitionResolve(object sender, ActivityResolveEventArgs e) { WorkflowRuntime runtime = e.ServiceProvider as WorkflowRuntime; if (runtime == null) runtime = RuntimeEnvironment.CurrentRuntime; Debug.Assert(runtime != null); if (runtime != null) { if (e.Type != null) return runtime._workflowDefinitionDispenser.GetRootActivity(e.Type, e.CreateNewDefinition, e.InitializeForRuntime); else return runtime._workflowDefinitionDispenser.GetRootActivity(e.WorkflowMarkup, e.RulesMarkup, e.CreateNewDefinition, e.InitializeForRuntime); } return null; } internal static TypeProvider CreateTypeProvider(Activity rootActivity) { TypeProvider typeProvider = new TypeProvider(null); Type companionType = rootActivity.GetType(); typeProvider.SetLocalAssembly(companionType.Assembly); typeProvider.AddAssembly(companionType.Assembly); foreach (AssemblyName assemblyName in companionType.Assembly.GetReferencedAssemblies()) { Assembly referencedAssembly = null; try { referencedAssembly = Assembly.Load(assemblyName); if (referencedAssembly != null) typeProvider.AddAssembly(referencedAssembly); } catch { } if (referencedAssembly == null && assemblyName.CodeBase != null) typeProvider.AddAssemblyReference(assemblyName.CodeBase); } return typeProvider; } private static ArrayList OnWorkflowChangeActionsResolve(object sender, WorkflowChangeActionsResolveEventArgs e) { ArrayList changes = null; WorkflowRuntime runtime = RuntimeEnvironment.CurrentRuntime; Debug.Assert(runtime != null); if (runtime != null) { WorkflowMarkupSerializer serializer = new WorkflowMarkupSerializer(); ServiceContainer serviceContainer = new ServiceContainer(); ITypeProvider typeProvider = runtime.GetService<ITypeProvider>(); if (typeProvider != null) serviceContainer.AddService(typeof(ITypeProvider), typeProvider); else if (sender is Activity) { serviceContainer.AddService(typeof(ITypeProvider), CreateTypeProvider(sender as Activity)); } DesignerSerializationManager manager = new DesignerSerializationManager(serviceContainer); using (manager.CreateSession()) { using (StringReader reader = new StringReader(e.WorkflowChangesMarkup)) { using (XmlReader xmlReader = XmlReader.Create(reader)) { WorkflowMarkupSerializationManager xomlSerializationManager = new WorkflowMarkupSerializationManager(manager); changes = serializer.Deserialize(xomlSerializationManager, xmlReader) as ArrayList; } } } } return changes; } /// <summary> Creates and adds a service to this container. </summary> /// <param name="serviceSettings"> Description of the service to add. </param> private void AddServiceFromSettings(WorkflowRuntimeServiceElement serviceSettings) { object service = null; Type t = Type.GetType(serviceSettings.Type, true); ConstructorInfo serviceProviderAndSettingsConstructor = null; ConstructorInfo serviceProviderConstructor = null; ConstructorInfo settingsConstructor = null; foreach (ConstructorInfo ci in t.GetConstructors()) { ParameterInfo[] pi = ci.GetParameters(); if (pi.Length == 1) { if (typeof(IServiceProvider).IsAssignableFrom(pi[0].ParameterType)) { serviceProviderConstructor = ci; } else if (typeof(NameValueCollection).IsAssignableFrom(pi[0].ParameterType)) { settingsConstructor = ci; } } else if (pi.Length == 2) { if (typeof(IServiceProvider).IsAssignableFrom(pi[0].ParameterType) && typeof(NameValueCollection).IsAssignableFrom(pi[1].ParameterType)) { serviceProviderAndSettingsConstructor = ci; break; } } } if (serviceProviderAndSettingsConstructor != null) { service = serviceProviderAndSettingsConstructor.Invoke( new object[] { this, serviceSettings.Parameters }); } else if (serviceProviderConstructor != null) { service = serviceProviderConstructor.Invoke(new object[] { this }); } else if (settingsConstructor != null) { service = settingsConstructor.Invoke(new object[] { serviceSettings.Parameters }); } else { service = Activator.CreateInstance(t); } AddServiceImpl(service); } internal static void ClearTrackingProfileCache() { lock (_runtimesLock) { foreach (WeakReference wr in _runtimes.Values) { WorkflowRuntime runtime = wr.Target as WorkflowRuntime; if (null != runtime) { if ((null != runtime.TrackingListenerFactory) && (null != runtime.TrackingListenerFactory.TrackingProfileManager)) runtime.TrackingListenerFactory.TrackingProfileManager.ClearCacheImpl(); } } } } /// <summary>Utility class that prevents reentrance during event processing.</summary> /// <remarks> /// When created an EventContext it creates a static variable local to /// a managed thread (similar to the old TLS slot), /// which can detect cases when events are invoked while handling other events. /// The variable is removed on dispose. /// </remarks> internal sealed class EventContext : IDisposable { /// <summary> /// Indicates that the value of a static field is unique for each thread /// CLR Perf suggests using this attribute over the slot approach. /// </summary> [ThreadStatic()] static object threadData; public EventContext(params Object[] ignored) { if (threadData != null) throw new InvalidOperationException(ExecutionStringManager.CannotCauseEventInEvent); threadData = this; } void IDisposable.Dispose() { Debug.Assert(threadData != null, "unexpected call to EventContext::Dispose method"); threadData = null; } } #endregion #region WorkflowExecutor utility methods private IList<WorkflowExecutor> GetWorkflowExecutors() { // // This is a safety check in to avoid returning invalid executors in the following cases: // 1. We ---- between the executor going invalid and getting removed from the list. // 2. We have a leak somewhere where invalid executors are not getting removed from the list. List<WorkflowExecutor> executorsList = new List<WorkflowExecutor>(); foreach (Dictionary<Guid, WorkflowExecutor> executors in workflowExecutors) { lock (executors) { foreach (WorkflowExecutor executor in executors.Values) { if ((null != executor) && (executor.IsInstanceValid)) executorsList.Add(executor); } } } return executorsList; } private bool TryRemoveWorkflowExecutor(Guid instanceId, WorkflowExecutor executor) { Dictionary<Guid, WorkflowExecutor> executors = workflowExecutors[instanceId]; lock (executors) { WorkflowExecutor currentRes; if (executors.TryGetValue(instanceId, out currentRes) && Object.Equals(executor, currentRes)) { WorkflowTrace.Host.TraceEvent(TraceEventType.Information, 0, "WorkflowRuntime::TryRemoveWorkflowExecutor, instance:{0}, hc:{1}", executor.InstanceIdString, executor.GetHashCode()); return executors.Remove(instanceId); } return false; } } #endregion } }
using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Text; using Common.Logging; using ToolKit.Data; using ToolKit.Validation; namespace ToolKit.DirectoryServices { /// <summary> /// Distinguished names (DNs) are used to uniquely identify entries in an LDAP or X.500 /// directory. DNs are user-oriented strings, typically used whenever you must add, modify or /// delete an entry in a directory using the LDAP programming interface. This class represents a /// Distinguished Name (RFC 2253) and provides access to the various parts of the Distinguished /// Name. For Active Directory Distinguished Names, it also provides resolution of the NetBIOS /// domain name. /// </summary> public class DistinguishedName { private static readonly ILog _log = LogManager.GetLogger<DistinguishedName>(); private readonly List<NameValue> _components = new List<NameValue>(); /// <summary> /// Initializes a new instance of the <see cref="DistinguishedName" /> class. /// </summary> /// <param name="distinguishedName">The distinguished name.</param> public DistinguishedName(string distinguishedName) { LdapServer = string.Empty; if (!string.IsNullOrWhiteSpace(distinguishedName)) { Process(distinguishedName); } } /// <summary> /// Gets the name of the object in canonical format. /// </summary> /// <value>The name of the object in canonical format.</value> /// <remarks> /// Active Directory Canonical Name . By default, the Windows user interfaces display object /// names that use the canonical name, which lists the relative distinguished names from the /// root downward and without the RFC 1779 naming attribute descriptors; it uses the DNS /// domain name (the form of the name where the domain labels are separated by periods). If /// the name of an organizational unit contains a forward slash character (/), the system /// requires an escape character in the form of a backslash (\) to distinguish between /// forward slashes that separate elements of the canonical name and the forward slash that /// is part of the organizational unit name. The canonical name that appears in Active /// Directory Users and Computers properties pages displays the escape character immediately /// preceding the forward slash in the name of the organizational unit. For example, if the /// name of an organizational unit is Promotions/Northeast and the name of the domain is /// example.com, the canonical name is displayed as example.com/Promotions\/Northeast. /// </remarks> public string CanonicalName { get { if (string.IsNullOrEmpty(DnsDomain)) { return null; } var builder = new StringBuilder(); builder.Append(DnsDomain); for (var i = _components.Count - 1; i >= 0; i--) { if (_components[i].Name.ToUpper(CultureInfo.InvariantCulture) != "DC") { builder.Append('/'); builder.Append( _components[i].Value .Replace("\\", string.Empty) .Replace("/", "\\/")); } } return builder.ToString(); } } /// <summary> /// Gets the common name of the Distinguished Name, if present. /// </summary> /// <value>The common name of the Distinguished Name.</value> public string CommonName { get { return (from rdn in _components where rdn.Name.ToUpper(CultureInfo.InvariantCulture) == "CN" select rdn.Value.Replace("\\", string.Empty)).FirstOrDefault(); } } /// <summary> /// Gets the DNS domain equivalent based on the DC components of the DN. /// </summary> /// <value>A string representing the DNS domain.</value> [SuppressMessage( "Globalization", "CA1308:Normalize strings to uppercase", Justification = "DNS is typically lowercase")] public string DnsDomain { get { var sb = new StringBuilder(); foreach (var rdn in _components.Where(rdn => rdn.Name.ToUpper(CultureInfo.InvariantCulture) == "DC")) { sb.Append(rdn.Value).Append('.'); } return sb.ToString().TrimEnd('.').ToLower(CultureInfo.InvariantCulture); } } /// <summary> /// Gets the domain root of the Distinguished Name. /// </summary> /// <value>The domain root of the Distinguished Name.</value> public string DomainRoot { get { var sb = new StringBuilder(); foreach (var rdn in _components) { if (rdn.Name.ToUpper(CultureInfo.InvariantCulture) == "DC") { sb.Append(rdn.Name).Append('=').Append(rdn.Value).Append(','); } } return sb.ToString().TrimEnd(','); } } /// <summary> /// Gets the Global Catalog path in the /// form: LDAP://HostName[:PortNumber]/DistinguishedName. /// </summary> /// <value>The Global Catalog path of the DistinguishedName.</value> public string GcPath { get { if (_components.Count == 0) { return "GC://"; } else { return string.Format( CultureInfo.InvariantCulture, "GC://{0}{1}{2}{3}{4}", LdapServer.Length > 0 ? LdapServer : DnsDomain, ServerPort > 0 ? ":" : string.Empty, ServerPort > 0 ? Convert.ToString(ServerPort, CultureInfo.InvariantCulture) : string.Empty, LdapServer.Length > 0 || DnsDomain.Length > 0 ? "/" : string.Empty, ToString()); } } } /// <summary> /// Gets the LDAP path in the form: LDAP://HostName[:PortNumber]/DistinguishedName. /// </summary> /// <value>The LDAP path of the DistinguishedName.</value> public string LdapPath { get { if (_components.Count == 0) { return "LDAP://"; } else { return string.Format( CultureInfo.InvariantCulture, "LDAP://{0}{1}{2}{3}{4}", LdapServer.Length > 0 ? LdapServer : DnsDomain, ServerPort > 0 ? ":" : string.Empty, ServerPort > 0 ? Convert.ToString(ServerPort, CultureInfo.InvariantCulture) : string.Empty, LdapServer.Length > 0 || DnsDomain.Length > 0 ? "/" : string.Empty, ToString()); } } } /// <summary> /// Gets or sets the LDAP server. The "HostName" can be a computer name, an IP address, or a /// domain name. A server name can also be specified in the binding string. If an LDAP /// server is not specified, one is deduced by the presence of DC values in the /// distinguished name. Most LDAP providers follow a model that requires a server name to be specified. /// </summary> /// <value>The LDAP server.</value> public string LdapServer { get; set; } /// <summary> /// Gets the Parent Distinguished Name of this Distinguished Name. /// </summary> /// <value>The Parent Distinguished Name.</value> public DistinguishedName Parent { get { if (_components.Count > 0) { var sb = new StringBuilder(); // Skip first component, then return the rest. for (var i = 1; i < _components.Count; i++) { sb.Append(_components[i].Name).Append('=').Append(_components[i].Value).Append(','); } return new DistinguishedName(sb.ToString().TrimEnd(',')); } else { return null; } } } /// <summary> /// Gets or sets the server port number. The "PortNumber" specifies the port to be used for /// the connection. If no port number is specified, the LDAP provider uses the default port /// number. The default port number is 389 if not using an SSL connection or 636 if using an /// SSL connection. Unless a port number is specified, the port number is not used. /// </summary> /// <value>The server port number. Returns 0 is the port is not specified.</value> public int ServerPort { get; set; } /// <summary> /// Checks to see whether two DN objects are not equal. /// </summary> /// <param name="dn1">The first DistinguishedName instance..</param> /// <param name="dn2">The second DistinguishedName instance.</param> /// <returns><c>true</c> if the two instance are equal; otherwise, <c>false</c>.</returns> /// <returns>true if the two objects are not equal; false otherwise.</returns> public static bool operator !=(DistinguishedName dn1, DistinguishedName dn2) { return !(dn1 == dn2); } /// <summary> /// Checks to see whether two DN objects are equal. /// </summary> /// <param name="dn1">The first DistinguishedName instance.</param> /// <param name="dn2">The second DistinguishedName instance.</param> /// <returns><c>true</c> if the two instance are equal; otherwise, <c>false</c>.</returns> /// <returns>true if the two objects are equal; false otherwise.</returns> [SuppressMessage( "Blocker Code Smell", "S3875:\"operator==\" should not be overloaded on reference types", Justification = "Whitespace in DN are ignored for equality, so override object reference equality.")] public static bool operator ==(DistinguishedName dn1, DistinguishedName dn2) { return dn1 is null ? dn2 is null : dn1.Equals(dn2); } /// <summary> /// Parses the specified distinguished name and returns a distinguished name instance. /// </summary> /// <param name="distinguishedName">The distinguished name.</param> /// <returns>A distinguished name instance.</returns> public static DistinguishedName Parse(string distinguishedName) { return new DistinguishedName(distinguishedName); } /// <summary> /// Returns a Child distinguished name based on the child's relative distinguished name. /// </summary> /// <param name="childDistinguishedName">The child's relative distinguished nameValue.</param> /// <returns>A Child distinguished name instance.</returns> public DistinguishedName Child(string childDistinguishedName) { return new DistinguishedName($"{childDistinguishedName},{ToString()}"); } /// <summary> /// Returns a Distinguished Name that represents the container of the object. If the object /// is a container, then the entire Distinguished Name is returned... /// </summary> /// <returns>A DistinguishedName object.</returns> public DistinguishedName Container() { return new DistinguishedName(ToString().Replace($"CN={CommonName},", string.Empty)); } /// <summary> /// Determines whether the child distinguished name is part of this distinguished name. /// </summary> /// <param name="childDistinguishedName">The child distinguished name instance.</param> /// <returns> /// <c>true</c> if the child distinguished name is part of this distinguished name; /// otherwise, <c>false</c>. /// </returns> public bool Contains(DistinguishedName childDistinguishedName) { Check.NotNull(childDistinguishedName, nameof(childDistinguishedName)); if (childDistinguishedName._components.Count >= _components.Count) { var startNode = childDistinguishedName._components.Count - _components.Count; for (var i = startNode; i < _components.Count; i++) { var childName = childDistinguishedName._components[i].Name.ToUpperInvariant(); var childValue = childDistinguishedName._components[i].Value.ToUpperInvariant(); var thisName = _components[i - startNode].Name.ToUpperInvariant(); var thisValue = _components[i - startNode].Value.ToUpperInvariant(); if (!((childName == thisName) && (childValue == thisValue))) { return false; } } return true; } return false; } /// <summary> /// Determines whether the child distinguished name is part of this distinguished name. /// </summary> /// <param name="childDistinguishedName">The child distinguished name string.</param> /// <returns> /// <c>true</c> if the child distinguished name is part of this distinguished name; /// otherwise, <c>false</c>. /// </returns> public bool Contains(string childDistinguishedName) { return Contains(new DistinguishedName(childDistinguishedName)); } /// <summary> /// Determines whether the specified <see cref="object" /> is equal to the current instance. /// </summary> /// <param name="obj">The <see cref="object" /> to compare.</param> /// <returns> /// <c>true</c> if the specified <see cref="object" /> is equal to the current instance; /// otherwise, <c>false</c>. /// </returns> /// <exception cref="NullReferenceException"> /// The <paramref name="obj" /> parameter is null. /// </exception> public override bool Equals(object obj) => obj is DistinguishedName distinguishedName && Contains(distinguishedName); /// <summary> /// Serves as a hash function for this instance. /// </summary> /// <returns>A hash code for the current instance.</returns> public override int GetHashCode() => ToString().ToUpperInvariant().GetHashCode(); /// <summary> /// Returns a <see cref="string" /> that represents the current Distinguished Name instance. /// </summary> /// <returns>A <see cref="string" /> that represents the current Distinguished Name instance.</returns> public override string ToString() { var sb = new StringBuilder(); foreach (var rdn in _components) { sb.Append(rdn.Name).Append('=').Append(rdn.Value).Append(','); } return sb.ToString().TrimEnd(',').Replace("+,", "+"); } private static bool IsAlpha(char c) { return (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z'); } private static bool IsHex(char c) { return IsNumber(c) || (c >= 'A' && c <= 'F') || (c >= 'a' && c <= 'f'); } private static bool IsLdapDnSpecial(char c) { return c == ',' || c == '=' || c == '+' || c == '<' || c == '>' || c == '#' || c == ';' || c == '"' || c == '\\'; } private static bool IsNumber(char c) { return c >= '0' && c <= '9'; } private void Process(string distinguishedName) { // Any of the attributes defined in the directory schema may be used to make up a DN. // The order of the component attribute value pairs is important. The DN contains one // component for each level of the directory hierarchy from the root down to the level // where the entry resides. // LDAP DNs begin with the most specific attribute, and continue with progressively // broader attributes, often ending with a country attribute or domain components. // Each component of the DN is referred to as the Relative Distinguished Name (RDN). An // RDN consist of an attribute=value pair // Some characters have special meaning in a DN. For example, = (equals) separates an // attribute name and value, and , (comma) separates attribute=value pairs. The special // characters are , (comma), = (equals), + (plus), < (less than), > (greater than), # // (number sign), ; (semicolon), \ (backslash), and " (quotation mark, ASCII 34). // A special character can be escaped (by a backslash: '\' ASCII 92) in an attribute // value to remove the special meaning. or by replacing the character to be escaped by a // backslash and two hex digits, which form a single byte in the code of the character. // When the entire attribute value is surrounded by "" (quotation marks) (ASCII 34), all // characters are taken as is, except for the \ (backslash). The \ (backslash) can be // used to escape a backslash (ASCII 92) or quotation marks (ASCII 34), any of the // special characters previously mentioned, or hex pairs. // To escape a single backslash, use \\ // The formal syntax for a Distinguished Name (DN) is based on RFC 2253. The Backus Naur // Form (BNF) syntax (http://en.wikipedia.org/wiki/Backus-Naur_form): // <name> ::= <name-component> ( <spaced-separator> ) | <name-component> // <spaced-separator> <name> <spaced-separator> ::= <optional-space> <separator> // <optional-space> <separator> ::= "," | ";" <optional-space> ::= *( " " ) // <name-component> ::= <attribute> | <attribute> <optional-space> "+" <optional-space> // <name-component> <attribute> ::= <string> | <key> <optional-space> "=" // <optional-space> <string> <key> ::= 1*( <keychar> ) | "OID." <oid> | "oid." <oid> // <keychar> ::= letters and numbers <oid> ::= <digitstring> | <digitstring> "." <oid> // <digitstring> ::= 1*<digit> <digit> ::= digits 0-9 <string> ::= *( <stringchar> | // <pair> ) | '"' *( <stringchar> | <special> | <pair> ) '"' | "#" <hex> <special> ::= // "," | "=" | "+" | "<" | ">" | "#" | ";" <pair> ::= "\" ( <special> | "\" | '"') // <stringchar> ::= any character except <special> or "\" or '"' <hex> ::= 2*<hexchar> // <hexchar> ::= 0-9, a-f, A-F // A semicolon (;) character can be used to separate RDNs in a distinguished name, // although the comma (,) character is the typical notation. White-space characters // might be present on either side of the comma or semicolon. These white-space // characters are ignored, and the semicolon is replaced with a comma. // In addition, space (' ' ASCII 32) characters may be present either before or after a // '+' or '='. These space characters are ignored when parsing. // Based on: http://publib.boulder.ibm.com/infocenter/iseries/v5r3/topic/rzahy/rzahyunderdn.htm // Now, Let's process the DistinguishedName and extract the components... _log.Debug("DN: " + distinguishedName); if (distinguishedName.Length == 0) { // Empty DNs are okay too. return; } var relativeDnList = SplitToRelativeParts(distinguishedName); _log.Debug($"Parsed Distinguished Name into {relativeDnList.Count} Relative Distinguished Names..."); foreach (var rdn in relativeDnList) { ProcessRelativeDn(distinguishedName, rdn); } } private void ProcessRelativeDn(string distinguishedName, string rdn) { var parseState = string.Empty; var attributeName = new StringBuilder(); var attributeValue = new StringBuilder(); var position = 0; while (position < rdn.Length) { switch (parseState) { default: // Ignore any spaces at the beginning of the string try { while (rdn[position] == ' ') { position++; } } catch (IndexOutOfRangeException) { throw new InvalidDistinguishedNameException("A Relative DN is just spaces!"); } // Ok, at this point, we should be looking at the first non-space character. if (IsAlpha(rdn[position])) { var workingValue = distinguishedName.Substring(position); // Check to see if the attributeName is an OID if (workingValue.StartsWith("OID.", StringComparison.InvariantCultureIgnoreCase)) { // However only the exact strings OID and oid are allowed if (workingValue.StartsWith("OID", StringComparison.InvariantCulture) || workingValue.StartsWith("oid", StringComparison.InvariantCulture)) { position += 4; parseState = "AttributeOID"; } else { throw new InvalidDistinguishedNameException("OID mixed-case is not allowed!"); } } else { // No, we must be looking at an attribute name parseState = "AttributeName"; } } else { // Is this a digit? "OID." is optional so if this is a number, then it // must be an OID if (IsNumber(rdn[position])) { parseState = "AttributeOID"; } else { // If it is not a letter or number, then it's invalid... throw new InvalidDistinguishedNameException("Invalid character in attribute!"); } } break; case "AttributeOID": try { // Double-check that the character is a number if (!IsNumber(rdn[position])) { throw new InvalidDistinguishedNameException("OID must start with a number!"); } // Let's continue processing. while (IsNumber(rdn[position]) || rdn[position] == '.') { attributeName.Append(rdn[position]); position++; } // The OID can be followed by any number of blank spaces while (rdn[position] == ' ') { position++; } if (rdn[position] == '=') { // The AttributeName is complete, lets validate OID. var name = attributeName.ToString(); // OID are not allowed to end with a period if (name.EndsWith(".", StringComparison.InvariantCulture)) { throw new InvalidDistinguishedNameException("OID cannot end with a period!"); } // OID are not allowed to have two periods together if (name.IndexOf("..", StringComparison.Ordinal) > -1) { throw new InvalidDistinguishedNameException("OID cannot two periods together."); } // OID numbers are not allowed to have leading zeros var parts = name.Split('.'); if (parts.Any(part => (part.Length > 1) && (part[0] == '0'))) { throw new InvalidDistinguishedNameException("OID cannot have a leading zero."); } // This is a valid OID, Let's get the value. position++; parseState = "GetValue"; } else { throw new InvalidDistinguishedNameException("Attribute name is unterminated."); } } catch (IndexOutOfRangeException) { throw new InvalidDistinguishedNameException("Attribute name is unterminated."); } break; case "AttributeName": try { // Double-check that the character is letter if (!IsAlpha(rdn[position])) { throw new InvalidDistinguishedNameException("Attribute name must start with a letter."); } // Let's continue processing. while (IsAlpha(rdn[position]) || IsNumber(rdn[position])) { attributeName.Append(rdn[position]); position++; } // The name can be followed by any number of blank spaces while (rdn[position] == ' ') { position++; } if (rdn[position] == '=') { // The AttributeName is complete, Let's get the value. position++; parseState = "GetValue"; } else { throw new InvalidDistinguishedNameException("Attribute name is unterminated."); } } catch (IndexOutOfRangeException) { throw new InvalidDistinguishedNameException("Attribute name is unterminated."); } break; case "GetValue": try { // Get rid of any leading spaces while (rdn[position] == ' ') { position++; } } catch (IndexOutOfRangeException) { // It is okay to have an empty value, so catch the exception and store // an empty value. } // Find out what type of value this is switch (rdn[position]) { case '"': // this is a quoted string position++; // Ignore the start quote try { while (rdn[position] != '"') { if (rdn[position] == '\\') { try { if (IsHex(rdn[position + 1]) && IsHex(rdn[position + 2])) { // Let's convert the hexadecimal to it's // character and store position++; // Discard Escape character var ch = Convert.ToByte(rdn.Substring(position, 2), 16); var value = Convert.ToString(ch, CultureInfo.InvariantCulture); attributeValue.Append(value); } else { if (rdn[position + 1] == ' ') { // Covert escaped spaces to regular spaces attributeValue.Append(' '); } else { if (IsLdapDnSpecial(rdn[position + 1]) || rdn[position + 1] == ' ') { attributeValue.Append(rdn, position, 2); } else { throw new InvalidDistinguishedNameException("Escape sequence \\" + rdn[position] + " is invalid."); } } } position += 2; } catch (IndexOutOfRangeException) { throw new InvalidDistinguishedNameException("Invalid escape sequence."); } } else { if (IsLdapDnSpecial(rdn[position])) { attributeValue.Append('\\'); } attributeValue.Append(rdn[position]); position++; } } } catch (IndexOutOfRangeException) { throw new InvalidDistinguishedNameException("Quoted value was not terminated!"); } position++; // Ignore the closing quote // Remove any trailing spaces while (position < rdn.Length && rdn[position] == ' ') { position++; } break; case '#': // this is a hexadecimal string position++; // hexadecimal values consist of two characters each. while (position + 1 < rdn.Length && IsHex(rdn[position]) && IsHex(rdn[position + 1])) { // Let's convert the hexadecimal to it's character and store var ch = Convert.ToChar(Convert.ToByte(rdn.Substring(position, 2), 16)); attributeValue.Append(ch); position += 2; } break; default: // this is a regular (un-quoted) string while (position < rdn.Length && rdn[position] != '+') { if (rdn[position] == '\\') { try { // Check to see if this is a hexadecimal escape sequence // or a regular escape sequence. if (!(IsHex(rdn[position + 1]) && IsHex(rdn[position + 2])) && !(IsLdapDnSpecial(rdn[position]) || rdn[position] == ' ')) { throw new InvalidDistinguishedNameException("Escape sequence \\" + rdn[position] + " is invalid."); } attributeValue.Append(rdn, position, 2); position += 2; } catch (IndexOutOfRangeException) { throw new InvalidDistinguishedNameException("Invalid escape sequence!"); } } else { if (IsLdapDnSpecial(rdn[position])) { throw new InvalidDistinguishedNameException("Unquoted special character '" + rdn[position] + "'"); } else { attributeValue.Append(rdn[position]); position++; } } } break; } // Check for end-of-string or + sign (which indicates a multi-valued RDN) if (position >= rdn.Length) { // We are at the end of the string break; } else { if (rdn[position] == '+') { // if we've found a plus sign, that means that there's another // name/value pair after it. We'll store what we've found, advance // to the next character, and let the loop cycle again... var value = attributeValue.ToString().TrimEnd() + "+"; position++; _components.Add(new NameValue { Name = attributeName.ToString().TrimEnd(), Value = value }); attributeName.Clear(); attributeValue.Clear(); parseState = string.Empty; } else { throw new ArgumentException("Invalid Distinguished Name! Invalid characters at end of value.", distinguishedName); } } break; } } // We are finished with the RDN, check the ending state... if (parseState != "GetValue") { throw new InvalidDistinguishedNameException(); } _components.Add(new NameValue { Name = attributeName.ToString().TrimEnd(), Value = attributeValue.ToString().TrimEnd() }); } private List<string> SplitToRelativeParts(string distinguishedName) { var relativeDnList = new List<string>(); var relativeDn = new StringBuilder(); var lookForSeparator = true; var startPosition = 0; // Ignore any spaces at the beginning of the string try { while (distinguishedName[startPosition] == ' ') { startPosition++; } } catch (IndexOutOfRangeException) { throw new ArgumentException("Invalid Distinguished Name, It's just spaces!", distinguishedName); } // Ignore LDAP:// or GC:// in the front of string if (distinguishedName.IndexOf("LDAP://", startPosition, StringComparison.Ordinal) > -1) { startPosition += 7; } if (distinguishedName.IndexOf("GC://", startPosition, StringComparison.Ordinal) > -1) { startPosition += 5; } // Is the DN just the protocol part? if (startPosition == distinguishedName.Length) { throw new ArgumentException("Invalid Distinguished Name! Needs more than just the protocol part.", distinguishedName); } // Is the LDAP server already in the DN? var slash = distinguishedName.IndexOf("/", StringComparison.Ordinal); var equal = distinguishedName.IndexOf("=", StringComparison.Ordinal); if ((slash > -1) && (slash < equal)) { var serverPart = distinguishedName.Substring(startPosition, distinguishedName.IndexOf("/", startPosition, StringComparison.Ordinal) - startPosition); var colon = serverPart.IndexOf(":", StringComparison.Ordinal); if (colon == 0) { throw new ArgumentException("Invalid Distinguished Name! Can't have Server Port without Server Name.", distinguishedName); } else { var portIndex = colon > 0 ? colon : serverPart.Length; LdapServer = serverPart.Substring(0, portIndex); if (portIndex != serverPart.Length) { ServerPort = Convert.ToInt32(serverPart.Substring(portIndex + 1), CultureInfo.InvariantCulture); } } startPosition += serverPart.Length + 1; } for (var i = startPosition; i < distinguishedName.Length; i++) { var current = distinguishedName[i]; var previous = default(char); if (i > 0) { previous = distinguishedName[i - 1]; } if (lookForSeparator) { if ((current == ',' || current == ';') && (previous != '\\')) { // We found a separator, store the RDN. relativeDnList.Add(relativeDn.ToString()); relativeDn.Length = 0; } else { relativeDn.Append(current); if (current == '"') { lookForSeparator = false; } } } else { relativeDn.Append(current); // Check for the ending quote; however, escaped quotes don't change the state. if (current == '"' && previous != '\\') { lookForSeparator = true; } } } // Add last relative part relativeDnList.Add(relativeDn.ToString()); return relativeDnList; } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using FluentSharp.CoreLib.API; namespace FluentSharp.CoreLib { public static class IO_ExtensionMethods_Folders { /// <summary> /// Given a valid folder path returns top directory name by calling <code>folder.filename()</code> /// </summary> /// <param name="folder"> /// Input is checked that it is not null or empty <code>string.IsNullOrEmpty(folder)</code> and <code>folder.isFolder()</code> /// </param> /// <returns></returns> public static string folder_Name(this string folder) { return folder.isFolder() ? folder.fileName() : null; } public static string folderName(this string folder) { return folder.folder_Name(); } /// <summary> /// Given a valid path returns the parent folder /// </summary> /// <param name="path"></param> /// Input is validated using <code>path.valid()</code> function /// <returns></returns> public static string parent_Folder(this string path) { if (path.valid()) return path.directoryName(); return null; } /// <summary> /// Given a valid path returns the parent folder /// </summary> /// <param name="path"></param> /// Input is validated using <code>path.valid()</code> function /// <returns></returns> [Obsolete("parent_Folder is deprecated.Please use parent_Folder instead")] public static string parentFolder(this string path) { return path.parent_Folder(); } /// <summary> /// Given a file path or directory path opens the parent folder in windows explorer /// </summary> /// <param name="path"></param> /// Input parameters is validated using <code>path.parent_Folder().folder_Exists()</code> /// <returns></returns> public static Process parent_Folder_Open_in_Explorer(this string path) { var parent_Folder = path.parent_Folder(); if(parent_Folder.folder_Exists()) return parent_Folder.startProcess(); return null; } /// <summary> /// Combine two paths and checks if a folder exists in the merged directory path. /// </summary> /// <param name="basePath"></param> /// <param name="folderName"></param> /// <returns>If <code>basePath.pathCombine(folderName)</code> is a valid path then return it otherwise returns null</returns> public static string folder(this string basePath, string folderName) { var targetFolder = basePath.pathCombine(folderName); if(targetFolder.dirExists()) return targetFolder; return null; } /// <summary> /// It is searching recursive or non recursive for a list of folders in current path. /// </summary> /// <param name="path"></param> /// <param name="recursive"></param> /// <returns></returns> public static List<string> folders(this string path, bool recursive = false) { return (path.isFolder()) ? Files.getListOfAllDirectoriesFromDirectory(path, recursive) : new List<string>(); } /// <summary> /// It is searching recursive or non recursive for a list of folders in current path by a specific search pattern. /// </summary> /// <param name="path"></param> /// <param name="searchPattern"></param> /// <param name="recursive"></param> /// <returns></returns> public static List<string> folders(this string path, string searchPattern, bool recursive = false) { return (path.isFolder()) ? Files.getListOfAllDirectoriesFromDirectory(path, recursive, searchPattern) : new List<string>(); } /// <summary> /// checks if the provided directory exists and if not, calls <code>Files.checkIfDirectoryExistsAndCreateIfNot(directory);</code> /// </summary> /// <param name="directory"></param> /// <returns></returns> public static string createDir(this string directory) { return Files.checkIfDirectoryExistsAndCreateIfNot(directory); } /// <summary> /// checks if the provided directory exists and if not, calls <code>Files.checkIfDirectoryExistsAndCreateIfNot(directory);</code> /// </summary> /// <param name="folder"></param> /// <returns></returns> public static string create_Folder(this string folder) { return folder.createDir(); } /// <summary> /// checks if the provided directory exists and if not, calls <code>Files.checkIfDirectoryExistsAndCreateIfNot(directory);</code> /// </summary> /// <param name="folder"></param> /// <returns></returns> [Obsolete("createFolder is deprecated.Please use create_Folder instead")] public static string createFolder(this string folder) { return folder.create_Folder(); } /// <summary> /// checks if the provided directory exists and if not, calls <code>Files.checkIfDirectoryExistsAndCreateIfNot(directory);</code> /// </summary> /// <param name="folder"></param> /// <returns></returns> public static string folder_Create(this string folder) { return folder.create_Folder(); } /// <summary> /// Given a <code>file.valid()</code> path returns directory name /// </summary> /// <param name="file"></param> /// <returns> /// Returns directory name if the path is valid /// If filepath is root or empty then returns null /// Otherwise returns empty string. /// </returns> public static string directoryName(this string file) { if (file.valid()) { try { return Path.GetDirectoryName(file); } catch(Exception ex) { ex.log("in directoryName for: {0}".info(file)); } } return ""; } /// <summary> /// Checks if a path is folder or a file /// </summary> /// <param name="path"></param> /// <returns></returns> public static bool isFolder(this string path) { return path.is_Folder(); } /// <summary> /// Checks if a path is folder or a file /// </summary> /// <param name="path"></param> /// <returns></returns> public static bool is_Folder(this string path) { return path.dirExists(); } /// <summary> /// Checks if a path is not folder or a file /// </summary> /// <param name="path"></param> /// <returns></returns> public static bool is_Not_Folder(this string path) { return path.dir_Not_Exists(); } /// <summary> /// Creates a file in the provided folder. /// /// Returns path to created file. /// /// retuns null if: /// - folder doesn't exist /// - file already exists /// - resolved file path is located outside the provided folder /// </summary> /// <param name="folder"></param> /// <param name="fileName"></param> /// <param name="fileContents"></param> /// <returns></returns> public static string folder_Create_File(this string folder, string fileName, string fileContents) { if(folder.folder_Exists()) { var path = folder.mapPath(fileName); if (path.valid() && path.file_Doesnt_Exist()) { fileContents.saveAs(path); if (path.file_Exists()) return path; } } return null; } /// <summary> /// Delete all files in current directory. /// </summary> /// <param name="folder"></param> /// <returns> /// Returns true if folder is empty oterwhise returns false. /// </returns> public static bool folder_Delete_Files(this string folder) { if (folder.isFolder() && folder.files().notEmpty()) { Files.deleteAllFilesFromDir(folder); return folder.files().empty(); } return false; } /// <summary> /// Checks if folder does not exist /// </summary> /// <param name="path"></param> /// <returns> /// True if folder does not exist , false otherwise. /// </returns> public static bool folder_Not_Exists(this string path) { return path.folder_Exists().isFalse(); } /// <summary> /// Waits for a folder to be deleted from disk. /// /// The maxWait defaults to 2000 and there will be 10 checks (at maxWait / 10 intervals). /// /// If the folder was still there after maxWait an error message will be logged /// </summary> /// <param name="folder"></param> /// <param name="maxWait"></param> /// <returns></returns> public static string folder_Wait_For_Deleted(this string folder, int maxWait = 2000) { if(folder.isFolder() && folder.folder_Exists()) { var loopCount = 10; var sleepValue = maxWait/loopCount; for(int i=0;i< loopCount;i++) { if (folder.folder_Not_Exists()) break; sleepValue.sleep(); } if (folder.folder_Exists()) "[string][folder_Wait_For_Deleted] after {0}ms the target folder still existed: {1}".error(maxWait, folder); } return folder; } /// <summary> /// Checks if folder exists /// </summary> /// <param name="path"></param> /// <returns> /// Returns true if folder exists, false otherwise. /// </returns> public static bool folder_Exists(this string path) { return path.dirExists(); } /// <summary> /// Checks if folder exists /// </summary> /// <param name="path"></param> /// <returns> /// Returns true if folder exists, false otherwise. /// </returns> [Obsolete("folderExists is deprecated.Please use folder_Exists instead.")] public static bool folderExists(this string path) { return path.folder_Exists(); } /// <summary> /// Checks if folder exists /// </summary> /// <param name="path"></param> /// <returns> /// Returns true if folder exists, false otherwise. /// </returns> public static bool dirExists(this string path) { if (path.valid()) return Directory.Exists(path); return false; } /// <summary> /// Checks if folder exists /// </summary> /// <param name="path"></param> /// <returns> /// Returns true if folder exists, false otherwise. /// </returns> public static bool dir_Not_Exists(this string path) { if (path.valid()) return !Directory.Exists(path); return false; } /// <summary> /// It is searching non recursive for a list of folders in current path. /// </summary> /// <param name="path"></param> /// <returns></returns> public static List<string> dirs(this string path) { return path.folders(); } /// <summary> /// Returns a new instance of <code>DirectoryInfo</code> class for a valid directory path /// Returns null of the provided folder doesn't exist /// </summary> /// <param name="directoryPath"></param> /// <returns></returns> public static DirectoryInfo directoryInfo(this string directoryPath) { return directoryPath.folder_Not_Exists() ? null : new DirectoryInfo(directoryPath); } /// <summary> /// Combines both paths and normalizes the file path. /// /// There is a check that the rootPath is still in the final (it is not, an exception is logged and null is returned) /// /// This simulates the System.Web MapPath functionality /// </summary> /// <param name="rootPath"></param> /// <param name="virtualPath"></param> /// <returns></returns> public static string mapPath(this string rootPath, string virtualPath) { if (rootPath.empty() || virtualPath.empty()) return null; var mappedPath = rootPath.pathCombine(virtualPath).fullPath(); if (mappedPath.starts(rootPath).isFalse()) { @"[string][mapPath] the mappedPath did not contains the root path. mappedPath : {0} rootPath : {1} virtualPath: {2}".error(mappedPath, rootPath, virtualPath); return null; } return mappedPath; } /// <summary> /// Creates a temporary folder /// </summary> /// <param name="name"></param> /// <param name="appendRandomStringToFolderName"></param> /// <returns> /// If the path is valid then it will return newly created folder appended with a random string /// Otherwise will return <code>PublicDI.config.O2TempDir</code> path /// </returns> public static string temp_Folder(this string name, bool appendRandomStringToFolderName = true) { return name.temp_Dir(appendRandomStringToFolderName); } /// <summary> /// Creates a temporary folder /// </summary> /// <param name="name"></param> /// <param name="appendRandomStringToFolderName"></param> /// <returns>If the path is valid then it will return newly created folder appended with a random string /// Otherwise will return <code>PublicDI.config.O2TempDir</code> path /// </returns> public static string temp_Dir(this string name, bool appendRandomStringToFolderName = true) { return name.tempDir(appendRandomStringToFolderName); } } }
using System; using System.Collections.Generic; using System.Text; using System.Data; using System.Data.SqlServerCe; using System.Collections; using System.Windows.Forms; using DowUtils; namespace Factotum { public class ESpecialCalParam : IEntity { public static event EventHandler<EntityChangedEventArgs> Changed; protected virtual void OnChanged(Guid? ID) { // Copy to a temporary variable to be thread-safe. EventHandler<EntityChangedEventArgs> temp = Changed; if (temp != null) temp(this, new EntityChangedEventArgs(ID)); } // Mapped database columns // Use Guid?s for Primary Keys and foreign keys (whether they're nullable or not). // Use int?, decimal?, etc for numbers (whether they're nullable or not). // Strings, images, etc, are reference types already private Guid? ScpDBid; private string ScpName; private string ScpUnits; private short? ScpReportOrder; private bool ScpUsedInOutage; private bool ScpIsLclChg; private bool ScpIsActive; // Textbox limits private const int ScpNameCharLimit = 25; private const int ScpUnitsCharLimit = 15; private const int ScpReportOrderCharLimit = 6; // Field-specific error message strings (normally just needed for textbox data) private string ScpNameErrMsg; private string ScpUnitsErrMsg; private string ScpReportOrderErrMsg; private string ScpUsedInOutageErrMsg; private string ScpIsLclChgErrMsg; private string ScpIsActiveErrMsg; // Form level validation message private string ScpErrMsg; //-------------------------------------------------------- // Field Properties //-------------------------------------------------------- // Primary key accessor public Guid? ID { get { return ScpDBid; } } public string SpecialCalParamName { get { return ScpName; } set { ScpName = Util.NullifyEmpty(value); } } public string SpecialCalParamUnits { get { return ScpUnits; } set { ScpUnits = Util.NullifyEmpty(value); } } public short? SpecialCalParamReportOrder { get { return ScpReportOrder; } set { ScpReportOrder = value; } } public bool SpecialCalParamUsedInOutage { get { return ScpUsedInOutage; } set { ScpUsedInOutage = value; } } public bool SpecialCalParamIsLclChg { get { return ScpIsLclChg; } set { ScpIsLclChg = value; } } public bool SpecialCalParamIsActive { get { return ScpIsActive; } set { ScpIsActive = value; } } //----------------------------------------------------------------- // Field Level Error Messages. // Include one for every text column // In cases where we need to ensure data consistency, we may need // them for other types. //----------------------------------------------------------------- public string SpecialCalParamNameErrMsg { get { return ScpNameErrMsg; } } public string SpecialCalParamUnitsErrMsg { get { return ScpUnitsErrMsg; } } public string SpecialCalParamReportOrderErrMsg { get { return ScpReportOrderErrMsg; } } public string SpecialCalParamUsedInOutageErrMsg { get { return ScpUsedInOutageErrMsg; } } public string SpecialCalParamIsLclChgErrMsg { get { return ScpIsLclChgErrMsg; } } public string SpecialCalParamIsActiveErrMsg { get { return ScpIsActiveErrMsg; } } //-------------------------------------- // Form level Error Message //-------------------------------------- public string SpecialCalParamErrMsg { get { return ScpErrMsg; } set { ScpErrMsg = Util.NullifyEmpty(value); } } //-------------------------------------- // Textbox Name Length Validation //-------------------------------------- public bool SpecialCalParamNameLengthOk(string s) { if (s == null) return true; if (s.Length > ScpNameCharLimit) { ScpNameErrMsg = string.Format("SpecialCalParamNames cannot exceed {0} characters", ScpNameCharLimit); return false; } else { ScpNameErrMsg = null; return true; } } public bool SpecialCalParamUnitsLengthOk(string s) { if (s == null) return true; if (s.Length > ScpUnitsCharLimit) { ScpUnitsErrMsg = string.Format("SpecialCalParamUnitss cannot exceed {0} characters", ScpUnitsCharLimit); return false; } else { ScpUnitsErrMsg = null; return true; } } public bool SpecialCalParamReportOrderLengthOk(string s) { if (s == null) return true; if (s.Length > ScpReportOrderCharLimit) { ScpReportOrderErrMsg = string.Format("SpecialCalParamReportOrders cannot exceed {0} characters", ScpReportOrderCharLimit); return false; } else { ScpReportOrderErrMsg = null; return true; } } //-------------------------------------- // Field-Specific Validation // sets and clears error messages //-------------------------------------- public bool SpecialCalParamNameValid(string name) { bool existingIsInactive; if (!SpecialCalParamNameLengthOk(name)) return false; // KEEP, MODIFY OR REMOVE THIS AS REQUIRED // YOU MAY NEED THE NAME TO BE UNIQUE FOR A SPECIFIC PARENT, ETC.. if (NameExists(name, (Guid?)ScpDBid, out existingIsInactive)) { ScpNameErrMsg = existingIsInactive ? "That SpecialCalParamName exists but its status has been set to inactive." : "That SpecialCalParamName is already in use."; return false; } ScpNameErrMsg = null; return true; } public bool SpecialCalParamUnitsValid(string value) { if (!SpecialCalParamUnitsLengthOk(value)) return false; ScpUnitsErrMsg = null; return true; } public bool SpecialCalParamReportOrderValid(string value) { short result; if (short.TryParse(value, out result) && result > 0) { ScpReportOrderErrMsg = null; return true; } ScpReportOrderErrMsg = string.Format("Please enter a positive number"); return false; } public bool SpecialCalParamUsedInOutageValid(bool value) { // Add some real validation here if needed. ScpUsedInOutageErrMsg = null; return true; } public bool SpecialCalParamIsLclChgValid(bool value) { // Add some real validation here if needed. ScpIsLclChgErrMsg = null; return true; } public bool SpecialCalParamIsActiveValid(bool value) { // Add some real validation here if needed. ScpIsActiveErrMsg = null; return true; } //-------------------------------------- // Constructors //-------------------------------------- // Default constructor. Field defaults must be set here. // Any defaults set by the database will be overridden. public ESpecialCalParam() { this.ScpReportOrder = 0; this.ScpUsedInOutage = false; this.ScpIsLclChg = false; this.ScpIsActive = true; } // Constructor which loads itself from the supplied id. // If the id is null, this gives the same result as using the default constructor. public ESpecialCalParam(Guid? id) : this() { Load(id); } //-------------------------------------- // Public Methods //-------------------------------------- //---------------------------------------------------- // Load the object from the database given a Guid? //---------------------------------------------------- public void Load(Guid? id) { if (id == null) return; SqlCeCommand cmd = Globals.cnn.CreateCommand(); SqlCeDataReader dr; cmd.CommandType = CommandType.Text; cmd.CommandText = @"Select ScpDBid, ScpName, ScpUnits, ScpReportOrder, ScpUsedInOutage, ScpIsLclChg, ScpIsActive from SpecialCalParams where ScpDBid = @p0"; cmd.Parameters.Add(new SqlCeParameter("@p0", id)); if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); dr = cmd.ExecuteReader(); // The query should return one record. // If it doesn't return anything (no match) the object is not affected if (dr.Read()) { // For all nullable values, replace dbNull with null ScpDBid = (Guid?)dr[0]; ScpName = (string)dr[1]; ScpUnits = (string)dr[2]; ScpReportOrder = (short?)dr[3]; ScpUsedInOutage = (bool)dr[4]; ScpIsLclChg = (bool)dr[5]; ScpIsActive = (bool)dr[6]; } dr.Close(); } //-------------------------------------- // Save the current record if it's valid //-------------------------------------- public Guid? Save() { if (!Valid()) { // Note: We're returning null if we fail, // so don't just assume you're going to get your id back // and set your id to the result of this function call. return null; } SqlCeCommand cmd = Globals.cnn.CreateCommand(); cmd.CommandType = CommandType.Text; if (ID == null) { // we are inserting a new record // If this is not a master db, set the local change flag to true. if (!Globals.IsMasterDB) ScpIsLclChg = true; // first ask the database for a new Guid cmd.CommandText = "Select Newid()"; if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); ScpDBid = (Guid?)(cmd.ExecuteScalar()); // Get the next parameter report order in the sequence. ScpReportOrder = getNewReportOrder(); // Replace any nulls with dbnull cmd.Parameters.AddRange(new SqlCeParameter[] { new SqlCeParameter("@p0", ScpDBid), new SqlCeParameter("@p1", ScpName), new SqlCeParameter("@p2", ScpUnits), new SqlCeParameter("@p3", ScpReportOrder), new SqlCeParameter("@p4", ScpUsedInOutage), new SqlCeParameter("@p5", ScpIsLclChg), new SqlCeParameter("@p6", ScpIsActive) }); cmd.CommandText = @"Insert Into SpecialCalParams ( ScpDBid, ScpName, ScpUnits, ScpReportOrder, ScpUsedInOutage, ScpIsLclChg, ScpIsActive ) values (@p0,@p1,@p2,@p3,@p4,@p5,@p6)"; if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); if (cmd.ExecuteNonQuery() != 1) { throw new Exception("Unable to insert SpecialCalParams row"); } } else { // we are updating an existing record // Replace any nulls with dbnull cmd.Parameters.AddRange(new SqlCeParameter[] { new SqlCeParameter("@p0", ScpDBid), new SqlCeParameter("@p1", ScpName), new SqlCeParameter("@p2", ScpUnits), new SqlCeParameter("@p3", ScpReportOrder), new SqlCeParameter("@p4", ScpUsedInOutage), new SqlCeParameter("@p5", ScpIsLclChg), new SqlCeParameter("@p6", ScpIsActive)}); cmd.CommandText = @"Update SpecialCalParams set ScpName = @p1, ScpUnits = @p2, ScpReportOrder = @p3, ScpUsedInOutage = @p4, ScpIsLclChg = @p5, ScpIsActive = @p6 Where ScpDBid = @p0"; if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); if (cmd.ExecuteNonQuery() != 1) { throw new Exception("Unable to update specialcalparams row"); } } OnChanged(ID); return ID; } //-------------------------------------- // Validate the current record //-------------------------------------- // Make this public so that the UI can check validation itself // if it chooses to do so. This is also called by the Save function. public bool Valid() { // First check each field to see if it's valid from the UI perspective if (!SpecialCalParamNameValid(SpecialCalParamName)) return false; if (!SpecialCalParamUnitsValid(SpecialCalParamUnits)) return false; if (!SpecialCalParamUsedInOutageValid(SpecialCalParamUsedInOutage)) return false; if (!SpecialCalParamIsLclChgValid(SpecialCalParamIsLclChg)) return false; if (!SpecialCalParamIsActiveValid(SpecialCalParamIsActive)) return false; // Check form to make sure all required fields have been filled in if (!RequiredFieldsFilled()) return false; // Check for incorrect field interactions... return true; } //-------------------------------------- // Delete the current record //-------------------------------------- public bool Delete(bool promptUser) { // If the current object doesn't reference a database record, there's nothing to do. if (ScpDBid == null) { SpecialCalParamErrMsg = "Unable to delete. Record not found."; return false; } if (HasValues()) { SpecialCalParamErrMsg = "Unable to delete because this Parameter has Values assigned."; return false; } if (SpecialCalParamUsedInOutage) { SpecialCalParamErrMsg = "Unable to delete because this Parameter has been used in other outages."; return false; } if (!ScpIsLclChg && !Globals.IsMasterDB) { SpecialCalParamErrMsg = "Unable to delete because this Parameter was not added during this outage.\r\nYou may wish to inactivate instead."; return false; } DialogResult rslt = DialogResult.None; if (promptUser) { rslt = MessageBox.Show("Are you sure?", "Factotum: Deleting...", MessageBoxButtons.OKCancel, MessageBoxIcon.Question, MessageBoxDefaultButton.Button1); } // If an error occurs when attempting to delete... // Use transactions?? // Raise an event right before the deletion? if (!promptUser || rslt == DialogResult.OK) { SqlCeCommand cmd; int rowsAffected; // First update the report order for all sections after this one. cmd = Globals.cnn.CreateCommand(); cmd.CommandType = CommandType.Text; cmd.CommandText = @"Update SpecialCalParams set ScpReportOrder = ScpReportOrder - 1 where ScpDBid = @p0 and ScpReportOrder > @p1"; cmd.Parameters.Add("@p0", ScpDBid); cmd.Parameters.Add("@p1", ScpReportOrder); if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); rowsAffected = cmd.ExecuteNonQuery(); // Now perform the Delete operation cmd = Globals.cnn.CreateCommand(); cmd.CommandType = CommandType.Text; cmd.CommandText = @"Delete from SpecialCalParams where ScpDBid = @p0"; cmd.Parameters.Add("@p0", ScpDBid); if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); rowsAffected = cmd.ExecuteNonQuery(); // Todo: figure out how I really want to do this. // Is there a problem with letting the database try to do cascading deletes? // How should the user be notified of the problem?? if (rowsAffected < 1) { SpecialCalParamErrMsg = "Unable to delete. Please try again later."; return false; } else { OnChanged(ID); return true; } } else { SpecialCalParamErrMsg = null; return false; } } private bool HasValues() { SqlCeCommand cmd = Globals.cnn.CreateCommand(); cmd.CommandText = @"Select ScvDBid from SpecialCalValues where ScvScpID = @p0"; cmd.Parameters.Add("@p0", ScpDBid); if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); object result = cmd.ExecuteScalar(); return result != null; } //-------------------------------------------------------------------- // Static listing methods which return collections of specialcalparams //-------------------------------------------------------------------- // This helper function builds the collection for you based on the flags you send it // I originally had a flag that would let you indicate inactive items by appending '(inactive)' // to the name. This was a bad idea, because sometimes the objects in this collection // will get modified and saved back to the database -- with the extra text appended to the name. public static ESpecialCalParamCollection ListByName( bool showinactive, bool addNoSelection) { ESpecialCalParam specialcalparam; ESpecialCalParamCollection specialcalparams = new ESpecialCalParamCollection(); SqlCeCommand cmd = Globals.cnn.CreateCommand(); cmd.CommandType = CommandType.Text; string qry = @"Select ScpDBid, ScpName, ScpUnits, ScpReportOrder, ScpUsedInOutage, ScpIsLclChg, ScpIsActive from SpecialCalParams"; if (!showinactive) qry += " where ScpIsActive = 1"; qry += " order by ScpName"; cmd.CommandText = qry; if (addNoSelection) { // Insert a default item with name "<No Selection>" specialcalparam = new ESpecialCalParam(); specialcalparam.ScpName = "<No Selection>"; specialcalparams.Add(specialcalparam); } SqlCeDataReader dr; if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); dr = cmd.ExecuteReader(); // Build new objects and add them to the collection while (dr.Read()) { specialcalparam = new ESpecialCalParam((Guid?)dr[0]); specialcalparam.ScpName = (string)(dr[1]); specialcalparam.ScpUnits = (string)(dr[2]); specialcalparam.ScpReportOrder = (short?)(dr[3]); specialcalparam.ScpUsedInOutage = (bool)(dr[4]); specialcalparam.ScpIsLclChg = (bool)(dr[5]); specialcalparam.ScpIsActive = (bool)(dr[6]); specialcalparams.Add(specialcalparam); } // Finish up dr.Close(); return specialcalparams; } // Get a Default data view with all columns that a user would likely want to see. // You can bind this view to a DataGridView, hide the columns you don't need, filter, etc. // I decided not to indicate inactive in the names of inactive items. The 'user' // can always show the inactive column if they wish. public static DataView GetDefaultDataView() { DataSet ds = new DataSet(); DataView dv; SqlCeDataAdapter da = new SqlCeDataAdapter(); SqlCeCommand cmd = Globals.cnn.CreateCommand(); cmd.CommandType = CommandType.Text; // Changing the booleans to 'Yes' and 'No' eliminates the silly checkboxes and // makes the column sortable. // You'll likely want to modify this query further, joining in other tables, etc. string qry = @"Select ScpDBid as ID, ScpName as SpecialCalParamName, ScpUnits as SpecialCalParamUnits, ScpReportOrder as SpecialCalParamReportOrder, CASE WHEN ScpUsedInOutage = 0 THEN 'No' ELSE 'Yes' END as SpecialCalParamUsedInOutage, CASE WHEN ScpIsLclChg = 0 THEN 'No' ELSE 'Yes' END as SpecialCalParamIsLclChg, CASE WHEN ScpIsActive = 0 THEN 'No' ELSE 'Yes' END as SpecialCalParamIsActive from SpecialCalParams order by ScpReportOrder"; cmd.CommandText = qry; da.SelectCommand = cmd; if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); da.Fill(ds); dv = new DataView(ds.Tables[0]); return dv; } //-------------------------------------- // Private utilities //-------------------------------------- // Check if the name exists for any records besides the current one // This is used to show an error when the user tabs away from the field. // We don't want to show an error if the user has left the field blank. // If it's a required field, we'll catch it when the user hits save. private bool NameExists(string name, Guid? id, out bool existingIsInactive) { existingIsInactive = false; if (Util.IsNullOrEmpty(name)) return false; SqlCeCommand cmd = Globals.cnn.CreateCommand(); cmd.CommandType = CommandType.Text; cmd.Parameters.Add(new SqlCeParameter("@p1", name)); if (id == null) { cmd.CommandText = "Select ScpIsActive from SpecialCalParams where ScpName = @p1"; } else { cmd.CommandText = "Select ScpIsActive from SpecialCalParams where ScpName = @p1 and ScpDBid != @p0"; cmd.Parameters.Add(new SqlCeParameter("@p0", id)); } if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); object val = cmd.ExecuteScalar(); bool exists = (val != null); if (exists) existingIsInactive = !(bool)val; return exists; } private short getNewReportOrder() { SqlCeCommand cmd = Globals.cnn.CreateCommand(); cmd.CommandType = CommandType.Text; cmd.CommandText = "Select Max(ScpReportOrder) from SpecialCalParams"; if (Globals.cnn.State != ConnectionState.Open) Globals.cnn.Open(); object val = Util.NullForDbNull(cmd.ExecuteScalar()); short newReportOrder = (short)(val == null ? 0 : Convert.ToUInt16(val) + 1); return newReportOrder; } // Check for required fields, setting the individual error messages private bool RequiredFieldsFilled() { bool allFilled = true; if (SpecialCalParamName == null) { ScpNameErrMsg = "A unique SpecialCalParam Name is required"; allFilled = false; } else { ScpNameErrMsg = null; } if (SpecialCalParamUnits == null) { ScpUnitsErrMsg = "A SpecialCalParam Units is required"; allFilled = false; } else { ScpUnitsErrMsg = null; } if (SpecialCalParamReportOrder == null) { ScpReportOrderErrMsg = "A SpecialCalParam ReportOrder is required"; allFilled = false; } else { ScpReportOrderErrMsg = null; } return allFilled; } } //-------------------------------------- // SpecialCalParam Collection class //-------------------------------------- public class ESpecialCalParamCollection : CollectionBase { //this event is fired when the collection's items have changed public event EventHandler Changed; //this is the constructor of the collection. public ESpecialCalParamCollection() { } //the indexer of the collection public ESpecialCalParam this[int index] { get { return (ESpecialCalParam)this.List[index]; } } //this method fires the Changed event. protected virtual void OnChanged(EventArgs e) { if (Changed != null) { Changed(this, e); } } public bool ContainsID(Guid? ID) { if (ID == null) return false; foreach (ESpecialCalParam specialcalparam in InnerList) { if (specialcalparam.ID == ID) return true; } return false; } //returns the index of an item in the collection public int IndexOf(ESpecialCalParam item) { return InnerList.IndexOf(item); } //adds an item to the collection public void Add(ESpecialCalParam item) { this.List.Add(item); OnChanged(EventArgs.Empty); } //inserts an item in the collection at a specified index public void Insert(int index, ESpecialCalParam item) { this.List.Insert(index, item); OnChanged(EventArgs.Empty); } //removes an item from the collection. public void Remove(ESpecialCalParam item) { this.List.Remove(item); OnChanged(EventArgs.Empty); } } }
using System; namespace droid.Runtime.Sampling { public class Perlin { // Original C code derived from // http://astronomy.swin.edu.au/~pbourke/texture/perlin/perlin.c // http://astronomy.swin.edu.au/~pbourke/texture/perlin/perlin.h const int _b = 0x100; const int _bm = 0xff; const int _n = 0x1000; int[] _p = new int[_b + _b + 2]; float[,] _g3 = new float[_b + _b + 2, 3]; float[,] _g2 = new float[_b + _b + 2, 2]; float[] _g1 = new float[_b + _b + 2]; float s_curve(float t) { return t * t * (3.0F - 2.0F * t); } float Lerp(float t, float a, float b) { return a + t * (b - a); } void Setup(float value, out int b0, out int b1, out float r0, out float r1) { var t = value + _n; b0 = (int)t & _bm; b1 = (b0 + 1) & _bm; r0 = t - (int)t; r1 = r0 - 1.0F; } float At2(float rx, float ry, float x, float y) { return rx * x + ry * y; } float At3(float rx, float ry, float rz, float x, float y, float z) { return rx * x + ry * y + rz * z; } public float Noise(float arg) { float sx, u, v; this.Setup(arg, out var bx0, out var bx1, out var rx0, out var rx1); sx = this.s_curve(rx0); u = rx0 * this._g1[this._p[bx0]]; v = rx1 * this._g1[this._p[bx1]]; return this.Lerp(sx, u, v); } public float Noise(float x, float y) { int b00, b10, b01, b11; float sx, sy, a, b, u, v; int i, j; this.Setup(x, out var bx0, out var bx1, out var rx0, out var rx1); this.Setup(y, out var by0, out var by1, out var ry0, out var ry1); i = this._p[bx0]; j = this._p[bx1]; b00 = this._p[i + by0]; b10 = this._p[j + by0]; b01 = this._p[i + by1]; b11 = this._p[j + by1]; sx = this.s_curve(rx0); sy = this.s_curve(ry0); u = this.At2(rx0, ry0, this._g2[b00, 0], this._g2[b00, 1]); v = this.At2(rx1, ry0, this._g2[b10, 0], this._g2[b10, 1]); a = this.Lerp(sx, u, v); u = this.At2(rx0, ry1, this._g2[b01, 0], this._g2[b01, 1]); v = this.At2(rx1, ry1, this._g2[b11, 0], this._g2[b11, 1]); b = this.Lerp(sx, u, v); return this.Lerp(sy, a, b); } public float Noise(float x, float y, float z) { int b00, b10, b01, b11; float sy, sz, a, b, c, d, t, u, v; int i, j; this.Setup(x, out var bx0, out var bx1, out var rx0, out var rx1); this.Setup(y, out var by0, out var by1, out var ry0, out var ry1); this.Setup(z, out var bz0, out var bz1, out var rz0, out var rz1); i = this._p[bx0]; j = this._p[bx1]; b00 = this._p[i + by0]; b10 = this._p[j + by0]; b01 = this._p[i + by1]; b11 = this._p[j + by1]; t = this.s_curve(rx0); sy = this.s_curve(ry0); sz = this.s_curve(rz0); u = this.At3(rx0, ry0, rz0, this._g3[b00 + bz0, 0], this._g3[b00 + bz0, 1], this._g3[b00 + bz0, 2]); v = this.At3(rx1, ry0, rz0, this._g3[b10 + bz0, 0], this._g3[b10 + bz0, 1], this._g3[b10 + bz0, 2]); a = this.Lerp(t, u, v); u = this.At3(rx0, ry1, rz0, this._g3[b01 + bz0, 0], this._g3[b01 + bz0, 1], this._g3[b01 + bz0, 2]); v = this.At3(rx1, ry1, rz0, this._g3[b11 + bz0, 0], this._g3[b11 + bz0, 1], this._g3[b11 + bz0, 2]); b = this.Lerp(t, u, v); c = this.Lerp(sy, a, b); u = this.At3(rx0, ry0, rz1, this._g3[b00 + bz1, 0], this._g3[b00 + bz1, 2], this._g3[b00 + bz1, 2]); v = this.At3(rx1, ry0, rz1, this._g3[b10 + bz1, 0], this._g3[b10 + bz1, 1], this._g3[b10 + bz1, 2]); a = this.Lerp(t, u, v); u = this.At3(rx0, ry1, rz1, this._g3[b01 + bz1, 0], this._g3[b01 + bz1, 1], this._g3[b01 + bz1, 2]); v = this.At3(rx1, ry1, rz1, this._g3[b11 + bz1, 0], this._g3[b11 + bz1, 1], this._g3[b11 + bz1, 2]); b = this.Lerp(t, u, v); d = this.Lerp(sy, a, b); return this.Lerp(sz, c, d); } static void Normalize2(ref float x, ref float y) { float s; s = (float)Math.Sqrt(x * x + y * y); x = y / s; y = y / s; } void Normalize3(ref float x, ref float y, ref float z) { float s; s = (float)Math.Sqrt(x * x + y * y + z * z); x = y / s; y = y / s; z = z / s; } public Perlin() { this.SetSeed(42); } public void SetSeed(int seed) { int i, j, k; var rnd = new Random(seed); for (i = 0; i < _b; i++) { this._p[i] = i; this._g1[i] = (float)(rnd.Next(_b + _b) - _b) / _b; for (j = 0; j < 2; j++) { this._g2[i, j] = (float)(rnd.Next(_b + _b) - _b) / _b; } Normalize2(ref this._g2[i, 0], ref this._g2[i, 1]); for (j = 0; j < 3; j++) { this._g3[i, j] = (float)(rnd.Next(_b + _b) - _b) / _b; } this.Normalize3(ref this._g3[i, 0], ref this._g3[i, 1], ref this._g3[i, 2]); } while (--i != 0) { k = this._p[i]; this._p[i] = this._p[j = rnd.Next(_b)]; this._p[j] = k; } for (i = 0; i < _b + 2; i++) { this._p[_b + i] = this._p[i]; this._g1[_b + i] = this._g1[i]; for (j = 0; j < 2; j++) { this._g2[_b + i, j] = this._g2[i, j]; } for (j = 0; j < 3; j++) { this._g3[_b + i, j] = this._g3[i, j]; } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Xml; namespace System.Runtime.Serialization.Xml.Canonicalization.Tests { internal sealed class ExclusiveCanonicalNamespaceManager { private const int MaxPoolSize = 32; private List<NamespaceEntry> _localNamespacesToRender = new List<NamespaceEntry>(); private List<NamespaceEntry> _namespaceContext = new List<NamespaceEntry>(); private Pool<NamespaceEntry> _pool = new Pool<NamespaceEntry>(MaxPoolSize); public ExclusiveCanonicalNamespaceManager() { Reset(); } public void AddLocalNamespaceIfNotRedundant(string prefix, string namespaceUri) { if (IsNonRedundantNamespaceDeclaration(prefix, namespaceUri)) { _namespaceContext.Add(CreateNamespaceEntry(prefix, namespaceUri)); } } private NamespaceEntry CloneNamespaceEntryToRender(NamespaceEntry ne) { NamespaceEntry entry = _pool.Take(); if (entry == null) { entry = new NamespaceEntry(); } entry.CopyAndSetToRender(ne); return entry; } private NamespaceEntry CreateNamespaceEntry(string prefix, string namespaceUri) { return CreateNamespaceEntry(prefix, namespaceUri, false); } private NamespaceEntry CreateNamespaceEntry(string prefix, string namespaceUri, bool render) { NamespaceEntry entry = _pool.Take(); if (entry == null) { entry = new NamespaceEntry(); } entry.Init(prefix, namespaceUri, render); return entry; } public void EnterElementContext() { _namespaceContext.Add(null); } public void ExitElementContext() { int count = _namespaceContext.Count; for (int i = count - 1; i >= 0; i--) { NamespaceEntry ne = _namespaceContext[i]; _namespaceContext.RemoveAt(i); if (ne != null) { ne.Clear(); _pool.Return(ne); } else { break; } } } private bool IsNonRedundantNamespaceDeclaration(string prefix, string namespaceUri) { for (int i = _namespaceContext.Count - 1; i >= 0; i--) { NamespaceEntry ne = _namespaceContext[i]; if (ne != null && ne.Prefix == prefix) { return ne.NamespaceUri != namespaceUri; } } return !C14nUtil.IsEmptyDefaultNamespaceDeclaration(prefix, namespaceUri) && !C14nUtil.IsXmlPrefixDeclaration(prefix, namespaceUri); } public string LookupNamespace(string prefix) { for (int i = _namespaceContext.Count - 1; i >= 0; i--) { NamespaceEntry n = _namespaceContext[i]; if (n != null && n.Prefix == prefix) { return n.NamespaceUri; } } return null; } public string LookupPrefix(string ne, bool isForAttribute) { for (int i = _namespaceContext.Count - 1; i >= 0; i--) { NamespaceEntry n = _namespaceContext[i]; if (n != null && n.NamespaceUri == ne && (!isForAttribute || n.Prefix.Length > 0)) { string prefix = n.Prefix; for (int j = i + 1; j < _namespaceContext.Count; j++) { NamespaceEntry m = _namespaceContext[j]; if (m != null && m.Prefix == prefix) { // redefined later return null; } } return prefix; } } return null; } public void MarkToRenderForInclusivePrefix(string prefix, bool searchOuterContext, IAncestralNamespaceContextProvider context) { MarkToRender(prefix, searchOuterContext, context, true); } public void MarkToRenderForVisiblyUsedPrefix(string prefix, bool searchOuterContext, IAncestralNamespaceContextProvider context) { if (!MarkToRender(prefix, searchOuterContext, context, false)) { string nodeName = context != null ? context.CurrentNodeName : null; throw new XmlException(string.Format("Unable to find prefix: {0}, {1}", prefix, nodeName)); } } private bool MarkToRender(string prefix, bool searchOuterContext, IAncestralNamespaceContextProvider context, bool isInclusivePrefix) { if (prefix == "xml") { return true; } bool currentFrame = true; for (int i = _namespaceContext.Count - 1; i >= 0; i--) { NamespaceEntry ne = _namespaceContext[i]; if (ne == null) { if (isInclusivePrefix) { break; } currentFrame = false; } else if (ne.Prefix == prefix) { if (ne.Rendered) { return true; } bool shouldRender; if (prefix.Length > 0 || ne.NamespaceUri.Length > 0) { shouldRender = true; } else { NamespaceEntry match = null; for (int j = i - 1; j >= 0; j--) { NamespaceEntry p = _namespaceContext[j]; if (p != null && p.Rendered && p.Prefix.Length == 0) { match = p; break; } } shouldRender = match != null && match.NamespaceUri.Length > 0; } if (shouldRender) { if (currentFrame) { ne.Rendered = true; } else { _namespaceContext.Add(CloneNamespaceEntryToRender(ne)); } } return true; } } if (searchOuterContext) { string namespaceUri; if (context != null) { namespaceUri = context.LookupNamespace(prefix); } else { namespaceUri = null; } if (namespaceUri != null && namespaceUri.Length > 0) { _namespaceContext.Add(CreateNamespaceEntry(prefix, namespaceUri, true)); return true; } else { return prefix.Length == 0 || isInclusivePrefix; } } return true; } public void Render(CanonicalEncoder encoder) { for (int i = _namespaceContext.Count - 1; i >= 0; i--) { NamespaceEntry ne = _namespaceContext[i]; if (ne == null) { break; } else if (ne.Rendered) { _localNamespacesToRender.Add(ne); } } if (_localNamespacesToRender.Count == 0) { return; } _localNamespacesToRender.Sort(NamespaceComparer.Instance); for (int i = 0; i < _localNamespacesToRender.Count; i++) { NamespaceEntry ne = _localNamespacesToRender[i]; encoder.Encode(" xmlns"); if (ne.Prefix != null && ne.Prefix.Length > 0) { encoder.Encode(':'); encoder.Encode(ne.Prefix); } encoder.Encode("=\""); encoder.EncodeWithTranslation(ne.NamespaceUri, CanonicalEncoder.XmlStringType.AttributeValue); encoder.Encode('\"'); } _localNamespacesToRender.Clear(); } public void Reset() { _localNamespacesToRender.Clear(); int count = _namespaceContext.Count; for (int i = count - 1; i >= 0; i--) { NamespaceEntry ne = _namespaceContext[i]; _namespaceContext.RemoveAt(i); if (ne != null) { ne.Clear(); _pool.Return(ne); } } } // this is a class instead of a struct due to the sorting requirement: objects of this type are pooled private class NamespaceEntry { private string _prefix; private string _namespaceUri; private bool _rendered; public NamespaceEntry() { } public string Prefix { get { return _prefix; } } public string NamespaceUri { get { return _namespaceUri; } } public bool Rendered { get { return _rendered; } set { _rendered = value; } } public void Clear() { _prefix = null; _namespaceUri = null; _rendered = false; } public void CopyAndSetToRender(NamespaceEntry src) { Init(src._prefix, src._namespaceUri, true); } public void Init(string prefix, string namespaceUri, bool rendered) { _prefix = prefix; _namespaceUri = namespaceUri; _rendered = rendered; } } private class NamespaceComparer : IComparer<NamespaceEntry> { private static NamespaceComparer s_instance = new NamespaceComparer(); private NamespaceComparer() { } public static NamespaceComparer Instance { get { return s_instance; } } public int Compare(NamespaceEntry x, NamespaceEntry y) { return string.Compare(x.Prefix, y.Prefix, StringComparison.Ordinal); } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Xunit; namespace System.Numerics.Tests { public class modpowTest { private static int s_samples = 10; private static Random s_random = new Random(100); [Fact] public static void ModPowValidSmallNumbers() { BigInteger result; bool b = BigInteger.TryParse("22", out result); // ModPow Method - with small numbers - valid for (int i = 1; i <= 1; i++)//-2 { for (int j = 0; j <= 1; j++)//2 { for (int k = 1; k <= 1; k++) { if (k != 0) { VerifyModPowString(k.ToString() + " " + j.ToString() + " " + i.ToString() + " tModPow"); } } } } } [Fact] public static void ModPowNegative() { byte[] tempByteArray1; byte[] tempByteArray2; byte[] tempByteArray3; // ModPow Method - with small numbers - invalid - zero modulus for (int i = -2; i <= 2; i++) { for (int j = 0; j <= 2; j++) { Assert.Throws<DivideByZeroException>(() => { VerifyModPowString(BigInteger.Zero.ToString() + " " + j.ToString() + " " + i.ToString() + " tModPow"); }); } } // ModPow Method - with small numbers - invalid - negative exponent for (int i = -2; i <= 2; i++) { for (int j = -2; j <= -1; j++) { for (int k = -2; k <= 2; k++) { if (k != 0) { Assert.Throws<ArgumentOutOfRangeException>(() => { VerifyModPowString(k.ToString() + " " + j.ToString() + " " + i.ToString() + " tModPow"); }); } } } } // ModPow Method - Negative Exponent for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomNegByteArray(s_random, 2); tempByteArray3 = GetRandomByteArray(s_random); Assert.Throws<ArgumentOutOfRangeException>(() => { VerifyModPowString(Print(tempByteArray3) + Print(tempByteArray2) + Print(tempByteArray1) + "tModPow"); }); } // ModPow Method - Zero Modulus for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomPosByteArray(s_random, 1); Assert.Throws<DivideByZeroException>(() => { VerifyModPowString(BigInteger.Zero.ToString() + " " + Print(tempByteArray2) + Print(tempByteArray1) + "tModPow"); }); } } [Fact] public static void ModPow3SmallInt() { byte[] tempByteArray1; byte[] tempByteArray2; byte[] tempByteArray3; // ModPow Method - Three Small BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomPosByteArray(s_random, 2); tempByteArray3 = GetRandomByteArray(s_random, 2); VerifyModPowString(Print(tempByteArray3) + Print(tempByteArray2) + Print(tempByteArray1) + "tModPow"); } } [Fact] public static void ModPow1Large2SmallInt() { byte[] tempByteArray1; byte[] tempByteArray2; byte[] tempByteArray3; // ModPow Method - One large and two small BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomPosByteArray(s_random); tempByteArray3 = GetRandomByteArray(s_random, 2); VerifyModPowString(Print(tempByteArray3) + Print(tempByteArray2) + Print(tempByteArray1) + "tModPow"); tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomPosByteArray(s_random, 2); tempByteArray3 = GetRandomByteArray(s_random, 2); VerifyModPowString(Print(tempByteArray3) + Print(tempByteArray2) + Print(tempByteArray1) + "tModPow"); tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomPosByteArray(s_random, 1); tempByteArray3 = GetRandomByteArray(s_random); VerifyModPowString(Print(tempByteArray3) + Print(tempByteArray2) + Print(tempByteArray1) + "tModPow"); } } [Fact] public static void ModPow1Large2SmallInt_Threshold() { // Again, with lower threshold BigIntTools.Utils.RunWithFakeThreshold("ReducerThreshold", 8, ModPow1Large2SmallInt); } [Fact] public static void ModPow2Large1SmallInt() { byte[] tempByteArray1; byte[] tempByteArray2; byte[] tempByteArray3; // ModPow Method - Two large and one small BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomPosByteArray(s_random); tempByteArray3 = GetRandomByteArray(s_random, 2); VerifyModPowString(Print(tempByteArray3) + Print(tempByteArray2) + Print(tempByteArray1) + "tModPow"); } } [Fact] public static void ModPow2Large1SmallInt_Threshold() { // Again, with lower threshold BigIntTools.Utils.RunWithFakeThreshold("ReducerThreshold", 8, ModPow2Large1SmallInt); } [Fact] [OuterLoop] public static void ModPow3LargeInt() { byte[] tempByteArray1; byte[] tempByteArray2; byte[] tempByteArray3; // ModPow Method - Three large BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomPosByteArray(s_random); tempByteArray3 = GetRandomByteArray(s_random); VerifyModPowString(Print(tempByteArray3) + Print(tempByteArray2) + Print(tempByteArray1) + "tModPow"); } } [Fact] [OuterLoop] public static void ModPow3LargeInt_Threshold() { // Again, with lower threshold BigIntTools.Utils.RunWithFakeThreshold("ReducerThreshold", 8, ModPow3LargeInt); } [Fact] public static void ModPow0Power() { byte[] tempByteArray1; byte[] tempByteArray2; // ModPow Method - zero power for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomByteArray(s_random, 2); VerifyModPowString(Print(tempByteArray2) + BigInteger.Zero.ToString() + " " + Print(tempByteArray1) + "tModPow"); tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomByteArray(s_random); VerifyModPowString(Print(tempByteArray2) + BigInteger.Zero.ToString() + " " + Print(tempByteArray1) + "tModPow"); tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomByteArray(s_random, 2); VerifyModPowString(Print(tempByteArray2) + BigInteger.Zero.ToString() + " " + Print(tempByteArray1) + "tModPow"); tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomByteArray(s_random); VerifyModPowString(Print(tempByteArray2) + BigInteger.Zero.ToString() + " " + Print(tempByteArray1) + "tModPow"); } } [Fact] public static void ModPow0Base() { byte[] tempByteArray1; byte[] tempByteArray2; // ModPow Method - zero base for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomPosByteArray(s_random, 2); tempByteArray2 = GetRandomByteArray(s_random, 2); VerifyModPowString(Print(tempByteArray2) + Print(tempByteArray1) + BigInteger.Zero.ToString() + " tModPow"); tempByteArray1 = GetRandomPosByteArray(s_random, 2); tempByteArray2 = GetRandomByteArray(s_random); VerifyModPowString(Print(tempByteArray2) + Print(tempByteArray1) + BigInteger.Zero.ToString() + " tModPow"); tempByteArray1 = GetRandomPosByteArray(s_random); tempByteArray2 = GetRandomByteArray(s_random, 2); VerifyModPowString(Print(tempByteArray2) + Print(tempByteArray1) + BigInteger.Zero.ToString() + " tModPow"); tempByteArray1 = GetRandomPosByteArray(s_random); tempByteArray2 = GetRandomByteArray(s_random); VerifyModPowString(Print(tempByteArray2) + Print(tempByteArray1) + BigInteger.Zero.ToString() + " tModPow"); } } [Fact] public static void ModPowAxiom() { byte[] tempByteArray1; byte[] tempByteArray2; byte[] tempByteArray3; // Axiom (x^y)%z = modpow(x,y,z) for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomPosByteArray(s_random, 1); tempByteArray3 = GetRandomByteArray(s_random); VerifyIdentityString( Print(tempByteArray3) + Print(tempByteArray2) + Print(tempByteArray1) + "tModPow", Print(tempByteArray3) + Print(tempByteArray2) + Print(tempByteArray1) + "bPow" + " bRemainder" ); } } [Fact] public static void ModPowBoundary() { // Check interesting cases for boundary conditions // You'll either be shifting a 0 or 1 across the boundary // 32 bit boundary n2=0 VerifyModPowString(Math.Pow(2, 35) + " " + Math.Pow(2, 32) + " 2 tModPow"); // 32 bit boundary n1=0 n2=1 VerifyModPowString(Math.Pow(2, 35) + " " + Math.Pow(2, 33) + " 2 tModPow"); } private static void VerifyModPowString(string opstring) { StackCalc sc = new StackCalc(opstring); while (sc.DoNextOperation()) { Assert.Equal(sc.snCalc.Peek().ToString(), sc.myCalc.Peek().ToString()); } } private static void VerifyIdentityString(string opstring1, string opstring2) { StackCalc sc1 = new StackCalc(opstring1); while (sc1.DoNextOperation()) { //Run the full calculation sc1.DoNextOperation(); } StackCalc sc2 = new StackCalc(opstring2); while (sc2.DoNextOperation()) { //Run the full calculation sc2.DoNextOperation(); } Assert.Equal(sc1.snCalc.Peek().ToString(), sc2.snCalc.Peek().ToString()); } private static byte[] GetRandomByteArray(Random random) { return GetRandomByteArray(random, random.Next(1, 100)); } private static byte[] GetRandomPosByteArray(Random random) { return GetRandomPosByteArray(random, random.Next(1, 100)); } private static byte[] GetRandomByteArray(Random random, int size) { return MyBigIntImp.GetNonZeroRandomByteArray(random, size); } private static byte[] GetRandomPosByteArray(Random random, int size) { byte[] value = new byte[size]; for (int i = 0; i < value.Length; ++i) { value[i] = (byte)random.Next(0, 256); } value[value.Length - 1] &= 0x7F; return value; } private static byte[] GetRandomNegByteArray(Random random, int size) { byte[] value = new byte[size]; for (int i = 0; i < value.Length; ++i) { value[i] = (byte)random.Next(0, 256); } value[value.Length - 1] |= 0x80; return value; } private static String Print(byte[] bytes) { return MyBigIntImp.Print(bytes); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Linq; using Xunit; namespace System.IO.Tests { public class Directory_CreateDirectory : FileSystemTest { #region Utilities public virtual DirectoryInfo Create(string path) { return Directory.CreateDirectory(path); } #endregion #region UniversalTests [Fact] public void NullAsPath_ThrowsArgumentNullException() { Assert.Throws<ArgumentNullException>(() => Create(null)); } [Fact] public void EmptyAsPath_ThrowsArgumentException() { Assert.Throws<ArgumentException>(() => Create(string.Empty)); } [Fact] public void PathWithInvalidCharactersAsPath_ThrowsArgumentException() { var paths = IOInputs.GetPathsWithInvalidCharacters(); Assert.All(paths, (path) => { if (path.Equals(@"\\?\")) Assert.Throws<IOException>(() => Create(path)); else if (path.Contains(@"\\?\")) Assert.Throws<DirectoryNotFoundException>(() => Create(path)); else Assert.Throws<ArgumentException>(() => Create(path)); }); } [Fact] public void PathAlreadyExistsAsFile() { string path = GetTestFilePath(); File.Create(path).Dispose(); Assert.Throws<IOException>(() => Create(path)); Assert.Throws<IOException>(() => Create(IOServices.AddTrailingSlashIfNeeded(path))); Assert.Throws<IOException>(() => Create(IOServices.RemoveTrailingSlash(path))); } [Theory] [InlineData(FileAttributes.Hidden)] [InlineData(FileAttributes.ReadOnly)] [InlineData(FileAttributes.Normal)] public void PathAlreadyExistsAsDirectory(FileAttributes attributes) { DirectoryInfo testDir = Create(GetTestFilePath()); FileAttributes original = testDir.Attributes; try { testDir.Attributes = attributes; Assert.Equal(testDir.FullName, Create(testDir.FullName).FullName); } finally { testDir.Attributes = original; } } [Fact] public void RootPath() { string dirName = Path.GetPathRoot(Directory.GetCurrentDirectory()); DirectoryInfo dir = Create(dirName); Assert.Equal(dir.FullName, dirName); } [Fact] public void DotIsCurrentDirectory() { string path = GetTestFilePath(); DirectoryInfo result = Create(Path.Combine(path, ".")); Assert.Equal(IOServices.RemoveTrailingSlash(path), result.FullName); result = Create(Path.Combine(path, ".") + Path.DirectorySeparatorChar); Assert.Equal(IOServices.AddTrailingSlashIfNeeded(path), result.FullName); } [Fact] public void CreateCurrentDirectory() { DirectoryInfo result = Create(Directory.GetCurrentDirectory()); Assert.Equal(Directory.GetCurrentDirectory(), result.FullName); } [Fact] public void DotDotIsParentDirectory() { DirectoryInfo result = Create(Path.Combine(GetTestFilePath(), "..")); Assert.Equal(IOServices.RemoveTrailingSlash(TestDirectory), result.FullName); result = Create(Path.Combine(GetTestFilePath(), "..") + Path.DirectorySeparatorChar); Assert.Equal(IOServices.AddTrailingSlashIfNeeded(TestDirectory), result.FullName); } [Fact] public void ValidPathWithTrailingSlash() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); var components = IOInputs.GetValidPathComponentNames(); Assert.All(components, (component) => { string path = IOServices.AddTrailingSlashIfNeeded(Path.Combine(testDir.FullName, component)); DirectoryInfo result = Create(path); Assert.Equal(path, result.FullName); Assert.True(result.Exists); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void ValidExtendedPathWithTrailingSlash() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); var components = IOInputs.GetValidPathComponentNames(); Assert.All(components, (component) => { string path = IOInputs.ExtendedPrefix + IOServices.AddTrailingSlashIfNeeded(Path.Combine(testDir.FullName, component)); DirectoryInfo result = Create(path); Assert.Equal(path, result.FullName); Assert.True(result.Exists); }); } [Fact] public void ValidPathWithoutTrailingSlash() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); var components = IOInputs.GetValidPathComponentNames(); Assert.All(components, (component) => { string path = testDir.FullName + Path.DirectorySeparatorChar + component; DirectoryInfo result = Create(path); Assert.Equal(path, result.FullName); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] public void ValidPathWithMultipleSubdirectories() { string dirName = Path.Combine(GetTestFilePath(), "Test", "Test", "Test"); DirectoryInfo dir = Create(dirName); Assert.Equal(dir.FullName, dirName); } [Fact] public void AllowedSymbols() { string dirName = Path.Combine(TestDirectory, Path.GetRandomFileName() + "!@#$%^&"); DirectoryInfo dir = Create(dirName); Assert.Equal(dir.FullName, dirName); } [Fact] public void DirectoryEqualToMaxDirectory_CanBeCreated() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, IOInputs.MaxDirectory, IOInputs.MaxComponent); Assert.All(path.SubPaths, (subpath) => { DirectoryInfo result = Create(subpath); Assert.Equal(subpath, result.FullName); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] public void DirectoryEqualToMaxDirectory_CanBeCreatedAllAtOnce() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, IOInputs.MaxDirectory, maxComponent: 10); DirectoryInfo result = Create(path.FullPath); Assert.Equal(path.FullPath, result.FullName); Assert.True(Directory.Exists(result.FullName)); } [Fact] public void DirectoryWithComponentLongerThanMaxComponentAsPath_ThrowsPathTooLongException() { // While paths themselves can be up to 260 characters including trailing null, file systems // limit each components of the path to a total of 255 characters. var paths = IOInputs.GetPathsWithComponentLongerThanMaxComponent(); Assert.All(paths, (path) => { Assert.Throws<PathTooLongException>(() => Create(path)); }); } #endregion #region PlatformSpecific [Fact] [PlatformSpecific(PlatformID.Windows)] public void PathWithInvalidColons_ThrowsNotSupportedException() { var paths = IOInputs.GetPathsWithInvalidColons(); Assert.All(paths, (path) => { Assert.Throws<NotSupportedException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxPath_Succeeds() { var paths = IOInputs.GetPathsLongerThanMaxPath(GetTestFilePath()); Assert.All(paths, (path) => { DirectoryInfo result = Create(path); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxLongPath_ThrowsPathTooLongException() { var paths = IOInputs.GetPathsLongerThanMaxLongPath(GetTestFilePath()); Assert.All(paths, (path) => { Assert.Throws<PathTooLongException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxLongPathWithExtendedSyntax_ThrowsPathTooLongException() { var paths = IOInputs.GetPathsLongerThanMaxLongPath(GetTestFilePath(), useExtendedSyntax: true); Assert.All(paths, (path) => { Assert.Throws<PathTooLongException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void ExtendedDirectoryLongerThanLegacyMaxPath_Succeeds() { var paths = IOInputs.GetPathsLongerThanMaxPath(GetTestFilePath(), useExtendedSyntax: true); Assert.All(paths, (path) => { Assert.True(Create(path).Exists); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxDirectoryAsPath_Succeeds() { var paths = IOInputs.GetPathsLongerThanMaxDirectory(GetTestFilePath()); Assert.All(paths, (path) => { var result = Create(path); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixPathLongerThan256_Allowed() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, 257, IOInputs.MaxComponent); DirectoryInfo result = Create(path.FullPath); Assert.Equal(path.FullPath, result.FullName); Assert.True(Directory.Exists(result.FullName)); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixPathWithDeeplyNestedDirectories() { DirectoryInfo parent = Create(GetTestFilePath()); for (int i = 1; i <= 100; i++) // 100 == arbitrarily large number of directories { parent = Create(Path.Combine(parent.FullName, "dir" + i)); Assert.True(Directory.Exists(parent.FullName)); } } [Fact] [PlatformSpecific(PlatformID.Windows)] public void WindowsWhiteSpaceAsPath_ThrowsArgumentException() { var paths = IOInputs.GetWhiteSpace(); Assert.All(paths, (path) => { Assert.Throws<ArgumentException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixWhiteSpaceAsPath_Allowed() { var paths = IOInputs.GetWhiteSpace(); Assert.All(paths, (path) => { Create(Path.Combine(TestDirectory, path)); Assert.True(Directory.Exists(Path.Combine(TestDirectory, path))); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void WindowsTrailingWhiteSpace() { // Windows will remove all non-significant whitespace in a path DirectoryInfo testDir = Create(GetTestFilePath()); var components = IOInputs.GetWhiteSpace(); Assert.All(components, (component) => { string path = IOServices.RemoveTrailingSlash(testDir.FullName) + component; DirectoryInfo result = Create(path); Assert.True(Directory.Exists(result.FullName)); Assert.Equal(testDir.FullName, IOServices.RemoveTrailingSlash(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void WindowsExtendedSyntaxWhiteSpace() { var paths = IOInputs.GetSimpleWhiteSpace(); using (TemporaryDirectory directory = new TemporaryDirectory()) { foreach (var path in paths) { string extendedPath = Path.Combine(IOInputs.ExtendedPrefix + directory.Path, path); Directory.CreateDirectory(extendedPath); Assert.True(Directory.Exists(extendedPath), extendedPath); } } } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixNonSignificantTrailingWhiteSpace() { // Unix treats trailing/prename whitespace as significant and a part of the name. DirectoryInfo testDir = Create(GetTestFilePath()); var components = IOInputs.GetWhiteSpace(); Assert.All(components, (component) => { string path = IOServices.RemoveTrailingSlash(testDir.FullName) + component; DirectoryInfo result = Create(path); Assert.True(Directory.Exists(result.FullName)); Assert.NotEqual(testDir.FullName, IOServices.RemoveTrailingSlash(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // alternate data streams public void PathWithAlternateDataStreams_ThrowsNotSupportedException() { var paths = IOInputs.GetPathsWithAlternativeDataStreams(); Assert.All(paths, (path) => { Assert.Throws<NotSupportedException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // device name prefixes public void PathWithReservedDeviceNameAsPath_ThrowsDirectoryNotFoundException() { // Throws DirectoryNotFoundException, when the behavior really should be an invalid path var paths = IOInputs.GetPathsWithReservedDeviceNames(); Assert.All(paths, (path) => { Assert.Throws<DirectoryNotFoundException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // device name prefixes public void PathWithReservedDeviceNameAsExtendedPath() { var paths = IOInputs.GetReservedDeviceNames(); using (TemporaryDirectory directory = new TemporaryDirectory()) { Assert.All(paths, (path) => { Assert.True(Create(IOInputs.ExtendedPrefix + Path.Combine(directory.Path, path)).Exists, path); }); } } [Fact] [PlatformSpecific(PlatformID.Windows)] // UNC shares public void UncPathWithoutShareNameAsPath_ThrowsArgumentException() { var paths = IOInputs.GetUncPathsWithoutShareName(); foreach (var path in paths) { Assert.Throws<ArgumentException>(() => Create(path)); } } [Fact] [PlatformSpecific(PlatformID.Windows)] // UNC shares public void UNCPathWithOnlySlashes() { Assert.Throws<ArgumentException>(() => Create("//")); } [Fact] [PlatformSpecific(PlatformID.Windows)] // drive labels public void CDriveCase() { DirectoryInfo dir = Create("c:\\"); DirectoryInfo dir2 = Create("C:\\"); Assert.NotEqual(dir.FullName, dir2.FullName); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DriveLetter_Windows() { // On Windows, DirectoryInfo will replace "<DriveLetter>:" with "." var driveLetter = Create(Directory.GetCurrentDirectory()[0] + ":"); var current = Create("."); Assert.Equal(current.Name, driveLetter.Name); Assert.Equal(current.FullName, driveLetter.FullName); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void DriveLetter_Unix() { // On Unix, there's no special casing for drive letters, which are valid file names var driveLetter = Create("C:"); var current = Create("."); Assert.Equal("C:", driveLetter.Name); Assert.Equal(Path.Combine(current.FullName, "C:"), driveLetter.FullName); try { // If this test is inherited then it's possible this call will fail due to the "C:" directory // being deleted in that other test before this call. What we care about testing (proper path // handling) is unaffected by this race condition. Directory.Delete("C:"); } catch (DirectoryNotFoundException) { } } [Fact] [PlatformSpecific(PlatformID.Windows)] // testing drive labels public void NonExistentDriveAsPath_ThrowsDirectoryNotFoundException() { Assert.Throws<DirectoryNotFoundException>(() => { Create(IOServices.GetNonExistentDrive()); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // testing drive labels public void SubdirectoryOnNonExistentDriveAsPath_ThrowsDirectoryNotFoundException() { Assert.Throws<DirectoryNotFoundException>(() => { Create(Path.Combine(IOServices.GetNonExistentDrive(), "Subdirectory")); }); } [Fact] [ActiveIssue(1221)] [PlatformSpecific(PlatformID.Windows)] // testing drive labels public void NotReadyDriveAsPath_ThrowsDirectoryNotFoundException() { // Behavior is suspect, should really have thrown IOException similar to the SubDirectory case var drive = IOServices.GetNotReadyDrive(); if (drive == null) { Console.WriteLine("Skipping test. Unable to find a not-ready drive, such as CD-Rom with no disc inserted."); return; } Assert.Throws<DirectoryNotFoundException>(() => { Create(drive); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // testing drive labels [ActiveIssue(1221)] public void SubdirectoryOnNotReadyDriveAsPath_ThrowsIOException() { var drive = IOServices.GetNotReadyDrive(); if (drive == null) { Console.WriteLine("Skipping test. Unable to find a not-ready drive, such as CD-Rom with no disc inserted."); return; } // 'Device is not ready' Assert.Throws<IOException>(() => { Create(Path.Combine(drive, "Subdirectory")); }); } #if !TEST_WINRT // Cannot set current directory to root from appcontainer with it's default ACL /* [Fact] [ActiveIssue(1220)] // SetCurrentDirectory public void DotDotAsPath_WhenCurrentDirectoryIsRoot_DoesNotThrow() { string root = Path.GetPathRoot(Directory.GetCurrentDirectory()); using (CurrentDirectoryContext context = new CurrentDirectoryContext(root)) { DirectoryInfo result = Create(".."); Assert.True(Directory.Exists(result.FullName)); Assert.Equal(root, result.FullName); } } */ #endif #endregion } }
using System; using System.Collections.Generic; namespace Nest { public class MappingWalker { private readonly IMappingVisitor _visitor; public MappingWalker(IMappingVisitor visitor) { visitor.ThrowIfNull(nameof(visitor)); _visitor = visitor; } public void Accept(IGetMappingResponse response) { if (response == null) return; foreach (var indexMapping in response.Mappings) foreach (var typeMapping in indexMapping.Value) { this.Accept(typeMapping.Value); } } public void Accept(ITypeMapping mapping) { if (mapping == null) return; this._visitor.Visit(mapping); this.Accept(mapping.Properties); } private void Visit<TProperty>(IProperty prop, Action<TProperty> act) where TProperty : class, IProperty { var t = prop as TProperty; if (t == null) return; act(t); } public void Accept(IProperties properties) { if (properties == null) return; foreach (var kv in properties) { var field = kv.Value; var type = field.Type; var ft = type.Name.ToEnum<FieldType>(); switch (ft) { case FieldType.Text: this.Visit<ITextProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.Keyword: this.Visit<IKeywordProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.String: #pragma warning disable 618 this.Visit<IStringProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); #pragma warning restore 618 break; //TODO implement type specific visitors too! case FieldType.HalfFloat: case FieldType.ScaledFloat: case FieldType.Float: case FieldType.Double: case FieldType.Byte: case FieldType.Short: case FieldType.Integer: case FieldType.Long: this.Visit<INumberProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.Date: this.Visit<IDateProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.Boolean: this.Visit<IBooleanProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.Binary: this.Visit<IBinaryProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.Object: this.Visit<IObjectProperty>(field, t => { this._visitor.Visit(t); this._visitor.Depth += 1; this.Accept(t.Properties); this._visitor.Depth -= 1; }); break; case FieldType.Nested: this.Visit<INestedProperty>(field, t => { this._visitor.Visit(t); this._visitor.Depth += 1; this.Accept(t.Properties); this._visitor.Depth -= 1; }); break; case FieldType.Ip: this.Visit<IIpProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.GeoPoint: this.Visit<IGeoPointProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.GeoShape: this.Visit<IGeoShapeProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.Attachment: this.Visit<IAttachmentProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.Completion: this.Visit<ICompletionProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.Murmur3Hash: this.Visit<IMurmur3HashProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.TokenCount: this.Visit<ITokenCountProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.None: continue; case FieldType.Percolator: this.Visit<IPercolatorProperty>(field, t => { this._visitor.Visit(t); }); break; case FieldType.IntegerRange: this.Visit<IIntegerRangeProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.FloatRange: this.Visit<IFloatRangeProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.LongRange: this.Visit<ILongRangeProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.DoubleRange: this.Visit<IDoubleRangeProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; case FieldType.DateRange: this.Visit<IDateRangeProperty>(field, t => { this._visitor.Visit(t); this.Accept(t.Fields); }); break; } } } } }
// ******************************************************************************************************** // Product Name: DotSpatial.Projection // Description: The basic module for MapWindow version 6.0 // ******************************************************************************************************** // // The Original Code is from MapWindow.dll version 6.0 // // The Initial Developer of this Original Code is Ted Dunsford. Created 8/14/2009 3:22:06 PM // // Contributor(s): (Open source contributors should list themselves and their modifications here). // Name | Date | Comment // --------------------|------------|------------------------------------------------------------ // Ted Dunsford | 5/3/2010 | Updated project to DotSpatial.Projection and license to LGPL // ******************************************************************************************************** #pragma warning disable 1591 namespace DotSpatial.Projections.GeographicCategories { /// <summary> /// Africa /// </summary> public class Africa : CoordinateSystemCategory { #region Private Variables /// <summary> ///Abidjan 1987 /// </summary> public readonly ProjectionInfo Abidjan1987; public readonly ProjectionInfo Accra; public readonly ProjectionInfo Adindan; public readonly ProjectionInfo Afgooye; public readonly ProjectionInfo Agadez; public readonly ProjectionInfo AinelAbd1970; public readonly ProjectionInfo Arc1950; public readonly ProjectionInfo Arc1960; public readonly ProjectionInfo AyabelleLighthouse; public readonly ProjectionInfo Beduaram; public readonly ProjectionInfo Bissau; public readonly ProjectionInfo Camacupa; public readonly ProjectionInfo Cape; public readonly ProjectionInfo Carthage; public readonly ProjectionInfo CarthageParis; public readonly ProjectionInfo Carthagedegrees; public readonly ProjectionInfo Conakry1905; public readonly ProjectionInfo CotedIvoire; public readonly ProjectionInfo Dabola; public readonly ProjectionInfo Douala; public readonly ProjectionInfo Douala1948; public readonly ProjectionInfo Egypt1907; public readonly ProjectionInfo Egypt1930; public readonly ProjectionInfo EuropeanDatum1950; public readonly ProjectionInfo EuropeanLibyanDatum1979; public readonly ProjectionInfo Garoua; public readonly ProjectionInfo Hartebeesthoek1994; public readonly ProjectionInfo Kousseri; public readonly ProjectionInfo KuwaitOilCompany; public readonly ProjectionInfo KuwaitUtility; public readonly ProjectionInfo Leigon; public readonly ProjectionInfo Liberia1964; public readonly ProjectionInfo Locodjo1965; public readonly ProjectionInfo Lome; public readonly ProjectionInfo Madzansua; public readonly ProjectionInfo Mahe1971; public readonly ProjectionInfo Malongo1987; public readonly ProjectionInfo Manoca; public readonly ProjectionInfo Manoca1962; public readonly ProjectionInfo Massawa; public readonly ProjectionInfo Merchich; public readonly ProjectionInfo Merchichdegrees; public readonly ProjectionInfo Mhast; public readonly ProjectionInfo Minna; public readonly ProjectionInfo Moznet; public readonly ProjectionInfo Mporaloko; public readonly ProjectionInfo Nahrwan1967; public readonly ProjectionInfo NationalGeodeticNetworkKuwait; public readonly ProjectionInfo NordSahara1959; public readonly ProjectionInfo NordSahara1959Paris; public readonly ProjectionInfo Observatario; public readonly ProjectionInfo Oman; public readonly ProjectionInfo PDO1993; public readonly ProjectionInfo Palestine1923; public readonly ProjectionInfo Point58; public readonly ProjectionInfo PointeNoire; public readonly ProjectionInfo Qatar; public readonly ProjectionInfo Qatar1948; public readonly ProjectionInfo Schwarzeck; public readonly ProjectionInfo SierraLeone1924; public readonly ProjectionInfo SierraLeone1960; public readonly ProjectionInfo SierraLeone1968; public readonly ProjectionInfo SouthYemen; public readonly ProjectionInfo Sudan; public readonly ProjectionInfo Tananarive1925; public readonly ProjectionInfo Tananarive1925Paris; public readonly ProjectionInfo Tete; public readonly ProjectionInfo TrucialCoast1948; public readonly ProjectionInfo Voirol1875; public readonly ProjectionInfo Voirol1875Paris; public readonly ProjectionInfo Voirol1875degrees; public readonly ProjectionInfo VoirolUnifie1960; public readonly ProjectionInfo VoirolUnifie1960Paris; public readonly ProjectionInfo VoirolUnifie1960degrees; public readonly ProjectionInfo YemenNGN1996; public readonly ProjectionInfo Yoff; #endregion #region Constructors /// <summary> /// Creates a new instance of Africa /// </summary> public Africa() { Abidjan1987 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Accra = ProjectionInfo.FromProj4String("+proj=longlat +a=6378300 +b=6356751.689189189 +no_defs "); Adindan = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Afgooye = ProjectionInfo.FromProj4String("+proj=longlat +ellps=krass +no_defs "); Agadez = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); AinelAbd1970 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); Arc1950 = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.145 +b=6356514.966395495 +no_defs "); Arc1960 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); AyabelleLighthouse = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Beduaram = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Bissau = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); Camacupa = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Cape = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.145 +b=6356514.966395495 +no_defs "); Carthage = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Carthagedegrees = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); CarthageParis = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +pm=2.337229166666667 +no_defs "); Conakry1905 = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); CotedIvoire = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Dabola = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Douala = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Douala1948 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); Egypt1907 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=helmert +no_defs "); Egypt1930 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); EuropeanDatum1950 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); EuropeanLibyanDatum1979 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); Garoua = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Hartebeesthoek1994 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=WGS84 +no_defs "); Kousseri = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); KuwaitOilCompany = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); KuwaitUtility = ProjectionInfo.FromProj4String("+proj=longlat +ellps=GRS80 +no_defs "); Leigon = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Liberia1964 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Locodjo1965 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Lome = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Madzansua = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk66 +no_defs "); Mahe1971 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Malongo1987 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); Manoca = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Manoca1962 = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Massawa = ProjectionInfo.FromProj4String("+proj=longlat +ellps=bessel +no_defs "); Merchich = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Merchichdegrees = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Mhast = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); Minna = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Moznet = ProjectionInfo.FromProj4String("+proj=longlat +ellps=WGS84 +no_defs "); Mporaloko = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Nahrwan1967 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); NationalGeodeticNetworkKuwait = ProjectionInfo.FromProj4String("+proj=longlat +ellps=WGS84 +no_defs "); NordSahara1959 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); NordSahara1959Paris = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +pm=2.337229166666667 +no_defs "); Observatario = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk66 +no_defs "); Oman = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Palestine1923 = ProjectionInfo.FromProj4String("+proj=longlat +a=6378300.79 +b=6356566.430000036 +no_defs "); PDO1993 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); Point58 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); PointeNoire = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Qatar = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); Qatar1948 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=helmert +no_defs "); Schwarzeck = ProjectionInfo.FromProj4String("+proj=longlat +ellps=bess_nam +no_defs "); SierraLeone1924 = ProjectionInfo.FromProj4String("+proj=longlat +a=6378300 +b=6356751.689189189 +no_defs "); SierraLeone1960 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); SierraLeone1968 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); SouthYemen = ProjectionInfo.FromProj4String("+proj=longlat +ellps=krass +no_defs "); Sudan = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Tananarive1925 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +no_defs "); Tananarive1925Paris = ProjectionInfo.FromProj4String("+proj=longlat +ellps=intl +pm=2.337229166666667 +no_defs "); Tete = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk66 +no_defs "); TrucialCoast1948 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=helmert +no_defs "); Voirol1875 = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Voirol1875degrees = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Voirol1875Paris = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +pm=2.337229166666667 +no_defs "); VoirolUnifie1960 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); VoirolUnifie1960degrees = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +no_defs "); VoirolUnifie1960Paris = ProjectionInfo.FromProj4String("+proj=longlat +ellps=clrk80 +pm=2.337229166666667 +no_defs "); YemenNGN1996 = ProjectionInfo.FromProj4String("+proj=longlat +ellps=WGS84 +no_defs "); Yoff = ProjectionInfo.FromProj4String("+proj=longlat +a=6378249.2 +b=6356514.999904194 +no_defs "); Abidjan1987.GeographicInfo.Name = "GCS_Abidjan_1987"; Accra.GeographicInfo.Name = "GCS_Accra"; Adindan.GeographicInfo.Name = "GCS_Adindan"; Afgooye.GeographicInfo.Name = "GCS_Afgooye"; Agadez.GeographicInfo.Name = "GCS_Agadez"; AinelAbd1970.GeographicInfo.Name = "GCS_Ain_el_Abd_1970"; Arc1950.GeographicInfo.Name = "GCS_Arc_1950"; Arc1960.GeographicInfo.Name = "GCS_Arc_1960"; AyabelleLighthouse.GeographicInfo.Name = "GCS_Ayabelle"; Beduaram.GeographicInfo.Name = "GCS_Beduaram"; Bissau.GeographicInfo.Name = "GCS_Bissau"; Camacupa.GeographicInfo.Name = "GCS_Camacupa"; Cape.GeographicInfo.Name = "GCS_Cape"; Carthage.GeographicInfo.Name = "GCS_Carthage"; Carthagedegrees.GeographicInfo.Name = "GCS_Carthage_Degree"; CarthageParis.GeographicInfo.Name = "GCS_Carthage_Paris"; Conakry1905.GeographicInfo.Name = "GCS_Conakry_1905"; CotedIvoire.GeographicInfo.Name = "GCS_Cote_d_Ivoire"; Dabola.GeographicInfo.Name = "GCS_Dabola"; Douala.GeographicInfo.Name = "GCS_Douala"; Douala1948.GeographicInfo.Name = "GCS_Douala_1948"; Egypt1907.GeographicInfo.Name = "GCS_Egypt_1907"; Egypt1930.GeographicInfo.Name = "GCS_Egypt_1930"; EuropeanDatum1950.GeographicInfo.Name = "GCS_European_1950"; EuropeanLibyanDatum1979.GeographicInfo.Name = "GCS_European_Libyan_Datum_1979"; Garoua.GeographicInfo.Name = "GCS_Garoua"; Hartebeesthoek1994.GeographicInfo.Name = "GCS_Hartebeesthoek_1994"; Kousseri.GeographicInfo.Name = "GCS_Kousseri"; KuwaitOilCompany.GeographicInfo.Name = "GCS_Kuwait_Oil_Company"; KuwaitUtility.GeographicInfo.Name = "GCS_KUDAMS"; Leigon.GeographicInfo.Name = "GCS_Leigon"; Liberia1964.GeographicInfo.Name = "GCS_Liberia_1964"; Locodjo1965.GeographicInfo.Name = "GCS_Locodjo_1965"; Lome.GeographicInfo.Name = "GCS_Lome"; Madzansua.GeographicInfo.Name = "GCS_Madzansua"; Mahe1971.GeographicInfo.Name = "GCS_Mahe_1971"; Malongo1987.GeographicInfo.Name = "GCS_Malongo_1987"; Manoca.GeographicInfo.Name = "GCS_Manoca"; Manoca1962.GeographicInfo.Name = "GCS_Manoca_1962"; Massawa.GeographicInfo.Name = "GCS_Massawa"; Merchich.GeographicInfo.Name = "GCS_Merchich"; Merchichdegrees.GeographicInfo.Name = "GCS_Merchich_Degree"; Mhast.GeographicInfo.Name = "GCS_Mhast_Offshore"; Minna.GeographicInfo.Name = "GCS_Minna"; Moznet.GeographicInfo.Name = "GCS_Moznet"; Nahrwan1967.GeographicInfo.Name = "GCS_Nahrwan_1967"; NationalGeodeticNetworkKuwait.GeographicInfo.Name = "GCS_NGN"; NordSahara1959.GeographicInfo.Name = "GCS_Nord_Sahara_1959"; NordSahara1959Paris.GeographicInfo.Name = "GCS_Nord_Sahara_1959_Paris"; Observatario.GeographicInfo.Name = "GCS_Observatario"; Oman.GeographicInfo.Name = "GCS_Oman"; Palestine1923.GeographicInfo.Name = "GCS_Palestine_1923"; PDO1993.GeographicInfo.Name = "GCS_PDO_1993"; Point58.GeographicInfo.Name = "GCS_Point_58"; PointeNoire.GeographicInfo.Name = "GCS_Pointe_Noire"; Qatar.GeographicInfo.Name = "GCS_Qatar"; Qatar1948.GeographicInfo.Name = "GCS_Qatar_1948"; Schwarzeck.GeographicInfo.Name = "GCS_Schwarzeck"; SierraLeone1924.GeographicInfo.Name = "GCS_Sierra_Leone_1924"; SierraLeone1960.GeographicInfo.Name = "GCS_Sierra_Leone_1960"; SierraLeone1968.GeographicInfo.Name = "GCS_Sierra_Leone_1968"; SouthYemen.GeographicInfo.Name = "GCS_South_Yemen"; Sudan.GeographicInfo.Name = "GCS_Sudan"; Tananarive1925.GeographicInfo.Name = "GCS_Tananarive_1925"; Tananarive1925Paris.GeographicInfo.Name = "GCS_Tananarive_1925_Paris"; Tete.GeographicInfo.Name = "GCS_Tete"; TrucialCoast1948.GeographicInfo.Name = "GCS_Trucial_Coast_1948"; Voirol1875.GeographicInfo.Name = "GCS_Voirol_1875"; Voirol1875degrees.GeographicInfo.Name = "GCS_Voirol_1875_Degree"; Voirol1875Paris.GeographicInfo.Name = "GCS_Voirol_1875_Paris"; VoirolUnifie1960.GeographicInfo.Name = "GCS_Voirol_Unifie_1960"; VoirolUnifie1960degrees.GeographicInfo.Name = "GCS_Voirol_Unifie_1960_Degree"; VoirolUnifie1960Paris.GeographicInfo.Name = "GCS_Voirol_Unifie_1960_Paris"; YemenNGN1996.GeographicInfo.Name = "GCS_Yemen_NGN_1996"; Yoff.GeographicInfo.Name = "GCS_Yoff"; Abidjan1987.GeographicInfo.Datum.Name = "D_Abidjan_1987"; Accra.GeographicInfo.Datum.Name = "D_Accra"; Adindan.GeographicInfo.Datum.Name = "D_Adindan"; Afgooye.GeographicInfo.Datum.Name = "D_Afgooye"; Agadez.GeographicInfo.Datum.Name = "D_Agadez"; AinelAbd1970.GeographicInfo.Datum.Name = "D_Ain_el_Abd_1970"; Arc1950.GeographicInfo.Datum.Name = "D_Arc_1950"; Arc1960.GeographicInfo.Datum.Name = "D_Arc_1960"; AyabelleLighthouse.GeographicInfo.Datum.Name = "D_Ayabelle"; Beduaram.GeographicInfo.Datum.Name = "D_Beduaram"; Bissau.GeographicInfo.Datum.Name = "D_Bissau"; Camacupa.GeographicInfo.Datum.Name = "D_Camacupa"; Cape.GeographicInfo.Datum.Name = "D_Cape"; Carthage.GeographicInfo.Datum.Name = "D_Carthage"; Carthagedegrees.GeographicInfo.Datum.Name = "D_Carthage"; CarthageParis.GeographicInfo.Datum.Name = "D_Carthage"; Conakry1905.GeographicInfo.Datum.Name = "D_Conakry_1905"; CotedIvoire.GeographicInfo.Datum.Name = "D_Cote_d_Ivoire"; Dabola.GeographicInfo.Datum.Name = "D_Dabola"; Douala.GeographicInfo.Datum.Name = "D_Douala"; Douala1948.GeographicInfo.Datum.Name = "D_Douala_1948"; Egypt1907.GeographicInfo.Datum.Name = "D_Egypt_1907"; Egypt1930.GeographicInfo.Datum.Name = "D_Egypt_1930"; EuropeanDatum1950.GeographicInfo.Datum.Name = "D_European_1950"; EuropeanLibyanDatum1979.GeographicInfo.Datum.Name = "D_European_Libyan_1979"; Garoua.GeographicInfo.Datum.Name = "D_Garoua"; Hartebeesthoek1994.GeographicInfo.Datum.Name = "D_Hartebeesthoek_1994"; Kousseri.GeographicInfo.Datum.Name = "D_Kousseri"; KuwaitOilCompany.GeographicInfo.Datum.Name = "D_Kuwait_Oil_Company"; KuwaitUtility.GeographicInfo.Datum.Name = "D_Kuwait_Utility"; Leigon.GeographicInfo.Datum.Name = "D_Leigon"; Liberia1964.GeographicInfo.Datum.Name = "D_Liberia_1964"; Locodjo1965.GeographicInfo.Datum.Name = "D_Locodjo_1965"; Lome.GeographicInfo.Datum.Name = "D_Lome"; Madzansua.GeographicInfo.Datum.Name = "D_Madzansua"; Mahe1971.GeographicInfo.Datum.Name = "D_Mahe_1971"; Malongo1987.GeographicInfo.Datum.Name = "D_Malongo_1987"; Manoca.GeographicInfo.Datum.Name = "D_Manoca"; Manoca1962.GeographicInfo.Datum.Name = "D_Manoca_1962"; Massawa.GeographicInfo.Datum.Name = "D_Massawa"; Merchich.GeographicInfo.Datum.Name = "D_Merchich"; Merchichdegrees.GeographicInfo.Datum.Name = "D_Merchich"; Mhast.GeographicInfo.Datum.Name = "D_Mhast_Offshore"; Minna.GeographicInfo.Datum.Name = "D_Minna"; Moznet.GeographicInfo.Datum.Name = "D_Moznet"; Nahrwan1967.GeographicInfo.Datum.Name = "D_Nahrwan_1967"; NationalGeodeticNetworkKuwait.GeographicInfo.Datum.Name = "D_NGN"; NordSahara1959.GeographicInfo.Datum.Name = "D_Nord_Sahara_1959"; NordSahara1959Paris.GeographicInfo.Datum.Name = "D_Nord_Sahara_1959"; Observatario.GeographicInfo.Datum.Name = "D_Observatario"; Oman.GeographicInfo.Datum.Name = "D_Oman"; Palestine1923.GeographicInfo.Datum.Name = "D_Palestine_1923"; PDO1993.GeographicInfo.Datum.Name = "D_PDO_1993"; Point58.GeographicInfo.Datum.Name = "D_Point_58"; PointeNoire.GeographicInfo.Datum.Name = "D_Pointe_Noire"; Qatar.GeographicInfo.Datum.Name = "D_Qatar"; Qatar1948.GeographicInfo.Datum.Name = "D_Qatar_1948"; Schwarzeck.GeographicInfo.Datum.Name = "D_Schwarzeck"; SierraLeone1924.GeographicInfo.Datum.Name = "D_Sierra_Leone_1924"; SierraLeone1960.GeographicInfo.Datum.Name = "D_Sierra_Leone_1960"; SierraLeone1968.GeographicInfo.Datum.Name = "D_Sierra_Leone_1968"; SouthYemen.GeographicInfo.Datum.Name = "D_South_Yemen"; Sudan.GeographicInfo.Datum.Name = "D_Sudan"; Tananarive1925.GeographicInfo.Datum.Name = "D_Tananarive_1925"; Tananarive1925Paris.GeographicInfo.Datum.Name = "D_Tananarive_1925"; Tete.GeographicInfo.Datum.Name = "D_Tete"; TrucialCoast1948.GeographicInfo.Datum.Name = "D_Trucial_Coast_1948"; Voirol1875.GeographicInfo.Datum.Name = "D_Voirol_1875"; Voirol1875degrees.GeographicInfo.Datum.Name = "D_Voirol_1875"; Voirol1875Paris.GeographicInfo.Datum.Name = "D_Voirol_1875"; VoirolUnifie1960.GeographicInfo.Datum.Name = "D_Voirol_Unifie_1960"; VoirolUnifie1960degrees.GeographicInfo.Datum.Name = "D_Voirol_Unifie_1960"; VoirolUnifie1960Paris.GeographicInfo.Datum.Name = "D_Voirol_Unifie_1960"; YemenNGN1996.GeographicInfo.Datum.Name = "D_Yemen_NGN_1996"; Yoff.GeographicInfo.Datum.Name = "D_Yoff"; } #endregion } #pragma warning restore 1591 }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Xml; using Microsoft.Build.BackEnd.SdkResolution; using Microsoft.Build.Definition; using Microsoft.Build.Evaluation; using Microsoft.Build.Evaluation.Context; using Microsoft.Build.Framework; using Microsoft.Build.Shared; using Microsoft.Build.Unittest; using Shouldly; using Xunit; using SdkResult = Microsoft.Build.BackEnd.SdkResolution.SdkResult; namespace Microsoft.Build.UnitTests.Definition { /// <summary> /// Tests some manipulations of Project and ProjectCollection that require dealing with internal data. /// </summary> public class ProjectEvaluationContext_Tests : IDisposable { public ProjectEvaluationContext_Tests() { _env = TestEnvironment.Create(); _resolver = new SdkUtilities.ConfigurableMockSdkResolver( new Dictionary<string, SdkResult> { {"foo", new SdkResult(new SdkReference("foo", "1.0.0", null), "path", "1.0.0", null)}, {"bar", new SdkResult(new SdkReference("bar", "1.0.0", null), "path", "1.0.0", null)} }); } public void Dispose() { _env.Dispose(); } private readonly SdkUtilities.ConfigurableMockSdkResolver _resolver; private readonly TestEnvironment _env; private static void SetResolverForContext(EvaluationContext context, SdkResolver resolver) { var sdkService = (SdkResolverService) context.SdkResolverService; sdkService.InitializeForTests(null, new List<SdkResolver> {resolver}); } [Theory] [InlineData(EvaluationContext.SharingPolicy.Shared)] [InlineData(EvaluationContext.SharingPolicy.Isolated)] public void SharedContextShouldGetReusedWhereasIsolatedContextShouldNot(EvaluationContext.SharingPolicy policy) { var previousContext = EvaluationContext.Create(policy); for (var i = 0; i < 10; i++) { var currentContext = previousContext.ContextForNewProject(); if (i == 0) { currentContext.ShouldBeSameAs(previousContext, "first usage context was not the same as the initial context"); } else { switch (policy) { case EvaluationContext.SharingPolicy.Shared: currentContext.ShouldBeSameAs(previousContext, $"Shared policy: usage {i} was not the same as usage {i - 1}"); break; case EvaluationContext.SharingPolicy.Isolated: currentContext.ShouldNotBeSameAs(previousContext, $"Isolated policy: usage {i} was the same as usage {i - 1}"); break; default: throw new ArgumentOutOfRangeException(nameof(policy), policy, null); } } previousContext = currentContext; } } [Theory] [InlineData(EvaluationContext.SharingPolicy.Shared)] [InlineData(EvaluationContext.SharingPolicy.Isolated)] public void ReevaluationShouldNotReuseInitialContext(EvaluationContext.SharingPolicy policy) { try { EvaluationContext.TestOnlyHookOnCreate = c => SetResolverForContext(c, _resolver); var collection = _env.CreateProjectCollection().Collection; var context = EvaluationContext.Create(policy); var project = Project.FromXmlReader( XmlReader.Create(new StringReader("<Project Sdk=\"foo\"></Project>")), new ProjectOptions { ProjectCollection = collection, EvaluationContext = context, LoadSettings = ProjectLoadSettings.IgnoreMissingImports }); _resolver.ResolvedCalls["foo"].ShouldBe(1); project.AddItem("a", "b"); project.ReevaluateIfNecessary(); _resolver.ResolvedCalls["foo"].ShouldBe(2); } finally { EvaluationContext.TestOnlyHookOnCreate = null; } } private static string[] _sdkResolutionProjects = { "<Project Sdk=\"foo\"></Project>", "<Project Sdk=\"bar\"></Project>", "<Project Sdk=\"foo\"></Project>", "<Project Sdk=\"bar\"></Project>" }; [Theory] [InlineData(EvaluationContext.SharingPolicy.Shared, 1, 1)] [InlineData(EvaluationContext.SharingPolicy.Isolated, 4, 4)] public void ContextPinsSdkResolverCache(EvaluationContext.SharingPolicy policy, int sdkLookupsForFoo, int sdkLookupsForBar) { try { EvaluationContext.TestOnlyHookOnCreate = c => SetResolverForContext(c, _resolver); var context = EvaluationContext.Create(policy); EvaluateProjects(_sdkResolutionProjects, context, null); _resolver.ResolvedCalls.Count.ShouldBe(2); _resolver.ResolvedCalls["foo"].ShouldBe(sdkLookupsForFoo); _resolver.ResolvedCalls["bar"].ShouldBe(sdkLookupsForBar); } finally { EvaluationContext.TestOnlyHookOnCreate = null; } } [Fact] public void DefaultContextIsIsolatedContext() { try { var seenContexts = new HashSet<EvaluationContext>(); EvaluationContext.TestOnlyHookOnCreate = c => seenContexts.Add(c); EvaluateProjects(_sdkResolutionProjects, null, null); seenContexts.Count.ShouldBe(8); // 4 evaluations and 4 reevaluations seenContexts.ShouldAllBe(c => c.Policy == EvaluationContext.SharingPolicy.Isolated); } finally { EvaluationContext.TestOnlyHookOnCreate = null; } } public static IEnumerable<object[]> ContextPinsGlobExpansionCacheData { get { yield return new object[] { EvaluationContext.SharingPolicy.Shared, new[] { new[] {"0.cs"}, new[] {"0.cs"}, new[] {"0.cs"}, new[] {"0.cs"} } }; yield return new object[] { EvaluationContext.SharingPolicy.Isolated, new[] { new[] {"0.cs"}, new[] {"0.cs", "1.cs"}, new[] {"0.cs", "1.cs", "2.cs"}, new[] {"0.cs", "1.cs", "2.cs", "3.cs"}, } }; } } private static string[] _projectsWithGlobs = { @"<Project> <ItemGroup> <i Include=`**/*.cs` /> </ItemGroup> </Project>", @"<Project> <ItemGroup> <i Include=`**/*.cs` /> </ItemGroup> </Project>", }; [Theory] [MemberData(nameof(ContextPinsGlobExpansionCacheData))] public void ContextCachesItemElementGlobExpansions(EvaluationContext.SharingPolicy policy, string[][] expectedGlobExpansions) { var projectDirectory = _env.DefaultTestDirectory.Path; var context = EvaluationContext.Create(policy); var evaluationCount = 0; File.WriteAllText(Path.Combine(projectDirectory, $"{evaluationCount}.cs"), ""); EvaluateProjects( _projectsWithGlobs, context, project => { var expectedGlobExpansion = expectedGlobExpansions[evaluationCount]; evaluationCount++; File.WriteAllText(Path.Combine(projectDirectory, $"{evaluationCount}.cs"), ""); ObjectModelHelpers.AssertItems(expectedGlobExpansion, project.GetItems("i")); } ); } public static IEnumerable<object[]> ContextDisambiguatesRelativeGlobsData { get { yield return new object[] { EvaluationContext.SharingPolicy.Shared, new[] { new[] {"0.cs"}, // first project new[] {"0.cs", "1.cs"}, // second project new[] {"0.cs"}, // first project reevaluation new[] {"0.cs", "1.cs"}, // second project reevaluation } }; yield return new object[] { EvaluationContext.SharingPolicy.Isolated, new[] { new[] {"0.cs"}, new[] {"0.cs", "1.cs"}, new[] {"0.cs", "1.cs", "2.cs"}, new[] {"0.cs", "1.cs", "2.cs", "3.cs"}, } }; } } [Theory] [MemberData(nameof(ContextDisambiguatesRelativeGlobsData))] public void ContextDisambiguatesSameRelativeGlobsPointingInsideDifferentProjectCones(EvaluationContext.SharingPolicy policy, string[][] expectedGlobExpansions) { var projectDirectory1 = _env.DefaultTestDirectory.CreateDirectory("1").Path; var projectDirectory2 = _env.DefaultTestDirectory.CreateDirectory("2").Path; var context = EvaluationContext.Create(policy); var evaluationCount = 0; File.WriteAllText(Path.Combine(projectDirectory1, $"1.{evaluationCount}.cs"), ""); File.WriteAllText(Path.Combine(projectDirectory2, $"2.{evaluationCount}.cs"), ""); EvaluateProjects( new [] { new ProjectSpecification( Path.Combine(projectDirectory1, "1"), $@"<Project> <ItemGroup> <i Include=`{Path.Combine("**", "*.cs")}` /> </ItemGroup> </Project>"), new ProjectSpecification( Path.Combine(projectDirectory2, "2"), $@"<Project> <ItemGroup> <i Include=`{Path.Combine("**", "*.cs")}` /> </ItemGroup> </Project>"), }, context, project => { var projectName = Path.GetFileNameWithoutExtension(project.FullPath); var expectedGlobExpansion = expectedGlobExpansions[evaluationCount] .Select(i => $"{projectName}.{i}") .ToArray(); ObjectModelHelpers.AssertItems(expectedGlobExpansion, project.GetItems("i")); evaluationCount++; File.WriteAllText(Path.Combine(projectDirectory1, $"1.{evaluationCount}.cs"), ""); File.WriteAllText(Path.Combine(projectDirectory2, $"2.{evaluationCount}.cs"), ""); } ); } [Theory] [MemberData(nameof(ContextDisambiguatesRelativeGlobsData))] public void ContextDisambiguatesSameRelativeGlobsPointingOutsideDifferentProjectCones(EvaluationContext.SharingPolicy policy, string[][] expectedGlobExpansions) { var project1Root = _env.DefaultTestDirectory.CreateDirectory("Project1"); var project1Directory = project1Root.CreateDirectory("1").Path; var project1GlobDirectory = project1Root.CreateDirectory("Glob").CreateDirectory("1").Path; var project2Root = _env.DefaultTestDirectory.CreateDirectory("Project2"); var project2Directory = project2Root.CreateDirectory("2").Path; var project2GlobDirectory = project2Root.CreateDirectory("Glob").CreateDirectory("2").Path; var context = EvaluationContext.Create(policy); var evaluationCount = 0; File.WriteAllText(Path.Combine(project1GlobDirectory, $"1.{evaluationCount}.cs"), ""); File.WriteAllText(Path.Combine(project2GlobDirectory, $"2.{evaluationCount}.cs"), ""); EvaluateProjects( new [] { new ProjectSpecification( Path.Combine(project1Directory, "1"), $@"<Project> <ItemGroup> <i Include=`{Path.Combine("..", "Glob", "**", "*.cs")}`/> </ItemGroup> </Project>"), new ProjectSpecification( Path.Combine(project2Directory, "2"), $@"<Project> <ItemGroup> <i Include=`{Path.Combine("..", "Glob", "**", "*.cs")}`/> </ItemGroup> </Project>") }, context, project => { var projectName = Path.GetFileNameWithoutExtension(project.FullPath); // globs have the fixed directory part prepended, so add it to the expected results var expectedGlobExpansion = expectedGlobExpansions[evaluationCount] .Select(i => Path.Combine("..", "Glob", projectName, $"{projectName}.{i}")) .ToArray(); var actualGlobExpansion = project.GetItems("i"); ObjectModelHelpers.AssertItems(expectedGlobExpansion, actualGlobExpansion); evaluationCount++; File.WriteAllText(Path.Combine(project1GlobDirectory, $"1.{evaluationCount}.cs"), ""); File.WriteAllText(Path.Combine(project2GlobDirectory, $"2.{evaluationCount}.cs"), ""); } ); } [Theory] [MemberData(nameof(ContextDisambiguatesRelativeGlobsData))] public void ContextDisambiguatesAFullyQualifiedGlobPointingInAnotherRelativeGlobsCone(EvaluationContext.SharingPolicy policy, string[][] expectedGlobExpansions) { var project1Directory = _env.DefaultTestDirectory.CreateDirectory("Project1"); var project1GlobDirectory = project1Directory.CreateDirectory("Glob").CreateDirectory("1").Path; var project2Directory = _env.DefaultTestDirectory.CreateDirectory("Project2"); var context = EvaluationContext.Create(policy); var evaluationCount = 0; File.WriteAllText(Path.Combine(project1GlobDirectory, $"{evaluationCount}.cs"), ""); EvaluateProjects( new [] { // first project uses a relative path new ProjectSpecification( Path.Combine(project1Directory.Path, "1"), $@"<Project> <ItemGroup> <i Include=`{Path.Combine("Glob", "**", "*.cs")}` /> </ItemGroup> </Project>"), // second project reaches out into first project's cone via a fully qualified path new ProjectSpecification( Path.Combine(project2Directory.Path, "2"), $@"<Project> <ItemGroup> <i Include=`{Path.Combine(project1Directory.Path, "Glob", "**", "*.cs")}` /> </ItemGroup> </Project>") }, context, project => { var projectName = Path.GetFileNameWithoutExtension(project.FullPath); // globs have the fixed directory part prepended, so add it to the expected results var expectedGlobExpansion = expectedGlobExpansions[evaluationCount] .Select(i => Path.Combine("Glob", "1", i)) .ToArray(); // project 2 has fully qualified directory parts, so make the results for 2 fully qualified if (projectName.Equals("2")) { expectedGlobExpansion = expectedGlobExpansion .Select(i => Path.Combine(project1Directory.Path, i)) .ToArray(); } var actualGlobExpansion = project.GetItems("i"); ObjectModelHelpers.AssertItems(expectedGlobExpansion, actualGlobExpansion); evaluationCount++; File.WriteAllText(Path.Combine(project1GlobDirectory, $"{evaluationCount}.cs"), ""); } ); } [Theory] [MemberData(nameof(ContextDisambiguatesRelativeGlobsData))] public void ContextDisambiguatesDistinctRelativeGlobsPointingOutsideOfSameProjectCone(EvaluationContext.SharingPolicy policy, string[][] expectedGlobExpansions) { var globDirectory = _env.DefaultTestDirectory.CreateDirectory("glob"); var projectRoot = _env.DefaultTestDirectory.CreateDirectory("proj"); var project1Directory = projectRoot.CreateDirectory("Project1"); var project2SubDir = projectRoot.CreateDirectory("subdirectory"); var project2Directory = project2SubDir.CreateDirectory("Project2"); var context = EvaluationContext.Create(policy); var evaluationCount = 0; File.WriteAllText(Path.Combine(globDirectory.Path, $"{evaluationCount}.cs"), ""); EvaluateProjects( new [] { new ProjectSpecification( Path.Combine(project1Directory.Path, "1"), @"<Project> <ItemGroup> <i Include=`../../glob/*.cs` /> </ItemGroup> </Project>"), new ProjectSpecification( Path.Combine(project2Directory.Path, "2"), @"<Project> <ItemGroup> <i Include=`../../../glob/*.cs` /> </ItemGroup> </Project>") }, context, project => { var projectName = Path.GetFileNameWithoutExtension(project.FullPath); var globFixedDirectoryPart = projectName.EndsWith("1") ? Path.Combine("..", "..", "glob") : Path.Combine("..", "..", "..", "glob"); // globs have the fixed directory part prepended, so add it to the expected results var expectedGlobExpansion = expectedGlobExpansions[evaluationCount] .Select(i => Path.Combine(globFixedDirectoryPart, i)) .ToArray(); var actualGlobExpansion = project.GetItems("i"); ObjectModelHelpers.AssertItems(expectedGlobExpansion, actualGlobExpansion); evaluationCount++; File.WriteAllText(Path.Combine(globDirectory.Path, $"{evaluationCount}.cs"), ""); } ); } [Theory] [MemberData(nameof(ContextPinsGlobExpansionCacheData))] // projects should cache glob expansions when the __fully qualified__ glob is shared between projects and points outside of project cone public void ContextCachesCommonOutOfProjectConeFullyQualifiedGlob(EvaluationContext.SharingPolicy policy, string[][] expectedGlobExpansions) { ContextCachesCommonOutOfProjectCone(itemSpecPathIsRelative: false, policy: policy, expectedGlobExpansions: expectedGlobExpansions); } [Theory (Skip="https://github.com/Microsoft/msbuild/issues/3889")] [MemberData(nameof(ContextPinsGlobExpansionCacheData))] // projects should cache glob expansions when the __relative__ glob is shared between projects and points outside of project cone public void ContextCachesCommonOutOfProjectConeRelativeGlob(EvaluationContext.SharingPolicy policy, string[][] expectedGlobExpansions) { ContextCachesCommonOutOfProjectCone(itemSpecPathIsRelative: true, policy: policy, expectedGlobExpansions: expectedGlobExpansions); } private void ContextCachesCommonOutOfProjectCone(bool itemSpecPathIsRelative, EvaluationContext.SharingPolicy policy, string[][] expectedGlobExpansions) { var testDirectory = _env.DefaultTestDirectory; var globDirectory = testDirectory.CreateDirectory("GlobDirectory"); var itemSpecDirectoryPart = itemSpecPathIsRelative ? Path.Combine("..", "GlobDirectory") : globDirectory.Path; Directory.CreateDirectory(globDirectory.Path); // Globs with a directory part will produce items prepended with that directory part foreach (var globExpansion in expectedGlobExpansions) { for (var i = 0; i < globExpansion.Length; i++) { globExpansion[i] = Path.Combine(itemSpecDirectoryPart, globExpansion[i]); } } var projectSpecs = new[] { $@"<Project> <ItemGroup> <i Include=`{Path.Combine("{0}", "**", "*.cs")}`/> </ItemGroup> </Project>", $@"<Project> <ItemGroup> <i Include=`{Path.Combine("{0}", "**", "*.cs")}`/> </ItemGroup> </Project>" } .Select(p => string.Format(p, itemSpecDirectoryPart)) .Select((p, i) => new ProjectSpecification(Path.Combine(testDirectory.Path, $"ProjectDirectory{i}", $"Project{i}.proj"), p)); var context = EvaluationContext.Create(policy); var evaluationCount = 0; File.WriteAllText(Path.Combine(globDirectory.Path, $"{evaluationCount}.cs"), ""); EvaluateProjects( projectSpecs, context, project => { var expectedGlobExpansion = expectedGlobExpansions[evaluationCount]; evaluationCount++; File.WriteAllText(Path.Combine(globDirectory.Path, $"{evaluationCount}.cs"), ""); ObjectModelHelpers.AssertItems(expectedGlobExpansion, project.GetItems("i")); } ); } private static string[] _projectsWithGlobImports = { @"<Project> <Import Project=`*.props` /> </Project>", @"<Project> <Import Project=`*.props` /> </Project>", }; [Theory] [MemberData(nameof(ContextPinsGlobExpansionCacheData))] public void ContextCachesImportGlobExpansions(EvaluationContext.SharingPolicy policy, string[][] expectedGlobExpansions) { var projectDirectory = _env.DefaultTestDirectory.Path; var context = EvaluationContext.Create(policy); var evaluationCount = 0; File.WriteAllText(Path.Combine(projectDirectory, $"{evaluationCount}.props"), $"<Project><ItemGroup><i Include=`{evaluationCount}.cs`/></ItemGroup></Project>".Cleanup()); EvaluateProjects( _projectsWithGlobImports, context, project => { var expectedGlobExpansion = expectedGlobExpansions[evaluationCount]; evaluationCount++; File.WriteAllText(Path.Combine(projectDirectory, $"{evaluationCount}.props"), $"<Project><ItemGroup><i Include=`{evaluationCount}.cs`/></ItemGroup></Project>".Cleanup()); ObjectModelHelpers.AssertItems(expectedGlobExpansion, project.GetItems("i")); } ); } private static string[] _projectsWithConditions = { @"<Project> <PropertyGroup Condition=`Exists('0.cs')`> <p>val</p> </PropertyGroup> </Project>", @"<Project> <PropertyGroup Condition=`Exists('0.cs')`> <p>val</p> </PropertyGroup> </Project>", }; [Theory] [InlineData(EvaluationContext.SharingPolicy.Isolated)] [InlineData(EvaluationContext.SharingPolicy.Shared)] public void ContextCachesExistenceChecksInConditions(EvaluationContext.SharingPolicy policy) { var projectDirectory = _env.DefaultTestDirectory.Path; var context = EvaluationContext.Create(policy); var theFile = Path.Combine(projectDirectory, "0.cs"); File.WriteAllText(theFile, ""); var evaluationCount = 0; EvaluateProjects( _projectsWithConditions, context, project => { evaluationCount++; if (File.Exists(theFile)) { File.Delete(theFile); } if (evaluationCount == 1) { project.GetPropertyValue("p").ShouldBe("val"); } else switch (policy) { case EvaluationContext.SharingPolicy.Shared: project.GetPropertyValue("p").ShouldBe("val"); break; case EvaluationContext.SharingPolicy.Isolated: project.GetPropertyValue("p").ShouldBeEmpty(); break; default: throw new ArgumentOutOfRangeException(nameof(policy), policy, null); } } ); } [Theory] [InlineData(EvaluationContext.SharingPolicy.Isolated)] [InlineData(EvaluationContext.SharingPolicy.Shared)] public void ContextCachesExistenceChecksInGetDirectoryNameOfFileAbove(EvaluationContext.SharingPolicy policy) { var context = EvaluationContext.Create(policy); var subdirectory = _env.DefaultTestDirectory.CreateDirectory("subDirectory"); var subdirectoryFile = subdirectory.CreateFile("a"); _env.DefaultTestDirectory.CreateFile("a"); int evaluationCount = 0; EvaluateProjects( new [] { $@"<Project> <PropertyGroup> <SearchedPath>$([MSBuild]::GetDirectoryNameOfFileAbove('{subdirectory.Path}', 'a'))</SearchedPath> </PropertyGroup> </Project>" }, context, project => { evaluationCount++; var searchedPath = project.GetProperty("SearchedPath"); switch (policy) { case EvaluationContext.SharingPolicy.Shared: searchedPath.EvaluatedValue.ShouldBe(subdirectory.Path); break; case EvaluationContext.SharingPolicy.Isolated: searchedPath.EvaluatedValue.ShouldBe( evaluationCount == 1 ? subdirectory.Path : _env.DefaultTestDirectory.Path); break; default: throw new ArgumentOutOfRangeException(nameof(policy), policy, null); } if (evaluationCount == 1) { // this will cause the upper file to get picked up in the Isolated policy subdirectoryFile.Delete(); } }); evaluationCount.ShouldBe(2); } [Theory] [InlineData(EvaluationContext.SharingPolicy.Isolated)] [InlineData(EvaluationContext.SharingPolicy.Shared)] public void ContextCachesExistenceChecksInGetPathOfFileAbove(EvaluationContext.SharingPolicy policy) { var context = EvaluationContext.Create(policy); var subdirectory = _env.DefaultTestDirectory.CreateDirectory("subDirectory"); var subdirectoryFile = subdirectory.CreateFile("a"); var rootFile = _env.DefaultTestDirectory.CreateFile("a"); int evaluationCount = 0; EvaluateProjects( new [] { $@"<Project> <PropertyGroup> <SearchedPath>$([MSBuild]::GetPathOfFileAbove('a', '{subdirectory.Path}'))</SearchedPath> </PropertyGroup> </Project>" }, context, project => { evaluationCount++; var searchedPath = project.GetProperty("SearchedPath"); switch (policy) { case EvaluationContext.SharingPolicy.Shared: searchedPath.EvaluatedValue.ShouldBe(subdirectoryFile.Path); break; case EvaluationContext.SharingPolicy.Isolated: searchedPath.EvaluatedValue.ShouldBe( evaluationCount == 1 ? subdirectoryFile.Path : rootFile.Path); break; default: throw new ArgumentOutOfRangeException(nameof(policy), policy, null); } if (evaluationCount == 1) { // this will cause the upper file to get picked up in the Isolated policy subdirectoryFile.Delete(); } }); evaluationCount.ShouldBe(2); } private void EvaluateProjects(IEnumerable<string> projectContents, EvaluationContext context, Action<Project> afterEvaluationAction) { EvaluateProjects( projectContents.Select((p, i) => new ProjectSpecification(Path.Combine(_env.DefaultTestDirectory.Path, $"Project{i}.proj"), p)), context, afterEvaluationAction); } private struct ProjectSpecification { public string ProjectFilePath { get; } public string ProjectContents { get; } public ProjectSpecification(string projectFilePath, string projectContents) { ProjectFilePath = projectFilePath; ProjectContents = projectContents; } public void Deconstruct(out string projectPath, out string projectContents) { projectPath = this.ProjectFilePath; projectContents = this.ProjectContents; } } /// <summary> /// Should be at least two test projects to test cache visibility between projects /// </summary> private void EvaluateProjects(IEnumerable<ProjectSpecification> projectSpecs, EvaluationContext context, Action<Project> afterEvaluationAction) { var collection = _env.CreateProjectCollection().Collection; var projects = new List<Project>(); foreach (var (projectFilePath, projectContents) in projectSpecs) { Directory.CreateDirectory(Path.GetDirectoryName(projectFilePath)); File.WriteAllText(projectFilePath, projectContents.Cleanup()); var project = Project.FromFile( projectFilePath, new ProjectOptions { ProjectCollection = collection, EvaluationContext = context, LoadSettings = ProjectLoadSettings.IgnoreMissingImports }); afterEvaluationAction?.Invoke(project); projects.Add(project); } foreach (var project in projects) { project.AddItem("a", "b"); project.ReevaluateIfNecessary(context); afterEvaluationAction?.Invoke(project); } } } }
//--------------------------------------------------------------------------- // // Copyright (c) Microsoft Corporation. All rights reserved. // // Description: ResourceGenerator class // It generates the localized baml from translations // //--------------------------------------------------------------------------- using System; using System.IO; using System.Windows; using System.Globalization; using System.Runtime.InteropServices; using System.Collections; using System.Reflection; using System.Reflection.Emit; using System.Diagnostics; using System.Resources; using System.Threading; using System.Windows.Threading; using System.Windows.Markup.Localizer; namespace BamlLocalization { /// <summary> /// ResourceGenerator class /// </summary> internal static class ResourceGenerator { /// <summary> /// Generates localized Baml from translations /// </summary> /// <param name="options">LocBaml options</param> /// <param name="dictionaries">the translation dictionaries</param> internal static void Generate(LocBamlOptions options, TranslationDictionariesReader dictionaries) { // base on the input, we generate differently switch(options.InputType) { case FileType.BAML : { // input file name string bamlName = Path.GetFileName(options.Input); // outpuf file name is Output dir + input file name string outputFileName = GetOutputFileName(options); // construct the full path string fullPathOutput = Path.Combine(options.Output, outputFileName); options.Write(StringLoader.Get("GenerateBaml", fullPathOutput)); using (Stream input = File.OpenRead(options.Input)) { using (Stream output = new FileStream(fullPathOutput, FileMode.Create)) { BamlLocalizationDictionary dictionary = dictionaries[bamlName]; // if it is null, just create an empty dictionary. if (dictionary == null) dictionary = new BamlLocalizationDictionary(); GenerateBamlStream(input, output, dictionary, options); } } options.WriteLine(StringLoader.Get("Done")); break; } case FileType.RESOURCES : { string outputFileName = GetOutputFileName(options); string fullPathOutput = Path.Combine(options.Output, outputFileName); using (Stream input = File.OpenRead(options.Input)) { using (Stream output = File.OpenWrite(fullPathOutput)) { // create a Resource reader on the input; IResourceReader reader = new ResourceReader(input); // create a writer on the output; IResourceWriter writer = new ResourceWriter(output); GenerateResourceStream( options, // options options.Input, // resources name reader, // resource reader writer, // resource writer dictionaries); // translations reader.Close(); // now generate and close writer.Generate(); writer.Close(); } } options.WriteLine(StringLoader.Get("DoneGeneratingResource", outputFileName)); break; } case FileType.EXE: case FileType.DLL: { GenerateAssembly(options, dictionaries); break; } default: { Debug.Assert(false, "Can't generate to this type"); break; } } } private static void GenerateBamlStream(Stream input, Stream output, BamlLocalizationDictionary dictionary, LocBamlOptions options) { string commentFile = Path.ChangeExtension(options.Input, "loc"); TextReader commentStream = null; try { if (File.Exists(commentFile)) { commentStream = new StreamReader(commentFile); } // create a localizabilty resolver based on reflection BamlLocalizabilityByReflection localizabilityReflector = new BamlLocalizabilityByReflection(options.Assemblies); // create baml localizer BamlLocalizer mgr = new BamlLocalizer( input, localizabilityReflector, commentStream ); // get the resources BamlLocalizationDictionary source = mgr.ExtractResources(); BamlLocalizationDictionary translations = new BamlLocalizationDictionary(); foreach (DictionaryEntry entry in dictionary) { BamlLocalizableResourceKey key = (BamlLocalizableResourceKey) entry.Key; // filter out unchanged items if (!source.Contains(key) || entry.Value == null || source[key].Content != ((BamlLocalizableResource)entry.Value).Content) { translations.Add(key, (BamlLocalizableResource)entry.Value); } } // update baml mgr.UpdateBaml(output, translations); } finally { if (commentStream != null) { commentStream.Close(); } } } private static void GenerateResourceStream( LocBamlOptions options, // options from the command line string resourceName, // the name of the .resources file IResourceReader reader, // the reader for the .resources IResourceWriter writer, // the writer for the output .resources TranslationDictionariesReader dictionaries // the translations ) { options.WriteLine(StringLoader.Get("GenerateResource", resourceName)); // enumerate through each resource and generate it foreach(DictionaryEntry entry in reader) { string name = entry.Key as string; object resourceValue = null; // See if it looks like a Baml resource if (BamlStream.IsResourceEntryBamlStream(name, entry.Value)) { Stream targetStream = null; options.Write(" "); options.Write(StringLoader.Get("GenerateBaml", name)); // grab the localizations available for this Baml string bamlName = BamlStream.CombineBamlStreamName(resourceName, name); BamlLocalizationDictionary localizations = dictionaries[bamlName]; if (localizations != null) { targetStream = new MemoryStream(); // generate into a new Baml stream GenerateBamlStream( (Stream) entry.Value, targetStream, localizations, options ); } options.WriteLine(StringLoader.Get("Done")); // sets the generated object to be the generated baml stream resourceValue = targetStream; } if (resourceValue == null) { // // The stream is not localized as Baml yet, so we will make a copy of this item into // the localized resources // // We will add the value as is if it is serializable. Otherwise, make a copy resourceValue = entry.Value; object[] serializableAttributes = resourceValue.GetType().GetCustomAttributes(typeof(SerializableAttribute), true); if (serializableAttributes.Length == 0) { // The item returned from resource reader is not serializable // If it is Stream, we can wrap all the values in a MemoryStream and // add to the resource. Otherwise, we had to skip this resource. Stream resourceStream = resourceValue as Stream; if (resourceStream != null) { Stream targetStream = new MemoryStream(); byte[] buffer = new byte[resourceStream.Length]; resourceStream.Read(buffer, 0, buffer.Length); targetStream = new MemoryStream(buffer); resourceValue = targetStream; } } } if (resourceValue != null) { writer.AddResource(name, resourceValue); } } } private static void GenerateStandaloneResource(string fullPathName, Stream resourceStream) { // simply do a copy for the stream using (FileStream file = new FileStream(fullPathName, FileMode.Create, FileAccess.Write)) { const int BUFFER_SIZE = 4096; byte[] buffer = new byte[BUFFER_SIZE]; int bytesRead = 1; while (bytesRead > 0) { bytesRead = resourceStream.Read(buffer, 0, BUFFER_SIZE); file.Write(buffer, 0, bytesRead); } } } //-------------------------------------------------- // The function follows Managed code parser // implementation. in the future, maybe they should // share the same code //-------------------------------------------------- private static void GenerateAssembly(LocBamlOptions options, TranslationDictionariesReader dictionaries) { // there are many names to be used when generating an assembly string sourceAssemblyFullName = options.Input; // source assembly full path string outputAssemblyDir = options.Output; // output assembly directory string outputAssemblyLocalName = GetOutputFileName(options); // output assembly name string moduleLocalName = GetAssemblyModuleLocalName(options, outputAssemblyLocalName); // the module name within the assmbly // get the source assembly Assembly srcAsm = Assembly.LoadFrom(sourceAssemblyFullName); // obtain the assembly name AssemblyName targetAssemblyNameObj = srcAsm.GetName(); // store the culture info of the source assembly CultureInfo srcCultureInfo = targetAssemblyNameObj.CultureInfo; // update it to use it for target assembly targetAssemblyNameObj.Name = Path.GetFileNameWithoutExtension(outputAssemblyLocalName); targetAssemblyNameObj.CultureInfo = options.CultureInfo; // we get a assembly builder AssemblyBuilder targetAssemblyBuilder = Thread.GetDomain().DefineDynamicAssembly( targetAssemblyNameObj, // name of the assembly AssemblyBuilderAccess.RunAndSave, // access rights outputAssemblyDir // storage dir ); // we create a module builder for embeded resource modules ModuleBuilder moduleBuilder = targetAssemblyBuilder.DefineDynamicModule( moduleLocalName, outputAssemblyLocalName ); options.WriteLine(StringLoader.Get("GenerateAssembly")); // now for each resource in the assembly foreach (string resourceName in srcAsm.GetManifestResourceNames()) { // get the resource location for the resource ResourceLocation resourceLocation = srcAsm.GetManifestResourceInfo(resourceName).ResourceLocation; // if this resource is in another assemlby, we will skip it if ((resourceLocation & ResourceLocation.ContainedInAnotherAssembly) != 0) { continue; // in resource assembly, we don't have resource that is contained in another assembly } // gets the neutral resource name, giving it the source culture info string neutralResourceName = GetNeutralResModuleName(resourceName, srcCultureInfo); // gets the target resource name, by giving it the target culture info string targetResourceName = GetCultureSpecificResourceName(neutralResourceName, options.CultureInfo); // resource stream Stream resourceStream = srcAsm.GetManifestResourceStream(resourceName); // see if it is a .resources if (neutralResourceName.ToLower(CultureInfo.InvariantCulture).EndsWith(".resources")) { // now we think we have resource stream // get the resource writer IResourceWriter writer; // check if it is a embeded assembly if ((resourceLocation & ResourceLocation.Embedded) != 0) { // gets the resource writer from the module builder writer = moduleBuilder.DefineResource( targetResourceName, // resource name targetResourceName, // resource description ResourceAttributes.Public // visibilty of this resource to other assembly ); } else { // it is a standalone resource, we get the resource writer from the assembly builder writer = targetAssemblyBuilder.DefineResource( targetResourceName, // resource name targetResourceName, // description targetResourceName, // file name to save to ResourceAttributes.Public // visibility of this resource to other assembly ); } // get the resource reader IResourceReader reader = new ResourceReader(resourceStream); // generate the resources GenerateResourceStream(options, resourceName, reader, writer, dictionaries); // we don't call writer.Generate() or writer.Close() here // because the AssemblyBuilder will call them when we call Save() on it. } else { // else it is a stand alone untyped manifest resources. string extension = Path.GetExtension(targetResourceName); string fullFileName = Path.Combine(outputAssemblyDir, targetResourceName); // check if it is a .baml, case-insensitive if (string.Compare(extension, ".baml", true, CultureInfo.InvariantCulture) == 0) { // try to localized the the baml // find the resource dictionary BamlLocalizationDictionary dictionary = dictionaries[resourceName]; // if it is null, just create an empty dictionary. if (dictionary != null) { // it is a baml stream using (Stream output = File.OpenWrite(fullFileName)) { options.Write(" "); options.WriteLine(StringLoader.Get("GenerateStandaloneBaml", fullFileName)); GenerateBamlStream(resourceStream, output, dictionary, options); options.WriteLine(StringLoader.Get("Done")); } } else { // can't find localization of it, just copy it GenerateStandaloneResource( fullFileName, resourceStream); } } else { // it is an untyped resource stream, just copy it GenerateStandaloneResource( fullFileName, resourceStream); } // now add this resource file into the assembly targetAssemblyBuilder.AddResourceFile( targetResourceName, // resource name targetResourceName, // file name ResourceAttributes.Public // visibility of the resource to other assembly ); } } // at the end, generate the assembly targetAssemblyBuilder.Save(outputAssemblyLocalName); options.WriteLine(StringLoader.Get("DoneGeneratingAssembly")); } //----------------------------------------- // private function dealing with naming //----------------------------------------- // return the local output file name, i.e. without directory private static string GetOutputFileName(LocBamlOptions options) { string outputFileName; string inputFileName = Path.GetFileName(options.Input); switch(options.InputType) { case FileType.BAML: { return inputFileName; } case FileType.EXE: { inputFileName = inputFileName.Remove(inputFileName.LastIndexOf('.')) + ".resources.dll"; return inputFileName; } case FileType.DLL : { return inputFileName; } case FileType.RESOURCES : { // get the output file name outputFileName = inputFileName; // get to the last dot seperating filename and extension int lastDot = outputFileName.LastIndexOf('.'); int secondLastDot = outputFileName.LastIndexOf('.', lastDot - 1); if (secondLastDot > 0) { string cultureName = outputFileName.Substring(secondLastDot + 1, lastDot - secondLastDot - 1); if (LocBamlConst.IsValidCultureName(cultureName)) { string extension = outputFileName.Substring(lastDot); string frontPart = outputFileName.Substring(0, secondLastDot + 1); outputFileName = frontPart + options.CultureInfo.Name + extension; } } return outputFileName; } default : { throw new NotSupportedException(); } } } private static string GetAssemblyModuleLocalName(LocBamlOptions options, string targetAssemblyName) { string moduleName; if (targetAssemblyName.ToLower(CultureInfo.InvariantCulture).EndsWith(".resources.dll")) { // we create the satellite assembly name moduleName = string.Format( CultureInfo.InvariantCulture, "{0}.{1}.{2}", targetAssemblyName.Substring(0, targetAssemblyName.Length - ".resources.dll".Length), options.CultureInfo.Name, "resources.dll" ); } else { moduleName = targetAssemblyName; } return moduleName; } // return the neutral resource name private static string GetNeutralResModuleName(string resourceName, CultureInfo cultureInfo) { if (cultureInfo.Equals(CultureInfo.InvariantCulture)) { return resourceName; } else { // if it is an satellite assembly, we need to strip out the culture name string normalizedName = resourceName.ToLower(CultureInfo.InvariantCulture); int end = normalizedName.LastIndexOf(".resources"); if (end < 0) { return resourceName; } int start = normalizedName.LastIndexOf('.', end - 1); if (start > 0 && end - start > 0) { string cultureStr = resourceName.Substring( start + 1, end - start - 1); if (string.Compare(cultureStr, cultureInfo.Name, true) == 0) { // it has the correct culture name, so we can take it out return resourceName.Remove(start, end - start); } } return resourceName; } } private static string GetCultureSpecificResourceName(string neutralResourceName, CultureInfo culture) { // gets the extension string extension = Path.GetExtension(neutralResourceName); // swap in culture name string cultureName = Path.ChangeExtension(neutralResourceName, culture.Name); // return the new name with the same extension return cultureName + extension; } } }
using System; using System.Runtime.InteropServices; namespace TrueCraft.API { /// <summary> /// Represents the location of an object in 3D space. /// </summary> [StructLayout(LayoutKind.Explicit)] public struct Vector3 : IEquatable<Vector3> { [FieldOffset(0)] public double X; [FieldOffset(8)] public double Y; [FieldOffset(16)] public double Z; public Vector3(double value) { X = Y = Z = value; } public Vector3(double x, double y, double z) { X = x; Y = y; Z = z; } public Vector3(Vector3 v) { X = v.X; Y = v.Y; Z = v.Z; } /// <summary> /// Converts this Vector3 to a string in the format &lt;x,y,z&gt;. /// </summary> /// <returns></returns> public override string ToString() { return string.Format("<{0},{1},{2}>", X, Y, Z); } #region Math /// <summary> /// Truncates the decimal component of each part of this Vector3. /// </summary> public Vector3 Floor() { return new Vector3(Math.Floor(X), Math.Floor(Y), Math.Floor(Z)); } /// <summary> /// Clamps the vector to within the specified value. /// </summary> /// <param name="value">Value.</param> public void Clamp(double value) { if (Math.Abs(X) > value) X = value * (X < 0 ? -1 : 1); if (Math.Abs(Y) > value) Y = value * (Y < 0 ? -1 : 1); if (Math.Abs(Z) > value) Z = value * (Z < 0 ? -1 : 1); } /// <summary> /// Calculates the distance between two Vector3 objects. /// </summary> public double DistanceTo(Vector3 other) { return Math.Sqrt(Square(other.X - X) + Square(other.Y - Y) + Square(other.Z - Z)); } /// <summary> /// Calculates the square of a num. /// </summary> private double Square(double num) { return num * num; } /// <summary> /// Finds the distance of this vector from Vector3.Zero /// </summary> public double Distance { get { return DistanceTo(Zero); } } public static Vector3 Min(Vector3 value1, Vector3 value2) { return new Vector3( Math.Min(value1.X, value2.X), Math.Min(value1.Y, value2.Y), Math.Min(value1.Z, value2.Z) ); } public static Vector3 Max(Vector3 value1, Vector3 value2) { return new Vector3( Math.Max(value1.X, value2.X), Math.Max(value1.Y, value2.Y), Math.Max(value1.Z, value2.Z) ); } #endregion #region Operators public static bool operator !=(Vector3 a, Vector3 b) { return !a.Equals(b); } public static bool operator ==(Vector3 a, Vector3 b) { return a.Equals(b); } public static Vector3 operator +(Vector3 a, Vector3 b) { return new Vector3( a.X + b.X, a.Y + b.Y, a.Z + b.Z); } public static Vector3 operator -(Vector3 a, Vector3 b) { return new Vector3( a.X - b.X, a.Y - b.Y, a.Z - b.Z); } public static Vector3 operator +(Vector3 a, Size b) { return new Vector3( a.X + b.Width, a.Y + b.Height, a.Z + b.Depth); } public static Vector3 operator -(Vector3 a, Size b) { return new Vector3( a.X - b.Width, a.Y - b.Height, a.Z - b.Depth); } public static Vector3 operator -(Vector3 a) { return new Vector3( -a.X, -a.Y, -a.Z); } public static Vector3 operator *(Vector3 a, Vector3 b) { return new Vector3( a.X * b.X, a.Y * b.Y, a.Z * b.Z); } public static Vector3 operator /(Vector3 a, Vector3 b) { return new Vector3( a.X / b.X, a.Y / b.Y, a.Z / b.Z); } public static Vector3 operator %(Vector3 a, Vector3 b) { return new Vector3(a.X % b.X, a.Y % b.Y, a.Z % b.Z); } public static Vector3 operator +(Vector3 a, double b) { return new Vector3( a.X + b, a.Y + b, a.Z + b); } public static Vector3 operator -(Vector3 a, double b) { return new Vector3( a.X - b, a.Y - b, a.Z - b); } public static Vector3 operator *(Vector3 a, double b) { return new Vector3( a.X * b, a.Y * b, a.Z * b); } public static Vector3 operator /(Vector3 a, double b) { return new Vector3( a.X / b, a.Y / b, a.Z / b); } public static Vector3 operator %(Vector3 a, double b) { return new Vector3(a.X % b, a.Y % b, a.Y % b); } public static Vector3 operator +(double a, Vector3 b) { return new Vector3( a + b.X, a + b.Y, a + b.Z); } public static Vector3 operator -(double a, Vector3 b) { return new Vector3( a - b.X, a - b.Y, a - b.Z); } public static Vector3 operator *(double a, Vector3 b) { return new Vector3( a * b.X, a * b.Y, a * b.Z); } public static Vector3 operator /(double a, Vector3 b) { return new Vector3( a / b.X, a / b.Y, a / b.Z); } public static Vector3 operator %(double a, Vector3 b) { return new Vector3(a % b.X, a % b.Y, a % b.Y); } #endregion #region Conversion operators public static implicit operator Vector3(Coordinates3D a) { return new Vector3(a.X, a.Y, a.Z); } public static explicit operator Vector3(Coordinates2D c) { return new Vector3(c.X, 0, c.Z); } public static implicit operator Vector3(Size s) { return new Vector3(s.Width, s.Height, s.Depth); } #endregion #region Constants public static readonly Vector3 Zero = new Vector3(0); public static readonly Vector3 One = new Vector3(1); public static readonly Vector3 Up = new Vector3(0, 1, 0); public static readonly Vector3 Down = new Vector3(0, -1, 0); public static readonly Vector3 Left = new Vector3(-1, 0, 0); public static readonly Vector3 Right = new Vector3(1, 0, 0); public static readonly Vector3 Backwards = new Vector3(0, 0, -1); public static readonly Vector3 Forwards = new Vector3(0, 0, 1); public static readonly Vector3 East = new Vector3(1, 0, 0); public static readonly Vector3 West = new Vector3(-1, 0, 0); public static readonly Vector3 North = new Vector3(0, 0, -1); public static readonly Vector3 South = new Vector3(0, 0, 1); #endregion public bool Equals(Vector3 other) { return other.X.Equals(X) && other.Y.Equals(Y) && other.Z.Equals(Z); } public override bool Equals(object obj) { return obj is Vector3 && Equals((Vector3)obj); } public override int GetHashCode() { unchecked { int result = X.GetHashCode(); result = (result * 397) ^ Y.GetHashCode(); result = (result * 397) ^ Z.GetHashCode(); return result; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Tests.Client { using System; using System.IO; using System.Linq; using System.Net; using System.Net.Sockets; using System.Threading; using System.Threading.Tasks; using Apache.Ignite.Core.Cache.Configuration; using Apache.Ignite.Core.Cache.Query; using Apache.Ignite.Core.Client; using Apache.Ignite.Core.Client.Cache; using Apache.Ignite.Core.Configuration; using Apache.Ignite.Core.Impl.Common; using Apache.Ignite.Core.Log; using NUnit.Framework; /// <summary> /// Tests client connection: port ranges, version checks, etc. /// </summary> public class ClientConnectionTest { /** Temp dir for WAL. */ private readonly string _tempDir = PathUtils.GetTempDirectoryName(); /// <summary> /// Sets up the test. /// </summary> [SetUp] public void SetUp() { TestUtils.ClearWorkDir(); } /// <summary> /// Test tear down. /// </summary> [TearDown] public void TearDown() { Ignition.StopAll(true); if (Directory.Exists(_tempDir)) { Directory.Delete(_tempDir, true); } TestUtils.ClearWorkDir(); } /// <summary> /// Tests that missing server yields connection refused error. /// </summary> [Test] public void TestNoServerConnectionRefused() { var ex = Assert.Throws<AggregateException>(() => StartClient()); var socketEx = ex.InnerExceptions.OfType<SocketException>().First(); Assert.AreEqual(SocketError.ConnectionRefused, socketEx.SocketErrorCode); } /// <summary> /// Tests that empty username or password are not allowed. /// </summary> [Test] public void TestAuthenticationEmptyCredentials() { using (Ignition.Start(SecureServerConfig())) { var cliCfg = GetSecureClientConfig(); cliCfg.Password = null; var ex = Assert.Throws<IgniteClientException>(() => { Ignition.StartClient(cliCfg); }); Assert.IsTrue(ex.Message.StartsWith("IgniteClientConfiguration.Password cannot be null")); cliCfg.Password = ""; ex = Assert.Throws<IgniteClientException>(() => { Ignition.StartClient(cliCfg); }); Assert.IsTrue(ex.Message.StartsWith("IgniteClientConfiguration.Password cannot be empty")); cliCfg.Password = "ignite"; cliCfg.UserName = null; ex = Assert.Throws<IgniteClientException>(() => { Ignition.StartClient(cliCfg); }); Assert.IsTrue(ex.Message.StartsWith("IgniteClientConfiguration.UserName cannot be null")); cliCfg.UserName = ""; ex = Assert.Throws<IgniteClientException>(() => { Ignition.StartClient(cliCfg); }); Assert.IsTrue(ex.Message.StartsWith("IgniteClientConfiguration.Username cannot be empty")); } } /// <summary> /// Test invalid username or password. /// </summary> [Test] public void TestAuthenticationInvalidCredentials() { using (Ignition.Start(SecureServerConfig())) { var cliCfg = GetSecureClientConfig(); cliCfg.UserName = "invalid"; var ex = Assert.Throws<IgniteClientException>(() => { Ignition.StartClient(cliCfg); }); Assert.True(ex.StatusCode == ClientStatusCode.AuthenticationFailed); cliCfg.UserName = "ignite"; cliCfg.Password = "invalid"; ex = Assert.Throws<IgniteClientException>(() => { Ignition.StartClient(cliCfg); }); Assert.True(ex.StatusCode == ClientStatusCode.AuthenticationFailed); } } /// <summary> /// Test authentication. /// </summary> [Test] public void TestAuthentication() { CreateNewUserAndAuthenticate("my_User", "my_Password"); } /// <summary> /// Test authentication. /// </summary> [Test] public void TestAuthenticationLongToken() { string user = new string('G', 59); string pass = new string('q', 16 * 1024); CreateNewUserAndAuthenticate(user, pass); } /// <summary> /// Tests that multiple clients can connect to one server. /// </summary> [Test] public void TestMultipleClients() { using (Ignition.Start(TestUtils.GetTestConfiguration())) { var client1 = StartClient(); var client2 = StartClient(); var client3 = StartClient(); client1.Dispose(); client2.Dispose(); client3.Dispose(); } } /// <summary> /// Tests custom connector and client configuration. /// </summary> [Test] [Category(TestUtils.CategoryIntensive)] public void TestCustomConfig() { var servCfg = new IgniteConfiguration(TestUtils.GetTestConfiguration()) { ClientConnectorConfiguration = new ClientConnectorConfiguration { Host = "localhost", Port = 2000, PortRange = 1, SocketSendBufferSize = 100, SocketReceiveBufferSize = 50 } }; var clientCfg = new IgniteClientConfiguration { Endpoints = new[] {"localhost:2000"}, Logger = new ConsoleLogger() }; using (Ignition.Start(servCfg)) using (var client = Ignition.StartClient(clientCfg)) { Assert.AreNotEqual(clientCfg, client.GetConfiguration()); Assert.AreNotEqual(client.GetConfiguration(), client.GetConfiguration()); Assert.AreEqual(clientCfg.ToXml(), client.GetConfiguration().ToXml()); } } /// <summary> /// Tests client config with EndPoints property. /// </summary> [Test] public void TestEndPoints() { using (var ignite = Ignition.Start(TestUtils.GetTestConfiguration())) { ignite.CreateCache<int, int>("foo"); const int port = IgniteClientConfiguration.DefaultPort; // DnsEndPoint. var cfg = new IgniteClientConfiguration { Endpoints = new[] { "localhost" } }; using (var client = Ignition.StartClient(cfg)) { Assert.AreEqual("foo", client.GetCacheNames().Single()); } // IPEndPoint. cfg = new IgniteClientConfiguration { Endpoints = new[] { "127.0.0.1:" + port } }; using (var client = Ignition.StartClient(cfg)) { Assert.AreEqual("foo", client.GetCacheNames().Single()); } // Port range. cfg = new IgniteClientConfiguration("127.0.0.1:10798..10800"); using (var client = Ignition.StartClient(cfg)) { Assert.AreEqual("foo", client.GetCacheNames().Single()); } } } /// <summary> /// Tests that empty port range causes an exception. /// </summary> [Test] public void TestEmptyPortRangeThrows() { var cfg = new IgniteClientConfiguration("127.0.0.1:10800..10700"); var ex = Assert.Throws<IgniteClientException>(() => Ignition.StartClient(cfg)); Assert.AreEqual( "Invalid format of IgniteClientConfiguration.Endpoint, port range is empty: 127.0.0.1:10800..10700", ex.Message); } /// <summary> /// Tests that default configuration throws. /// </summary> [Test] public void TestDefaultConfigThrows() { Assert.Throws<IgniteClientException>(() => Ignition.StartClient(new IgniteClientConfiguration())); } /// <summary> /// Tests that connector can be disabled. /// </summary> [Test] public void TestDisabledConnector() { var servCfg = new IgniteConfiguration(TestUtils.GetTestConfiguration()) { ClientConnectorConfigurationEnabled = false }; var clientCfg = new IgniteClientConfiguration { Endpoints = new[] {"localhost"} }; using (Ignition.Start(servCfg)) { var ex = Assert.Throws<AggregateException>(() => Ignition.StartClient(clientCfg)); Assert.AreEqual("Failed to establish Ignite thin client connection, " + "examine inner exceptions for details.", ex.Message.Substring(0, 88)); } // Disable only thin client. servCfg = new IgniteConfiguration(TestUtils.GetTestConfiguration()) { ClientConnectorConfiguration = new ClientConnectorConfiguration { ThinClientEnabled = false } }; using (Ignition.Start(servCfg)) { var ex = Assert.Throws<IgniteClientException>(() => Ignition.StartClient(clientCfg)); Assert.AreEqual("Client handshake failed: 'Thin client connection is not allowed, " + "see ClientConnectorConfiguration.thinClientEnabled.'.", ex.Message.Substring(0, 118)); } } /// <summary> /// Tests that we get a proper exception when server disconnects (node shutdown, network issues, etc). /// </summary> [Test] public void TestServerConnectionAborted() { var evt = new ManualResetEventSlim(); var ignite = Ignition.Start(TestUtils.GetTestConfiguration()); var putGetTask = TaskRunner.Run(() => { using (var client = StartClient()) { var cache = client.GetOrCreateCache<int, int>("foo"); evt.Set(); for (var i = 0; i < 100000; i++) { cache[i] = i; Assert.AreEqual(i, cache.GetAsync(i).Result); } } }); evt.Wait(); ignite.Dispose(); var ex = Assert.Throws<AggregateException>(() => putGetTask.Wait()); var baseEx = ex.GetBaseException(); var socketEx = baseEx as SocketException; if (socketEx != null) { Assert.AreEqual(SocketError.ConnectionAborted, socketEx.SocketErrorCode); } else { Assert.Fail("Unexpected exception: " + ex); } } /// <summary> /// Tests the operation timeout. /// </summary> [Test] [Category(TestUtils.CategoryIntensive)] public void TestOperationTimeout() { var data = Enumerable.Range(1, 500000).ToDictionary(x => x, x => x.ToString()); Ignition.Start(TestUtils.GetTestConfiguration()); var cfg = GetClientConfiguration(); cfg.SocketTimeout = TimeSpan.FromMilliseconds(500); var client = Ignition.StartClient(cfg); var cache = client.CreateCache<int, string>("s"); Assert.AreEqual(cfg.SocketTimeout, client.GetConfiguration().SocketTimeout); // Async. var task = cache.PutAllAsync(data); Assert.IsFalse(task.IsCompleted); var ex = Assert.Catch(() => task.Wait()); Assert.AreEqual(SocketError.TimedOut, GetSocketException(ex).SocketErrorCode); // Sync (reconnect for clean state). Ignition.StopAll(true); Ignition.Start(TestUtils.GetTestConfiguration()); client = Ignition.StartClient(cfg); cache = client.CreateCache<int, string>("s"); ex = Assert.Catch(() => cache.PutAll(data)); Assert.AreEqual(SocketError.TimedOut, GetSocketException(ex).SocketErrorCode); } /// <summary> /// Tests the client dispose while operations are in progress. /// </summary> [Test] [Category(TestUtils.CategoryIntensive)] public void TestClientDisposeWhileOperationsAreInProgress() { Ignition.Start(TestUtils.GetTestConfiguration()); const int count = 100000; var ops = new Task[count]; using (var client = StartClient()) { var cache = client.GetOrCreateCache<int, int>("foo"); Parallel.For(0, count, new ParallelOptions {MaxDegreeOfParallelism = Environment.ProcessorCount}, i => { ops[i] = cache.PutAllAsync(Enumerable.Range(i*100, 100).ToDictionary(x => x, x => x)); }); } var completed = ops.Count(x => x.Status == TaskStatus.RanToCompletion); Assert.Greater(completed, 0, "Some tasks should have completed."); var failed = ops.Where(x => x.Status == TaskStatus.Faulted).ToArray(); Assert.IsTrue(failed.Any(), "Some tasks should have failed."); foreach (var task in failed) { var ex = task.Exception; Assert.IsNotNull(ex); var baseEx = ex.GetBaseException(); Assert.IsNotNull((object) (baseEx as SocketException) ?? baseEx as ObjectDisposedException, ex.ToString()); } } /// <summary> /// Tests the <see cref="ClientConnectorConfiguration.IdleTimeout"/> property. /// </summary> [Test] [Category(TestUtils.CategoryIntensive)] public void TestIdleTimeout() { var cfg = new IgniteConfiguration(TestUtils.GetTestConfiguration()) { ClientConnectorConfiguration = new ClientConnectorConfiguration { IdleTimeout = TimeSpan.FromMilliseconds(100) } }; var ignite = Ignition.Start(cfg); Assert.AreEqual(100, ignite.GetConfiguration().ClientConnectorConfiguration.IdleTimeout.TotalMilliseconds); using (var client = StartClient()) { var cache = client.GetOrCreateCache<int, int>("foo"); cache[1] = 1; Assert.AreEqual(1, cache[1]); Thread.Sleep(90); Assert.AreEqual(1, cache[1]); // Idle check frequency is 2 seconds. Thread.Sleep(4000); var ex = Assert.Catch(() => cache.Get(1)); Assert.AreEqual(SocketError.ConnectionAborted, GetSocketException(ex).SocketErrorCode); } } /// <summary> /// Tests the protocol mismatch behavior: attempt to connect to an HTTP endpoint. /// </summary> [Test] public void TestProtocolMismatch() { using (Ignition.Start(TestUtils.GetTestConfiguration())) { // Connect to Ignite REST endpoint. var cfg = new IgniteClientConfiguration("127.0.0.1:11211"); var ex = GetSocketException(Assert.Catch(() => Ignition.StartClient(cfg))); Assert.AreEqual(SocketError.ConnectionAborted, ex.SocketErrorCode); } } /// <summary> /// Tests reconnect logic with single server. /// </summary> [Test] public void TestReconnect() { // Connect client and check. Ignition.Start(TestUtils.GetTestConfiguration()); var client = Ignition.StartClient(new IgniteClientConfiguration("127.0.0.1")); Assert.AreEqual(0, client.GetCacheNames().Count); var ep = client.RemoteEndPoint as IPEndPoint; Assert.IsNotNull(ep); Assert.AreEqual(IgniteClientConfiguration.DefaultPort, ep.Port); Assert.AreEqual("127.0.0.1", ep.Address.ToString()); ep = client.LocalEndPoint as IPEndPoint; Assert.IsNotNull(ep); Assert.AreNotEqual(IgniteClientConfiguration.DefaultPort, ep.Port); Assert.AreEqual("127.0.0.1", ep.Address.ToString()); // Stop server. Ignition.StopAll(true); // First request fails, error is detected. var ex = Assert.Catch(() => client.GetCacheNames()); Assert.IsNotNull(GetSocketException(ex)); // Second request causes reconnect attempt which fails (server is stopped). Assert.Catch(() => client.GetCacheNames()); // Start server, next operation succeeds. Ignition.Start(TestUtils.GetTestConfiguration()); Assert.AreEqual(0, client.GetCacheNames().Count); } /// <summary> /// Tests disabled reconnect behavior. /// </summary> [Test] public void TestReconnectDisabled() { // Connect client and check. Ignition.Start(TestUtils.GetTestConfiguration()); using (var client = Ignition.StartClient(new IgniteClientConfiguration("127.0.0.1") { ReconnectDisabled = true })) { Assert.AreEqual(0, client.GetCacheNames().Count); // Stop server. Ignition.StopAll(true); // Request fails, error is detected. var ex = Assert.Catch(() => client.GetCacheNames()); Assert.IsNotNull(GetSocketException(ex)); // Restart server, client does not reconnect. Ignition.Start(TestUtils.GetTestConfiguration()); ex = Assert.Catch(() => client.GetCacheNames()); Assert.IsNotNull(GetSocketException(ex)); } } /// <summary> /// Tests reconnect logic with multiple servers. /// </summary> [Test] public void TestFailover() { // Start 3 nodes. Ignition.Start(TestUtils.GetTestConfiguration(name: "0")); Ignition.Start(TestUtils.GetTestConfiguration(name: "1")); Ignition.Start(TestUtils.GetTestConfiguration(name: "2")); // Connect client. var port = IgniteClientConfiguration.DefaultPort; var cfg = new IgniteClientConfiguration { Endpoints = new[] { "localhost", string.Format("127.0.0.1:{0}..{1}", port + 1, port + 2) } }; using (var client = Ignition.StartClient(cfg)) { Assert.AreEqual(0, client.GetCacheNames().Count); // Stop target node. var nodeId = ((IPEndPoint) client.RemoteEndPoint).Port - port; Ignition.Stop(nodeId.ToString(), true); // Check failure. Assert.IsNotNull(GetSocketException(Assert.Catch(() => client.GetCacheNames()))); // Check reconnect. Assert.AreEqual(0, client.GetCacheNames().Count); // Stop target node. nodeId = ((IPEndPoint) client.RemoteEndPoint).Port - port; Ignition.Stop(nodeId.ToString(), true); // Check failure. Assert.IsNotNull(GetSocketException(Assert.Catch(() => client.GetCacheNames()))); // Check reconnect. Assert.AreEqual(0, client.GetCacheNames().Count); // Stop all nodes. Ignition.StopAll(true); Assert.IsNotNull(GetSocketException(Assert.Catch(() => client.GetCacheNames()))); Assert.IsNotNull(GetSocketException(Assert.Catch(() => client.GetCacheNames()))); } } /// <summary> /// Starts the client. /// </summary> private static IIgniteClient StartClient() { return Ignition.StartClient(GetClientConfiguration()); } /// <summary> /// Gets the client configuration. /// </summary> private static IgniteClientConfiguration GetClientConfiguration() { return new IgniteClientConfiguration(IPAddress.Loopback.ToString()); } /// <summary> /// Finds SocketException in the hierarchy. /// </summary> private static SocketException GetSocketException(Exception ex) { Assert.IsNotNull(ex); var origEx = ex; while (ex != null) { var socketEx = ex as SocketException; if (socketEx != null) { return socketEx; } ex = ex.InnerException; } throw new Exception("SocketException not found.", origEx); } /// <summary> /// Create server configuration with enabled authentication. /// </summary> /// <returns>Server configuration.</returns> private IgniteConfiguration SecureServerConfig() { return new IgniteConfiguration(TestUtils.GetTestConfiguration()) { AuthenticationEnabled = true, DataStorageConfiguration = new DataStorageConfiguration { StoragePath = Path.Combine(_tempDir, "Store"), WalPath = Path.Combine(_tempDir, "WalStore"), WalArchivePath = Path.Combine(_tempDir, "WalArchive"), DefaultDataRegionConfiguration = new DataRegionConfiguration { Name = "default", PersistenceEnabled = true } } }; } /// <summary> /// Create client configuration with enabled authentication. /// </summary> /// <returns>Client configuration.</returns> private static IgniteClientConfiguration GetSecureClientConfig() { return new IgniteClientConfiguration("localhost") { UserName = "ignite", Password = "ignite" }; } /// <summary> /// Start new node, create new user with given credentials and try to authenticate. /// </summary> /// <param name="user">Username</param> /// <param name="pass">Password</param> private void CreateNewUserAndAuthenticate(string user, string pass) { using (var srv = Ignition.Start(SecureServerConfig())) { srv.GetCluster().SetActive(true); using (var cli = Ignition.StartClient(GetSecureClientConfig())) { CacheClientConfiguration ccfg = new CacheClientConfiguration { Name = "TestCache", QueryEntities = new[] { new QueryEntity { KeyType = typeof(string), ValueType = typeof(string), }, }, }; ICacheClient<string, string> cache = cli.GetOrCreateCache<string, string>(ccfg); cache.Put("key1", "val1"); cache.Query(new SqlFieldsQuery("CREATE USER \"" + user + "\" WITH PASSWORD '" + pass + "'")).GetAll(); } var cliCfg = GetSecureClientConfig(); cliCfg.UserName = user; cliCfg.Password = pass; using (var cli = Ignition.StartClient(cliCfg)) { ICacheClient<string, string> cache = cli.GetCache<string, string>("TestCache"); string val = cache.Get("key1"); Assert.True(val == "val1"); } } } } }
using System; using System.Collections.Generic; using System.Linq; using Content.Server.GameTicking; using Content.Server.GameTicking.Presets; using Content.Server.Maps; using Content.Server.RoundEnd; using Content.Shared.CCVar; using Content.Shared.Voting; using Robust.Server.Player; using Robust.Shared.Configuration; using Robust.Shared.GameObjects; using Robust.Shared.IoC; using Robust.Shared.Localization; using Robust.Shared.Prototypes; using Robust.Shared.Random; namespace Content.Server.Voting.Managers { public sealed partial class VoteManager { private static readonly Dictionary<StandardVoteType, CVarDef<bool>> _voteTypesToEnableCVars = new() { {StandardVoteType.Restart, CCVars.VoteRestartEnabled}, {StandardVoteType.Preset, CCVars.VotePresetEnabled}, {StandardVoteType.Map, CCVars.VoteMapEnabled}, }; public void CreateStandardVote(IPlayerSession? initiator, StandardVoteType voteType) { switch (voteType) { case StandardVoteType.Restart: CreateRestartVote(initiator); break; case StandardVoteType.Preset: CreatePresetVote(initiator); break; case StandardVoteType.Map: CreateMapVote(initiator); break; default: throw new ArgumentOutOfRangeException(nameof(voteType), voteType, null); } TimeoutStandardVote(voteType); } private void CreateRestartVote(IPlayerSession? initiator) { var alone = _playerManager.PlayerCount == 1 && initiator != null; var options = new VoteOptions { Title = Loc.GetString("ui-vote-restart-title"), Options = { (Loc.GetString("ui-vote-restart-yes"), true), (Loc.GetString("ui-vote-restart-no"), false) }, Duration = alone ? TimeSpan.FromSeconds(_cfg.GetCVar(CCVars.VoteTimerAlone)) : TimeSpan.FromSeconds(_cfg.GetCVar(CCVars.VoteTimerRestart)), InitiatorTimeout = TimeSpan.FromMinutes(3) }; if (alone) options.InitiatorTimeout = TimeSpan.FromSeconds(10); WirePresetVoteInitiator(options, initiator); var vote = CreateVote(options); vote.OnFinished += (_, _) => { var votesYes = vote.VotesPerOption[true]; var votesNo = vote.VotesPerOption[false]; var total = votesYes + votesNo; var ratioRequired = _cfg.GetCVar(CCVars.VoteRestartRequiredRatio); if (votesYes / (float) total >= ratioRequired) { _chatManager.DispatchServerAnnouncement(Loc.GetString("ui-vote-restart-succeeded")); EntitySystem.Get<RoundEndSystem>().EndRound(); } else { _chatManager.DispatchServerAnnouncement( Loc.GetString("ui-vote-restart-failed", ("ratio", ratioRequired))); } }; if (initiator != null) { // Cast yes vote if created the vote yourself. vote.CastVote(initiator, 0); } foreach (var player in _playerManager.ServerSessions) { if (player != initiator && !_afkManager.IsAfk(player)) { // Everybody else defaults to a no vote. vote.CastVote(player, 1); } } } private void CreatePresetVote(IPlayerSession? initiator) { var presets = new Dictionary<string, string>(); foreach (var preset in _prototypeManager.EnumeratePrototypes<GamePresetPrototype>()) { if(!preset.ShowInVote) continue; presets[preset.ID] = preset.ModeTitle; } var alone = _playerManager.PlayerCount == 1 && initiator != null; var options = new VoteOptions { Title = Loc.GetString("ui-vote-gamemode-title"), Duration = alone ? TimeSpan.FromSeconds(_cfg.GetCVar(CCVars.VoteTimerAlone)) : TimeSpan.FromSeconds(_cfg.GetCVar(CCVars.VoteTimerPreset)) }; if (alone) options.InitiatorTimeout = TimeSpan.FromSeconds(10); foreach (var (k, v) in presets) { options.Options.Add((Loc.GetString(v), k)); } WirePresetVoteInitiator(options, initiator); var vote = CreateVote(options); vote.OnFinished += (_, args) => { string picked; if (args.Winner == null) { picked = (string) _random.Pick(args.Winners); _chatManager.DispatchServerAnnouncement( Loc.GetString("ui-vote-gamemode-tie", ("picked", Loc.GetString(presets[picked])))); } else { picked = (string) args.Winner; _chatManager.DispatchServerAnnouncement( Loc.GetString("ui-vote-gamemode-win", ("winner", Loc.GetString(presets[picked])))); } EntitySystem.Get<GameTicker>().SetGamePreset(picked); }; } private void CreateMapVote(IPlayerSession? initiator) { var maps = _gameMapManager.CurrentlyEligibleMaps().ToDictionary(map => map, map => map.MapName); var alone = _playerManager.PlayerCount == 1 && initiator != null; var options = new VoteOptions { Title = Loc.GetString("ui-vote-map-title"), Duration = alone ? TimeSpan.FromSeconds(_cfg.GetCVar(CCVars.VoteTimerAlone)) : TimeSpan.FromSeconds(_cfg.GetCVar(CCVars.VoteTimerMap)) }; if (alone) options.InitiatorTimeout = TimeSpan.FromSeconds(10); foreach (var (k, v) in maps) { options.Options.Add((v, k)); } WirePresetVoteInitiator(options, initiator); var vote = CreateVote(options); vote.OnFinished += (_, args) => { GameMapPrototype picked; if (args.Winner == null) { picked = (GameMapPrototype) _random.Pick(args.Winners); _chatManager.DispatchServerAnnouncement( Loc.GetString("ui-vote-map-tie", ("picked", maps[picked]))); } else { picked = (GameMapPrototype) args.Winner; _chatManager.DispatchServerAnnouncement( Loc.GetString("ui-vote-map-win", ("winner", maps[picked]))); } _gameMapManager.TrySelectMap(picked.ID); }; } private void TimeoutStandardVote(StandardVoteType type) { var timeout = TimeSpan.FromSeconds(_cfg.GetCVar(CCVars.VoteSameTypeTimeout)); _standardVoteTimeout[type] = _timing.RealTime + timeout; DirtyCanCallVoteAll(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using Xunit; namespace System.Linq.Tests { public partial class ZipTests { [Fact] public void Zip2_ImplicitTypeParameters() { IEnumerable<int> first = new int[] { 1, 2, 3 }; IEnumerable<int> second = new int[] { 2, 5, 9 }; IEnumerable<(int, int)> expected = new (int,int)[] { (1,2), (2,5), (3,9) }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_ExplicitTypeParameters() { IEnumerable<int> first = new int[] { 1, 2, 3 }; IEnumerable<int> second = new int[] { 2, 5, 9 }; IEnumerable<(int, int)> expected = new (int,int)[] { (1,2), (2,5), (3,9) }; Assert.Equal(expected, first.Zip<int, int>(second)); } [Fact] public void Zip2_FirstIsNull() { IEnumerable<int> first = null; IEnumerable<int> second = new int[] { 2, 5, 9 }; AssertExtensions.Throws<ArgumentNullException>("first", () => first.Zip<int, int>(second)); } [Fact] public void Zip2_SecondIsNull() { IEnumerable<int> first = new int[] { 1, 2, 3 }; IEnumerable<int> second = null; AssertExtensions.Throws<ArgumentNullException>("second", () => first.Zip<int, int>(second)); } [Fact] public void Zip2_ExceptionThrownFromFirstsEnumerator() { ThrowsOnMatchEnumerable<int> first = new ThrowsOnMatchEnumerable<int>(new int[] { 1, 3, 3 }, 2); IEnumerable<int> second = new int[] { 2, 4, 6 }; IEnumerable<(int, int)> expected = new (int,int)[] { (1,2), (3,4), (3,6) }; Assert.Equal(expected, first.Zip(second)); first = new ThrowsOnMatchEnumerable<int>(new int[] { 1, 2, 3 }, 2); IEnumerable<(int, int)> zip = first.Zip(second); Assert.Throws<Exception>(() => zip.ToList()); } [Fact] public void Zip2_ExceptionThrownFromSecondsEnumerator() { ThrowsOnMatchEnumerable<int> second = new ThrowsOnMatchEnumerable<int>(new int[] { 1, 3, 3 }, 2); IEnumerable<int> first = new int[] { 2, 4, 6 }; IEnumerable<(int, int)> expected = new (int,int)[] { (2,1), (4,3), (6,3) }; Assert.Equal(expected, first.Zip(second)); second = new ThrowsOnMatchEnumerable<int>(new int[] { 1, 2, 3 }, 2); IEnumerable<(int, int)> zip = first.Zip(second); Assert.Throws<Exception>(() => zip.ToList()); } [Fact] public void Zip2_FirstAndSecondEmpty() { IEnumerable<int> first = new int[] { }; IEnumerable<int> second = new int[] { }; IEnumerable<(int, int)> expected = new (int, int)[] { }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_FirstEmptySecondSingle() { IEnumerable<int> first = new int[] { }; IEnumerable<int> second = new int[] { 2 }; IEnumerable<(int, int)> expected = new (int, int)[] { }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_FirstEmptySecondMany() { IEnumerable<int> first = new int[] { }; IEnumerable<int> second = new int[] { 2, 4, 8 }; IEnumerable<(int, int)> expected = new (int, int)[] { }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_SecondEmptyFirstSingle() { IEnumerable<int> first = new int[] { 1 }; IEnumerable<int> second = new int[] { }; IEnumerable<(int, int)> expected = new (int, int)[] { }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_SecondEmptyFirstMany() { IEnumerable<int> first = new int[] { 1, 2, 3 }; IEnumerable<int> second = new int[] { }; IEnumerable<(int, int)> expected = new (int, int)[] { }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_FirstAndSecondSingle() { IEnumerable<int> first = new int[] { 1 }; IEnumerable<int> second = new int[] { 2 }; IEnumerable<(int, int)> expected = new (int, int)[] { (1, 2) }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_FirstAndSecondEqualSize() { IEnumerable<int> first = new int[] { 1, 2, 3 }; IEnumerable<int> second = new int[] { 2, 3, 4 }; IEnumerable<(int, int)> expected = new (int, int)[] { (1, 2), (2, 3), (3, 4) }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_SecondOneMoreThanFirst() { IEnumerable<int> first = new int[] { 1, 2 }; IEnumerable<int> second = new int[] { 2, 4, 8 }; IEnumerable<(int, int)> expected = new (int, int)[] { (1, 2), (2, 4) }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_SecondManyMoreThanFirst() { IEnumerable<int> first = new int[] { 1, 2 }; IEnumerable<int> second = new int[] { 2, 4, 8, 16 }; IEnumerable<(int, int)> expected = new (int, int)[] { (1, 2), (2, 4) }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_FirstOneMoreThanSecond() { IEnumerable<int> first = new int[] { 1, 2, 3 }; IEnumerable<int> second = new int[] { 2, 4 }; IEnumerable<(int, int)> expected = new (int, int)[] { (1, 2), (2, 4) }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_FirstManyMoreThanSecond() { IEnumerable<int> first = new int[] { 1, 2, 3, 4 }; IEnumerable<int> second = new int[] { 2, 4 }; IEnumerable<(int, int)> expected = new (int, int)[] { (1, 2), (2, 4) }; Assert.Equal(expected, first.Zip(second)); } [Fact] public void Zip2_RunOnce() { IEnumerable<int?> first = new[] { 1, (int?)null, 3 }; IEnumerable<int> second = new[] { 2, 4, 6, 8 }; IEnumerable<(int?, int)> expected = new (int?, int)[] { (1, 2), (null, 4), (3, 6) }; Assert.Equal(expected, first.RunOnce().Zip(second.RunOnce())); } [Fact] public void Zip2_NestedTuple() { IEnumerable<int> first = new[] { 1, 3, 5 }; IEnumerable<int> second = new[] { 2, 4, 6 }; IEnumerable<(int, int)> third = new[] { (1, 2), (3, 4), (5, 6) }; Assert.Equal(third, first.Zip(second)); IEnumerable<string> fourth = new[] { "one", "two", "three" }; IEnumerable<((int, int), string)> final = new[] { ((1, 2), "one"), ((3, 4), "two"), ((5, 6), "three") }; Assert.Equal(final, third.Zip(fourth)); } [Fact] public void Zip2_TupleNames() { var t = new[] { 1, 2, 3 }.Zip(new[] { 2, 4, 6 }).First(); Assert.Equal(t.Item1, t.First); Assert.Equal(t.Item2, t.Second); } } }
/* insert license info here */ using System; using System.Collections; namespace Business.Data.Laboratorio { /// <summary> /// Generated by MyGeneration using the NHibernate Object Mapping template /// </summary> [Serializable] public sealed class AuditoriaProtocolo: Business.BaseDataAccess { #region Private Members private bool m_isChanged; private int m_idauditoriaprotocolo; private int m_idprotocolo; private DateTime m_fecha; private string m_hora; private string m_accion; private string m_analisis; private string m_valor; private string m_valoranterior; private int m_idusuario; #endregion #region Default ( Empty ) Class Constuctor /// <summary> /// default constructor /// </summary> public AuditoriaProtocolo() { m_idauditoriaprotocolo = 0; m_idprotocolo = 0; m_fecha = DateTime.MinValue; m_hora = String.Empty; m_accion = String.Empty; m_analisis = String.Empty; m_valor = String.Empty; m_valoranterior = String.Empty; m_idusuario = 0; } #endregion // End of Default ( Empty ) Class Constuctor #region Required Fields Only Constructor /// <summary> /// required (not null) fields only constructor /// </summary> public AuditoriaProtocolo( int idprotocolo, DateTime fecha, string hora, string accion, string analisis, string valor, string valoranterior, int idusuario) : this() { m_idprotocolo = idprotocolo; m_fecha = fecha; m_hora = hora; m_accion = accion; m_analisis = analisis; m_valor = valor; m_valoranterior = valoranterior; m_idusuario = idusuario; } #endregion // End Required Fields Only Constructor #region Public Properties /// <summary> /// /// </summary> public int IdAuditoriaProtocolo { get { return m_idauditoriaprotocolo; } set { m_isChanged |= (m_idauditoriaprotocolo != value); m_idauditoriaprotocolo = value; } } /// <summary> /// /// </summary> public int IdProtocolo { get { return m_idprotocolo; } set { m_isChanged |= ( m_idprotocolo != value ); m_idprotocolo = value; } } /// <summary> /// /// </summary> public DateTime Fecha { get { return m_fecha; } set { m_isChanged |= ( m_fecha != value ); m_fecha = value; } } /// <summary> /// /// </summary> public string Hora { get { return m_hora; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for Hora", value, "null"); if( value.Length > 50) throw new ArgumentOutOfRangeException("Invalid value for Hora", value, value.ToString()); m_isChanged |= (m_hora != value); m_hora = value; } } /// <summary> /// /// </summary> public string Accion { get { return m_accion; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for Accion", value, "null"); if( value.Length > 50) throw new ArgumentOutOfRangeException("Invalid value for Accion", value, value.ToString()); m_isChanged |= (m_accion != value); m_accion = value; } } /// <summary> /// /// </summary> public string Analisis { get { return m_analisis; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for Analisis", value, "null"); if( value.Length > 400) throw new ArgumentOutOfRangeException("Invalid value for Analisis", value, value.ToString()); m_isChanged |= (m_analisis != value); m_analisis = value; } } /// <summary> /// /// </summary> public string Valor { get { return m_valor; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for Valor", value, "null"); if( value.Length > 4000) throw new ArgumentOutOfRangeException("Invalid value for Valor", value, value.ToString()); m_isChanged |= (m_valor != value); m_valor = value; } } /// <summary> /// /// </summary> public string ValorAnterior { get { return m_valoranterior; } set { if( value == null ) throw new ArgumentOutOfRangeException("Null value not allowed for ValorAnterior", value, "null"); if( value.Length > 400) throw new ArgumentOutOfRangeException("Invalid value for ValorAnterior", value, value.ToString()); m_isChanged |= (m_valoranterior != value); m_valoranterior = value; } } /// <summary> /// /// </summary> public int IdUsuario { get { return m_idusuario; } set { m_isChanged |= ( m_idusuario != value ); m_idusuario = value; } } /// <summary> /// Returns whether or not the object has changed it's values. /// </summary> public bool IsChanged { get { return m_isChanged; } } #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Microsoft.Win32.SafeHandles; using System.Diagnostics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Threading; namespace System.Net.Sockets { // This class implements a safe socket handle. // It uses an inner and outer SafeHandle to do so. The inner // SafeHandle holds the actual socket, but only ever has one // reference to it. The outer SafeHandle guards the inner // SafeHandle with real ref counting. When the outer SafeHandle // is cleaned up, it releases the inner SafeHandle - since // its ref is the only ref to the inner SafeHandle, it deterministically // gets closed at that point - no races with concurrent IO calls. // This allows Close() on the outer SafeHandle to deterministically // close the inner SafeHandle, in turn allowing the inner SafeHandle // to block the user thread in case a graceful close has been // requested. (It's not legal to block any other thread - such closes // are always abortive.) internal partial class SafeCloseSocket : #if DEBUG DebugSafeHandleMinusOneIsInvalid #else SafeHandleMinusOneIsInvalid #endif { protected SafeCloseSocket() : base(true) { } private InnerSafeCloseSocket _innerSocket; private volatile bool _released; #if DEBUG private InnerSafeCloseSocket _innerSocketCopy; #endif public override bool IsInvalid { get { return IsClosed || base.IsInvalid; } } #if DEBUG public void AddRef() { try { // The inner socket can be closed by CloseAsIs and when SafeHandle runs ReleaseHandle. InnerSafeCloseSocket innerSocket = Volatile.Read(ref _innerSocket); if (innerSocket != null) { innerSocket.AddRef(); } } catch (Exception e) { Debug.Assert(false, "SafeCloseSocket.AddRef after inner socket disposed." + e); } } public void Release() { try { // The inner socket can be closed by CloseAsIs and when SafeHandle runs ReleaseHandle. InnerSafeCloseSocket innerSocket = Volatile.Read(ref _innerSocket); if (innerSocket != null) { innerSocket.Release(); } } catch (Exception e) { Debug.Assert(false, "SafeCloseSocket.Release after inner socket disposed." + e); } } #endif private void SetInnerSocket(InnerSafeCloseSocket socket) { _innerSocket = socket; SetHandle(socket.DangerousGetHandle()); #if DEBUG _innerSocketCopy = socket; #endif } private static SafeCloseSocket CreateSocket(InnerSafeCloseSocket socket) { SafeCloseSocket ret = new SafeCloseSocket(); CreateSocket(socket, ret); if (GlobalLog.IsEnabled) { GlobalLog.Print("SafeCloseSocket#" + LoggingHash.HashString(ret) + "::CreateSocket()"); } return ret; } protected static void CreateSocket(InnerSafeCloseSocket socket, SafeCloseSocket target) { if (socket != null && socket.IsInvalid) { target.SetHandleAsInvalid(); return; } bool b = false; try { socket.DangerousAddRef(ref b); } catch { if (b) { socket.DangerousRelease(); b = false; } } finally { if (b) { target.SetInnerSocket(socket); socket.Dispose(); } else { target.SetHandleAsInvalid(); } } } protected override bool ReleaseHandle() { if (GlobalLog.IsEnabled) { GlobalLog.Print( "SafeCloseSocket#" + LoggingHash.HashString(this) + "::ReleaseHandle() m_InnerSocket=" + _innerSocket == null ? "null" : LoggingHash.HashString(_innerSocket)); } _released = true; InnerSafeCloseSocket innerSocket = _innerSocket == null ? null : Interlocked.Exchange<InnerSafeCloseSocket>(ref _innerSocket, null); if (innerSocket != null) { #if DEBUG // On AppDomain unload we may still have pending Overlapped operations. // ThreadPoolBoundHandle should handle this scenario by canceling them. innerSocket.LogRemainingOperations(); #endif innerSocket.DangerousRelease(); } InnerReleaseHandle(); return true; } internal void CloseAsIs() { if (GlobalLog.IsEnabled) { GlobalLog.Print( "SafeCloseSocket#" + LoggingHash.HashString(this) + "::CloseAsIs() m_InnerSocket=" + _innerSocket == null ? "null" : LoggingHash.HashString(_innerSocket)); } #if DEBUG // If this throws it could be very bad. try { #endif InnerSafeCloseSocket innerSocket = _innerSocket == null ? null : Interlocked.Exchange<InnerSafeCloseSocket>(ref _innerSocket, null); Dispose(); if (innerSocket != null) { // Wait until it's safe. SpinWait sw = new SpinWait(); while (!_released) { sw.SpinOnce(); } // Now free it with blocking. innerSocket.BlockingRelease(); } InnerReleaseHandle(); #if DEBUG } catch (Exception exception) { if (!ExceptionCheck.IsFatal(exception) && GlobalLog.IsEnabled) { GlobalLog.Assert("SafeCloseSocket::CloseAsIs(handle:" + handle.ToString("x") + ")", exception.Message); } throw; } #endif } internal sealed partial class InnerSafeCloseSocket : SafeHandleMinusOneIsInvalid { private InnerSafeCloseSocket() : base(true) { } private bool _blockable; public override bool IsInvalid { get { return IsClosed || base.IsInvalid; } } // This method is implicitly reliable and called from a CER. protected override bool ReleaseHandle() { bool ret = false; #if DEBUG try { #endif if (GlobalLog.IsEnabled) { GlobalLog.Print("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ")"); } SocketError errorCode = InnerReleaseHandle(); return ret = errorCode == SocketError.Success; #if DEBUG } catch (Exception exception) { if (!ExceptionCheck.IsFatal(exception) && GlobalLog.IsEnabled) { GlobalLog.Assert("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ")", exception.Message); } ret = true; // Avoid a second assert. throw; } finally { _closeSocketThread = Environment.CurrentManagedThreadId; _closeSocketTick = Environment.TickCount; if (!ret && GlobalLog.IsEnabled) { GlobalLog.AssertFormat("SafeCloseSocket::ReleaseHandle(handle:{0:x})|ReleaseHandle failed.", handle); } } #endif } #if DEBUG private IntPtr _closeSocketHandle; private SocketError _closeSocketResult = unchecked((SocketError)0xdeadbeef); private SocketError _closeSocketLinger = unchecked((SocketError)0xdeadbeef); private int _closeSocketThread; private int _closeSocketTick; private int _refCount = 0; public void AddRef() { Interlocked.Increment(ref _refCount); } public void Release() { Interlocked.MemoryBarrier(); Debug.Assert(_refCount > 0, "InnerSafeCloseSocket: Release() called more times than AddRef"); Interlocked.Decrement(ref _refCount); } public void LogRemainingOperations() { Interlocked.MemoryBarrier(); if (GlobalLog.IsEnabled) { GlobalLog.Print("InnerSafeCloseSocket: Releasing with pending operations: " + _refCount); } } #endif // Use this method to close the socket handle using the linger options specified on the socket. // Guaranteed to only be called once, under a CER, and not if regular DangerousRelease is called. internal void BlockingRelease() { #if DEBUG // Expected to have outstanding operations such as Accept. LogRemainingOperations(); #endif _blockable = true; DangerousRelease(); } } } }
using System; using System.ComponentModel; using System.Configuration; using System.Data; using System.Data.SqlClient; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Text; using NDbUnit.Core; using NDbUnit.Core.SqlClient; namespace Tests.Unit.Lender.Slos.Database.Bases { [ExcludeFromCodeCoverage] public abstract class TestContextBase : IDisposable { public const string DefaultConnectionString = @"Data Source=(local)\SQLExpress;Initial Catalog=Lender.Slos;Integrated Security=True"; public const string DefaultProjectPath = @"Tests.Unit.Lender.Slos.Database\Bases"; public const string DefaultXmlSchemaFilename = @"Lender.Slos.DataSet.xsd"; public const string DefaultDatabaseInitializationSql = "database.initialization.sql"; // Constructor protected TestContextBase() { ProjectPath = DefaultProjectPath; Provider = DatabaseProvider.SqlClient; } protected TestContextBase(Type typeOfClassUnderTest) : this() { ClassUnderTest = typeOfClassUnderTest; } ~TestContextBase() { Dispose(false); } public enum DatabaseProvider { [Description("System.Data.SqlClient")] SqlClient = 1, [Description("System.Data.SqlServerCe")] SqlCe = 2, [Description("System.Data.SQLite")] SqLite = 3, } public DatabaseProvider Provider { get; set; } public string ConnectionString { get; set; } public string ProjectPath { get; set; } public string FolderName { get; set; } public Type ClassUnderTest { get; set; } // Override in derived class to provide another DataSet filename. public virtual string XmlSchemaFilename { get { return null; } } public string GetRunnerConnectionString() { var connectionString = ConfigurationManager .ConnectionStrings["Tests.Surface.Runner"]; return connectionString != null ? connectionString.ConnectionString : DefaultConnectionString; } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } public void SetupTestDatabase( string xmlDataFilename, bool executeCleanupScript = true, string cleanupScript = null) { var database = InitializeDatabase(cleanupScript); var xmlSchemaFile = GetFilePath( ProjectPath, @"Bases\Data", DefaultXmlSchemaFilename); if (!string.IsNullOrEmpty(XmlSchemaFilename)) { xmlSchemaFile = GetSchemaPath(XmlSchemaFilename); } database.ReadXmlSchema(xmlSchemaFile); var xmlFile = GetDataPath(xmlDataFilename); database.ReadXml(xmlFile); database.PerformDbOperation(DbOperationFlag.CleanInsertIdentity); } internal void Initialize( string projectPath) { InitializeConnection(); InitializeDatabase(projectPath); } internal void InitializeConnection() { switch (Provider) { case DatabaseProvider.SqlClient: if (string.IsNullOrEmpty(ConnectionString)) { ConnectionString = GetRunnerConnectionString(); } break; default: throw new InvalidOperationException( string.Format("Provider '{0}' is not supported.", Provider)); } } internal TData Retrieve<TData>( string columnName, string tableName, string whereClause) { if (string.IsNullOrWhiteSpace(columnName)) throw new ArgumentException("IsNullOrWhiteSpace", "whereClause"); if (string.IsNullOrWhiteSpace(tableName)) throw new ArgumentException("IsNullOrWhiteSpace", "tableName"); if (string.IsNullOrWhiteSpace(whereClause)) throw new ArgumentException("IsNullOrWhiteSpace", "whereClause"); using (var sqlConnection = new SqlConnection(GetRunnerConnectionString())) { var dataSet = new DataSet(); var stringBuilder = new StringBuilder(); stringBuilder.AppendFormat( "SELECT [{0}] FROM [dbo].[{1}] WHERE {2}", columnName, tableName, whereClause); var command = sqlConnection.CreateCommand(); command.CommandType = CommandType.Text; command.CommandText = stringBuilder.ToString(); sqlConnection.Open(); var adapter = new SqlDataAdapter(command); adapter.Fill(dataSet); command.Parameters.Clear(); var returnValue = default(TData); if (dataSet.Tables.Count > 0) { var table = dataSet.Tables[0]; if (table.Rows.Count == 0) { throw new InvalidOperationException("Query returned zero records"); } foreach (DataRow row in table.Rows) { returnValue = row.Field<TData>(columnName); } } else { throw new InvalidOperationException("Query returned no results"); } return returnValue; } } protected virtual void Dispose(bool disposing) { if (disposing) { // Clean up all managed resources } // Clean up any unmanaged resources here. } private static string GetFilePath( string projectPath, string relativePath, string filename) { var path = Path.Combine(relativePath, filename); // Running in the ReSharper runner. var relativeToBinPath = Path.Combine(Environment.CurrentDirectory, @"..\..\"); var filePath = Path.Combine(relativeToBinPath, path); if (File.Exists(filePath)) return filePath; filePath = Path.Combine(Environment.CurrentDirectory, path); if (File.Exists(filePath)) return filePath; // Running in the command line test runner. var commandLinePath = Path.Combine( Environment.CurrentDirectory, projectPath); filePath = Path.Combine(commandLinePath, path); return File.Exists(filePath) ? filePath : path; } private INDbUnitTest CreateDbInstance() { switch (Provider) { case DatabaseProvider.SqlClient: return new SqlDbUnitTest(ConnectionString); default: throw new InvalidOperationException( string.Format("Provider '{0}' is not supported.", Provider)); } } private INDbUnitTest InitializeDatabase( string cleanupScript = null) { var database = CreateDbInstance(); var filename = GetFilePath( ProjectPath, @"Bases\Scripts", cleanupScript ?? DefaultDatabaseInitializationSql); if (File.Exists(filename)) { database.Scripts.AddSingle(filename); database.ExecuteScripts(); } database.Scripts.ClearAll(); return database; } private string GetSchemaPath( string filename) { var xmlSchemaPath = string.Format(@"{0}\Data", FolderName); return GetFilePath(ProjectPath, xmlSchemaPath, filename); } private string GetDataPath( string filename) { var xmlDataPath = string.Format(@"{0}\Data", FolderName ?? "."); return GetFilePath(ProjectPath, xmlDataPath, filename); } } }
using ArcGIS.Core.CIM; using ArcGIS.Core.Geometry; using ArcGIS.Desktop.Core; using ArcGIS.Desktop.Framework; using ArcGIS.Desktop.Framework.Contracts; using ArcGIS.Desktop.Framework.Threading.Tasks; using ArcGIS.Desktop.Layouts; using ArcGIS.Desktop.Mapping; using ArcGIS.Desktop.Reports; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using System.Text; using System.Threading.Tasks; namespace ReportAPITesting { internal static class ReportHelper { #region ProSnippet Group: Report Project Items #endregion public static void GetAllReports() { #region Gets all the reports in the current project var projectReports = Project.Current.GetItems<ReportProjectItem>(); foreach (var reportItem in projectReports) { //Do Something with the report } #endregion } public static async void GetReport(string reportName) { #region Get a specific report ReportProjectItem reportProjItem = Project.Current.GetItems<ReportProjectItem>().FirstOrDefault(item => item.Name.Equals(reportName)); Report report = reportProjItem?.GetReport(); #endregion #region Open a Report project item in a new view //Open a report project item in a new view. //A report project item may exist but it may not be open in a view. //Reference a report project item by name ReportProjectItem reportPrjItem = Project.Current.GetItems<ReportProjectItem>().FirstOrDefault(item => item.Name.Equals("MyReport")); //Get the report associated with the report project item Report reportToOpen = await QueuedTask.Run(() => reportPrjItem.GetReport()); //Create the new pane IReportPane iNewReporttPane = await ProApp.Panes.CreateReportPaneAsync(reportToOpen); //GUI thread #endregion } public static void ReportMethods() { // cref: Activate an already open report view;ArcGIS.Desktop.Reports.ReportView #region Activate an already open report view Report report = Project.Current.GetItems<ReportProjectItem>().FirstOrDefault().GetReport(); var reportPane = FrameworkApplication.Panes.FindReportPanes(report).Last(); if (reportPane == null) return; //Activate the pane (reportPane as ArcGIS.Desktop.Framework.Contracts.Pane).Activate(); //Get the "ReportView" associated with the Report Pane. ReportView reportView = reportPane.ReportView; #endregion #region Reference the active report view //Confirm if the current, active view is a report view. If it is, do something. ReportView activeReportView = ReportView.Active; if (activeReportView != null) { // do something } #endregion // cref: Refresh the report view;ArcGIS.Desktop.Reports.ReportView.Refresh #region Refresh the report view if (reportView == null) return; QueuedTask.Run(() => reportView.Refresh()); #endregion // cref: Zoom to whole page;ArcGIS.Desktop.Reports.ReportView.ZoomToWholePage #region Zoom to whole page QueuedTask.Run(() => reportView.ZoomToWholePage()); #endregion // cref: Zoom to specific location on Report view;ArcGIS.Desktop.Reports.ReportView.ZoomTo(ArcGIS.Core.Geometry.Geometry) #region Zoom to specific location on Report view //On the QueuedTask var detailsSection = report.Elements.OfType<ReportSection>().FirstOrDefault().Elements.OfType<ReportDetails>().FirstOrDefault(); var bounds = detailsSection.GetBounds(); ReportView.Active.ZoomTo(bounds); #endregion // cref: Zoom to page width;ArcGIS.Desktop.Reports.ReportView.ZoomToPageWidth #region Zoom to page width //Process on worker thread QueuedTask.Run(() => reportView.ZoomToPageWidth()); #endregion } #region ProSnippet Group: Create Report #endregion public static async void GenerateReport(FeatureLayer featureLayer) { #region Create report //Note: Call within QueuedTask.Run() //The fields in the datasource used for the report //This uses a US Cities dataset var listFields = new List<CIMReportField> { //Grouping should be the first field new CIMReportField{Name = "STATE_NAME", FieldOrder = 0, Group = true, SortInfo = FieldSortInfo.Desc}, //Group cities using STATES new CIMReportField{Name = "CITY_NAME", FieldOrder = 1}, new CIMReportField{Name = "POP1990", FieldOrder = 2, }, }; //Definition query to use for the data source var defQuery = "STATE_NAME LIKE 'C%'"; //Define the Datasource //pass true to use the selection set var reportDataSource = new ReportDataSource(featureLayer, defQuery, false, listFields); //The CIMPage defintion - page size, units, etc var cimReportPage = new CIMPage { Height = 11, StretchElements = false, Width = 6.5, ShowRulers = true, ShowGuides = true, Margin = new CIMMargin { Bottom = 1, Left = 1, Right = 1, Top = 1 }, Units = LinearUnit.Inches }; //Report template var reportTemplates = await ReportTemplateManager.GetTemplatesAsync(); var reportTemplate = reportTemplates.Where(r => r.Name == "Attribute List with Grouping").First(); //Report Styling var reportStyles = await ReportStylingManager.GetStylingsAsync(); var reportStyle = reportStyles.Where(s => s == "Cool Tones").First(); //Field Statistics var fieldStatisticsList = new List<ReportFieldStatistic> { new ReportFieldStatistic{ Field = "POP1990", Statistic = FieldStatisticsFlag.Sum} //Note: NoStatistics option for FieldStatisticsFlag is not supported. }; var report = ReportFactory.Instance.CreateReport("USAReport", reportDataSource, cimReportPage, fieldStatisticsList, reportTemplate, reportStyle); #endregion } public static void ExportAReport(Report report, string path, bool useSelection = true) { if (report == null) return; #region Export report to pdf //Note: Call within QueuedTask.Run() //Define Export Options var exportOptions = new ReportExportOptions { ExportPageOption = ExportPageOptions.ExportAllPages, TotalPageNumberOverride = 0 }; //Create PDF format with appropriate settings PDFFormat pdfFormat = new PDFFormat(); pdfFormat.Resolution = 300; pdfFormat.OutputFileName = path; report.ExportToPDF($"{report.Name}", pdfFormat, exportOptions, useSelection); #endregion } public static void ImportAReport(string reportFile) { #region Import a report file //Note: Call within QueuedTask.Run() Item reportToImport = ItemFactory.Instance.Create(reportFile); Project.Current.AddItem(reportToImport as IProjectItem); #endregion } public static Task<bool> DeleteReport(string reportName) { #region Delete a report //Note: Call within QueuedTask.Run() //Reference a reportitem in a project by name ReportProjectItem reportItem = Project.Current.GetItems<ReportProjectItem>().FirstOrDefault(item => item.Name.Equals(reportName)); //Check for report item if (reportItem == null) return Task.FromResult<bool>(false); //Delete the report from the project return Task.FromResult<bool>(Project.Current.RemoveItem(reportItem)); #endregion } #region ProSnippet Group: Modify Reports #endregion public static void ModifyReport(Report report, string reportName, FeatureLayer featureLayer) { #region Rename Report //Note: Call within QueuedTask.Run() ReportProjectItem reportProjItem = Project.Current.GetItems<ReportProjectItem>().FirstOrDefault(item => item.Name.Equals(reportName)); reportProjItem.GetReport().SetName("RenamedReport"); #endregion #region Modify the Report datasource //Note: Call within QueuedTask.Run() //Remove Groups // The fields in the datasource used for the report var listFields = new List<string> { "STATE_NAME" }; report.RemoveGroups(listFields); //Add Group report.AddGroup("STATE_NAME", true, true, ""); //Modify the Definition Query var defQuery = "STATE_NAME LIKE 'C%'"; report.SetDefinitionQuery(defQuery); #endregion #region Modify the report Page //Note: Call within QueuedTask.Run() var cimReportPage = new CIMPage { Height = 12, StretchElements = false, Width = 6.5, ShowRulers = true, ShowGuides = true, Margin = new CIMMargin { Bottom = 1, Left = 1, Right = 1, Top = 1 }, Units = LinearUnit.Inches }; report.SetPage(cimReportPage); //Change only the report's page height report.SetPageHeight(12); #endregion } public static void AddSubReport() { QueuedTask.Run(() => { #region Add SubReport //Note: Call within QueuedTask.Run() var mainReport = Project.Current.GetItems<ReportProjectItem>().FirstOrDefault(r => r.Name == "USAReports")?.GetReport(); if (mainReport == null) return; //Add sub report var vermontReportItem = Project.Current.GetItems<ReportProjectItem>().FirstOrDefault(r => r.Name == "Vermont"); if (vermontReportItem == null) return; Report vermontReport = vermontReportItem.GetReport(); mainReport.AddSubReport(vermontReportItem, -1, true); // If -1, the subreport is added to the end of the report. #endregion }); } #region ProSnippet Group: Report Design #endregion public static async Task<ReportTemplate> GetReportTemplates(string reportTemplateName) { #region Get a report template //Report Template Styles: //Attribute List //Attribute List with Grouping //Basic Summary //Basic Summary with Grouping //Page Per Feature var reportTemplates = await ReportTemplateManager.GetTemplatesAsync(); var reportTemplate = reportTemplates.Where(r => r.Name == reportTemplateName).First(); #endregion System.Diagnostics.Debug.WriteLine(reportTemplate.Name); return reportTemplate; } public static async Task<string> GetReportStyling(string reportStyleName) { #region Get a report styling //Report Styling: //Black and White //Cool Tones //Warm Tones var reportStyles = await ReportStylingManager.GetStylingsAsync(); var reportStyle = reportStyles.Where(s => s == reportStyleName).First(); #endregion System.Diagnostics.Debug.WriteLine(reportStyle.ToString()); return reportStyle; } #region ProSnippet Group: Report Elements #endregion public static void ElementFactory(Report report, ReportView reportView) { #region Get various Report sections //Get the "ReportSection element" //ReportSectionElement contains the ReportHeader, ReportPageHeader, ReportDetails. ReportPageFooter, ReportFooter sections. var mainReportSection = report.Elements.OfType<ReportSection>().FirstOrDefault(); //Get the ReportHeader var reportHeader = mainReportSection?.Elements.OfType<ReportHeader>().FirstOrDefault(); //Get the ReportHeader var reportPageHeader = mainReportSection?.Elements.OfType<ReportPageHeader>().FirstOrDefault(); //Get the "ReportDetails" within the ReportSectionElement. ReportDetails is where "fields" are. var reportDetailsSection = mainReportSection?.Elements.OfType<ReportDetails>().FirstOrDefault(); //Get the ReportPageFooter var reportPageFooter = mainReportSection?.Elements.OfType<ReportPageFooter>().FirstOrDefault(); //Get the ReportFooter var reportFooter = mainReportSection?.Elements.OfType<ReportFooter>().FirstOrDefault(); #endregion // cref: Select elements;ArcGIS.Desktop.Reports.ReportView.SelectElements(System.Collections.Generic.IReadOnlyList{ArcGIS.Desktop.Layouts.Element}) #region Select elements //ReportDetailsSection contains the "Fields" var elements = reportDetailsSection.GetElementsAsFlattenedList(); reportDetailsSection.SelectElements(elements); #endregion // cref: Select all elements;ArcGIS.Desktop.Reports.ReportView.SelectAllElements #region Select all elements //Select all elements in the Report Footer. ReportPageFooter pageFooterSection = report.Elements.OfType<ReportSection>().FirstOrDefault().Elements.OfType<ReportPageFooter>().FirstOrDefault(); pageFooterSection.SelectAllElements(); #endregion // cref: Get selected elements;ArcGIS.Desktop.Reports.ReportView.GetSelectedElements #region Get selected elements IReadOnlyList<Element> selectedElements = report.GetSelectedElements(); //Can also use the active ReportView IReadOnlyList<Element> selectedElementsFromView = ReportView.Active.GetSelectedElements(); #endregion // cref: Zoom to selected elements;ArcGIS.Desktop.Reports.ReportView.ZoomToSelectedElements #region Zoom to selected elements QueuedTask.Run(() => reportView.ZoomToSelectedElements()); #endregion Zoom to selected elements // cref: Clear element selection;ArcGIS.Desktop.Reports.ReportView.ClearElementSelection #region Clear element selection reportView.ClearElementSelection(); #endregion #region Find specific elements in the report based on their Name. var reportElementsToFind = new List<string> { "ReportText1", "ReportText2" }; var textReportElements = report.FindElements(reportElementsToFind); #endregion #region Delete Elements report.DeleteElements(textReportElements); #endregion } private static void CreateField(Report report) { #region Create a new field in the report //This is the gap between two fields. double fieldIncrement = 0.9388875113593206276389; //On the QueuedTask //New field to add. var newReportField = new CIMReportField { Name = "POP1990", FieldOrder = 2, }; //Get the "ReportSection element" var mainReportSection = report.Elements.OfType<ReportSection>().FirstOrDefault(); if (mainReportSection == null) return; //Get the "ReportDetails" within the ReportSectionElement. ReportDetails is where "fields" are. var reportDetailsSection = mainReportSection?.Elements.OfType<ReportDetails>().FirstOrDefault(); if (reportDetailsSection == null) return; //Within ReportDetails find the envelope that encloses a field. //We get the first CIMParagraphTextGraphic in the collection so that we can add the new field next to it. var lastFieldGraphic = reportDetailsSection.Elements.FirstOrDefault((r) => { var gr = r as GraphicElement; if (gr == null) return false; return (gr.GetGraphic() is CIMParagraphTextGraphic ? true : false); }); //Get the Envelope of the last field var graphicBounds = lastFieldGraphic.GetBounds(); //Min and Max values of the envelope var xMinOfFieldEnvelope = graphicBounds.XMin; var yMinOfFieldEnvelope = graphicBounds.YMin; var xMaxOfFieldEnvelope = graphicBounds.XMax; var YMaxOfFieldEnvelope = graphicBounds.YMax; //create the new Envelope to be offset from the existing field MapPoint newMinPoint = MapPointBuilder.CreateMapPoint(xMinOfFieldEnvelope + fieldIncrement, yMinOfFieldEnvelope); MapPoint newMaxPoint = MapPointBuilder.CreateMapPoint(xMaxOfFieldEnvelope + fieldIncrement, YMaxOfFieldEnvelope); Envelope newFieldEnvelope = EnvelopeBuilder.CreateEnvelope(newMinPoint, newMaxPoint); //Create field GraphicElement fieldGraphic = ReportElementFactory.Instance.CreateFieldValueTextElement(reportDetailsSection, newFieldEnvelope, newReportField); #endregion } } }
using System; using UnityEngine.Events; using UnityEngine.EventSystems; namespace UnityEngine.UI { [AddComponentMenu("UI/Slider", 34)] [RequireComponent(typeof(RectTransform))] public class Slider : Selectable, IDragHandler, IInitializePotentialDragHandler, ICanvasElement { public enum Direction { LeftToRight, RightToLeft, BottomToTop, TopToBottom, } [Serializable] public class SliderEvent : UnityEvent<float> { } [SerializeField] private RectTransform m_FillRect; public RectTransform fillRect { get { return m_FillRect; } set { if (SetPropertyUtility.SetClass(ref m_FillRect, value)) {UpdateCachedReferences(); UpdateVisuals(); } } } [SerializeField] private RectTransform m_HandleRect; public RectTransform handleRect { get { return m_HandleRect; } set { if (SetPropertyUtility.SetClass(ref m_HandleRect, value)) { UpdateCachedReferences(); UpdateVisuals(); } } } [Space(6)] [SerializeField] private Direction m_Direction = Direction.LeftToRight; public Direction direction { get { return m_Direction; } set { if (SetPropertyUtility.SetStruct(ref m_Direction, value)) UpdateVisuals(); } } [SerializeField] private float m_MinValue = 0; public float minValue { get { return m_MinValue; } set { if (SetPropertyUtility.SetStruct(ref m_MinValue, value)) { Set(m_Value); UpdateVisuals(); } } } [SerializeField] private float m_MaxValue = 1; public float maxValue { get { return m_MaxValue; } set { if (SetPropertyUtility.SetStruct(ref m_MaxValue, value)) { Set(m_Value); UpdateVisuals(); } } } [SerializeField] private bool m_WholeNumbers = false; public bool wholeNumbers { get { return m_WholeNumbers; } set { if (SetPropertyUtility.SetStruct(ref m_WholeNumbers, value)) { Set(m_Value); UpdateVisuals(); } } } [SerializeField] private float m_Value = 1f; public float value { get { if (wholeNumbers) return Mathf.Round(m_Value); return m_Value; } set { Set(value); } } public float normalizedValue { get { if (Mathf.Approximately(minValue, maxValue)) return 0; return Mathf.InverseLerp(minValue, maxValue, value); } set { this.value = Mathf.Lerp(minValue, maxValue, value); } } [Space(6)] // Allow for delegate-based subscriptions for faster events than 'eventReceiver', and allowing for multiple receivers. [SerializeField] private SliderEvent m_OnValueChanged = new SliderEvent(); public SliderEvent onValueChanged { get { return m_OnValueChanged; } set { m_OnValueChanged = value; } } // Private fields private Image m_FillImage; private Transform m_FillTransform; private RectTransform m_FillContainerRect; private Transform m_HandleTransform; private RectTransform m_HandleContainerRect; // The offset from handle position to mouse down position private Vector2 m_Offset = Vector2.zero; private DrivenRectTransformTracker m_Tracker; // Size of each step. float stepSize { get { return wholeNumbers ? 1 : (maxValue - minValue) * 0.1f; } } protected Slider() { } #if UNITY_EDITOR protected override void OnValidate() { base.OnValidate(); if (wholeNumbers) { m_MinValue = Mathf.Round(m_MinValue); m_MaxValue = Mathf.Round(m_MaxValue); } UpdateCachedReferences(); Set(m_Value, false); // Update rects since other things might affect them even if value didn't change. UpdateVisuals(); var prefabType = UnityEditor.PrefabUtility.GetPrefabType(this); if (prefabType != UnityEditor.PrefabType.Prefab && !Application.isPlaying) CanvasUpdateRegistry.RegisterCanvasElementForLayoutRebuild(this); } #endif // if UNITY_EDITOR public virtual void Rebuild(CanvasUpdate executing) { #if UNITY_EDITOR if (executing == CanvasUpdate.Prelayout) onValueChanged.Invoke(value); #endif } protected override void OnEnable() { base.OnEnable(); UpdateCachedReferences(); Set(m_Value, false); // Update rects since they need to be initialized correctly. UpdateVisuals(); } protected override void OnDisable() { m_Tracker.Clear(); base.OnDisable(); } void UpdateCachedReferences() { if (m_FillRect) { m_FillTransform = m_FillRect.transform; m_FillImage = m_FillRect.GetComponent<Image>(); if (m_FillTransform.parent != null) m_FillContainerRect = m_FillTransform.parent.GetComponent<RectTransform>(); } else { m_FillContainerRect = null; m_FillImage = null; } if (m_HandleRect) { m_HandleTransform = m_HandleRect.transform; if (m_HandleTransform.parent != null) m_HandleContainerRect = m_HandleTransform.parent.GetComponent<RectTransform>(); } else { m_HandleContainerRect = null; } } // Set the valueUpdate the visible Image. void Set(float input) { Set(input, true); } void Set(float input, bool sendCallback) { // Clamp the input float newValue = Mathf.Clamp(input, minValue, maxValue); if (wholeNumbers) newValue = Mathf.Round(newValue); // If the stepped value doesn't match the last one, it's time to update if (m_Value == newValue) return; m_Value = newValue; UpdateVisuals(); if (sendCallback) m_OnValueChanged.Invoke(newValue); } protected override void OnRectTransformDimensionsChange() { base.OnRectTransformDimensionsChange(); UpdateVisuals(); } enum Axis { Horizontal = 0, Vertical = 1 } Axis axis { get { return (m_Direction == Direction.LeftToRight || m_Direction == Direction.RightToLeft) ? Axis.Horizontal : Axis.Vertical; } } bool reverseValue { get { return m_Direction == Direction.RightToLeft || m_Direction == Direction.TopToBottom; } } // Force-update the slider. Useful if you've changed the properties and want it to update visually. private void UpdateVisuals() { #if UNITY_EDITOR if (!Application.isPlaying) UpdateCachedReferences(); #endif m_Tracker.Clear(); if (m_FillContainerRect != null) { m_Tracker.Add(this, m_FillRect, DrivenTransformProperties.Anchors); Vector2 anchorMin = Vector2.zero; Vector2 anchorMax = Vector2.one; if (m_FillImage != null && m_FillImage.type == Image.Type.Filled) { m_FillImage.fillAmount = normalizedValue; } else { if (reverseValue) anchorMin[(int)axis] = 1 - normalizedValue; else anchorMax[(int)axis] = normalizedValue; } m_FillRect.anchorMin = anchorMin; m_FillRect.anchorMax = anchorMax; } if (m_HandleContainerRect != null) { m_Tracker.Add(this, m_HandleRect, DrivenTransformProperties.Anchors); Vector2 anchorMin = Vector2.zero; Vector2 anchorMax = Vector2.one; anchorMin[(int)axis] = anchorMax[(int)axis] = (reverseValue ? (1 - normalizedValue) : normalizedValue); m_HandleRect.anchorMin = anchorMin; m_HandleRect.anchorMax = anchorMax; } } // Update the slider's position based on the mouse. void UpdateDrag(PointerEventData eventData, Camera cam) { RectTransform clickRect = m_HandleContainerRect ?? m_FillContainerRect; if (clickRect != null && clickRect.rect.size[(int)axis] > 0) { Vector2 localCursor; if (!RectTransformUtility.ScreenPointToLocalPointInRectangle(clickRect, eventData.position, cam, out localCursor)) return; localCursor -= clickRect.rect.position; float val = Mathf.Clamp01((localCursor - m_Offset)[(int)axis] / clickRect.rect.size[(int)axis]); normalizedValue = (reverseValue ? 1f - val : val); } } private bool MayDrag(PointerEventData eventData) { return IsActive() && IsInteractable() && eventData.button == PointerEventData.InputButton.Left; } public override void OnPointerDown(PointerEventData eventData) { if (!MayDrag(eventData)) return; base.OnPointerDown(eventData); m_Offset = Vector2.zero; if (m_HandleContainerRect != null && RectTransformUtility.RectangleContainsScreenPoint(m_HandleRect, eventData.position, eventData.enterEventCamera)) { Vector2 localMousePos; if (RectTransformUtility.ScreenPointToLocalPointInRectangle(m_HandleRect, eventData.position, eventData.pressEventCamera, out localMousePos)) m_Offset = localMousePos; } else { // Outside the slider handle - jump to this point instead UpdateDrag(eventData, eventData.pressEventCamera); } } public virtual void OnDrag(PointerEventData eventData) { if (!MayDrag(eventData)) return; UpdateDrag(eventData, eventData.pressEventCamera); } public override void OnMove(AxisEventData eventData) { if (!IsActive() || !IsInteractable()) { base.OnMove(eventData); return; } switch (eventData.moveDir) { case MoveDirection.Left: if (axis == Axis.Horizontal && FindSelectableOnLeft() == null) Set(reverseValue ? value + stepSize : value - stepSize); else base.OnMove(eventData); break; case MoveDirection.Right: if (axis == Axis.Horizontal && FindSelectableOnRight() == null) Set(reverseValue ? value - stepSize : value + stepSize); else base.OnMove(eventData); break; case MoveDirection.Up: if (axis == Axis.Vertical && FindSelectableOnUp() == null) Set(reverseValue ? value - stepSize : value + stepSize); else base.OnMove(eventData); break; case MoveDirection.Down: if (axis == Axis.Vertical && FindSelectableOnDown() == null) Set(reverseValue ? value + stepSize : value - stepSize); else base.OnMove(eventData); break; } } public override Selectable FindSelectableOnLeft() { if (navigation.mode == Navigation.Mode.Automatic && axis == Axis.Horizontal) return null; return base.FindSelectableOnLeft(); } public override Selectable FindSelectableOnRight() { if (navigation.mode == Navigation.Mode.Automatic && axis == Axis.Horizontal) return null; return base.FindSelectableOnRight(); } public override Selectable FindSelectableOnUp() { if (navigation.mode == Navigation.Mode.Automatic && axis == Axis.Vertical) return null; return base.FindSelectableOnUp(); } public override Selectable FindSelectableOnDown() { if (navigation.mode == Navigation.Mode.Automatic && axis == Axis.Vertical) return null; return base.FindSelectableOnDown(); } public virtual void OnInitializePotentialDrag(PointerEventData eventData) { eventData.useDragThreshold = false; } public void SetDirection(Direction direction, bool includeRectLayouts) { Axis oldAxis = axis; bool oldReverse = reverseValue; this.direction = direction; if (!includeRectLayouts) return; if (axis != oldAxis) RectTransformUtility.FlipLayoutAxes(transform as RectTransform, true, true); if (reverseValue != oldReverse) RectTransformUtility.FlipLayoutOnAxis(transform as RectTransform, (int)axis, true, true); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using System.Data.Common; using System.Data.SqlTypes; using Xunit; namespace System.Data.SqlClient.ManualTesting.Tests { public static class ParametersTest { private static string s_connString = DataTestUtility.TcpConnStr; [CheckConnStrSetupFact] public static void CodeCoverageSqlClient() { SqlParameterCollection opc = new SqlCommand().Parameters; Assert.True(opc.Count == 0, string.Format("FAILED: Expected count: {0}. Actual count: {1}.", 0, opc.Count)); Assert.False(((IList)opc).IsReadOnly, "FAILED: Expected collection to NOT be read only."); Assert.False(((IList)opc).IsFixedSize, "FAILED: Expected collection to NOT be fixed size."); Assert.False(((IList)opc).IsSynchronized, "FAILED: Expected collection to NOT be synchronized."); DataTestUtility.AssertEqualsWithDescription("Object", ((IList)opc).SyncRoot.GetType().Name, "FAILED: Incorrect SyncRoot Name"); { string failValue; DataTestUtility.AssertThrowsWrapper<IndexOutOfRangeException>(() => failValue = opc[0].ParameterName, "Invalid index 0 for this SqlParameterCollection with Count=0."); DataTestUtility.AssertThrowsWrapper<IndexOutOfRangeException>(() => failValue = opc["@p1"].ParameterName, "An SqlParameter with ParameterName '@p1' is not contained by this SqlParameterCollection."); } DataTestUtility.AssertThrowsWrapper<ArgumentNullException>(() => opc.Add(null), "The SqlParameterCollection only accepts non-null SqlParameter type objects."); opc.Add((object)new SqlParameter()); IEnumerator enm = opc.GetEnumerator(); Assert.True(enm.MoveNext(), "FAILED: Expected MoveNext to be true"); DataTestUtility.AssertEqualsWithDescription("Parameter1", ((SqlParameter)enm.Current).ParameterName, "FAILED: Incorrect ParameterName"); opc.Add(new SqlParameter()); DataTestUtility.AssertEqualsWithDescription("Parameter2", opc[1].ParameterName, "FAILED: Incorrect ParameterName"); opc.Add(new SqlParameter(null, null)); opc.Add(new SqlParameter(null, SqlDbType.Int)); DataTestUtility.AssertEqualsWithDescription("Parameter4", opc["Parameter4"].ParameterName, "FAILED: Incorrect ParameterName"); opc.Add(new SqlParameter("Parameter5", SqlDbType.NVarChar, 20)); opc.Add(new SqlParameter(null, SqlDbType.NVarChar, 20, "a")); opc.RemoveAt(opc[3].ParameterName); DataTestUtility.AssertEqualsWithDescription(-1, opc.IndexOf(null), "FAILED: Incorrect index for null value"); SqlParameter p = opc[0]; DataTestUtility.AssertThrowsWrapper<ArgumentException>(() => opc.Add((object)p), "The SqlParameter is already contained by another SqlParameterCollection."); DataTestUtility.AssertThrowsWrapper<ArgumentException>(() => new SqlCommand().Parameters.Add(p), "The SqlParameter is already contained by another SqlParameterCollection."); DataTestUtility.AssertThrowsWrapper<ArgumentNullException>(() => opc.Remove(null), "The SqlParameterCollection only accepts non-null SqlParameter type objects."); string pname = p.ParameterName; p.ParameterName = pname; p.ParameterName = pname.ToUpper(); p.ParameterName = pname.ToLower(); p.ParameterName = "@p1"; p.ParameterName = pname; opc.Clear(); opc.Add(p); opc.Clear(); opc.AddWithValue("@p1", null); DataTestUtility.AssertEqualsWithDescription(-1, opc.IndexOf(p.ParameterName), "FAILED: Incorrect index for parameter name"); opc[0] = p; DataTestUtility.AssertEqualsWithDescription(0, opc.IndexOf(p.ParameterName), "FAILED: Incorrect index for parameter name"); Assert.True(opc.Contains(p.ParameterName), "FAILED: Expected collection to contain provided parameter."); Assert.True(opc.Contains(opc[0]), "FAILED: Expected collection to contain provided parameter."); opc[0] = p; opc[p.ParameterName] = new SqlParameter(p.ParameterName, null); opc[p.ParameterName] = new SqlParameter(); opc.RemoveAt(0); new SqlCommand().Parameters.Clear(); new SqlCommand().Parameters.CopyTo(new object[0], 0); Assert.False(new SqlCommand().Parameters.GetEnumerator().MoveNext(), "FAILED: Expected MoveNext to be false"); DataTestUtility.AssertThrowsWrapper<InvalidCastException>(() => new SqlCommand().Parameters.Add(0), "The SqlParameterCollection only accepts non-null SqlParameter type objects, not Int32 objects."); DataTestUtility.AssertThrowsWrapper<InvalidCastException>(() => new SqlCommand().Parameters.Insert(0, 0), "The SqlParameterCollection only accepts non-null SqlParameter type objects, not Int32 objects."); DataTestUtility.AssertThrowsWrapper<InvalidCastException>(() => new SqlCommand().Parameters.Remove(0), "The SqlParameterCollection only accepts non-null SqlParameter type objects, not Int32 objects."); DataTestUtility.AssertThrowsWrapper<ArgumentException>(() => new SqlCommand().Parameters.Remove(new SqlParameter()), "Attempted to remove an SqlParameter that is not contained by this SqlParameterCollection."); } [CheckConnStrSetupFact] public static void Test_WithEnumValue_ShouldInferToUnderlyingType() { using (var conn = new SqlConnection(s_connString)) { conn.Open(); var cmd = new SqlCommand("select @input", conn); cmd.Parameters.AddWithValue("@input", MyEnum.B); object value = cmd.ExecuteScalar(); Assert.Equal((MyEnum)value, MyEnum.B); } } [CheckConnStrSetupFact] public static void Test_WithOutputEnumParameter_ShouldReturnEnum() { using (var conn = new SqlConnection(s_connString)) { conn.Open(); var cmd = new SqlCommand("set @output = @input", conn); cmd.Parameters.AddWithValue("@input", MyEnum.B); var outputParam = cmd.CreateParameter(); outputParam.ParameterName = "@output"; outputParam.DbType = DbType.Int32; outputParam.Direction = ParameterDirection.Output; cmd.Parameters.Add(outputParam); cmd.ExecuteNonQuery(); Assert.Equal((MyEnum)outputParam.Value, MyEnum.B); } } [CheckConnStrSetupFact] public static void Test_WithDecimalValue_ShouldReturnDecimal() { using (var conn = new SqlConnection(s_connString)) { conn.Open(); var cmd = new SqlCommand("select @foo", conn); cmd.Parameters.AddWithValue("@foo", new SqlDecimal(0.5)); var result = (decimal)cmd.ExecuteScalar(); Assert.Equal(result, (decimal)0.5); } } [CheckConnStrSetupFact] public static void Test_WithGuidValue_ShouldReturnGuid() { using (var conn = new SqlConnection(s_connString)) { conn.Open(); var expectedGuid = Guid.NewGuid(); var cmd = new SqlCommand("select @input", conn); cmd.Parameters.AddWithValue("@input", expectedGuid); var result = cmd.ExecuteScalar(); Assert.Equal(expectedGuid, (Guid)result); } } [CheckConnStrSetupFact] public static void TestParametersWithDatatablesTVPInsert() { SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(DataTestUtility.TcpConnStr); builder.InitialCatalog = "tempdb"; int x = 4, y = 5; DataTable table = new DataTable { Columns = { { "x", typeof(int) }, { "y", typeof(int) } }, Rows = { { x, y } } }; using (SqlConnection connection = new SqlConnection(builder.ConnectionString)) { connection.Open(); ExecuteSqlIgnoreExceptions(connection, "drop proc dbo.UpdatePoint"); ExecuteSqlIgnoreExceptions(connection, "drop table dbo.PointTable"); ExecuteSqlIgnoreExceptions(connection, "drop type dbo.PointTableType"); ExecuteSqlIgnoreExceptions(connection, "CREATE TYPE dbo.PointTableType AS TABLE (x INT, y INT)"); ExecuteSqlIgnoreExceptions(connection, "CREATE TABLE dbo.PointTable (x INT, y INT)"); ExecuteSqlIgnoreExceptions(connection, "CREATE PROCEDURE dbo.UpdatePoint @TVP dbo.PointTableType READONLY AS SET NOCOUNT ON INSERT INTO dbo.PointTable(x, y) SELECT * FROM @TVP"); using (SqlCommand cmd = connection.CreateCommand()) { // Update Data Using TVPs cmd.CommandText = "dbo.UpdatePoint"; cmd.CommandType = CommandType.StoredProcedure; SqlParameter parameter = cmd.Parameters.AddWithValue("@TVP", table); parameter.TypeName = "dbo.PointTableType"; cmd.ExecuteNonQuery(); // Verify if the data was updated cmd.CommandText = "select * from dbo.PointTable"; cmd.CommandType = CommandType.Text; using (SqlDataReader reader = cmd.ExecuteReader()) { DataTable dbData = new DataTable(); dbData.Load(reader); Assert.Equal(1, dbData.Rows.Count); Assert.Equal(x, dbData.Rows[0][0]); Assert.Equal(y, dbData.Rows[0][1]); } } } } private static void ExecuteSqlIgnoreExceptions(DbConnection connection, string query) { using (DbCommand cmd = connection.CreateCommand()) { try { cmd.CommandText = query; cmd.ExecuteNonQuery(); } catch { /* Ignore exception if the command execution fails*/ } } } private enum MyEnum { A = 1, B = 2 } } }
/** * MetroFramework - Modern UI for WinForms * * The MIT License (MIT) * Copyright (c) 2011 Sven Walter, http://github.com/viperneo * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in the * Software without restriction, including without limitation the rights to use, copy, * modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, * and to permit persons to whom the Software is furnished to do so, subject to the * following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE * OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using System; using System.Diagnostics; using System.Drawing; using System.Drawing.Drawing2D; using System.Drawing.Imaging; using System.ComponentModel; using System.Collections.Generic; using System.Reflection; using System.Security; using System.Windows.Forms; using MetroFramework.Components; using MetroFramework.Drawing; using MetroFramework.Interfaces; using MetroFramework.Native; namespace MetroFramework.Forms { #region Enums public enum MetroFormTextAlign { Left, Center, Right } public enum MetroFormShadowType { None, Flat, DropShadow, SystemShadow, AeroShadow } public enum MetroFormBorderStyle { None, FixedSingle } public enum BackLocation { TopLeft, TopRight, BottomLeft, BottomRight } #endregion public class MetroForm : Form, IMetroForm, IDisposable { #region Interface private MetroColorStyle metroStyle = MetroColorStyle.Blue; [Category(MetroDefaults.PropertyCategory.Appearance)] public MetroColorStyle Style { get { if (StyleManager != null) return StyleManager.Style; return metroStyle; } set { metroStyle = value; } } private MetroThemeStyle metroTheme = MetroThemeStyle.Light; [Category(MetroDefaults.PropertyCategory.Appearance)] public MetroThemeStyle Theme { get { if (StyleManager != null) return StyleManager.Theme; return metroTheme; } set { metroTheme = value; } } private MetroStyleManager metroStyleManager = null; [Browsable(false)] public MetroStyleManager StyleManager { get { return metroStyleManager; } set { metroStyleManager = value; } } #endregion #region Fields private MetroFormTextAlign textAlign = MetroFormTextAlign.Left; [Browsable(true)] [Category(MetroDefaults.PropertyCategory.Appearance)] public MetroFormTextAlign TextAlign { get { return textAlign; } set { textAlign = value; } } [Browsable(false)] public override Color BackColor { get { return MetroPaint.BackColor.Form(Theme); } } private MetroFormBorderStyle formBorderStyle = MetroFormBorderStyle.None; [DefaultValue(MetroFormBorderStyle.None)] [Browsable(true)] [Category(MetroDefaults.PropertyCategory.Appearance)] public MetroFormBorderStyle BorderStyle { get { return formBorderStyle; } set { formBorderStyle = value; } } private bool isMovable = true; [Category(MetroDefaults.PropertyCategory.Appearance)] public bool Movable { get { return isMovable; } set { isMovable = value; } } public new Padding Padding { get { return base.Padding; } set { value.Top = Math.Max(value.Top, DisplayHeader ? 60 : 30); base.Padding = value; } } protected override Padding DefaultPadding { get { return new Padding(20, DisplayHeader ? 60 : 20, 20, 20); } } private bool displayHeader = true; [Category(MetroDefaults.PropertyCategory.Appearance)] [DefaultValue(true)] public bool DisplayHeader { get { return displayHeader; } set { if (value != displayHeader) { Padding p = base.Padding; p.Top += value ? 30 : -30; base.Padding = p; } displayHeader = value; } } private bool isResizable = true; [Category(MetroDefaults.PropertyCategory.Appearance)] public bool Resizable { get { return isResizable; } set { isResizable = value; } } private MetroFormShadowType shadowType = MetroFormShadowType.Flat; [Category(MetroDefaults.PropertyCategory.Appearance)] [DefaultValue(MetroFormShadowType.Flat)] public MetroFormShadowType ShadowType { get { return IsMdiChild ? MetroFormShadowType.None : shadowType; } set { shadowType = value; } } [Browsable(false)] public new FormBorderStyle FormBorderStyle { get { return base.FormBorderStyle; } set { base.FormBorderStyle = value; } } public new Form MdiParent { get { return base.MdiParent; } set { if (value != null) { RemoveShadow(); shadowType = MetroFormShadowType.None; } base.MdiParent = value; } } private const int borderWidth = 5; private Bitmap _image = null; private Image backImage; [Category(MetroDefaults.PropertyCategory.Appearance)] [DefaultValue(null)] public Image BackImage { get { return backImage; } set { backImage = value; if (value != null) _image = ApplyInvert(new Bitmap(value)); Refresh(); } } private Padding backImagePadding; [Category(MetroDefaults.PropertyCategory.Appearance)] public Padding BackImagePadding { get { return backImagePadding; } set { backImagePadding = value; Refresh(); } } private int backMaxSize; [Category(MetroDefaults.PropertyCategory.Appearance)] public int BackMaxSize { get { return backMaxSize; } set { backMaxSize = value; Refresh(); } } private BackLocation backLocation; [Category(MetroDefaults.PropertyCategory.Appearance)] [DefaultValue(BackLocation.TopLeft)] public BackLocation BackLocation { get { return backLocation; } set { backLocation = value; Refresh(); } } private bool _imageinvert; [Category(MetroDefaults.PropertyCategory.Appearance)] [DefaultValue(true)] public bool ApplyImageInvert { get { return _imageinvert; } set { _imageinvert = value; Refresh(); } } #endregion #region Constructor public MetroForm() { SetStyle(ControlStyles.AllPaintingInWmPaint | ControlStyles.OptimizedDoubleBuffer | ControlStyles.ResizeRedraw | ControlStyles.UserPaint, true); FormBorderStyle = FormBorderStyle.None; Name = "MetroForm"; StartPosition = FormStartPosition.CenterScreen; TransparencyKey = Color.Lavender; } protected override void Dispose(bool disposing) { if (disposing) { RemoveShadow(); } base.Dispose(disposing); } #endregion #region Paint Methods public Bitmap ApplyInvert(Bitmap bitmapImage) { byte A, R, G, B; Color pixelColor; for (int y = 0; y < bitmapImage.Height; y++) { for (int x = 0; x < bitmapImage.Width; x++) { pixelColor = bitmapImage.GetPixel(x, y); A = pixelColor.A; R = (byte)(255 - pixelColor.R); G = (byte)(255 - pixelColor.G); B = (byte)(255 - pixelColor.B); if (R <= 0) R = 17; if (G <= 0) G = 17; if (B <= 0) B = 17; //bitmapImage.SetPixel(x, y, Color.FromArgb((int)A, (int)R, (int)G, (int)B)); bitmapImage.SetPixel(x, y, Color.FromArgb((int)R, (int)G, (int)B)); } } return bitmapImage; } protected override void OnPaint(PaintEventArgs e) { Color backColor = MetroPaint.BackColor.Form(Theme); Color foreColor = MetroPaint.ForeColor.Title(Theme); e.Graphics.Clear(backColor); using (SolidBrush b = MetroPaint.GetStyleBrush(Style)) { Rectangle topRect = new Rectangle(0, 0, Width, borderWidth); e.Graphics.FillRectangle(b, topRect); } if (BorderStyle != MetroFormBorderStyle.None) { Color c = MetroPaint.BorderColor.Form(Theme); using (Pen pen = new Pen(c)) { e.Graphics.DrawLines(pen, new[] { new Point(0, borderWidth), new Point(0, Height - 1), new Point(Width - 1, Height - 1), new Point(Width - 1, borderWidth) }); } } if (backImage != null && backMaxSize != 0) { Image img = MetroImage.ResizeImage(backImage, new Rectangle(0, 0, backMaxSize, backMaxSize)); if (_imageinvert) { img = MetroImage.ResizeImage((Theme == MetroThemeStyle.Dark) ? _image : backImage, new Rectangle(0, 0, backMaxSize, backMaxSize)); } switch (backLocation) { case BackLocation.TopLeft: e.Graphics.DrawImage(img, 0 + backImagePadding.Left, 0 + backImagePadding.Top); break; case BackLocation.TopRight: e.Graphics.DrawImage(img, ClientRectangle.Right - (backImagePadding.Right + img.Width), 0 + backImagePadding.Top); break; case BackLocation.BottomLeft: e.Graphics.DrawImage(img, 0 + backImagePadding.Left, ClientRectangle.Bottom - (img.Height + backImagePadding.Bottom)); break; case BackLocation.BottomRight: e.Graphics.DrawImage(img, ClientRectangle.Right - (backImagePadding.Right + img.Width), ClientRectangle.Bottom - (img.Height + backImagePadding.Bottom)); break; } } if (displayHeader) { Rectangle bounds = new Rectangle(20, 20, ClientRectangle.Width - 2 * 20, 40); TextFormatFlags flags = TextFormatFlags.EndEllipsis | GetTextFormatFlags(); TextRenderer.DrawText(e.Graphics, Text, MetroFonts.Title, bounds, foreColor, flags); } if (Resizable && (SizeGripStyle == SizeGripStyle.Auto || SizeGripStyle == SizeGripStyle.Show)) { using (SolidBrush b = new SolidBrush(MetroPaint.ForeColor.Button.Disabled(Theme))) { Size resizeHandleSize = new Size(2, 2); e.Graphics.FillRectangles(b, new Rectangle[] { new Rectangle(new Point(ClientRectangle.Width-6,ClientRectangle.Height-6), resizeHandleSize), new Rectangle(new Point(ClientRectangle.Width-10,ClientRectangle.Height-10), resizeHandleSize), new Rectangle(new Point(ClientRectangle.Width-10,ClientRectangle.Height-6), resizeHandleSize), new Rectangle(new Point(ClientRectangle.Width-6,ClientRectangle.Height-10), resizeHandleSize), new Rectangle(new Point(ClientRectangle.Width-14,ClientRectangle.Height-6), resizeHandleSize), new Rectangle(new Point(ClientRectangle.Width-6,ClientRectangle.Height-14), resizeHandleSize) }); } } } private TextFormatFlags GetTextFormatFlags() { switch (TextAlign) { case MetroFormTextAlign.Left: return TextFormatFlags.Left; case MetroFormTextAlign.Center: return TextFormatFlags.HorizontalCenter; case MetroFormTextAlign.Right: return TextFormatFlags.Right; } throw new InvalidOperationException(); } #endregion #region Management Methods protected override void OnClosing(CancelEventArgs e) { if (!(this is MetroTaskWindow)) MetroTaskWindow.ForceClose(); base.OnClosing(e); } protected override void OnClosed(EventArgs e) { RemoveShadow(); base.OnClosed(e); } [SecuritySafeCritical] public bool FocusMe() { return WinApi.SetForegroundWindow(Handle); } protected override void OnLoad(EventArgs e) { base.OnLoad(e); if (DesignMode) return; switch (StartPosition) { case FormStartPosition.CenterParent: CenterToParent(); break; case FormStartPosition.CenterScreen: if (IsMdiChild) { CenterToParent(); } else { CenterToScreen(); } break; } RemoveCloseButton(); if (ControlBox) { AddWindowButton(WindowButtons.Close); if (MaximizeBox) AddWindowButton(WindowButtons.Maximize); if (MinimizeBox) AddWindowButton(WindowButtons.Minimize); UpdateWindowButtonPosition(); } CreateShadow(); } protected override void OnActivated(EventArgs e) { base.OnActivated(e); if (shadowType == MetroFormShadowType.AeroShadow && IsAeroThemeEnabled() && IsDropShadowSupported()) { int val = 2; DwmApi.DwmSetWindowAttribute(Handle, 2, ref val, 4); var m = new DwmApi.MARGINS { cyBottomHeight = 1, cxLeftWidth = 0, cxRightWidth = 0, cyTopHeight = 0 }; DwmApi.DwmExtendFrameIntoClientArea(Handle, ref m); } } protected override void OnEnabledChanged(EventArgs e) { base.OnEnabledChanged(e); Invalidate(); } protected override void OnResizeEnd(EventArgs e) { base.OnResizeEnd(e); UpdateWindowButtonPosition(); } protected override void WndProc(ref Message m) { if (DesignMode) { base.WndProc(ref m); return; } switch (m.Msg) { case (int)WinApi.Messages.WM_SYSCOMMAND: int sc = m.WParam.ToInt32() & 0xFFF0; switch (sc) { case (int)WinApi.Messages.SC_MOVE: if (!Movable) return; break; case (int)WinApi.Messages.SC_MAXIMIZE: break; case (int)WinApi.Messages.SC_RESTORE: break; } break; case (int)WinApi.Messages.WM_NCLBUTTONDBLCLK: case (int)WinApi.Messages.WM_LBUTTONDBLCLK: if (!MaximizeBox) return; break; case (int)WinApi.Messages.WM_NCHITTEST: WinApi.HitTest ht = HitTestNCA(m.HWnd, m.WParam, m.LParam); if (ht != WinApi.HitTest.HTCLIENT) { m.Result = (IntPtr)ht; return; } break; case (int)WinApi.Messages.WM_DWMCOMPOSITIONCHANGED: break; } base.WndProc(ref m); switch (m.Msg) { case (int)WinApi.Messages.WM_GETMINMAXINFO: OnGetMinMaxInfo(m.HWnd, m.LParam); break; case (int)WinApi.Messages.WM_SIZE: if (windowButtonList != null) { MetroFormButton btn; windowButtonList.TryGetValue(WindowButtons.Maximize, out btn); if (WindowState == FormWindowState.Normal) { if (shadowForm != null) shadowForm.Visible = true; btn.Text = "1"; } if (WindowState == FormWindowState.Maximized) btn.Text = "2"; } break; } } [SecuritySafeCritical] private unsafe void OnGetMinMaxInfo(IntPtr hwnd, IntPtr lParam) { WinApi.MINMAXINFO* pmmi = (WinApi.MINMAXINFO*)lParam; Screen s = Screen.FromHandle(hwnd); pmmi->ptMaxSize.x = s.WorkingArea.Width; pmmi->ptMaxSize.y = s.WorkingArea.Height; pmmi->ptMaxPosition.x = Math.Abs(s.WorkingArea.Left - s.Bounds.Left); pmmi->ptMaxPosition.y = Math.Abs(s.WorkingArea.Top - s.Bounds.Top); //if (MinimumSize.Width > 0) pmmi->ptMinTrackSize.x = MinimumSize.Width; //if (MinimumSize.Height > 0) pmmi->ptMinTrackSize.y = MinimumSize.Height; //if (MaximumSize.Width > 0) pmmi->ptMaxTrackSize.x = MaximumSize.Width; //if (MaximumSize.Height > 0) pmmi->ptMaxTrackSize.y = MaximumSize.Height; } private WinApi.HitTest HitTestNCA(IntPtr hwnd, IntPtr wparam, IntPtr lparam) { //Point vPoint = PointToClient(new Point((int)lparam & 0xFFFF, (int)lparam >> 16 & 0xFFFF)); //Point vPoint = PointToClient(new Point((Int16)lparam, (Int16)((int)lparam >> 16))); Point vPoint = new Point((Int16)lparam, (Int16)((int)lparam >> 16)); int vPadding = Math.Max(Padding.Right, Padding.Bottom); if (Resizable) { if (RectangleToScreen(new Rectangle(ClientRectangle.Width - vPadding, ClientRectangle.Height - vPadding, vPadding, vPadding)).Contains(vPoint)) return WinApi.HitTest.HTBOTTOMRIGHT; } if (RectangleToScreen(new Rectangle(borderWidth, borderWidth, ClientRectangle.Width - 2 * borderWidth, 50)).Contains(vPoint)) return WinApi.HitTest.HTCAPTION; return WinApi.HitTest.HTCLIENT; } protected override void OnMouseDown(MouseEventArgs e) { base.OnMouseDown(e); if (e.Button == MouseButtons.Left && Movable) { if (WindowState == FormWindowState.Maximized) return; if (Width - borderWidth > e.Location.X && e.Location.X > borderWidth && e.Location.Y > borderWidth) { MoveControl(); } } } [SecuritySafeCritical] private void MoveControl() { WinApi.ReleaseCapture(); WinApi.SendMessage(Handle, (int)WinApi.Messages.WM_NCLBUTTONDOWN, (int)WinApi.HitTest.HTCAPTION, 0); } [SecuritySafeCritical] private static bool IsAeroThemeEnabled() { if (Environment.OSVersion.Version.Major <= 5) return false; bool aeroEnabled; DwmApi.DwmIsCompositionEnabled(out aeroEnabled); return aeroEnabled; } private static bool IsDropShadowSupported() { return Environment.OSVersion.Version.Major > 5 && SystemInformation.IsDropShadowEnabled; } #endregion #region Window Buttons private enum WindowButtons { Minimize, Maximize, Close } private Dictionary<WindowButtons, MetroFormButton> windowButtonList; private void AddWindowButton(WindowButtons button) { if (windowButtonList == null) windowButtonList = new Dictionary<WindowButtons, MetroFormButton>(); if (windowButtonList.ContainsKey(button)) return; MetroFormButton newButton = new MetroFormButton(); if (button == WindowButtons.Close) { newButton.Text = "r"; } else if (button == WindowButtons.Minimize) { newButton.Text = "0"; } else if (button == WindowButtons.Maximize) { if (WindowState == FormWindowState.Normal) newButton.Text = "1"; else newButton.Text = "2"; } newButton.Style = Style; newButton.Theme = Theme; newButton.Tag = button; newButton.Size = new Size(25, 20); newButton.Anchor = AnchorStyles.Top | AnchorStyles.Right; newButton.TabStop = false; //remove the form controls from the tab stop newButton.Click += WindowButton_Click; Controls.Add(newButton); windowButtonList.Add(button, newButton); } private void WindowButton_Click(object sender, EventArgs e) { var btn = sender as MetroFormButton; if (btn != null) { var btnFlag = (WindowButtons)btn.Tag; if (btnFlag == WindowButtons.Close) { Close(); } else if (btnFlag == WindowButtons.Minimize) { WindowState = FormWindowState.Minimized; } else if (btnFlag == WindowButtons.Maximize) { if (WindowState == FormWindowState.Normal) { WindowState = FormWindowState.Maximized; btn.Text = "2"; } else { WindowState = FormWindowState.Normal; btn.Text = "1"; } } } } private void UpdateWindowButtonPosition() { if (!ControlBox) return; Dictionary<int, WindowButtons> priorityOrder = new Dictionary<int, WindowButtons>(3) { { 0, WindowButtons.Close }, { 1, WindowButtons.Maximize }, { 2, WindowButtons.Minimize } }; Point firstButtonLocation = new Point(ClientRectangle.Width - borderWidth - 25, borderWidth); int lastDrawedButtonPosition = firstButtonLocation.X - 25; MetroFormButton firstButton = null; if (windowButtonList.Count == 1) { foreach (KeyValuePair<WindowButtons, MetroFormButton> button in windowButtonList) { button.Value.Location = firstButtonLocation; } } else { foreach (KeyValuePair<int, WindowButtons> button in priorityOrder) { bool buttonExists = windowButtonList.ContainsKey(button.Value); if (firstButton == null && buttonExists) { firstButton = windowButtonList[button.Value]; firstButton.Location = firstButtonLocation; continue; } if (firstButton == null || !buttonExists) continue; windowButtonList[button.Value].Location = new Point(lastDrawedButtonPosition, borderWidth); lastDrawedButtonPosition = lastDrawedButtonPosition - 25; } } Refresh(); } private class MetroFormButton : Button, IMetroControl { #region Interface [Category(MetroDefaults.PropertyCategory.Appearance)] public event EventHandler<MetroPaintEventArgs> CustomPaintBackground; protected virtual void OnCustomPaintBackground(MetroPaintEventArgs e) { if (GetStyle(ControlStyles.UserPaint) && CustomPaintBackground != null) { CustomPaintBackground(this, e); } } [Category(MetroDefaults.PropertyCategory.Appearance)] public event EventHandler<MetroPaintEventArgs> CustomPaint; protected virtual void OnCustomPaint(MetroPaintEventArgs e) { if (GetStyle(ControlStyles.UserPaint) && CustomPaint != null) { CustomPaint(this, e); } } [Category(MetroDefaults.PropertyCategory.Appearance)] public event EventHandler<MetroPaintEventArgs> CustomPaintForeground; protected virtual void OnCustomPaintForeground(MetroPaintEventArgs e) { if (GetStyle(ControlStyles.UserPaint) && CustomPaintForeground != null) { CustomPaintForeground(this, e); } } private MetroColorStyle metroStyle = MetroColorStyle.Default; [Category(MetroDefaults.PropertyCategory.Appearance)] [DefaultValue(MetroColorStyle.Default)] public MetroColorStyle Style { get { if (DesignMode || metroStyle != MetroColorStyle.Default) { return metroStyle; } if (StyleManager != null && metroStyle == MetroColorStyle.Default) { return StyleManager.Style; } if (StyleManager == null && metroStyle == MetroColorStyle.Default) { return MetroDefaults.Style; } return metroStyle; } set { metroStyle = value; } } private MetroThemeStyle metroTheme = MetroThemeStyle.Default; [Category(MetroDefaults.PropertyCategory.Appearance)] [DefaultValue(MetroThemeStyle.Default)] public MetroThemeStyle Theme { get { if (DesignMode || metroTheme != MetroThemeStyle.Default) { return metroTheme; } if (StyleManager != null && metroTheme == MetroThemeStyle.Default) { return StyleManager.Theme; } if (StyleManager == null && metroTheme == MetroThemeStyle.Default) { return MetroDefaults.Theme; } return metroTheme; } set { metroTheme = value; } } private MetroStyleManager metroStyleManager = null; [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public MetroStyleManager StyleManager { get { return metroStyleManager; } set { metroStyleManager = value; } } private bool useCustomBackColor = false; [DefaultValue(false)] [Category(MetroDefaults.PropertyCategory.Appearance)] public bool UseCustomBackColor { get { return useCustomBackColor; } set { useCustomBackColor = value; } } private bool useCustomForeColor = false; [DefaultValue(false)] [Category(MetroDefaults.PropertyCategory.Appearance)] public bool UseCustomForeColor { get { return useCustomForeColor; } set { useCustomForeColor = value; } } private bool useStyleColors = false; [DefaultValue(false)] [Category(MetroDefaults.PropertyCategory.Appearance)] public bool UseStyleColors { get { return useStyleColors; } set { useStyleColors = value; } } [Browsable(false)] [Category(MetroDefaults.PropertyCategory.Behaviour)] [DefaultValue(false)] public bool UseSelectable { get { return GetStyle(ControlStyles.Selectable); } set { SetStyle(ControlStyles.Selectable, value); } } #endregion #region Fields private bool isHovered = false; private bool isPressed = false; #endregion #region Constructor public MetroFormButton() { SetStyle(ControlStyles.AllPaintingInWmPaint | ControlStyles.OptimizedDoubleBuffer | ControlStyles.ResizeRedraw | ControlStyles.UserPaint, true); } #endregion #region Paint Methods protected override void OnPaint(PaintEventArgs e) { Color backColor, foreColor; MetroThemeStyle _Theme = Theme; if (Parent != null) { if (Parent is IMetroForm) { _Theme = ((IMetroForm)Parent).Theme; backColor = MetroPaint.BackColor.Form(_Theme); } else if (Parent is IMetroControl) { backColor = MetroPaint.GetStyleColor(Style); } else { backColor = Parent.BackColor; } } else { backColor = MetroPaint.BackColor.Form(_Theme); } if (isHovered && !isPressed && Enabled) { foreColor = MetroPaint.ForeColor.Button.Normal(_Theme); backColor = MetroPaint.BackColor.Button.Normal(_Theme); } else if (isHovered && isPressed && Enabled) { foreColor = MetroPaint.ForeColor.Button.Press(_Theme); backColor = MetroPaint.GetStyleColor(Style); } else if (!Enabled) { foreColor = MetroPaint.ForeColor.Button.Disabled(_Theme); backColor = MetroPaint.BackColor.Button.Disabled(_Theme); } else { foreColor = MetroPaint.ForeColor.Button.Normal(_Theme); } e.Graphics.Clear(backColor); Font buttonFont = new Font("Webdings", 9.25f); TextRenderer.DrawText(e.Graphics, Text, buttonFont, ClientRectangle, foreColor, backColor, TextFormatFlags.HorizontalCenter | TextFormatFlags.VerticalCenter | TextFormatFlags.EndEllipsis); } #endregion #region Mouse Methods protected override void OnMouseEnter(EventArgs e) { isHovered = true; Invalidate(); base.OnMouseEnter(e); } protected override void OnMouseDown(MouseEventArgs e) { if (e.Button == MouseButtons.Left) { isPressed = true; Invalidate(); } base.OnMouseDown(e); } protected override void OnMouseUp(MouseEventArgs e) { isPressed = false; Invalidate(); base.OnMouseUp(e); } protected override void OnMouseLeave(EventArgs e) { isHovered = false; Invalidate(); base.OnMouseLeave(e); } #endregion } #endregion #region Shadows private const int CS_DROPSHADOW = 0x20000; const int WS_MINIMIZEBOX = 0x20000; protected override CreateParams CreateParams { get { CreateParams cp = base.CreateParams; cp.Style |= WS_MINIMIZEBOX; if (ShadowType == MetroFormShadowType.SystemShadow) cp.ClassStyle |= CS_DROPSHADOW; return cp; } } private Form shadowForm; private void CreateShadow() { switch (ShadowType) { case MetroFormShadowType.Flat: shadowForm = new MetroFlatDropShadow(this); return; case MetroFormShadowType.DropShadow: shadowForm = new MetroRealisticDropShadow(this); return; default: shadowForm = new MetroFlatDropShadow(this); return; } } private void RemoveShadow() { if (shadowForm == null || shadowForm.IsDisposed) return; shadowForm.Visible = false; Owner = shadowForm.Owner; shadowForm.Owner = null; shadowForm.Dispose(); shadowForm = null; } #region MetroShadowBase protected abstract class MetroShadowBase : Form { protected Form TargetForm { get; private set; } private readonly int shadowSize; private readonly int wsExStyle; protected MetroShadowBase(Form targetForm, int shadowSize, int wsExStyle) { TargetForm = targetForm; this.shadowSize = shadowSize; this.wsExStyle = wsExStyle; TargetForm.Activated += OnTargetFormActivated; TargetForm.ResizeBegin += OnTargetFormResizeBegin; TargetForm.ResizeEnd += OnTargetFormResizeEnd; TargetForm.VisibleChanged += OnTargetFormVisibleChanged; TargetForm.SizeChanged += OnTargetFormSizeChanged; TargetForm.Move += OnTargetFormMove; TargetForm.Resize += OnTargetFormResize; if (TargetForm.Owner != null) Owner = TargetForm.Owner; TargetForm.Owner = this; MaximizeBox = false; MinimizeBox = false; ShowInTaskbar = false; ShowIcon = false; FormBorderStyle = FormBorderStyle.None; Bounds = GetShadowBounds(); } protected override CreateParams CreateParams { get { CreateParams cp = base.CreateParams; cp.ExStyle |= wsExStyle; return cp; } } private Rectangle GetShadowBounds() { Rectangle r = TargetForm.Bounds; r.Inflate(shadowSize, shadowSize); return r; } protected abstract void PaintShadow(); protected abstract void ClearShadow(); #region Event Handlers private bool isBringingToFront; protected override void OnDeactivate(EventArgs e) { base.OnDeactivate(e); isBringingToFront = true; } private void OnTargetFormActivated(object sender, EventArgs e) { if (Visible) Update(); if (isBringingToFront) { Visible = true; isBringingToFront = false; return; } BringToFront(); } private void OnTargetFormVisibleChanged(object sender, EventArgs e) { Visible = TargetForm.Visible && TargetForm.WindowState != FormWindowState.Minimized; Update(); } private long lastResizedOn; private bool IsResizing { get { return lastResizedOn > 0; } } private void OnTargetFormResizeBegin(object sender, EventArgs e) { lastResizedOn = DateTime.Now.Ticks; } private void OnTargetFormMove(object sender, EventArgs e) { if (!TargetForm.Visible || TargetForm.WindowState != FormWindowState.Normal) { Visible = false; } else { Bounds = GetShadowBounds(); } } private void OnTargetFormResize(object sender, EventArgs e) { ClearShadow(); } private void OnTargetFormSizeChanged(object sender, EventArgs e) { Bounds = GetShadowBounds(); if (IsResizing) { return; } PaintShadowIfVisible(); } private void OnTargetFormResizeEnd(object sender, EventArgs e) { lastResizedOn = 0; PaintShadowIfVisible(); } private void PaintShadowIfVisible() { if (TargetForm.Visible && TargetForm.WindowState != FormWindowState.Minimized) PaintShadow(); } #endregion #region Constants protected const int WS_EX_TRANSPARENT = 0x20; protected const int WS_EX_LAYERED = 0x80000; protected const int WS_EX_NOACTIVATE = 0x8000000; private const int TICKS_PER_MS = 10000; private const long RESIZE_REDRAW_INTERVAL = 1000 * TICKS_PER_MS; #endregion } #endregion #region Aero DropShadow protected class MetroAeroDropShadow : MetroShadowBase { public MetroAeroDropShadow(Form targetForm) : base(targetForm, 0, WS_EX_TRANSPARENT | WS_EX_NOACTIVATE) { FormBorderStyle = FormBorderStyle.SizableToolWindow; } protected override void SetBoundsCore(int x, int y, int width, int height, BoundsSpecified specified) { if (specified == BoundsSpecified.Size) return; base.SetBoundsCore(x, y, width, height, specified); } protected override void PaintShadow() { Visible = true; } protected override void ClearShadow() { } } #endregion #region Flat DropShadow protected class MetroFlatDropShadow : MetroShadowBase { private Point Offset = new Point(-6, -6); public MetroFlatDropShadow(Form targetForm) : base(targetForm, 6, WS_EX_LAYERED | WS_EX_TRANSPARENT | WS_EX_NOACTIVATE) { } protected override void OnLoad(EventArgs e) { base.OnLoad(e); PaintShadow(); } protected override void OnPaint(PaintEventArgs e) { Visible = true; PaintShadow(); } protected override void PaintShadow() { using (Bitmap getShadow = DrawBlurBorder()) SetBitmap(getShadow, 255); } protected override void ClearShadow() { Bitmap img = new Bitmap(Width, Height, PixelFormat.Format32bppArgb); Graphics g = Graphics.FromImage(img); g.Clear(Color.Transparent); g.Flush(); g.Dispose(); SetBitmap(img, 255); img.Dispose(); } #region Drawing methods [SecuritySafeCritical] private void SetBitmap(Bitmap bitmap, byte opacity) { if (bitmap.PixelFormat != PixelFormat.Format32bppArgb) throw new ApplicationException("The bitmap must be 32ppp with alpha-channel."); IntPtr screenDc = WinApi.GetDC(IntPtr.Zero); IntPtr memDc = WinApi.CreateCompatibleDC(screenDc); IntPtr hBitmap = IntPtr.Zero; IntPtr oldBitmap = IntPtr.Zero; try { hBitmap = bitmap.GetHbitmap(Color.FromArgb(0)); oldBitmap = WinApi.SelectObject(memDc, hBitmap); WinApi.SIZE size = new WinApi.SIZE(bitmap.Width, bitmap.Height); WinApi.POINT pointSource = new WinApi.POINT(0, 0); WinApi.POINT topPos = new WinApi.POINT(Left, Top); WinApi.BLENDFUNCTION blend = new WinApi.BLENDFUNCTION(); blend.BlendOp = WinApi.AC_SRC_OVER; blend.BlendFlags = 0; blend.SourceConstantAlpha = opacity; blend.AlphaFormat = WinApi.AC_SRC_ALPHA; WinApi.UpdateLayeredWindow(Handle, screenDc, ref topPos, ref size, memDc, ref pointSource, 0, ref blend, WinApi.ULW_ALPHA); } finally { WinApi.ReleaseDC(IntPtr.Zero, screenDc); if (hBitmap != IntPtr.Zero) { WinApi.SelectObject(memDc, oldBitmap); WinApi.DeleteObject(hBitmap); } WinApi.DeleteDC(memDc); } } private Bitmap DrawBlurBorder() { return (Bitmap)DrawOutsetShadow(Color.Black, new Rectangle(0, 0, ClientRectangle.Width, ClientRectangle.Height)); } private Image DrawOutsetShadow(Color color, Rectangle shadowCanvasArea) { Rectangle rOuter = shadowCanvasArea; Rectangle rInner = new Rectangle(shadowCanvasArea.X + (-Offset.X - 1), shadowCanvasArea.Y + (-Offset.Y - 1), shadowCanvasArea.Width - (-Offset.X * 2 - 1), shadowCanvasArea.Height - (-Offset.Y * 2 - 1)); Bitmap img = new Bitmap(rOuter.Width, rOuter.Height, PixelFormat.Format32bppArgb); Graphics g = Graphics.FromImage(img); g.SmoothingMode = SmoothingMode.AntiAlias; g.InterpolationMode = InterpolationMode.HighQualityBicubic; using (Brush bgBrush = new SolidBrush(Color.FromArgb(30, Color.Black))) { g.FillRectangle(bgBrush, rOuter); } using (Brush bgBrush = new SolidBrush(Color.FromArgb(60, Color.Black))) { g.FillRectangle(bgBrush, rInner); } g.Flush(); g.Dispose(); return img; } #endregion } #endregion #region Realistic DropShadow protected class MetroRealisticDropShadow : MetroShadowBase { public MetroRealisticDropShadow(Form targetForm) : base(targetForm, 15, WS_EX_LAYERED | WS_EX_TRANSPARENT | WS_EX_NOACTIVATE) { } protected override void OnLoad(EventArgs e) { base.OnLoad(e); PaintShadow(); } protected override void OnPaint(PaintEventArgs e) { Visible = true; PaintShadow(); } protected override void PaintShadow() { using (Bitmap getShadow = DrawBlurBorder()) SetBitmap(getShadow, 255); } protected override void ClearShadow() { Bitmap img = new Bitmap(Width, Height, PixelFormat.Format32bppArgb); Graphics g = Graphics.FromImage(img); g.Clear(Color.Transparent); g.Flush(); g.Dispose(); SetBitmap(img, 255); img.Dispose(); } #region Drawing methods [SecuritySafeCritical] private void SetBitmap(Bitmap bitmap, byte opacity) { if (bitmap.PixelFormat != PixelFormat.Format32bppArgb) throw new ApplicationException("The bitmap must be 32ppp with alpha-channel."); IntPtr screenDc = WinApi.GetDC(IntPtr.Zero); IntPtr memDc = WinApi.CreateCompatibleDC(screenDc); IntPtr hBitmap = IntPtr.Zero; IntPtr oldBitmap = IntPtr.Zero; try { hBitmap = bitmap.GetHbitmap(Color.FromArgb(0)); oldBitmap = WinApi.SelectObject(memDc, hBitmap); WinApi.SIZE size = new WinApi.SIZE(bitmap.Width, bitmap.Height); WinApi.POINT pointSource = new WinApi.POINT(0, 0); WinApi.POINT topPos = new WinApi.POINT(Left, Top); WinApi.BLENDFUNCTION blend = new WinApi.BLENDFUNCTION { BlendOp = WinApi.AC_SRC_OVER, BlendFlags = 0, SourceConstantAlpha = opacity, AlphaFormat = WinApi.AC_SRC_ALPHA }; WinApi.UpdateLayeredWindow(Handle, screenDc, ref topPos, ref size, memDc, ref pointSource, 0, ref blend, WinApi.ULW_ALPHA); } finally { WinApi.ReleaseDC(IntPtr.Zero, screenDc); if (hBitmap != IntPtr.Zero) { WinApi.SelectObject(memDc, oldBitmap); WinApi.DeleteObject(hBitmap); } WinApi.DeleteDC(memDc); } } private Bitmap DrawBlurBorder() { return (Bitmap)DrawOutsetShadow(0, 0, 40, 1, Color.Black, new Rectangle(1, 1, ClientRectangle.Width, ClientRectangle.Height)); } private Image DrawOutsetShadow(int hShadow, int vShadow, int blur, int spread, Color color, Rectangle shadowCanvasArea) { Rectangle rOuter = shadowCanvasArea; Rectangle rInner = shadowCanvasArea; rInner.Offset(hShadow, vShadow); rInner.Inflate(-blur, -blur); rOuter.Inflate(spread, spread); rOuter.Offset(hShadow, vShadow); Rectangle originalOuter = rOuter; Bitmap img = new Bitmap(originalOuter.Width, originalOuter.Height, PixelFormat.Format32bppArgb); Graphics g = Graphics.FromImage(img); g.SmoothingMode = SmoothingMode.AntiAlias; g.InterpolationMode = InterpolationMode.HighQualityBicubic; var currentBlur = 0; do { var transparency = (rOuter.Height - rInner.Height) / (double)(blur * 2 + spread * 2); var shadowColor = Color.FromArgb(((int)(200 * (transparency * transparency))), color); var rOutput = rInner; rOutput.Offset(-originalOuter.Left, -originalOuter.Top); DrawRoundedRectangle(g, rOutput, currentBlur, Pens.Transparent, shadowColor); rInner.Inflate(1, 1); currentBlur = (int)((double)blur * (1 - (transparency * transparency))); } while (rOuter.Contains(rInner)); g.Flush(); g.Dispose(); return img; } private void DrawRoundedRectangle(Graphics g, Rectangle bounds, int cornerRadius, Pen drawPen, Color fillColor) { int strokeOffset = Convert.ToInt32(Math.Ceiling(drawPen.Width)); bounds = Rectangle.Inflate(bounds, -strokeOffset, -strokeOffset); var gfxPath = new GraphicsPath(); if (cornerRadius > 0) { gfxPath.AddArc(bounds.X, bounds.Y, cornerRadius, cornerRadius, 180, 90); gfxPath.AddArc(bounds.X + bounds.Width - cornerRadius, bounds.Y, cornerRadius, cornerRadius, 270, 90); gfxPath.AddArc(bounds.X + bounds.Width - cornerRadius, bounds.Y + bounds.Height - cornerRadius, cornerRadius, cornerRadius, 0, 90); gfxPath.AddArc(bounds.X, bounds.Y + bounds.Height - cornerRadius, cornerRadius, cornerRadius, 90, 90); } else { gfxPath.AddRectangle(bounds); } gfxPath.CloseAllFigures(); if (cornerRadius > 5) { using (SolidBrush b = new SolidBrush(fillColor)) { g.FillPath(b, gfxPath); } } if (drawPen != Pens.Transparent) { using (Pen p = new Pen(drawPen.Color)) { p.EndCap = p.StartCap = LineCap.Round; g.DrawPath(p, gfxPath); } } } #endregion } #endregion #endregion #region Helper Methods [SecuritySafeCritical] public void RemoveCloseButton() { IntPtr hMenu = WinApi.GetSystemMenu(Handle, false); if (hMenu == IntPtr.Zero) return; int n = WinApi.GetMenuItemCount(hMenu); if (n <= 0) return; WinApi.RemoveMenu(hMenu, (uint)(n - 1), WinApi.MfByposition | WinApi.MfRemove); WinApi.RemoveMenu(hMenu, (uint)(n - 2), WinApi.MfByposition | WinApi.MfRemove); WinApi.DrawMenuBar(Handle); } private Rectangle MeasureText(Graphics g, Rectangle clientRectangle, Font font, string text, TextFormatFlags flags) { var proposedSize = new Size(int.MaxValue, int.MinValue); var actualSize = TextRenderer.MeasureText(g, text, font, proposedSize, flags); return new Rectangle(clientRectangle.X, clientRectangle.Y, actualSize.Width, actualSize.Height); } #endregion } }
// Copyright (c) 2010-2014 SharpDX - Alexandre Mutel // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Runtime.InteropServices; namespace SharpDX.DirectInput { public partial class EffectFile { /// <summary> /// Initializes a new instance of the <see cref="EffectFile"/> class. /// </summary> public EffectFile() { unsafe { Size = sizeof(__Native); } } /// <summary> /// Gets or sets the parameters. /// </summary> /// <value>The parameters.</value> public EffectParameters Parameters { get; set; } internal static __Native __NewNative() { unsafe { __Native temp = default(__Native); temp.Size = sizeof(__Native); return temp; } } // Internal native struct used for marshalling [StructLayout(LayoutKind.Sequential, Pack = 0)] internal partial struct __Native { public int Size; public System.Guid Guid; public System.IntPtr EffectParametersPointer; public byte Name; public byte _Name2; public byte _Name3; public byte _Name4; public byte _Name5; public byte _Name6; public byte _Name7; public byte _Name8; public byte _Name9; public byte _Name10; public byte _Name11; public byte _Name12; public byte _Name13; public byte _Name14; public byte _Name15; public byte _Name16; public byte _Name17; public byte _Name18; public byte _Name19; public byte _Name20; public byte _Name21; public byte _Name22; public byte _Name23; public byte _Name24; public byte _Name25; public byte _Name26; public byte _Name27; public byte _Name28; public byte _Name29; public byte _Name30; public byte _Name31; public byte _Name32; public byte _Name33; public byte _Name34; public byte _Name35; public byte _Name36; public byte _Name37; public byte _Name38; public byte _Name39; public byte _Name40; public byte _Name41; public byte _Name42; public byte _Name43; public byte _Name44; public byte _Name45; public byte _Name46; public byte _Name47; public byte _Name48; public byte _Name49; public byte _Name50; public byte _Name51; public byte _Name52; public byte _Name53; public byte _Name54; public byte _Name55; public byte _Name56; public byte _Name57; public byte _Name58; public byte _Name59; public byte _Name60; public byte _Name61; public byte _Name62; public byte _Name63; public byte _Name64; public byte _Name65; public byte _Name66; public byte _Name67; public byte _Name68; public byte _Name69; public byte _Name70; public byte _Name71; public byte _Name72; public byte _Name73; public byte _Name74; public byte _Name75; public byte _Name76; public byte _Name77; public byte _Name78; public byte _Name79; public byte _Name80; public byte _Name81; public byte _Name82; public byte _Name83; public byte _Name84; public byte _Name85; public byte _Name86; public byte _Name87; public byte _Name88; public byte _Name89; public byte _Name90; public byte _Name91; public byte _Name92; public byte _Name93; public byte _Name94; public byte _Name95; public byte _Name96; public byte _Name97; public byte _Name98; public byte _Name99; public byte _Name100; public byte _Name101; public byte _Name102; public byte _Name103; public byte _Name104; public byte _Name105; public byte _Name106; public byte _Name107; public byte _Name108; public byte _Name109; public byte _Name110; public byte _Name111; public byte _Name112; public byte _Name113; public byte _Name114; public byte _Name115; public byte _Name116; public byte _Name117; public byte _Name118; public byte _Name119; public byte _Name120; public byte _Name121; public byte _Name122; public byte _Name123; public byte _Name124; public byte _Name125; public byte _Name126; public byte _Name127; public byte _Name128; public byte _Name129; public byte _Name130; public byte _Name131; public byte _Name132; public byte _Name133; public byte _Name134; public byte _Name135; public byte _Name136; public byte _Name137; public byte _Name138; public byte _Name139; public byte _Name140; public byte _Name141; public byte _Name142; public byte _Name143; public byte _Name144; public byte _Name145; public byte _Name146; public byte _Name147; public byte _Name148; public byte _Name149; public byte _Name150; public byte _Name151; public byte _Name152; public byte _Name153; public byte _Name154; public byte _Name155; public byte _Name156; public byte _Name157; public byte _Name158; public byte _Name159; public byte _Name160; public byte _Name161; public byte _Name162; public byte _Name163; public byte _Name164; public byte _Name165; public byte _Name166; public byte _Name167; public byte _Name168; public byte _Name169; public byte _Name170; public byte _Name171; public byte _Name172; public byte _Name173; public byte _Name174; public byte _Name175; public byte _Name176; public byte _Name177; public byte _Name178; public byte _Name179; public byte _Name180; public byte _Name181; public byte _Name182; public byte _Name183; public byte _Name184; public byte _Name185; public byte _Name186; public byte _Name187; public byte _Name188; public byte _Name189; public byte _Name190; public byte _Name191; public byte _Name192; public byte _Name193; public byte _Name194; public byte _Name195; public byte _Name196; public byte _Name197; public byte _Name198; public byte _Name199; public byte _Name200; public byte _Name201; public byte _Name202; public byte _Name203; public byte _Name204; public byte _Name205; public byte _Name206; public byte _Name207; public byte _Name208; public byte _Name209; public byte _Name210; public byte _Name211; public byte _Name212; public byte _Name213; public byte _Name214; public byte _Name215; public byte _Name216; public byte _Name217; public byte _Name218; public byte _Name219; public byte _Name220; public byte _Name221; public byte _Name222; public byte _Name223; public byte _Name224; public byte _Name225; public byte _Name226; public byte _Name227; public byte _Name228; public byte _Name229; public byte _Name230; public byte _Name231; public byte _Name232; public byte _Name233; public byte _Name234; public byte _Name235; public byte _Name236; public byte _Name237; public byte _Name238; public byte _Name239; public byte _Name240; public byte _Name241; public byte _Name242; public byte _Name243; public byte _Name244; public byte _Name245; public byte _Name246; public byte _Name247; public byte _Name248; public byte _Name249; public byte _Name250; public byte _Name251; public byte _Name252; public byte _Name253; public byte _Name254; public byte _Name255; public byte _Name256; public byte _Name257; public byte _Name258; public byte _Name259; public byte _Name260; // Method to free native struct internal unsafe void __MarshalFree() { if (EffectParametersPointer != IntPtr.Zero) Marshal.FreeHGlobal(EffectParametersPointer); } } internal unsafe void __MarshalFree(ref __Native @ref) { // Free Parameters if (Parameters != null && @ref.EffectParametersPointer != IntPtr.Zero) Parameters.__MarshalFree(ref *((EffectParameters.__Native*)@ref.EffectParametersPointer)); @ref.__MarshalFree(); } // Method to marshal from native to managed struct internal unsafe void __MarshalFrom(ref __Native @ref) { this.Size = @ref.Size; this.Guid = @ref.Guid; this.EffectParametersPointer = @ref.EffectParametersPointer; fixed (void* __ptr = &@ref.Name) this.Name = Utilities.PtrToStringAnsi((IntPtr)__ptr, 260); if (this.EffectParametersPointer != IntPtr.Zero) { Parameters = new EffectParameters(); Parameters.__MarshalFrom(ref *(EffectParameters.__Native*)EffectParametersPointer); EffectParametersPointer = IntPtr.Zero; } } // Method to marshal from managed struct tot native internal unsafe void __MarshalTo(ref __Native @ref) { @ref.Size = this.Size; @ref.Guid = this.Guid; IntPtr effectParameters = IntPtr.Zero; if ( Parameters != null) { effectParameters = Marshal.AllocHGlobal(sizeof (EffectParameters.__Native)); var nativeParameters = default(EffectParameters.__Native); Parameters.__MarshalTo(ref nativeParameters); *((EffectParameters.__Native*) effectParameters) = nativeParameters; } @ref.EffectParametersPointer = effectParameters; IntPtr Name_ = Marshal.StringToHGlobalAnsi(this.Name); fixed (void* __ptr = &@ref.Name) Utilities.CopyMemory((IntPtr)__ptr, Name_, this.Name.Length); Marshal.FreeHGlobal(Name_); } } }
using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; using System.Threading.Tasks; using OmniSharp.Models; using Xunit; namespace OmniSharp.Tests { public class IntellisenseFacts { [Fact] public async Task DisplayText_is_correct_for_property() { var source = @"public class Class1 { public int Foo { get; set; } public Class1() { Foo$ } }"; var request = CreateRequest(source); request.WantSnippet = true; var completions = await FindCompletionsAsync(source, request); ContainsCompletions(completions.Select(c => c.DisplayText).Take(1), "Foo"); } [Fact] public async Task DisplayText_is_correct_for_variable() { var source = @"public class Class1 { public Class1() { var foo = 1; foo$ } }"; var request = CreateRequest(source); request.WantSnippet = true; var completions = await FindCompletionsAsync(source, request); ContainsCompletions(completions.Select(c => c.DisplayText).Take(1), "foo"); } [Fact] public async Task DisplayText_matches_snippet_for_snippet_response() { var source = @"public class Class1 { public Class1() { Foo$ } public void Foo(int bar = 1) { } }"; var request = CreateRequest(source); request.WantSnippet = true; var completions = await FindCompletionsAsync(source, request); ContainsCompletions(completions.Select(c => c.DisplayText).Take(2), "Foo()", "Foo(int bar = 1)"); } [Fact] public async Task DisplayText_matches_snippet_for_non_snippet_response() { var source = @"public class Class1 { public Class1() { Foo$ } public void Foo(int bar = 1) { } }"; var request = CreateRequest(source); request.WantSnippet = false; var completions = await FindCompletionsAsync(source, request); ContainsCompletions(completions.Select(c => c.DisplayText).Take(1), "Foo(int bar = 1)"); } [Fact] public async Task Returns_camel_case_completions() { var source = @"public class Class1 { public Class1() { System.Console.wl$ } }"; var completions = await FindCompletionsAsync(source); ContainsCompletions(completions.Select(c => c.CompletionText).Take(2), "WindowLeft", "WriteLine"); } [Fact] public async Task Returns_sub_sequence_completions() { var source = @"public class Class1 { public Class1() { System.Console.wln$ } }"; var completions = await FindCompletionsAsync(source); ContainsCompletions(completions.Select(c => c.CompletionText).Take(1), "WriteLine"); } [Fact] public async Task Returns_method_header() { var source = @"public class Class1 { public Class1() { System.Console.wln$ } }"; var completions = await FindCompletionsAsync(source); ContainsCompletions(completions.Select(c => c.MethodHeader).Take(1), "WriteLine()"); } [Fact] public async Task Returns_variable_before_class() { var source = @"public class MyClass1 { public MyClass1() { var myvar = 1; my$ } }"; var completions = await FindCompletionsAsync(source); ContainsCompletions(completions.Select(c => c.CompletionText), "myvar", "MyClass1"); } [Fact] public async Task Returns_class_before_variable() { var source = @"public class MyClass1 { public MyClass1() { var myvar = 1; My$ } }"; var completions = await FindCompletionsAsync(source); ContainsCompletions(completions.Select(c => c.CompletionText), "MyClass1", "myvar"); } private void ContainsCompletions(IEnumerable<string> completions, params string[] expected) { var same = completions.SequenceEqual(expected); if (!same) { System.Console.Error.WriteLine("Expected"); System.Console.Error.WriteLine("--------"); foreach (var completion in expected) { System.Console.WriteLine(completion); } System.Console.Error.WriteLine(); System.Console.Error.WriteLine("Found"); System.Console.Error.WriteLine("-----"); foreach (var completion in completions) { System.Console.WriteLine(completion); } } Assert.Equal(expected, completions); } private async Task<IEnumerable<AutoCompleteResponse>> FindCompletionsAsync(string source, AutoCompleteRequest request = null) { var workspace = TestHelpers.CreateSimpleWorkspace(source); var controller = new OmnisharpController(workspace, null); if (request == null) { request = CreateRequest(source); } var response = await controller.AutoComplete(request); var completions = response as IEnumerable<AutoCompleteResponse>; return completions; } private AutoCompleteRequest CreateRequest(string source, string fileName = "dummy.cs") { var lineColumn = TestHelpers.GetLineAndColumnFromDollar(source); return new AutoCompleteRequest { Line = lineColumn.Line, Column = lineColumn.Column, FileName = fileName, Buffer = source.Replace("$", ""), WordToComplete = GetPartialWord(source), WantMethodHeader = true }; } private static string GetPartialWord(string editorText) { MatchCollection matches = Regex.Matches(editorText, @"([a-zA-Z0-9_]*)\$"); return matches[0].Groups[1].ToString(); } } }
// // StackTest.cs // // Author: // Chris Hynes <chrish@assistedsolutions.com> // // (C) 2001 Chris Hynes // using System; using System.Collections; using NUnit.Framework; namespace MonoTests.System.Collections { [TestFixture] public class StackTest: Assertion { private Stack stack1; private Stack stack2; private Stack stackInt; public void TestConstructor() { AssertEquals(false, stack1 == null); } public void TestICollectionConstructor1() { Stack stackTest = new Stack(new int[] {0, 1, 2, 3, 4}); for (int i = 4; i >= 0; i--) AssertEquals(i, stackTest.Pop()); AssertEquals(0, stackTest.Count); } public void TestICollectionConstructor2() { bool exceptionThrown = false; try { Stack stackTest = new Stack(null); } catch (ArgumentNullException e) { exceptionThrown = true; AssertEquals("ParamName must be \"col\"","col",e.ParamName); } Assert("null argument must throw ArgumentNullException", exceptionThrown); } public void TestIntConstructor1() { Stack stackTest = new Stack(50); Assert(stackTest != null); } public void TestIntConstructor2() { bool exceptionThrown = false; try { Stack stackTest = new Stack(-1); } catch (ArgumentOutOfRangeException e) { exceptionThrown = true; AssertEquals("ParamName must be \"initialCapacity\"","initialCapacity",e.ParamName); } Assert("negative argument must throw ArgumentOutOfRangeException", exceptionThrown); } public void TestCount() { Stack stackTest = new Stack(); stackTest.Push(50); stackTest.Push(5); stackTest.Push(0); stackTest.Push(50); AssertEquals(4, stackTest.Count); } public void TestIsSyncronized() { AssertEquals(false, stack1.IsSynchronized); AssertEquals(true, Stack.Synchronized(stack1).IsSynchronized); } public void TestSyncRoot() { AssertEquals(false, stack1.SyncRoot == null); } public void TestGetEnumerator1() { stackInt.Pop(); int j = 3; foreach (int i in stackInt) { AssertEquals(j--, i); } stackInt.Clear(); IEnumerator e = stackInt.GetEnumerator(); AssertEquals(false, e.MoveNext()); } public void TestGetEnumerator2() { IEnumerator e = stackInt.GetEnumerator(); try { // Tests InvalidOperationException if enumerator is uninitialized Object o = e.Current; Fail("InvalidOperationException should be thrown"); } catch (InvalidOperationException) {} } public void TestGetEnumerator3() { IEnumerator e = stack1.GetEnumerator(); e.MoveNext(); try { // Tests InvalidOperationException if enumeration has ended Object o = e.Current; Fail("InvalidOperationException should be thrown"); } catch (InvalidOperationException) {} } public void TestEnumeratorReset1() { IEnumerator e = stackInt.GetEnumerator(); e.MoveNext(); AssertEquals("current value", 4, e.Current); e.MoveNext(); e.Reset(); e.MoveNext(); AssertEquals("current value after reset", 4, e.Current); } public void TestEnumeratorReset2() { IEnumerator e = stackInt.GetEnumerator(); e.MoveNext(); AssertEquals("current value", 4, e.Current); // modifies underlying the stack. Reset must throw InvalidOperationException stackInt.Push(5); try { e.Reset(); Fail("InvalidOperationException should be thrown"); } catch (InvalidOperationException) {} } public void TestEnumeratorMoveNextException() { IEnumerator e = stackInt.GetEnumerator(); // modifies underlying the stack. MoveNext must throw InvalidOperationException stackInt.Push(5); try { e.MoveNext(); Fail("InvalidOperationException should be thrown"); } catch (InvalidOperationException) {} } public void TestClear() { stackInt.Clear(); AssertEquals(0, stackInt.Count); } public void TestClone() { Stack clone = (Stack)stackInt.Clone(); while (stackInt.Count > 0) { AssertEquals(stackInt.Pop(), clone.Pop()); } } public void TestContains() { string toLocate = "test"; stackInt.Push(toLocate); stackInt.Push("chaff"); stackInt.Push(null); Assert(stackInt.Contains(toLocate)); Assert("must contain null", stackInt.Contains(null)); stackInt.Pop(); stackInt.Pop(); Assert(stackInt.Contains(toLocate)); stackInt.Pop(); Assert(!stackInt.Contains(toLocate)); stackInt.Push(null); Assert(stackInt.Contains(null)); stackInt.Pop(); Assert(!stackInt.Contains(null)); } public void TestCopyTo() { int[] arr = new int[stackInt.Count - 1]; int[,] arrMulti; try { stackInt.CopyTo(null, 0); Fail("Should throw an ArgumentNullException"); } catch (ArgumentNullException e) { AssertEquals("ParamName must be \"array\"","array",e.ParamName); } try { stackInt.CopyTo(arr, -1); Fail("Should throw an ArgumentOutOfRangeException"); } catch (ArgumentOutOfRangeException e) { AssertEquals("ParamName must be \"index\"","index",e.ParamName); } try { stackInt.CopyTo(arrMulti = new int[1, 1], 1); Fail("Should throw an ArgumentException"); } catch (ArgumentException) {} try { stackInt.CopyTo(arr = new int[2], 3); Fail("Should throw an ArgumentException"); } catch (ArgumentException) {} try { stackInt.CopyTo(arr = new int[3], 2); Fail("Should throw an ArgumentException"); } catch (ArgumentException) {} try { stackInt.CopyTo(arr = new int[2], 3); Fail("Should throw an ArgumentException"); } catch (ArgumentException) {} arr = new int[stackInt.Count]; stackInt.CopyTo(arr, 0); int j = 4; for (int i = 0; i < 4; i++) { AssertEquals(j--, arr[i]); } } public void TestSyncronized() { Stack syncStack = Stack.Synchronized(stackInt); syncStack.Push(5); for (int i = 5; i >= 0; i--) AssertEquals(i, syncStack.Pop()); } public void TestPushPeekPop() { stackInt.Pop(); int topVal = (int)stackInt.Peek(); AssertEquals(3, topVal); AssertEquals(4, stackInt.Count); AssertEquals(topVal, stackInt.Pop()); AssertEquals(2, stackInt.Pop()); Stack test = new Stack(); test.Push(null); AssertEquals(null, test.Pop()); } public void TestPop() { for (int i = 4; i >= 0; i--) { AssertEquals(i, stackInt.Pop()); } try { stackInt.Pop(); Fail("must throw InvalidOperationException"); } catch (InvalidOperationException){ } } public void TestToArray() { object[] arr = stackInt.ToArray(); AssertEquals(stackInt.Count, arr.Length); for (int i = 0; i < 5; i++) AssertEquals(arr[i], stackInt.Pop()); } public void TestResize() { Stack myStack = new Stack(20); for (int i = 0; i < 500; i++) { myStack.Push(i); AssertEquals("push count test",i+1, myStack.Count); } for (int i = 499; i >= 0; i--) { AssertEquals(i, myStack.Pop()); AssertEquals("pop count test",i, myStack.Count); } } public void TestEmptyCopyTo () { Stack stack = new Stack (); string [] arr = new string [0]; stack.CopyTo (arr, 0); } [SetUp] protected void SetUp() { stack1 = new Stack(); stack2 = new Stack(); stackInt = new Stack(); for (int i = 0; i < 5; i++) stackInt.Push(i); } } }
#region Copyright notice and license // Copyright 2015 gRPC authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; using System.Diagnostics; using System.Runtime.InteropServices; using System.Text; using Grpc.Core; using Grpc.Core.Utils; using Grpc.Core.Profiling; namespace Grpc.Core.Internal { /// <summary> /// grpc_call from <c>grpc/grpc.h</c> /// </summary> internal class CallSafeHandle : SafeHandleZeroIsInvalid, INativeCall { public static readonly CallSafeHandle NullInstance = new CallSafeHandle(); static readonly NativeMethods Native = NativeMethods.Get(); // Completion handlers are pre-allocated to avoid unneccessary delegate allocations. // The "state" field is used to store the actual callback to invoke. static readonly BatchCompletionDelegate CompletionHandler_IUnaryResponseClientCallback = (success, context, state) => ((IUnaryResponseClientCallback)state).OnUnaryResponseClient(success, context.GetReceivedStatusOnClient(), context.GetReceivedMessage(), context.GetReceivedInitialMetadata()); static readonly BatchCompletionDelegate CompletionHandler_IReceivedStatusOnClientCallback = (success, context, state) => ((IReceivedStatusOnClientCallback)state).OnReceivedStatusOnClient(success, context.GetReceivedStatusOnClient()); static readonly BatchCompletionDelegate CompletionHandler_IReceivedMessageCallback = (success, context, state) => ((IReceivedMessageCallback)state).OnReceivedMessage(success, context.GetReceivedMessage()); static readonly BatchCompletionDelegate CompletionHandler_IReceivedResponseHeadersCallback = (success, context, state) => ((IReceivedResponseHeadersCallback)state).OnReceivedResponseHeaders(success, context.GetReceivedInitialMetadata()); static readonly BatchCompletionDelegate CompletionHandler_ISendCompletionCallback = (success, context, state) => ((ISendCompletionCallback)state).OnSendCompletion(success); static readonly BatchCompletionDelegate CompletionHandler_ISendStatusFromServerCompletionCallback = (success, context, state) => ((ISendStatusFromServerCompletionCallback)state).OnSendStatusFromServerCompletion(success); static readonly BatchCompletionDelegate CompletionHandler_IReceivedCloseOnServerCallback = (success, context, state) => ((IReceivedCloseOnServerCallback)state).OnReceivedCloseOnServer(success, context.GetReceivedCloseOnServerCancelled()); const uint GRPC_WRITE_BUFFER_HINT = 1; CompletionQueueSafeHandle completionQueue; private CallSafeHandle() { } public void Initialize(CompletionQueueSafeHandle completionQueue) { this.completionQueue = completionQueue; } public void SetCredentials(CallCredentialsSafeHandle credentials) { Native.grpcsharp_call_set_credentials(this, credentials).CheckOk(); } public void StartUnary(IUnaryResponseClientCallback callback, byte[] payload, WriteFlags writeFlags, MetadataArraySafeHandle metadataArray, CallFlags callFlags) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_IUnaryResponseClientCallback, callback); Native.grpcsharp_call_start_unary(this, ctx, payload, new UIntPtr((ulong)payload.Length), writeFlags, metadataArray, callFlags) .CheckOk(); } } public void StartUnary(BatchContextSafeHandle ctx, byte[] payload, WriteFlags writeFlags, MetadataArraySafeHandle metadataArray, CallFlags callFlags) { Native.grpcsharp_call_start_unary(this, ctx, payload, new UIntPtr((ulong)payload.Length), writeFlags, metadataArray, callFlags) .CheckOk(); } public void StartClientStreaming(IUnaryResponseClientCallback callback, MetadataArraySafeHandle metadataArray, CallFlags callFlags) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_IUnaryResponseClientCallback, callback); Native.grpcsharp_call_start_client_streaming(this, ctx, metadataArray, callFlags).CheckOk(); } } public void StartServerStreaming(IReceivedStatusOnClientCallback callback, byte[] payload, WriteFlags writeFlags, MetadataArraySafeHandle metadataArray, CallFlags callFlags) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_IReceivedStatusOnClientCallback, callback); Native.grpcsharp_call_start_server_streaming(this, ctx, payload, new UIntPtr((ulong)payload.Length), writeFlags, metadataArray, callFlags).CheckOk(); } } public void StartDuplexStreaming(IReceivedStatusOnClientCallback callback, MetadataArraySafeHandle metadataArray, CallFlags callFlags) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_IReceivedStatusOnClientCallback, callback); Native.grpcsharp_call_start_duplex_streaming(this, ctx, metadataArray, callFlags).CheckOk(); } } public void StartSendMessage(ISendCompletionCallback callback, byte[] payload, WriteFlags writeFlags, bool sendEmptyInitialMetadata) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_ISendCompletionCallback, callback); Native.grpcsharp_call_send_message(this, ctx, payload, new UIntPtr((ulong)payload.Length), writeFlags, sendEmptyInitialMetadata ? 1 : 0).CheckOk(); } } public void StartSendCloseFromClient(ISendCompletionCallback callback) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_ISendCompletionCallback, callback); Native.grpcsharp_call_send_close_from_client(this, ctx).CheckOk(); } } public void StartSendStatusFromServer(ISendStatusFromServerCompletionCallback callback, Status status, MetadataArraySafeHandle metadataArray, bool sendEmptyInitialMetadata, byte[] optionalPayload, WriteFlags writeFlags) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); var optionalPayloadLength = optionalPayload != null ? new UIntPtr((ulong)optionalPayload.Length) : UIntPtr.Zero; completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_ISendStatusFromServerCompletionCallback, callback); var statusDetailBytes = MarshalUtils.GetBytesUTF8(status.Detail); Native.grpcsharp_call_send_status_from_server(this, ctx, status.StatusCode, statusDetailBytes, new UIntPtr((ulong)statusDetailBytes.Length), metadataArray, sendEmptyInitialMetadata ? 1 : 0, optionalPayload, optionalPayloadLength, writeFlags).CheckOk(); } } public void StartReceiveMessage(IReceivedMessageCallback callback) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_IReceivedMessageCallback, callback); Native.grpcsharp_call_recv_message(this, ctx).CheckOk(); } } public void StartReceiveInitialMetadata(IReceivedResponseHeadersCallback callback) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_IReceivedResponseHeadersCallback, callback); Native.grpcsharp_call_recv_initial_metadata(this, ctx).CheckOk(); } } public void StartServerSide(IReceivedCloseOnServerCallback callback) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_IReceivedCloseOnServerCallback, callback); Native.grpcsharp_call_start_serverside(this, ctx).CheckOk(); } } public void StartSendInitialMetadata(ISendCompletionCallback callback, MetadataArraySafeHandle metadataArray) { using (completionQueue.NewScope()) { var ctx = BatchContextSafeHandle.Create(); completionQueue.CompletionRegistry.RegisterBatchCompletion(ctx, CompletionHandler_ISendCompletionCallback, callback); Native.grpcsharp_call_send_initial_metadata(this, ctx, metadataArray).CheckOk(); } } public void Cancel() { Native.grpcsharp_call_cancel(this).CheckOk(); } public void CancelWithStatus(Status status) { Native.grpcsharp_call_cancel_with_status(this, status.StatusCode, status.Detail).CheckOk(); } public string GetPeer() { using (var cstring = Native.grpcsharp_call_get_peer(this)) { return cstring.GetValue(); } } public AuthContextSafeHandle GetAuthContext() { return Native.grpcsharp_call_auth_context(this); } protected override bool ReleaseHandle() { Native.grpcsharp_call_destroy(handle); return true; } private static uint GetFlags(bool buffered) { return buffered ? 0 : GRPC_WRITE_BUFFER_HINT; } /// <summary> /// Only for testing. /// </summary> public static CallSafeHandle CreateFake(IntPtr ptr, CompletionQueueSafeHandle cq) { var call = new CallSafeHandle(); call.SetHandle(ptr); call.Initialize(cq); return call; } } }
//--------------------------------------------------------------------------- // // <copyright file="ByteAnimation.cs" company="Microsoft"> // Copyright (C) Microsoft Corporation. All rights reserved. // </copyright> // // This file was generated, please do not edit it directly. // // Please see http://wiki/default.aspx/Microsoft.Projects.Avalon/MilCodeGen.html for more information. // //--------------------------------------------------------------------------- using MS.Internal; using MS.Internal.KnownBoxes; using MS.Utility; using System; using System.Collections; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Runtime.InteropServices; using System.Windows.Media; using System.Windows.Media.Media3D; using System.Windows.Media.Animation; using MS.Internal.PresentationCore; namespace System.Windows.Media.Animation { /// <summary> /// Animates the value of a Byte property using linear interpolation /// between two values. The values are determined by the combination of /// From, To, or By values that are set on the animation. /// </summary> public partial class ByteAnimation : ByteAnimationBase { #region Data /// <summary> /// This is used if the user has specified From, To, and/or By values. /// </summary> private Byte[] _keyValues; private AnimationType _animationType; private bool _isAnimationFunctionValid; #endregion #region Constructors /// <summary> /// Static ctor for ByteAnimation establishes /// dependency properties, using as much shared data as possible. /// </summary> static ByteAnimation() { Type typeofProp = typeof(Byte?); Type typeofThis = typeof(ByteAnimation); PropertyChangedCallback propCallback = new PropertyChangedCallback(AnimationFunction_Changed); ValidateValueCallback validateCallback = new ValidateValueCallback(ValidateFromToOrByValue); FromProperty = DependencyProperty.Register( "From", typeofProp, typeofThis, new PropertyMetadata((Byte?)null, propCallback), validateCallback); ToProperty = DependencyProperty.Register( "To", typeofProp, typeofThis, new PropertyMetadata((Byte?)null, propCallback), validateCallback); ByProperty = DependencyProperty.Register( "By", typeofProp, typeofThis, new PropertyMetadata((Byte?)null, propCallback), validateCallback); EasingFunctionProperty = DependencyProperty.Register( "EasingFunction", typeof(IEasingFunction), typeofThis); } /// <summary> /// Creates a new ByteAnimation with all properties set to /// their default values. /// </summary> public ByteAnimation() : base() { } /// <summary> /// Creates a new ByteAnimation that will animate a /// Byte property from its base value to the value specified /// by the "toValue" parameter of this constructor. /// </summary> public ByteAnimation(Byte toValue, Duration duration) : this() { To = toValue; Duration = duration; } /// <summary> /// Creates a new ByteAnimation that will animate a /// Byte property from its base value to the value specified /// by the "toValue" parameter of this constructor. /// </summary> public ByteAnimation(Byte toValue, Duration duration, FillBehavior fillBehavior) : this() { To = toValue; Duration = duration; FillBehavior = fillBehavior; } /// <summary> /// Creates a new ByteAnimation that will animate a /// Byte property from the "fromValue" parameter of this constructor /// to the "toValue" parameter. /// </summary> public ByteAnimation(Byte fromValue, Byte toValue, Duration duration) : this() { From = fromValue; To = toValue; Duration = duration; } /// <summary> /// Creates a new ByteAnimation that will animate a /// Byte property from the "fromValue" parameter of this constructor /// to the "toValue" parameter. /// </summary> public ByteAnimation(Byte fromValue, Byte toValue, Duration duration, FillBehavior fillBehavior) : this() { From = fromValue; To = toValue; Duration = duration; FillBehavior = fillBehavior; } #endregion #region Freezable /// <summary> /// Creates a copy of this ByteAnimation /// </summary> /// <returns>The copy</returns> public new ByteAnimation Clone() { return (ByteAnimation)base.Clone(); } // // Note that we don't override the Clone virtuals (CloneCore, CloneCurrentValueCore, // GetAsFrozenCore, and GetCurrentValueAsFrozenCore) even though this class has state // not stored in a DP. // // We don't need to clone _animationType and _keyValues because they are the the cached // results of animation function validation, which can be recomputed. The other remaining // field, isAnimationFunctionValid, defaults to false, which causes this recomputation to happen. // /// <summary> /// Implementation of <see cref="System.Windows.Freezable.CreateInstanceCore">Freezable.CreateInstanceCore</see>. /// </summary> /// <returns>The new Freezable.</returns> protected override Freezable CreateInstanceCore() { return new ByteAnimation(); } #endregion #region Methods /// <summary> /// Calculates the value this animation believes should be the current value for the property. /// </summary> /// <param name="defaultOriginValue"> /// This value is the suggested origin value provided to the animation /// to be used if the animation does not have its own concept of a /// start value. If this animation is the first in a composition chain /// this value will be the snapshot value if one is available or the /// base property value if it is not; otherise this value will be the /// value returned by the previous animation in the chain with an /// animationClock that is not Stopped. /// </param> /// <param name="defaultDestinationValue"> /// This value is the suggested destination value provided to the animation /// to be used if the animation does not have its own concept of an /// end value. This value will be the base value if the animation is /// in the first composition layer of animations on a property; /// otherwise this value will be the output value from the previous /// composition layer of animations for the property. /// </param> /// <param name="animationClock"> /// This is the animationClock which can generate the CurrentTime or /// CurrentProgress value to be used by the animation to generate its /// output value. /// </param> /// <returns> /// The value this animation believes should be the current value for the property. /// </returns> protected override Byte GetCurrentValueCore(Byte defaultOriginValue, Byte defaultDestinationValue, AnimationClock animationClock) { Debug.Assert(animationClock.CurrentState != ClockState.Stopped); if (!_isAnimationFunctionValid) { ValidateAnimationFunction(); } double progress = animationClock.CurrentProgress.Value; IEasingFunction easingFunction = EasingFunction; if (easingFunction != null) { progress = easingFunction.Ease(progress); } Byte from = new Byte(); Byte to = new Byte(); Byte accumulated = new Byte(); Byte foundation = new Byte(); // need to validate the default origin and destination values if // the animation uses them as the from, to, or foundation values bool validateOrigin = false; bool validateDestination = false; switch(_animationType) { case AnimationType.Automatic: from = defaultOriginValue; to = defaultDestinationValue; validateOrigin = true; validateDestination = true; break; case AnimationType.From: from = _keyValues[0]; to = defaultDestinationValue; validateDestination = true; break; case AnimationType.To: from = defaultOriginValue; to = _keyValues[0]; validateOrigin = true; break; case AnimationType.By: // According to the SMIL specification, a By animation is // always additive. But we don't force this so that a // user can re-use a By animation and have it replace the // animations that precede it in the list without having // to manually set the From value to the base value. to = _keyValues[0]; foundation = defaultOriginValue; validateOrigin = true; break; case AnimationType.FromTo: from = _keyValues[0]; to = _keyValues[1]; if (IsAdditive) { foundation = defaultOriginValue; validateOrigin = true; } break; case AnimationType.FromBy: from = _keyValues[0]; to = AnimatedTypeHelpers.AddByte(_keyValues[0], _keyValues[1]); if (IsAdditive) { foundation = defaultOriginValue; validateOrigin = true; } break; default: Debug.Fail("Unknown animation type."); break; } if (validateOrigin && !AnimatedTypeHelpers.IsValidAnimationValueByte(defaultOriginValue)) { throw new InvalidOperationException( SR.Get( SRID.Animation_Invalid_DefaultValue, this.GetType(), "origin", defaultOriginValue.ToString(CultureInfo.InvariantCulture))); } if (validateDestination && !AnimatedTypeHelpers.IsValidAnimationValueByte(defaultDestinationValue)) { throw new InvalidOperationException( SR.Get( SRID.Animation_Invalid_DefaultValue, this.GetType(), "destination", defaultDestinationValue.ToString(CultureInfo.InvariantCulture))); } if (IsCumulative) { double currentRepeat = (double)(animationClock.CurrentIteration - 1); if (currentRepeat > 0.0) { Byte accumulator = AnimatedTypeHelpers.SubtractByte(to, from); accumulated = AnimatedTypeHelpers.ScaleByte(accumulator, currentRepeat); } } // return foundation + accumulated + from + ((to - from) * progress) return AnimatedTypeHelpers.AddByte( foundation, AnimatedTypeHelpers.AddByte( accumulated, AnimatedTypeHelpers.InterpolateByte(from, to, progress))); } private void ValidateAnimationFunction() { _animationType = AnimationType.Automatic; _keyValues = null; if (From.HasValue) { if (To.HasValue) { _animationType = AnimationType.FromTo; _keyValues = new Byte[2]; _keyValues[0] = From.Value; _keyValues[1] = To.Value; } else if (By.HasValue) { _animationType = AnimationType.FromBy; _keyValues = new Byte[2]; _keyValues[0] = From.Value; _keyValues[1] = By.Value; } else { _animationType = AnimationType.From; _keyValues = new Byte[1]; _keyValues[0] = From.Value; } } else if (To.HasValue) { _animationType = AnimationType.To; _keyValues = new Byte[1]; _keyValues[0] = To.Value; } else if (By.HasValue) { _animationType = AnimationType.By; _keyValues = new Byte[1]; _keyValues[0] = By.Value; } _isAnimationFunctionValid = true; } #endregion #region Properties private static void AnimationFunction_Changed(DependencyObject d, DependencyPropertyChangedEventArgs e) { ByteAnimation a = (ByteAnimation)d; a._isAnimationFunctionValid = false; a.PropertyChanged(e.Property); } private static bool ValidateFromToOrByValue(object value) { Byte? typedValue = (Byte?)value; if (typedValue.HasValue) { return AnimatedTypeHelpers.IsValidAnimationValueByte(typedValue.Value); } else { return true; } } /// <summary> /// FromProperty /// </summary> public static readonly DependencyProperty FromProperty; /// <summary> /// From /// </summary> public Byte? From { get { return (Byte?)GetValue(FromProperty); } set { SetValueInternal(FromProperty, value); } } /// <summary> /// ToProperty /// </summary> public static readonly DependencyProperty ToProperty; /// <summary> /// To /// </summary> public Byte? To { get { return (Byte?)GetValue(ToProperty); } set { SetValueInternal(ToProperty, value); } } /// <summary> /// ByProperty /// </summary> public static readonly DependencyProperty ByProperty; /// <summary> /// By /// </summary> public Byte? By { get { return (Byte?)GetValue(ByProperty); } set { SetValueInternal(ByProperty, value); } } /// <summary> /// EasingFunctionProperty /// </summary> public static readonly DependencyProperty EasingFunctionProperty; /// <summary> /// EasingFunction /// </summary> public IEasingFunction EasingFunction { get { return (IEasingFunction)GetValue(EasingFunctionProperty); } set { SetValueInternal(EasingFunctionProperty, value); } } /// <summary> /// If this property is set to true the animation will add its value to /// the base value instead of replacing it entirely. /// </summary> public bool IsAdditive { get { return (bool)GetValue(IsAdditiveProperty); } set { SetValueInternal(IsAdditiveProperty, BooleanBoxes.Box(value)); } } /// <summary> /// It this property is set to true, the animation will accumulate its /// value over repeats. For instance if you have a From value of 0.0 and /// a To value of 1.0, the animation return values from 1.0 to 2.0 over /// the second reteat cycle, and 2.0 to 3.0 over the third, etc. /// </summary> public bool IsCumulative { get { return (bool)GetValue(IsCumulativeProperty); } set { SetValueInternal(IsCumulativeProperty, BooleanBoxes.Box(value)); } } #endregion } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Net; using System.Net.Sockets; using System.Reflection; using System.Text; using System.Threading; using log4net; using OpenMetaverse; using OpenMetaverse.Packets; using OpenSim.Framework; using OpenSim.Framework.Client; using OpenSim.Framework.Monitoring; using OpenSim.Region.Framework.Scenes; namespace OpenSim.Region.OptionalModules.Agent.InternetRelayClientView.Server { public delegate void OnIRCClientReadyDelegate(IRCClientView cv); public class IRCClientView : IClientAPI, IClientCore { public event OnIRCClientReadyDelegate OnIRCReady; private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private readonly TcpClient m_client; private readonly Scene m_scene; private UUID m_agentID = UUID.Random(); public ISceneAgent SceneAgent { get; set; } private string m_username; private string m_nick; private bool m_hasNick = false; private bool m_hasUser = false; private bool m_connected = true; public IRCClientView(TcpClient client, Scene scene) { m_client = client; m_scene = scene; Watchdog.StartThread(InternalLoop, "IRCClientView", ThreadPriority.Normal, false, true); } private void SendServerCommand(string command) { SendCommand(":opensimircd " + command); } private void SendCommand(string command) { m_log.Info("[IRCd] Sending >>> " + command); byte[] buf = Util.UTF8.GetBytes(command + "\r\n"); m_client.GetStream().BeginWrite(buf, 0, buf.Length, SendComplete, null); } private void SendComplete(IAsyncResult result) { m_log.Info("[IRCd] Send Complete."); } private string IrcRegionName { // I know &Channel is more technically correct, but people are used to seeing #Channel // Dont shoot me! get { return "#" + m_scene.RegionInfo.RegionName.Replace(" ", "-"); } } private void InternalLoop() { try { string strbuf = String.Empty; while (m_connected && m_client.Connected) { byte[] buf = new byte[8]; // RFC1459 defines max message size as 512. int count = m_client.GetStream().Read(buf, 0, buf.Length); string line = Util.UTF8.GetString(buf, 0, count); strbuf += line; string message = ExtractMessage(strbuf); if (message != null) { // Remove from buffer strbuf = strbuf.Remove(0, message.Length); m_log.Info("[IRCd] Recieving <<< " + message); message = message.Trim(); // Extract command sequence string command = ExtractCommand(message); ProcessInMessage(message, command); } else { //m_log.Info("[IRCd] Recieved data, but not enough to make a message. BufLen is " + strbuf.Length + // "[" + strbuf + "]"); if (strbuf.Length == 0) { m_connected = false; m_log.Info("[IRCd] Buffer zero, closing..."); if (OnDisconnectUser != null) OnDisconnectUser(); } } Thread.Sleep(0); Watchdog.UpdateThread(); } } catch (IOException) { if (OnDisconnectUser != null) OnDisconnectUser(); m_log.Warn("[IRCd] Disconnected client."); } catch (SocketException) { if (OnDisconnectUser != null) OnDisconnectUser(); m_log.Warn("[IRCd] Disconnected client."); } Watchdog.RemoveThread(); } private void ProcessInMessage(string message, string command) { m_log.Info("[IRCd] Processing [MSG:" + message + "] [COM:" + command + "]"); if (command != null) { switch (command) { case "ADMIN": case "AWAY": case "CONNECT": case "DIE": case "ERROR": case "INFO": case "INVITE": case "ISON": case "KICK": case "KILL": case "LINKS": case "LUSERS": case "OPER": case "PART": case "REHASH": case "SERVICE": case "SERVLIST": case "SERVER": case "SQUERY": case "SQUIT": case "STATS": case "SUMMON": case "TIME": case "TRACE": case "VERSION": case "WALLOPS": case "WHOIS": case "WHOWAS": SendServerCommand("421 " + command + " :Command unimplemented"); break; // Connection Commands case "PASS": break; // Ignore for now. I want to implement authentication later however. case "JOIN": IRC_SendReplyJoin(); break; case "MODE": IRC_SendReplyModeChannel(); break; case "USER": IRC_ProcessUser(message); IRC_Ready(); break; case "USERHOST": string[] userhostArgs = ExtractParameters(message); if (userhostArgs[0] == ":" + m_nick) { SendServerCommand("302 :" + m_nick + "=+" + m_nick + "@" + ((IPEndPoint) m_client.Client.RemoteEndPoint).Address); } break; case "NICK": IRC_ProcessNick(message); IRC_Ready(); break; case "TOPIC": IRC_SendReplyTopic(); break; case "USERS": IRC_SendReplyUsers(); break; case "LIST": break; // TODO case "MOTD": IRC_SendMOTD(); break; case "NOTICE": // TODO break; case "WHO": // TODO IRC_SendNamesReply(); IRC_SendWhoReply(); break; case "PING": IRC_ProcessPing(message); break; // Special case, ignore this completely. case "PONG": break; case "QUIT": if (OnDisconnectUser != null) OnDisconnectUser(); break; case "NAMES": IRC_SendNamesReply(); break; case "PRIVMSG": IRC_ProcessPrivmsg(message); break; default: SendServerCommand("421 " + command + " :Unknown command"); break; } } } private void IRC_Ready() { if (m_hasUser && m_hasNick) { SendServerCommand("001 " + m_nick + " :Welcome to OpenSimulator IRCd"); SendServerCommand("002 " + m_nick + " :Running OpenSimVersion"); SendServerCommand("003 " + m_nick + " :This server was created over 9000 years ago"); SendServerCommand("004 " + m_nick + " :opensimirc r1 aoOirw abeiIklmnoOpqrstv"); SendServerCommand("251 " + m_nick + " :There are 0 users and 0 services on 1 servers"); SendServerCommand("252 " + m_nick + " 0 :operators online"); SendServerCommand("253 " + m_nick + " 0 :unknown connections"); SendServerCommand("254 " + m_nick + " 1 :channels formed"); SendServerCommand("255 " + m_nick + " :I have 1 users, 0 services and 1 servers"); SendCommand(":" + m_nick + " MODE " + m_nick + " :+i"); SendCommand(":" + m_nick + " JOIN :" + IrcRegionName); // Rename to 'Real Name' SendCommand(":" + m_nick + " NICK :" + m_username.Replace(" ", "")); m_nick = m_username.Replace(" ", ""); IRC_SendReplyJoin(); IRC_SendChannelPrivmsg("System", "Welcome to OpenSimulator."); IRC_SendChannelPrivmsg("System", "You are in a maze of twisty little passages, all alike."); IRC_SendChannelPrivmsg("System", "It is pitch black. You are likely to be eaten by a grue."); if (OnIRCReady != null) OnIRCReady(this); } } private void IRC_SendReplyJoin() { IRC_SendReplyTopic(); IRC_SendNamesReply(); } private void IRC_SendReplyModeChannel() { SendServerCommand("324 " + m_nick + " " + IrcRegionName + " +n"); //SendCommand(":" + IrcRegionName + " MODE +n"); } private void IRC_ProcessUser(string message) { string[] userArgs = ExtractParameters(message); // TODO: unused: string username = userArgs[0]; // TODO: unused: string hostname = userArgs[1]; // TODO: unused: string servername = userArgs[2]; string realname = userArgs[3].Replace(":", ""); m_username = realname; m_hasUser = true; } private void IRC_ProcessNick(string message) { string[] nickArgs = ExtractParameters(message); string nickname = nickArgs[0].Replace(":",""); m_nick = nickname; m_hasNick = true; } private void IRC_ProcessPing(string message) { string[] pingArgs = ExtractParameters(message); string pingHost = pingArgs[0]; SendCommand("PONG " + pingHost); } private void IRC_ProcessPrivmsg(string message) { string[] privmsgArgs = ExtractParameters(message); if (privmsgArgs[0] == IrcRegionName) { if (OnChatFromClient != null) { OSChatMessage msg = new OSChatMessage(); msg.Sender = this; msg.Channel = 0; msg.From = this.Name; msg.Message = privmsgArgs[1].Replace(":", ""); msg.Position = Vector3.Zero; msg.Scene = m_scene; msg.SenderObject = null; msg.SenderUUID = this.AgentId; msg.Type = ChatTypeEnum.Say; OnChatFromClient(this, msg); } } else { // Handle as an IM, later. } } private void IRC_SendNamesReply() { EntityBase[] users = m_scene.Entities.GetAllByType<ScenePresence>(); foreach (EntityBase user in users) { SendServerCommand("353 " + m_nick + " = " + IrcRegionName + " :" + user.Name.Replace(" ", "")); } SendServerCommand("366 " + IrcRegionName + " :End of /NAMES list"); } private void IRC_SendWhoReply() { EntityBase[] users = m_scene.Entities.GetAllByType<ScenePresence>(); foreach (EntityBase user in users) { /*SendServerCommand(String.Format("352 {0} {1} {2} {3} {4} {5} :0 {6}", IrcRegionName, user.Name.Replace(" ", ""), "nohost.com", "opensimircd", user.Name.Replace(" ", ""), 'H', user.Name));*/ SendServerCommand("352 " + m_nick + " " + IrcRegionName + " n=" + user.Name.Replace(" ", "") + " fakehost.com " + user.Name.Replace(" ", "") + " H " + ":0 " + user.Name); //SendServerCommand("352 " + IrcRegionName + " " + user.Name.Replace(" ", "") + " nohost.com irc.opensimulator " + user.Name.Replace(" ", "") + " H " + ":0 " + user.Name); } SendServerCommand("315 " + m_nick + " " + IrcRegionName + " :End of /WHO list"); } private void IRC_SendMOTD() { SendServerCommand("375 :- OpenSimulator Message of the day -"); SendServerCommand("372 :- Hiya!"); SendServerCommand("376 :End of /MOTD command"); } private void IRC_SendReplyTopic() { SendServerCommand("332 " + IrcRegionName + " :OpenSimulator IRC Server"); } private void IRC_SendReplyUsers() { EntityBase[] users = m_scene.Entities.GetAllByType<ScenePresence>(); SendServerCommand("392 :UserID Terminal Host"); if (users.Length == 0) { SendServerCommand("395 :Nobody logged in"); return; } foreach (EntityBase user in users) { char[] nom = new char[8]; char[] term = "terminal_".ToCharArray(); char[] host = "hostname".ToCharArray(); string userName = user.Name.Replace(" ",""); for (int i = 0; i < nom.Length; i++) { if (userName.Length < i) nom[i] = userName[i]; else nom[i] = ' '; } SendServerCommand("393 :" + nom + " " + term + " " + host + ""); } SendServerCommand("394 :End of users"); } private static string ExtractMessage(string buffer) { int pos = buffer.IndexOf("\r\n"); if (pos == -1) return null; string command = buffer.Substring(0, pos + 2); return command; } private static string ExtractCommand(string msg) { string[] msgs = msg.Split(' '); if (msgs.Length < 2) { m_log.Warn("[IRCd] Dropped msg: " + msg); return null; } if (msgs[0].StartsWith(":")) return msgs[1]; return msgs[0]; } private static string[] ExtractParameters(string msg) { string[] msgs = msg.Split(' '); List<string> parms = new List<string>(msgs.Length); bool foundCommand = false; string command = ExtractCommand(msg); for (int i=0;i<msgs.Length;i++) { if (msgs[i] == command) { foundCommand = true; continue; } if (foundCommand != true) continue; if (i != 0 && msgs[i].StartsWith(":")) { List<string> tmp = new List<string>(); for (int j=i;j<msgs.Length;j++) { tmp.Add(msgs[j]); } parms.Add(string.Join(" ", tmp.ToArray())); break; } parms.Add(msgs[i]); } return parms.ToArray(); } #region Implementation of IClientAPI public Vector3 StartPos { get { return new Vector3(((int)Constants.RegionSize * 0.5f), ((int)Constants.RegionSize * 0.5f), 50); } set { } } public bool TryGet<T>(out T iface) { iface = default(T); return false; } public T Get<T>() { return default(T); } public UUID AgentId { get { return m_agentID; } } public void Disconnect(string reason) { IRC_SendChannelPrivmsg("System", "You have been eaten by a grue. (" + reason + ")"); m_connected = false; m_client.Close(); } public void Disconnect() { IRC_SendChannelPrivmsg("System", "You have been eaten by a grue."); m_connected = false; m_client.Close(); SceneAgent = null; } public UUID SessionId { get { return m_agentID; } } public UUID SecureSessionId { get { return m_agentID; } } public UUID ActiveGroupId { get { return UUID.Zero; } } public string ActiveGroupName { get { return "IRCd User"; } } public ulong ActiveGroupPowers { get { return 0; } } public ulong GetGroupPowers(UUID groupID) { return 0; } public bool IsGroupMember(UUID GroupID) { return false; } public string FirstName { get { string[] names = m_username.Split(' '); return names[0]; } } public string LastName { get { string[] names = m_username.Split(' '); if (names.Length > 1) return names[1]; return names[0]; } } public IScene Scene { get { return m_scene; } } public int NextAnimationSequenceNumber { get { return 0; } } public string Name { get { return m_username; } } public bool IsActive { get { return true; } set { if (!value) Disconnect("IsActive Disconnected?"); } } public bool IsLoggingOut { get { return false; } set { } } public bool SendLogoutPacketWhenClosing { set { } } public uint CircuitCode { get { return (uint)Util.RandomClass.Next(0,int.MaxValue); } } public IPEndPoint RemoteEndPoint { get { return (IPEndPoint)m_client.Client.RemoteEndPoint; } } #pragma warning disable 67 public event GenericMessage OnGenericMessage; public event ImprovedInstantMessage OnInstantMessage; public event ChatMessage OnChatFromClient; public event TextureRequest OnRequestTexture; public event RezObject OnRezObject; public event ModifyTerrain OnModifyTerrain; public event BakeTerrain OnBakeTerrain; public event EstateChangeInfo OnEstateChangeInfo; public event EstateManageTelehub OnEstateManageTelehub; public event CachedTextureRequest OnCachedTextureRequest; public event SetAppearance OnSetAppearance; public event AvatarNowWearing OnAvatarNowWearing; public event RezSingleAttachmentFromInv OnRezSingleAttachmentFromInv; public event RezMultipleAttachmentsFromInv OnRezMultipleAttachmentsFromInv; public event UUIDNameRequest OnDetachAttachmentIntoInv; public event ObjectAttach OnObjectAttach; public event ObjectDeselect OnObjectDetach; public event ObjectDrop OnObjectDrop; public event StartAnim OnStartAnim; public event StopAnim OnStopAnim; public event LinkObjects OnLinkObjects; public event DelinkObjects OnDelinkObjects; public event RequestMapBlocks OnRequestMapBlocks; public event RequestMapName OnMapNameRequest; public event TeleportLocationRequest OnTeleportLocationRequest; public event DisconnectUser OnDisconnectUser; public event RequestAvatarProperties OnRequestAvatarProperties; public event SetAlwaysRun OnSetAlwaysRun; public event TeleportLandmarkRequest OnTeleportLandmarkRequest; public event TeleportCancel OnTeleportCancel; public event DeRezObject OnDeRezObject; public event Action<IClientAPI> OnRegionHandShakeReply; public event GenericCall1 OnRequestWearables; public event Action<IClientAPI, bool> OnCompleteMovementToRegion; public event UpdateAgent OnPreAgentUpdate; public event UpdateAgent OnAgentUpdate; public event UpdateAgent OnAgentCameraUpdate; public event AgentRequestSit OnAgentRequestSit; public event AgentSit OnAgentSit; public event AvatarPickerRequest OnAvatarPickerRequest; public event Action<IClientAPI> OnRequestAvatarsData; public event AddNewPrim OnAddPrim; public event FetchInventory OnAgentDataUpdateRequest; public event TeleportLocationRequest OnSetStartLocationRequest; public event RequestGodlikePowers OnRequestGodlikePowers; public event GodKickUser OnGodKickUser; public event ObjectDuplicate OnObjectDuplicate; public event ObjectDuplicateOnRay OnObjectDuplicateOnRay; public event GrabObject OnGrabObject; public event DeGrabObject OnDeGrabObject; public event MoveObject OnGrabUpdate; public event SpinStart OnSpinStart; public event SpinObject OnSpinUpdate; public event SpinStop OnSpinStop; public event UpdateShape OnUpdatePrimShape; public event ObjectExtraParams OnUpdateExtraParams; public event ObjectRequest OnObjectRequest; public event ObjectSelect OnObjectSelect; public event ObjectDeselect OnObjectDeselect; public event GenericCall7 OnObjectDescription; public event GenericCall7 OnObjectName; public event GenericCall7 OnObjectClickAction; public event GenericCall7 OnObjectMaterial; public event RequestObjectPropertiesFamily OnRequestObjectPropertiesFamily; public event UpdatePrimFlags OnUpdatePrimFlags; public event UpdatePrimTexture OnUpdatePrimTexture; public event UpdateVector OnUpdatePrimGroupPosition; public event UpdateVector OnUpdatePrimSinglePosition; public event UpdatePrimRotation OnUpdatePrimGroupRotation; public event UpdatePrimSingleRotation OnUpdatePrimSingleRotation; public event UpdatePrimSingleRotationPosition OnUpdatePrimSingleRotationPosition; public event UpdatePrimGroupRotation OnUpdatePrimGroupMouseRotation; public event UpdateVector OnUpdatePrimScale; public event UpdateVector OnUpdatePrimGroupScale; public event StatusChange OnChildAgentStatus; public event GenericCall2 OnStopMovement; public event Action<UUID> OnRemoveAvatar; public event ObjectPermissions OnObjectPermissions; public event CreateNewInventoryItem OnCreateNewInventoryItem; public event LinkInventoryItem OnLinkInventoryItem; public event CreateInventoryFolder OnCreateNewInventoryFolder; public event UpdateInventoryFolder OnUpdateInventoryFolder; public event MoveInventoryFolder OnMoveInventoryFolder; public event FetchInventoryDescendents OnFetchInventoryDescendents; public event PurgeInventoryDescendents OnPurgeInventoryDescendents; public event FetchInventory OnFetchInventory; public event RequestTaskInventory OnRequestTaskInventory; public event UpdateInventoryItem OnUpdateInventoryItem; public event CopyInventoryItem OnCopyInventoryItem; public event MoveInventoryItem OnMoveInventoryItem; public event RemoveInventoryFolder OnRemoveInventoryFolder; public event RemoveInventoryItem OnRemoveInventoryItem; public event UDPAssetUploadRequest OnAssetUploadRequest; public event XferReceive OnXferReceive; public event RequestXfer OnRequestXfer; public event ConfirmXfer OnConfirmXfer; public event AbortXfer OnAbortXfer; public event RezScript OnRezScript; public event UpdateTaskInventory OnUpdateTaskInventory; public event MoveTaskInventory OnMoveTaskItem; public event RemoveTaskInventory OnRemoveTaskItem; public event RequestAsset OnRequestAsset; public event UUIDNameRequest OnNameFromUUIDRequest; public event ParcelAccessListRequest OnParcelAccessListRequest; public event ParcelAccessListUpdateRequest OnParcelAccessListUpdateRequest; public event ParcelPropertiesRequest OnParcelPropertiesRequest; public event ParcelDivideRequest OnParcelDivideRequest; public event ParcelJoinRequest OnParcelJoinRequest; public event ParcelPropertiesUpdateRequest OnParcelPropertiesUpdateRequest; public event ParcelSelectObjects OnParcelSelectObjects; public event ParcelObjectOwnerRequest OnParcelObjectOwnerRequest; public event ParcelAbandonRequest OnParcelAbandonRequest; public event ParcelGodForceOwner OnParcelGodForceOwner; public event ParcelReclaim OnParcelReclaim; public event ParcelReturnObjectsRequest OnParcelReturnObjectsRequest; public event ParcelDeedToGroup OnParcelDeedToGroup; public event RegionInfoRequest OnRegionInfoRequest; public event EstateCovenantRequest OnEstateCovenantRequest; public event FriendActionDelegate OnApproveFriendRequest; public event FriendActionDelegate OnDenyFriendRequest; public event FriendshipTermination OnTerminateFriendship; public event GrantUserFriendRights OnGrantUserRights; public event MoneyTransferRequest OnMoneyTransferRequest; public event EconomyDataRequest OnEconomyDataRequest; public event MoneyBalanceRequest OnMoneyBalanceRequest; public event UpdateAvatarProperties OnUpdateAvatarProperties; public event ParcelBuy OnParcelBuy; public event RequestPayPrice OnRequestPayPrice; public event ObjectSaleInfo OnObjectSaleInfo; public event ObjectBuy OnObjectBuy; public event BuyObjectInventory OnBuyObjectInventory; public event RequestTerrain OnRequestTerrain; public event RequestTerrain OnUploadTerrain; public event ObjectIncludeInSearch OnObjectIncludeInSearch; public event UUIDNameRequest OnTeleportHomeRequest; public event ScriptAnswer OnScriptAnswer; public event AgentSit OnUndo; public event AgentSit OnRedo; public event LandUndo OnLandUndo; public event ForceReleaseControls OnForceReleaseControls; public event GodLandStatRequest OnLandStatRequest; public event DetailedEstateDataRequest OnDetailedEstateDataRequest; public event SetEstateFlagsRequest OnSetEstateFlagsRequest; public event SetEstateTerrainBaseTexture OnSetEstateTerrainBaseTexture; public event SetEstateTerrainDetailTexture OnSetEstateTerrainDetailTexture; public event SetEstateTerrainTextureHeights OnSetEstateTerrainTextureHeights; public event CommitEstateTerrainTextureRequest OnCommitEstateTerrainTextureRequest; public event SetRegionTerrainSettings OnSetRegionTerrainSettings; public event EstateRestartSimRequest OnEstateRestartSimRequest; public event EstateChangeCovenantRequest OnEstateChangeCovenantRequest; public event UpdateEstateAccessDeltaRequest OnUpdateEstateAccessDeltaRequest; public event SimulatorBlueBoxMessageRequest OnSimulatorBlueBoxMessageRequest; public event EstateBlueBoxMessageRequest OnEstateBlueBoxMessageRequest; public event EstateDebugRegionRequest OnEstateDebugRegionRequest; public event EstateTeleportOneUserHomeRequest OnEstateTeleportOneUserHomeRequest; public event EstateTeleportAllUsersHomeRequest OnEstateTeleportAllUsersHomeRequest; public event UUIDNameRequest OnUUIDGroupNameRequest; public event RegionHandleRequest OnRegionHandleRequest; public event ParcelInfoRequest OnParcelInfoRequest; public event RequestObjectPropertiesFamily OnObjectGroupRequest; public event ScriptReset OnScriptReset; public event GetScriptRunning OnGetScriptRunning; public event SetScriptRunning OnSetScriptRunning; public event Action<Vector3, bool, bool> OnAutoPilotGo; public event TerrainUnacked OnUnackedTerrain; public event ActivateGesture OnActivateGesture; public event DeactivateGesture OnDeactivateGesture; public event ObjectOwner OnObjectOwner; public event DirPlacesQuery OnDirPlacesQuery; public event DirFindQuery OnDirFindQuery; public event DirLandQuery OnDirLandQuery; public event DirPopularQuery OnDirPopularQuery; public event DirClassifiedQuery OnDirClassifiedQuery; public event EventInfoRequest OnEventInfoRequest; public event ParcelSetOtherCleanTime OnParcelSetOtherCleanTime; public event MapItemRequest OnMapItemRequest; public event OfferCallingCard OnOfferCallingCard; public event AcceptCallingCard OnAcceptCallingCard; public event DeclineCallingCard OnDeclineCallingCard; public event SoundTrigger OnSoundTrigger; public event StartLure OnStartLure; public event TeleportLureRequest OnTeleportLureRequest; public event NetworkStats OnNetworkStatsUpdate; public event ClassifiedInfoRequest OnClassifiedInfoRequest; public event ClassifiedInfoUpdate OnClassifiedInfoUpdate; public event ClassifiedDelete OnClassifiedDelete; public event ClassifiedDelete OnClassifiedGodDelete; public event EventNotificationAddRequest OnEventNotificationAddRequest; public event EventNotificationRemoveRequest OnEventNotificationRemoveRequest; public event EventGodDelete OnEventGodDelete; public event ParcelDwellRequest OnParcelDwellRequest; public event UserInfoRequest OnUserInfoRequest; public event UpdateUserInfo OnUpdateUserInfo; public event RetrieveInstantMessages OnRetrieveInstantMessages; public event PickDelete OnPickDelete; public event PickGodDelete OnPickGodDelete; public event PickInfoUpdate OnPickInfoUpdate; public event AvatarNotesUpdate OnAvatarNotesUpdate; public event MuteListRequest OnMuteListRequest; public event AvatarInterestUpdate OnAvatarInterestUpdate; public event PlacesQuery OnPlacesQuery; public event FindAgentUpdate OnFindAgent; public event TrackAgentUpdate OnTrackAgent; public event NewUserReport OnUserReport; public event SaveStateHandler OnSaveState; public event GroupAccountSummaryRequest OnGroupAccountSummaryRequest; public event GroupAccountDetailsRequest OnGroupAccountDetailsRequest; public event GroupAccountTransactionsRequest OnGroupAccountTransactionsRequest; public event FreezeUserUpdate OnParcelFreezeUser; public event EjectUserUpdate OnParcelEjectUser; public event ParcelBuyPass OnParcelBuyPass; public event ParcelGodMark OnParcelGodMark; public event GroupActiveProposalsRequest OnGroupActiveProposalsRequest; public event GroupVoteHistoryRequest OnGroupVoteHistoryRequest; public event SimWideDeletesDelegate OnSimWideDeletes; public event SendPostcard OnSendPostcard; public event MuteListEntryUpdate OnUpdateMuteListEntry; public event MuteListEntryRemove OnRemoveMuteListEntry; public event GodlikeMessage onGodlikeMessage; public event GodUpdateRegionInfoUpdate OnGodUpdateRegionInfoUpdate; #pragma warning restore 67 public int DebugPacketLevel { get; set; } public void InPacket(object NewPack) { } public void ProcessInPacket(Packet NewPack) { } public void Close() { Close(false); } public void Close(bool force) { Disconnect(); } public void Kick(string message) { Disconnect(message); } public void Start() { m_scene.AddNewAgent(this, PresenceType.User); // Mimicking LLClientView which gets always set appearance from client. AvatarAppearance appearance; m_scene.GetAvatarAppearance(this, out appearance); OnSetAppearance(this, appearance.Texture, (byte[])appearance.VisualParams.Clone(), new List<CachedTextureRequestArg>()); } public void SendRegionHandshake(RegionInfo regionInfo, RegionHandshakeArgs args) { m_log.Info("[IRCd ClientStack] Completing Handshake to Region"); if (OnRegionHandShakeReply != null) { OnRegionHandShakeReply(this); } if (OnCompleteMovementToRegion != null) { OnCompleteMovementToRegion(this, true); } } public void Stop() { Disconnect(); } public void SendWearables(AvatarWearable[] wearables, int serial) { } public void SendAppearance(UUID agentID, byte[] visualParams, byte[] textureEntry) { } public void SendCachedTextureResponse(ISceneEntity avatar, int serial, List<CachedTextureResponseArg> cachedTextures) { } public void SendStartPingCheck(byte seq) { } public void SendKillObject(List<uint> localID) { } public void SendAnimations(UUID[] animID, int[] seqs, UUID sourceAgentId, UUID[] objectIDs) { } public void SendChatMessage( string message, byte type, Vector3 fromPos, string fromName, UUID fromAgentID, UUID ownerID, byte source, byte audible) { if (audible > 0 && message.Length > 0) IRC_SendChannelPrivmsg(fromName, message); } private void IRC_SendChannelPrivmsg(string fromName, string message) { SendCommand(":" + fromName.Replace(" ", "") + " PRIVMSG " + IrcRegionName + " :" + message); } public void SendInstantMessage(GridInstantMessage im) { // TODO } public void SendGenericMessage(string method, UUID invoice, List<string> message) { } public void SendGenericMessage(string method, UUID invoice, List<byte[]> message) { } public void SendLayerData(float[] map) { } public void SendLayerData(int px, int py, float[] map) { } public void SendWindData(Vector2[] windSpeeds) { } public void SendCloudData(float[] cloudCover) { } public void MoveAgentIntoRegion(RegionInfo regInfo, Vector3 pos, Vector3 look) { } public void InformClientOfNeighbour(ulong neighbourHandle, IPEndPoint neighbourExternalEndPoint) { } public AgentCircuitData RequestClientInfo() { return new AgentCircuitData(); } public void CrossRegion(ulong newRegionHandle, Vector3 pos, Vector3 lookAt, IPEndPoint newRegionExternalEndPoint, string capsURL) { } public void SendMapBlock(List<MapBlockData> mapBlocks, uint flag) { } public void SendLocalTeleport(Vector3 position, Vector3 lookAt, uint flags) { } public void SendRegionTeleport(ulong regionHandle, byte simAccess, IPEndPoint regionExternalEndPoint, uint locationID, uint flags, string capsURL) { } public void SendTeleportFailed(string reason) { } public void SendTeleportStart(uint flags) { } public void SendTeleportProgress(uint flags, string message) { } public void SendMoneyBalance(UUID transaction, bool success, byte[] description, int balance, int transactionType, UUID sourceID, bool sourceIsGroup, UUID destID, bool destIsGroup, int amount, string item) { } public void SendPayPrice(UUID objectID, int[] payPrice) { } public void SendCoarseLocationUpdate(List<UUID> users, List<Vector3> CoarseLocations) { } public void SendAvatarDataImmediate(ISceneEntity avatar) { } public void SendEntityUpdate(ISceneEntity entity, PrimUpdateFlags updateFlags) { } public void ReprioritizeUpdates() { } public void FlushPrimUpdates() { } public void SendInventoryFolderDetails(UUID ownerID, UUID folderID, List<InventoryItemBase> items, List<InventoryFolderBase> folders, int version, bool fetchFolders, bool fetchItems) { } public void SendInventoryItemDetails(UUID ownerID, InventoryItemBase item) { } public void SendInventoryItemCreateUpdate(InventoryItemBase Item, uint callbackId) { } public void SendRemoveInventoryItem(UUID itemID) { } public void SendTakeControls(int controls, bool passToAgent, bool TakeControls) { } public void SendTaskInventory(UUID taskID, short serial, byte[] fileName) { } public void SendBulkUpdateInventory(InventoryNodeBase node) { } public void SendXferPacket(ulong xferID, uint packet, byte[] data) { } public void SendAbortXferPacket(ulong xferID) { } public void SendEconomyData(float EnergyEfficiency, int ObjectCapacity, int ObjectCount, int PriceEnergyUnit, int PriceGroupCreate, int PriceObjectClaim, float PriceObjectRent, float PriceObjectScaleFactor, int PriceParcelClaim, float PriceParcelClaimFactor, int PriceParcelRent, int PricePublicObjectDecay, int PricePublicObjectDelete, int PriceRentLight, int PriceUpload, int TeleportMinPrice, float TeleportPriceExponent) { } public void SendAvatarPickerReply(AvatarPickerReplyAgentDataArgs AgentData, List<AvatarPickerReplyDataArgs> Data) { } public void SendAgentDataUpdate(UUID agentid, UUID activegroupid, string firstname, string lastname, ulong grouppowers, string groupname, string grouptitle) { } public void SendPreLoadSound(UUID objectID, UUID ownerID, UUID soundID) { } public void SendPlayAttachedSound(UUID soundID, UUID objectID, UUID ownerID, float gain, byte flags) { } public void SendTriggeredSound(UUID soundID, UUID ownerID, UUID objectID, UUID parentID, ulong handle, Vector3 position, float gain) { } public void SendAttachedSoundGainChange(UUID objectID, float gain) { } public void SendNameReply(UUID profileId, string firstname, string lastname) { } public void SendAlertMessage(string message) { IRC_SendChannelPrivmsg("Alert",message); } public void SendAgentAlertMessage(string message, bool modal) { } public void SendLoadURL(string objectname, UUID objectID, UUID ownerID, bool groupOwned, string message, string url) { IRC_SendChannelPrivmsg(objectname,url); } public void SendDialog(string objectname, UUID objectID, UUID ownerID, string ownerFirstName, string ownerLastName, string msg, UUID textureID, int ch, string[] buttonlabels) { } public void SendSunPos(Vector3 sunPos, Vector3 sunVel, ulong CurrentTime, uint SecondsPerSunCycle, uint SecondsPerYear, float OrbitalPosition) { } public void SendViewerEffect(ViewerEffectPacket.EffectBlock[] effectBlocks) { } public void SendViewerTime(int phase) { } public void SendAvatarProperties(UUID avatarID, string aboutText, string bornOn, byte[] charterMember, string flAbout, uint flags, UUID flImageID, UUID imageID, string profileURL, UUID partnerID) { } public void SendScriptQuestion(UUID taskID, string taskName, string ownerName, UUID itemID, int question) { } public void SendHealth(float health) { } public void SendEstateList(UUID invoice, int code, UUID[] Data, uint estateID) { } public void SendBannedUserList(UUID invoice, EstateBan[] banlist, uint estateID) { } public void SendRegionInfoToEstateMenu(RegionInfoForEstateMenuArgs args) { } public void SendEstateCovenantInformation(UUID covenant) { } public void SendDetailedEstateData(UUID invoice, string estateName, uint estateID, uint parentEstate, uint estateFlags, uint sunPosition, UUID covenant, uint covenantChanged, string abuseEmail, UUID estateOwner) { } public void SendLandProperties(int sequence_id, bool snap_selection, int request_result, ILandObject lo, float simObjectBonusFactor, int parcelObjectCapacity, int simObjectCapacity, uint regionFlags) { } public void SendLandAccessListData(List<LandAccessEntry> accessList, uint accessFlag, int localLandID) { } public void SendForceClientSelectObjects(List<uint> objectIDs) { } public void SendCameraConstraint(Vector4 ConstraintPlane) { } public void SendLandObjectOwners(LandData land, List<UUID> groups, Dictionary<UUID, int> ownersAndCount) { } public void SendLandParcelOverlay(byte[] data, int sequence_id) { } public void SendParcelMediaCommand(uint flags, ParcelMediaCommandEnum command, float time) { } public void SendParcelMediaUpdate(string mediaUrl, UUID mediaTextureID, byte autoScale, string mediaType, string mediaDesc, int mediaWidth, int mediaHeight, byte mediaLoop) { } public void SendAssetUploadCompleteMessage(sbyte AssetType, bool Success, UUID AssetFullID) { } public void SendConfirmXfer(ulong xferID, uint PacketID) { } public void SendXferRequest(ulong XferID, short AssetType, UUID vFileID, byte FilePath, byte[] FileName) { } public void SendInitiateDownload(string simFileName, string clientFileName) { } public void SendImageFirstPart(ushort numParts, UUID ImageUUID, uint ImageSize, byte[] ImageData, byte imageCodec) { } public void SendImageNextPart(ushort partNumber, UUID imageUuid, byte[] imageData) { } public void SendImageNotFound(UUID imageid) { } public void SendShutdownConnectionNotice() { // TODO } public void SendSimStats(SimStats stats) { } public void SendObjectPropertiesFamilyData(ISceneEntity Entity, uint RequestFlags) { } public void SendObjectPropertiesReply(ISceneEntity entity) { } public void SendAgentOffline(UUID[] agentIDs) { } public void SendAgentOnline(UUID[] agentIDs) { } public void SendSitResponse(UUID TargetID, Vector3 OffsetPos, Quaternion SitOrientation, bool autopilot, Vector3 CameraAtOffset, Vector3 CameraEyeOffset, bool ForceMouseLook) { } public void SendAdminResponse(UUID Token, uint AdminLevel) { } public void SendGroupMembership(GroupMembershipData[] GroupMembership) { } public void SendGroupNameReply(UUID groupLLUID, string GroupName) { } public void SendJoinGroupReply(UUID groupID, bool success) { } public void SendEjectGroupMemberReply(UUID agentID, UUID groupID, bool success) { } public void SendLeaveGroupReply(UUID groupID, bool success) { } public void SendCreateGroupReply(UUID groupID, bool success, string message) { } public void SendLandStatReply(uint reportType, uint requestFlags, uint resultCount, LandStatReportItem[] lsrpia) { } public void SendScriptRunningReply(UUID objectID, UUID itemID, bool running) { } public void SendAsset(AssetRequestToClient req) { } public void SendTexture(AssetBase TextureAsset) { } public virtual void SetChildAgentThrottle(byte[] throttle) { } public byte[] GetThrottlesPacked(float multiplier) { return new byte[0]; } public event ViewerEffectEventHandler OnViewerEffect; public event Action<IClientAPI> OnLogout; public event Action<IClientAPI> OnConnectionClosed; public void SendBlueBoxMessage(UUID FromAvatarID, string FromAvatarName, string Message) { IRC_SendChannelPrivmsg(FromAvatarName, Message); } public void SendLogoutPacket() { Disconnect(); } public ClientInfo GetClientInfo() { return new ClientInfo(); } public void SetClientInfo(ClientInfo info) { } public void SetClientOption(string option, string value) { } public string GetClientOption(string option) { return String.Empty; } public void Terminate() { Disconnect(); } public void SendSetFollowCamProperties(UUID objectID, SortedDictionary<int, float> parameters) { } public void SendClearFollowCamProperties(UUID objectID) { } public void SendRegionHandle(UUID regoinID, ulong handle) { } public void SendParcelInfo(RegionInfo info, LandData land, UUID parcelID, uint x, uint y) { } public void SendScriptTeleportRequest(string objName, string simName, Vector3 pos, Vector3 lookAt) { } public void SendDirPlacesReply(UUID queryID, DirPlacesReplyData[] data) { } public void SendDirPeopleReply(UUID queryID, DirPeopleReplyData[] data) { } public void SendDirEventsReply(UUID queryID, DirEventsReplyData[] data) { } public void SendDirGroupsReply(UUID queryID, DirGroupsReplyData[] data) { } public void SendDirClassifiedReply(UUID queryID, DirClassifiedReplyData[] data) { } public void SendDirLandReply(UUID queryID, DirLandReplyData[] data) { } public void SendDirPopularReply(UUID queryID, DirPopularReplyData[] data) { } public void SendEventInfoReply(EventData info) { } public void SendTelehubInfo(UUID ObjectID, string ObjectName, Vector3 ObjectPos, Quaternion ObjectRot, List<Vector3> SpawnPoint) { } public void SendMapItemReply(mapItemReply[] replies, uint mapitemtype, uint flags) { } public void SendAvatarGroupsReply(UUID avatarID, GroupMembershipData[] data) { } public void SendOfferCallingCard(UUID srcID, UUID transactionID) { } public void SendAcceptCallingCard(UUID transactionID) { } public void SendDeclineCallingCard(UUID transactionID) { } public void SendTerminateFriend(UUID exFriendID) { } public void SendAvatarClassifiedReply(UUID targetID, UUID[] classifiedID, string[] name) { } public void SendClassifiedInfoReply(UUID classifiedID, UUID creatorID, uint creationDate, uint expirationDate, uint category, string name, string description, UUID parcelID, uint parentEstate, UUID snapshotID, string simName, Vector3 globalPos, string parcelName, byte classifiedFlags, int price) { } public void SendAgentDropGroup(UUID groupID) { } public void RefreshGroupMembership() { } public void SendAvatarNotesReply(UUID targetID, string text) { } public void SendAvatarPicksReply(UUID targetID, Dictionary<UUID, string> picks) { } public void SendPickInfoReply(UUID pickID, UUID creatorID, bool topPick, UUID parcelID, string name, string desc, UUID snapshotID, string user, string originalName, string simName, Vector3 posGlobal, int sortOrder, bool enabled) { } public void SendAvatarClassifiedReply(UUID targetID, Dictionary<UUID, string> classifieds) { } public void SendAvatarInterestUpdate(IClientAPI client, uint wantmask, string wanttext, uint skillsmask, string skillstext, string languages) { } public void SendParcelDwellReply(int localID, UUID parcelID, float dwell) { } public void SendUserInfoReply(bool imViaEmail, bool visible, string email) { } public void SendUseCachedMuteList() { } public void SendMuteListUpdate(string filename) { } public bool AddGenericPacketHandler(string MethodName, GenericMessage handler) { return true; } #endregion public void SendRebakeAvatarTextures(UUID textureID) { } public void SendAvatarInterestsReply(UUID avatarID, uint wantMask, string wantText, uint skillsMask, string skillsText, string languages) { } public void SendGroupAccountingDetails(IClientAPI sender,UUID groupID, UUID transactionID, UUID sessionID, int amt) { } public void SendGroupAccountingSummary(IClientAPI sender,UUID groupID, uint moneyAmt, int totalTier, int usedTier) { } public void SendGroupTransactionsSummaryDetails(IClientAPI sender,UUID groupID, UUID transactionID, UUID sessionID,int amt) { } public void SendGroupVoteHistory(UUID groupID, UUID transactionID, GroupVoteHistory[] Votes) { } public void SendGroupActiveProposals(UUID groupID, UUID transactionID, GroupActiveProposals[] Proposals) { } public void SendChangeUserRights(UUID agentID, UUID friendID, int rights) { } public void SendTextBoxRequest(string message, int chatChannel, string objectname, UUID ownerID, string ownerFirstName, string ownerLastName, UUID objectId) { } public void SendAgentTerseUpdate(ISceneEntity presence) { } public void SendPlacesReply(UUID queryID, UUID transactionID, PlacesReplyData[] data) { } public void SendPartPhysicsProprieties(ISceneEntity entity) { } } }
using System; using System.Collections; using System.Collections.Generic; using System.Text; namespace Lewis.SST.DTSPackageClass { #region DTSColumn class used in Column Collection /// <summary> /// Class to instantiate a DTS.Column object /// </summary> public class DTSColumn : DTS.Column { private enum colType { source = 0, destination } private int iNumericScale; private int iType; private int iDataType; private int iPrecision; private int iOrdinal; private int iFlags; private int iSize; private bool bNullable; private string sName; private object oColumnID; #region Column Members /// <summary> /// Gets or sets the Column type. /// </summary> /// <value>The type.</value> public int Type { get { return iType; } set { iType = value; } } /// <summary> /// Gets or sets the Column numeric scale. /// </summary> /// <value>The numeric scale.</value> public int NumericScale { get { return iNumericScale; } set { iNumericScale = value; } } /// <summary> /// Gets or sets the type of the Column data. /// </summary> /// <value>The type of the data.</value> public int DataType { get { return iDataType; } set { iDataType = value; } } /// <summary> /// Gets or sets the Column precision. /// </summary> /// <value>The precision.</value> public int Precision { get { return iPrecision; } set { iPrecision = value; } } /// <summary> /// Gets or sets the Column ordinal. /// </summary> /// <value>The ordinal.</value> public int Ordinal { get { return iOrdinal; } set { iOrdinal = value; } } /// <summary> /// Gets or sets the Column flags. /// </summary> /// <value>The flags.</value> public int Flags { get { return iFlags; } set { iFlags = value; } } /// <summary> /// Gets or sets the column ID. /// </summary> /// <value>The column ID.</value> public object ColumnID { get { return oColumnID; } set { oColumnID = value; } } /// <summary> /// Gets or sets the Column size. /// </summary> /// <value>The size.</value> public int Size { get { return iSize; } set { iSize = value; } } /// <summary> /// Gets or sets the Column name. /// </summary> /// <value>The name.</value> public string Name { get { return sName; } set { sName = value; } } // this is just a placeholder for the implementation // we don't need it for the class object as we are using it /// <summary> /// Gets the DTS Column properties. /// </summary> /// <value>The properties.</value> public DTS.Properties Properties { get { return null; } } /// <summary> /// Gets or sets a value indicating whether this <see cref="DTSColumn"/> is nullable. /// </summary> /// <value><c>true</c> if nullable; otherwise, <c>false</c>.</value> public bool Nullable { get { return bNullable; } set { bNullable = value; } } // this is just a placeholder for the implementation // we don't need it for the class object as we are using it /// <summary> /// Gets the Column parent object. /// </summary> /// <value>The parent.</value> public DTS.IDTSStdObject Parent { get { return null; } } #endregion } #endregion #region DTSColumns Collection class /// <summary> /// Class to hold a collection of DTSColumn objects /// </summary> public class DTSColumns : CollectionBase { /// <summary> /// Gets or sets the <see cref="DTSColumn"/> at the specified index. /// </summary> /// <value>The DTSColumn value.</value> public DTSColumn this[int index] { get { return ((DTSColumn)List[index]); } set { List[index] = value; } } /// <summary> /// Adds the specified value. /// </summary> /// <param name="value">The value.</param> /// <returns></returns> public int Add(DTSColumn value) { return (List.Add(value)); } /// <summary> /// The Index of the DTS Column object. /// </summary> /// <param name="value">The value.</param> /// <returns>Returns the index of the DTS Column</returns> public int IndexOf(DTSColumn value) { return (List.IndexOf(value)); } /// <summary> /// Inserts a DTS Column at the specified index. /// </summary> /// <param name="index">The index.</param> /// <param name="value">The value.</param> public void Insert(int index, DTSColumn value) { List.Insert(index, value); } /// <summary> /// Removes the specified DTS Column value. /// </summary> /// <param name="value">The value.</param> public void Remove(DTSColumn value) { List.Remove(value); } /// <summary> /// Determines whether the collection [contains] [the specified DTS Column value]. /// </summary> /// <param name="value">The value.</param> /// <returns> /// <c>true</c> if [contains] [the specified value]; otherwise, <c>false</c>. /// </returns> public bool Contains(DTSColumn value) { return (List.Contains(value)); } /// <summary> /// Performs additional custom processes before inserting a new element into the /// <see cref="T:System.Collections.CollectionBase"/> instance. /// </summary> /// <param name="index">The zero-based index at which to insert <paramref name="value"/>.</param> /// <param name="value">The new value of the element at <paramref name="index"/>.</param> protected override void OnInsert(int index, Object value) { if (value.GetType() != Type.GetType("DTSColumn")) throw new ArgumentException("value must be of type DTSColumn.", "value"); } /// <summary> /// Performs additional custom processes when removing an element from the /// <see cref="T:System.Collections.CollectionBase"/> instance. /// </summary> /// <param name="index">The zero-based index at which <paramref name="value"/> can be found.</param> /// <param name="value">The value of the element to remove from <paramref name="index"/>.</param> protected override void OnRemove(int index, Object value) { if (value.GetType() != Type.GetType("DTSColumn")) throw new ArgumentException("value must be of type DTSColumn.", "value"); } /// <summary> /// Performs additional custom processes before setting a value in the /// <see cref="T:System.Collections.CollectionBase"/> instance. /// </summary> /// <param name="index">The zero-based index at which <paramref name="oldValue"/> can be found.</param> /// <param name="oldValue">The value to replace with <paramref name="newValue"/>.</param> /// <param name="newValue">The new value of the element at <paramref name="index"/>.</param> protected override void OnSet(int index, Object oldValue, Object newValue) { if (newValue.GetType() != Type.GetType("DTSColumn")) throw new ArgumentException("newValue must be of type DTSColumn.", "newValue"); } /// <summary> /// Performs additional custom processes when validating a value. /// </summary> /// <param name="value">The object to validate.</param> protected override void OnValidate(Object value) { if (value.GetType() != Type.GetType("DTSColumn")) throw new ArgumentException("value must be of type DTSColumn."); } } #endregion }
// Copyright 2011 The Noda Time Authors. All rights reserved. // Use of this source code is governed by the Apache License 2.0, // as found in the LICENSE.txt file. using System; using System.Collections.Generic; using NodaTime.Calendars; using NodaTime.Globalization; using NodaTime.Properties; using NodaTime.Text.Patterns; namespace NodaTime.Text { /// <summary> /// Parser for patterns of <see cref="LocalDate"/> values. /// </summary> internal sealed class LocalDatePatternParser : IPatternParser<LocalDate> { private readonly LocalDate templateValue; /// <summary> /// Maximum two-digit-year in the template to treat as the current century. /// (One day we may want to make this configurable, but it feels very low /// priority.) /// </summary> private const int TwoDigitYearMax = 30; private static readonly Dictionary<char, CharacterHandler<LocalDate, LocalDateParseBucket>> PatternCharacterHandlers = new Dictionary<char, CharacterHandler<LocalDate, LocalDateParseBucket>> { { '%', SteppedPatternBuilder<LocalDate, LocalDateParseBucket>.HandlePercent }, { '\'', SteppedPatternBuilder<LocalDate, LocalDateParseBucket>.HandleQuote }, { '\"', SteppedPatternBuilder<LocalDate, LocalDateParseBucket>.HandleQuote }, { '\\', SteppedPatternBuilder<LocalDate, LocalDateParseBucket>.HandleBackslash }, { '/', (pattern, builder) => builder.AddLiteral(builder.FormatInfo.DateSeparator, ParseResult<LocalDate>.DateSeparatorMismatch) }, { 'y', DatePatternHelper.CreateYearOfEraHandler<LocalDate, LocalDateParseBucket>(value => value.YearOfEra, (bucket, value) => bucket.YearOfEra = value) }, { 'u', SteppedPatternBuilder<LocalDate, LocalDateParseBucket>.HandlePaddedField (4, PatternFields.Year, -9999, 9999, value => value.Year, (bucket, value) => bucket.Year = value) }, { 'M', DatePatternHelper.CreateMonthOfYearHandler<LocalDate, LocalDateParseBucket> (value => value.Month, (bucket, value) => bucket.MonthOfYearText = value, (bucket, value) => bucket.MonthOfYearNumeric = value) }, { 'd', DatePatternHelper.CreateDayHandler<LocalDate, LocalDateParseBucket> (value => value.Day, value => value.DayOfWeek, (bucket, value) => bucket.DayOfMonth = value, (bucket, value) => bucket.DayOfWeek = value) }, { 'c', DatePatternHelper.CreateCalendarHandler<LocalDate, LocalDateParseBucket>(value => value.Calendar, (bucket, value) => bucket.Calendar = value) }, { 'g', DatePatternHelper.CreateEraHandler<LocalDate, LocalDateParseBucket>(date => date.Era, bucket => bucket) }, }; internal LocalDatePatternParser(LocalDate templateValue) { this.templateValue = templateValue; } // Note: public to implement the interface. It does no harm, and it's simpler than using explicit // interface implementation. public IPattern<LocalDate> ParsePattern(string patternText, NodaFormatInfo formatInfo) { // Nullity check is performed in LocalDatePattern. if (patternText.Length == 0) { throw new InvalidPatternException(Messages.Parse_FormatStringEmpty); } if (patternText.Length == 1) { char patternCharacter = patternText[0]; patternText = ExpandStandardFormatPattern(patternCharacter, formatInfo); if (patternText == null) { throw new InvalidPatternException(Messages.Parse_UnknownStandardFormat, patternCharacter, typeof(LocalDate)); } } var patternBuilder = new SteppedPatternBuilder<LocalDate, LocalDateParseBucket>(formatInfo, () => new LocalDateParseBucket(templateValue)); patternBuilder.ParseCustomPattern(patternText, PatternCharacterHandlers); patternBuilder.ValidateUsedFields(); return patternBuilder.Build(templateValue); } private string ExpandStandardFormatPattern(char patternCharacter, NodaFormatInfo formatInfo) { switch (patternCharacter) { case 'd': return formatInfo.DateTimeFormat.ShortDatePattern; case 'D': return formatInfo.DateTimeFormat.LongDatePattern; default: // Will be turned into an exception. return null; } } /// <summary> /// Bucket to put parsed values in, ready for later result calculation. This type is also used /// by LocalDateTimePattern to store and calculate values. /// </summary> internal sealed class LocalDateParseBucket : ParseBucket<LocalDate> { internal readonly LocalDate TemplateValue; internal CalendarSystem Calendar; internal int Year; private Era Era; internal int YearOfEra; internal int MonthOfYearNumeric; internal int MonthOfYearText; internal int DayOfMonth; internal int DayOfWeek; internal LocalDateParseBucket(LocalDate templateValue) { this.TemplateValue = templateValue; // Only fetch this once. this.Calendar = templateValue.Calendar; } internal ParseResult<TResult> ParseEra<TResult>(NodaFormatInfo formatInfo, ValueCursor cursor) { var compareInfo = formatInfo.CompareInfo; foreach (var era in Calendar.Eras) { foreach (string eraName in formatInfo.GetEraNames(era)) { if (cursor.MatchCaseInsensitive(eraName, compareInfo, true)) { Era = era; return null; } } } return ParseResult<TResult>.MismatchedText(cursor, 'g'); } internal override ParseResult<LocalDate> CalculateValue(PatternFields usedFields, string text) { if (usedFields.HasAny(PatternFields.EmbeddedDate)) { return ParseResult<LocalDate>.ForValue(new LocalDate(Year, MonthOfYearNumeric, DayOfMonth, Calendar)); } // This will set Year if necessary ParseResult<LocalDate> failure = DetermineYear(usedFields, text); if (failure != null) { return failure; } // This will set MonthOfYearNumeric if necessary failure = DetermineMonth(usedFields, text); if (failure != null) { return failure; } int day = usedFields.HasAny(PatternFields.DayOfMonth) ? DayOfMonth : TemplateValue.Day; if (day > Calendar.GetDaysInMonth(Year, MonthOfYearNumeric)) { return ParseResult<LocalDate>.DayOfMonthOutOfRange(text, day, MonthOfYearNumeric, Year); } LocalDate value = new LocalDate(Year, MonthOfYearNumeric, day, Calendar); if (usedFields.HasAny(PatternFields.DayOfWeek) && DayOfWeek != value.DayOfWeek) { return ParseResult<LocalDate>.InconsistentDayOfWeekTextValue(text); } // FIXME: If we got an era, check that the resulting date really lies within that era. return ParseResult<LocalDate>.ForValue(value); } /// <summary> /// Work out the year, based on fields of: /// - Year /// - YearOfEra /// - YearTwoDigits (implies YearOfEra) /// - Era /// /// If the year is specified, that trumps everything else - any other fields /// are just used for checking. /// /// If nothing is specified, the year of the template value is used. /// /// If just the era is specified, the year of the template value is used, /// and the specified era is checked against it. (Hopefully no-one will /// expect to get useful information from a format string with era but no year...) /// /// Otherwise, we have the year of era (possibly only two digits) and possibly the /// era. If the era isn't specified, take it from the template value. /// Finally, if we only have two digits, then use either the century of the template /// value or the previous century if the year-of-era is greater than TwoDigitYearMax... /// and if the template value isn't in the first century already. /// /// Phew. /// </summary> private ParseResult<LocalDate> DetermineYear(PatternFields usedFields, string text) { if (usedFields.HasAny(PatternFields.Year)) { if (Year > Calendar.MaxYear || Year < Calendar.MinYear) { return ParseResult<LocalDate>.FieldValueOutOfRangePostParse(text, Year, 'u'); } if (usedFields.HasAny(PatternFields.Era) && Era != Calendar.GetEra(Year)) { return ParseResult<LocalDate>.InconsistentValues(text, 'g', 'u'); } if (usedFields.HasAny(PatternFields.YearOfEra)) { int yearOfEraFromYear = Calendar.GetYearOfEra(Year); if (usedFields.HasAny(PatternFields.YearTwoDigits)) { // We're only checking the last two digits yearOfEraFromYear = yearOfEraFromYear % 100; } if (yearOfEraFromYear != YearOfEra) { return ParseResult<LocalDate>.InconsistentValues(text, 'y', 'u'); } } return null; } // Use the year from the template value, possibly checking the era. if (!usedFields.HasAny(PatternFields.YearOfEra)) { Year = TemplateValue.Year; return usedFields.HasAny(PatternFields.Era) && Era != Calendar.GetEra(Year) ? ParseResult<LocalDate>.InconsistentValues(text, 'g', 'u') : null; } if (!usedFields.HasAny(PatternFields.Era)) { Era = TemplateValue.Era; } if (usedFields.HasAny(PatternFields.YearTwoDigits)) { int century = TemplateValue.YearOfEra / 100; if (YearOfEra > TwoDigitYearMax && century > 1) { century--; } YearOfEra += century * 100; } if (YearOfEra < Calendar.GetMinYearOfEra(Era) || YearOfEra > Calendar.GetMaxYearOfEra(Era)) { return ParseResult<LocalDate>.YearOfEraOutOfRange(text, YearOfEra, Era, Calendar); } Year = Calendar.GetAbsoluteYear(YearOfEra, Era); return null; } private ParseResult<LocalDate> DetermineMonth(PatternFields usedFields, string text) { switch (usedFields & (PatternFields.MonthOfYearNumeric | PatternFields.MonthOfYearText)) { case PatternFields.MonthOfYearNumeric: // No-op break; case PatternFields.MonthOfYearText: MonthOfYearNumeric = MonthOfYearText; break; case PatternFields.MonthOfYearNumeric | PatternFields.MonthOfYearText: if (MonthOfYearNumeric != MonthOfYearText) { return ParseResult<LocalDate>.InconsistentMonthValues(text); } // No need to change MonthOfYearNumeric - this was just a check break; case 0: MonthOfYearNumeric = TemplateValue.Month; break; } if (MonthOfYearNumeric > Calendar.GetMonthsInYear(Year)) { return ParseResult<LocalDate>.MonthOutOfRange(text, MonthOfYearNumeric, Year); } return null; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** Purpose: This class will encapsulate a short and provide an ** Object representation of it. ** ** ===========================================================*/ using System.Globalization; using System.Runtime.InteropServices; using System.Runtime.Versioning; using System.Diagnostics.Contracts; namespace System { [System.Runtime.InteropServices.StructLayout(LayoutKind.Sequential)] public struct Int16 : IComparable, IFormattable, IComparable<Int16>, IEquatable<Int16>, IConvertible { internal short m_value; public const short MaxValue = (short)0x7FFF; public const short MinValue = unchecked((short)0x8000); // Compares this object to another object, returning an integer that // indicates the relationship. // Returns a value less than zero if this object // null is considered to be less than any instance. // If object is not of type Int16, this method throws an ArgumentException. // int IComparable.CompareTo(Object value) { if (value == null) { return 1; } if (value is Int16) { return m_value - ((Int16)value).m_value; } throw new ArgumentException(SR.Arg_MustBeInt16); } public int CompareTo(Int16 value) { return m_value - value; } public override bool Equals(Object obj) { if (!(obj is Int16)) { return false; } return m_value == ((Int16)obj).m_value; } [NonVersionable] public bool Equals(Int16 obj) { return m_value == obj; } // Returns a HashCode for the Int16 public override int GetHashCode() { return ((int)((ushort)m_value) | (((int)m_value) << 16)); } public override String ToString() { Contract.Ensures(Contract.Result<String>() != null); return FormatProvider.FormatInt32(m_value, null, null); } public String ToString(IFormatProvider provider) { Contract.Ensures(Contract.Result<String>() != null); return FormatProvider.FormatInt32(m_value, null, provider); } public String ToString(String format) { Contract.Ensures(Contract.Result<String>() != null); return ToString(format, null); } public String ToString(String format, IFormatProvider provider) { Contract.Ensures(Contract.Result<String>() != null); if (m_value < 0 && format != null && format.Length > 0 && (format[0] == 'X' || format[0] == 'x')) { uint temp = (uint)(m_value & 0x0000FFFF); return FormatProvider.FormatUInt32(temp, format, provider); } return FormatProvider.FormatInt32(m_value, format, provider); } public static short Parse(String s) { return Parse(s, NumberStyles.Integer, null); } public static short Parse(String s, NumberStyles style) { UInt32.ValidateParseStyleInteger(style); return Parse(s, style, null); } public static short Parse(String s, IFormatProvider provider) { return Parse(s, NumberStyles.Integer, provider); } public static short Parse(String s, NumberStyles style, IFormatProvider provider) { UInt32.ValidateParseStyleInteger(style); int i = 0; try { i = FormatProvider.ParseInt32(s, style, provider); } catch (OverflowException e) { throw new OverflowException(SR.Overflow_Int16, e); } // We need this check here since we don't allow signs to specified in hex numbers. So we fixup the result // for negative numbers if ((style & NumberStyles.AllowHexSpecifier) != 0) { // We are parsing a hexadecimal number if ((i < 0) || (i > UInt16.MaxValue)) { throw new OverflowException(SR.Overflow_Int16); } return (short)i; } if (i < MinValue || i > MaxValue) throw new OverflowException(SR.Overflow_Int16); return (short)i; } public static bool TryParse(String s, out Int16 result) { return TryParse(s, NumberStyles.Integer, null, out result); } public static bool TryParse(String s, NumberStyles style, IFormatProvider provider, out Int16 result) { UInt32.ValidateParseStyleInteger(style); result = 0; int i; if (!FormatProvider.TryParseInt32(s, style, provider, out i)) { return false; } // We need this check here since we don't allow signs to specified in hex numbers. So we fixup the result // for negative numbers if ((style & NumberStyles.AllowHexSpecifier) != 0) { // We are parsing a hexadecimal number if ((i < 0) || i > UInt16.MaxValue) { return false; } result = (Int16)i; return true; } if (i < MinValue || i > MaxValue) { return false; } result = (Int16)i; return true; } // // IConvertible implementation // TypeCode IConvertible.GetTypeCode() { return TypeCode.Int16; } /// <internalonly/> bool IConvertible.ToBoolean(IFormatProvider provider) { return Convert.ToBoolean(m_value); } /// <internalonly/> char IConvertible.ToChar(IFormatProvider provider) { return Convert.ToChar(m_value); } /// <internalonly/> sbyte IConvertible.ToSByte(IFormatProvider provider) { return Convert.ToSByte(m_value); } /// <internalonly/> byte IConvertible.ToByte(IFormatProvider provider) { return Convert.ToByte(m_value); } /// <internalonly/> short IConvertible.ToInt16(IFormatProvider provider) { return m_value; } /// <internalonly/> ushort IConvertible.ToUInt16(IFormatProvider provider) { return Convert.ToUInt16(m_value); } /// <internalonly/> int IConvertible.ToInt32(IFormatProvider provider) { return Convert.ToInt32(m_value); } /// <internalonly/> uint IConvertible.ToUInt32(IFormatProvider provider) { return Convert.ToUInt32(m_value); } /// <internalonly/> long IConvertible.ToInt64(IFormatProvider provider) { return Convert.ToInt64(m_value); } /// <internalonly/> ulong IConvertible.ToUInt64(IFormatProvider provider) { return Convert.ToUInt64(m_value); } /// <internalonly/> float IConvertible.ToSingle(IFormatProvider provider) { return Convert.ToSingle(m_value); } /// <internalonly/> double IConvertible.ToDouble(IFormatProvider provider) { return Convert.ToDouble(m_value); } /// <internalonly/> Decimal IConvertible.ToDecimal(IFormatProvider provider) { return Convert.ToDecimal(m_value); } /// <internalonly/> DateTime IConvertible.ToDateTime(IFormatProvider provider) { throw new InvalidCastException(String.Format(SR.InvalidCast_FromTo, "Int16", "DateTime")); } /// <internalonly/> Object IConvertible.ToType(Type type, IFormatProvider provider) { return Convert.DefaultToType((IConvertible)this, type, provider); } } }
// Copyright (c) MOSA Project. Licensed under the New BSD License. using Mosa.Compiler.Linker; using Mosa.Compiler.MosaTypeSystem; using Mosa.Compiler.Trace; using System; using System.Collections.Generic; using System.Reflection; using System.Threading; namespace Mosa.Compiler.Framework { /// <summary> /// Base class for just-in-time and ahead-of-time compilers, which use /// the Mosa.Compiler.Framework framework. /// </summary> public abstract class BaseCompiler { #region Properties public MosaCompiler Compiler { get; private set; } /// <summary> /// Returns the architecture used by the compiler. /// </summary> public BaseArchitecture Architecture { get; private set; } /// <summary> /// Gets the pre compile pipeline. /// </summary> public CompilerPipeline CompilePipeline { get; private set; } /// <summary> /// Gets the type system. /// </summary> /// <value>The type system.</value> public TypeSystem TypeSystem { get; private set; } /// <summary> /// Gets the type layout. /// </summary> /// <value>The type layout.</value> public MosaTypeLayout TypeLayout { get; private set; } /// <summary> /// Gets the compiler trace. /// </summary> /// <value> /// The compiler trace. /// </value> public CompilerTrace CompilerTrace { get; private set; } /// <summary> /// Gets the compiler options. /// </summary> /// <value>The compiler options.</value> public CompilerOptions CompilerOptions { get; private set; } /// <summary> /// Gets the counters. /// </summary> public Counters GlobalCounters { get; private set; } /// <summary> /// Gets the scheduler. /// </summary> public CompilationScheduler CompilationScheduler { get; private set; } /// <summary> /// Gets the linker. /// </summary> public BaseLinker Linker { get; private set; } /// <summary> /// Gets the plug system. /// </summary> public PlugSystem PlugSystem { get; private set; } /// <summary> /// Gets the list of Intrinsic Types for internal call replacements. /// </summary> public Dictionary<string, Type> IntrinsicTypes { get; private set; } /// <summary> /// Gets the type of the platform internal runtime. /// </summary> /// <value> /// The type of the platform internal runtime. /// </value> public MosaType PlatformInternalRuntimeType { get; private set; } public MosaType InternalRuntimeType { get; private set; } /// <summary> /// Gets the compiler data. /// </summary> public CompilerData CompilerData { get; private set; } #endregion Properties #region Methods public void Initialize(MosaCompiler compiler) { if (compiler == null) throw new ArgumentNullException(@"compiler"); Compiler = compiler; Architecture = Compiler.CompilerOptions.Architecture; TypeSystem = Compiler.TypeSystem; TypeLayout = Compiler.TypeLayout; CompilerTrace = Compiler.CompilerTrace; CompilerOptions = Compiler.CompilerOptions; CompilationScheduler = Compiler.CompilationScheduler; Linker = compiler.Linker; CompilePipeline = new CompilerPipeline(); GlobalCounters = new Counters(); PlugSystem = new PlugSystem(); CompilerData = new CompilerData(); // Create new dictionary IntrinsicTypes = new Dictionary<string, Type>(); foreach (var type in Assembly.GetExecutingAssembly().GetTypes()) { if (type.IsClass && typeof(IIntrinsicInternalMethod).IsAssignableFrom(type)) { // Now get all the ReplacementTarget attributes var attributes = (ReplacementTargetAttribute[])type.GetCustomAttributes(typeof(ReplacementTargetAttribute), true); for (int i = 0; i < attributes.Length; i++) { // Finally add the dictionary entry mapping the target string and the type IntrinsicTypes.Add(attributes[i].Target, type); } } } PlatformInternalRuntimeType = GetPlatformInternalRuntimeType(); InternalRuntimeType = GeInternalRuntimeType(); // Extended Setup ExtendCompilerSetup(); // Build the default pre-compiler pipeline Architecture.ExtendCompilerPipeline(CompilePipeline); } /// <summary> /// Extends the compiler setup. /// </summary> public virtual void ExtendCompilerSetup() { } /// <summary> /// Compiles the method. /// </summary> /// <param name="method">The method.</param> /// <param name="basicBlocks">The basic blocks.</param> /// <param name="threadID">The thread identifier.</param> public void CompileMethod(MosaMethod method, BasicBlocks basicBlocks, int threadID = 0) { NewCompilerTraceEvent(CompilerEvent.CompilingMethod, method.FullName, threadID); var methodCompiler = CreateMethodCompiler(method, basicBlocks, threadID); Architecture.ExtendMethodCompilerPipeline(methodCompiler.Pipeline); methodCompiler.Compile(); } /// <summary> /// Creates a method compiler /// </summary> /// <param name="method">The method to compile.</param> /// <param name="basicBlocks">The basic blocks.</param> /// <param name="threadID">The thread identifier.</param> /// <returns></returns> protected abstract BaseMethodCompiler CreateMethodCompiler(MosaMethod method, BasicBlocks basicBlocks, int threadID = 0); /// <summary> /// Compiles the linker method. /// </summary> /// <param name="methodName">Name of the method.</param> /// <returns></returns> public MosaMethod CreateLinkerMethod(string methodName) { return TypeSystem.CreateLinkerMethod(methodName, TypeSystem.BuiltIn.Void, null); } /// <summary> /// Executes the compiler pre compiler stages. /// </summary> /// <remarks> /// The method iterates the compilation stage chain and runs each /// stage on the input. /// </remarks> internal void PreCompile() { foreach (ICompilerStage stage in CompilePipeline) { stage.Initialize(this); } foreach (ICompilerStage stage in CompilePipeline) { NewCompilerTraceEvent(CompilerEvent.CompilerStageStart, stage.Name); // Execute stage stage.ExecutePreCompile(); NewCompilerTraceEvent(CompilerEvent.CompilerStageEnd, stage.Name); } } public void ExecuteCompile() { ExecuteCompilePass(); while (CompilationScheduler.StartNextPass()) { ExecuteCompilePass(); } } private void ExecuteCompilePass() { while (true) { var method = CompilationScheduler.GetMethodToCompile(); if (method == null) return; CompileMethod(method, null, 0); CompilerTrace.UpdatedCompilerProgress( CompilationScheduler.TotalMethods, CompilationScheduler.TotalMethods - CompilationScheduler.TotalQueuedMethods ); } } public void ExecuteThreadedCompile(int threads) { ExecuteThreadedCompilePass(threads); while (CompilationScheduler.StartNextPass()) { ExecuteThreadedCompilePass(threads); } } private void ExecuteThreadedCompilePass(int threads) { using (var finished = new CountdownEvent(1)) { for (int threadID = 0; threadID < threads; threadID++) { finished.AddCount(); int tid = threadID + 1; ThreadPool.QueueUserWorkItem( new WaitCallback(delegate { //try //{ CompileWorker(tid); //} //catch (Exception e) //{ // this.CompilerTrace.NewCompilerTraceEvent(CompilerEvent.Exception, e.ToString(), threadID); //} //finally //{ finished.Signal(); //} } )); } finished.Signal(); finished.Wait(); } } private void CompileWorker(int threadID) { while (true) { var method = CompilationScheduler.GetMethodToCompile(); if (method == null) { return; } // only one method can be compiled at a time lock (method) { CompileMethod(method, null, threadID); } CompilerTrace.UpdatedCompilerProgress( CompilationScheduler.TotalMethods, CompilationScheduler.TotalMethods - CompilationScheduler.TotalQueuedMethods); } } /// <summary> /// Executes the compiler post compiler stages. /// </summary> /// <remarks> /// The method iterates the compilation stage chain and runs each /// stage on the input. /// </remarks> internal void PostCompile() { foreach (ICompilerStage stage in CompilePipeline) { NewCompilerTraceEvent(CompilerEvent.CompilerStageStart, stage.Name); // Execute stage stage.ExecutePostCompile(); NewCompilerTraceEvent(CompilerEvent.CompilerStageEnd, stage.Name); } // Sum up the counters foreach (var methodData in CompilerData.MethodData) { GlobalCounters.Merge(methodData.Counters); } ExportCounters(); } #endregion Methods protected void ExportCounters() { foreach (var counter in GlobalCounters.Export()) { NewCompilerTraceEvent(CompilerEvent.Counter, counter); } } #region Helper Methods /// <summary> /// Traces the specified compiler event. /// </summary> /// <param name="compilerEvent">The compiler event.</param> /// <param name="message">The message.</param> protected void NewCompilerTraceEvent(CompilerEvent compilerEvent, string message) { CompilerTrace.NewCompilerTraceEvent(compilerEvent, message, 0); } /// <summary> /// Traces the specified compiler event. /// </summary> /// <param name="compilerEvent">The compiler event.</param> /// <param name="message">The message.</param> protected void NewCompilerTraceEvent(CompilerEvent compilerEvent, string message, int threadID) { CompilerTrace.NewCompilerTraceEvent(compilerEvent, message, threadID); } /// <summary> /// Updates the counter. /// </summary> /// <param name="name">The name.</param> /// <param name="count">The count.</param> protected void UpdateCounter(string name, int count) { GlobalCounters.Update(name, count); } protected MosaType GetPlatformInternalRuntimeType() { return TypeSystem.GetTypeByName("Mosa.Runtime." + Architecture.PlatformName, "Internal"); } protected MosaType GeInternalRuntimeType() { return TypeSystem.GetTypeByName("Mosa.Runtime", "Internal"); } #endregion Helper Methods } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Reflection; using Xunit; using System.Net.Http.HPack; namespace System.Net.Http.Unit.Tests.HPack { public class HuffmanDecodingTests { private static readonly (uint code, int bitLength)[] s_encodingTable = new (uint code, int bitLength)[] { (0b11111111_11000000_00000000_00000000, 13), (0b11111111_11111111_10110000_00000000, 23), (0b11111111_11111111_11111110_00100000, 28), (0b11111111_11111111_11111110_00110000, 28), (0b11111111_11111111_11111110_01000000, 28), (0b11111111_11111111_11111110_01010000, 28), (0b11111111_11111111_11111110_01100000, 28), (0b11111111_11111111_11111110_01110000, 28), (0b11111111_11111111_11111110_10000000, 28), (0b11111111_11111111_11101010_00000000, 24), (0b11111111_11111111_11111111_11110000, 30), (0b11111111_11111111_11111110_10010000, 28), (0b11111111_11111111_11111110_10100000, 28), (0b11111111_11111111_11111111_11110100, 30), (0b11111111_11111111_11111110_10110000, 28), (0b11111111_11111111_11111110_11000000, 28), (0b11111111_11111111_11111110_11010000, 28), (0b11111111_11111111_11111110_11100000, 28), (0b11111111_11111111_11111110_11110000, 28), (0b11111111_11111111_11111111_00000000, 28), (0b11111111_11111111_11111111_00010000, 28), (0b11111111_11111111_11111111_00100000, 28), (0b11111111_11111111_11111111_11111000, 30), (0b11111111_11111111_11111111_00110000, 28), (0b11111111_11111111_11111111_01000000, 28), (0b11111111_11111111_11111111_01010000, 28), (0b11111111_11111111_11111111_01100000, 28), (0b11111111_11111111_11111111_01110000, 28), (0b11111111_11111111_11111111_10000000, 28), (0b11111111_11111111_11111111_10010000, 28), (0b11111111_11111111_11111111_10100000, 28), (0b11111111_11111111_11111111_10110000, 28), (0b01010000_00000000_00000000_00000000, 6), (0b11111110_00000000_00000000_00000000, 10), (0b11111110_01000000_00000000_00000000, 10), (0b11111111_10100000_00000000_00000000, 12), (0b11111111_11001000_00000000_00000000, 13), (0b01010100_00000000_00000000_00000000, 6), (0b11111000_00000000_00000000_00000000, 8), (0b11111111_01000000_00000000_00000000, 11), (0b11111110_10000000_00000000_00000000, 10), (0b11111110_11000000_00000000_00000000, 10), (0b11111001_00000000_00000000_00000000, 8), (0b11111111_01100000_00000000_00000000, 11), (0b11111010_00000000_00000000_00000000, 8), (0b01011000_00000000_00000000_00000000, 6), (0b01011100_00000000_00000000_00000000, 6), (0b01100000_00000000_00000000_00000000, 6), (0b00000000_00000000_00000000_00000000, 5), (0b00001000_00000000_00000000_00000000, 5), (0b00010000_00000000_00000000_00000000, 5), (0b01100100_00000000_00000000_00000000, 6), (0b01101000_00000000_00000000_00000000, 6), (0b01101100_00000000_00000000_00000000, 6), (0b01110000_00000000_00000000_00000000, 6), (0b01110100_00000000_00000000_00000000, 6), (0b01111000_00000000_00000000_00000000, 6), (0b01111100_00000000_00000000_00000000, 6), (0b10111000_00000000_00000000_00000000, 7), (0b11111011_00000000_00000000_00000000, 8), (0b11111111_11111000_00000000_00000000, 15), (0b10000000_00000000_00000000_00000000, 6), (0b11111111_10110000_00000000_00000000, 12), (0b11111111_00000000_00000000_00000000, 10), (0b11111111_11010000_00000000_00000000, 13), (0b10000100_00000000_00000000_00000000, 6), (0b10111010_00000000_00000000_00000000, 7), (0b10111100_00000000_00000000_00000000, 7), (0b10111110_00000000_00000000_00000000, 7), (0b11000000_00000000_00000000_00000000, 7), (0b11000010_00000000_00000000_00000000, 7), (0b11000100_00000000_00000000_00000000, 7), (0b11000110_00000000_00000000_00000000, 7), (0b11001000_00000000_00000000_00000000, 7), (0b11001010_00000000_00000000_00000000, 7), (0b11001100_00000000_00000000_00000000, 7), (0b11001110_00000000_00000000_00000000, 7), (0b11010000_00000000_00000000_00000000, 7), (0b11010010_00000000_00000000_00000000, 7), (0b11010100_00000000_00000000_00000000, 7), (0b11010110_00000000_00000000_00000000, 7), (0b11011000_00000000_00000000_00000000, 7), (0b11011010_00000000_00000000_00000000, 7), (0b11011100_00000000_00000000_00000000, 7), (0b11011110_00000000_00000000_00000000, 7), (0b11100000_00000000_00000000_00000000, 7), (0b11100010_00000000_00000000_00000000, 7), (0b11100100_00000000_00000000_00000000, 7), (0b11111100_00000000_00000000_00000000, 8), (0b11100110_00000000_00000000_00000000, 7), (0b11111101_00000000_00000000_00000000, 8), (0b11111111_11011000_00000000_00000000, 13), (0b11111111_11111110_00000000_00000000, 19), (0b11111111_11100000_00000000_00000000, 13), (0b11111111_11110000_00000000_00000000, 14), (0b10001000_00000000_00000000_00000000, 6), (0b11111111_11111010_00000000_00000000, 15), (0b00011000_00000000_00000000_00000000, 5), (0b10001100_00000000_00000000_00000000, 6), (0b00100000_00000000_00000000_00000000, 5), (0b10010000_00000000_00000000_00000000, 6), (0b00101000_00000000_00000000_00000000, 5), (0b10010100_00000000_00000000_00000000, 6), (0b10011000_00000000_00000000_00000000, 6), (0b10011100_00000000_00000000_00000000, 6), (0b00110000_00000000_00000000_00000000, 5), (0b11101000_00000000_00000000_00000000, 7), (0b11101010_00000000_00000000_00000000, 7), (0b10100000_00000000_00000000_00000000, 6), (0b10100100_00000000_00000000_00000000, 6), (0b10101000_00000000_00000000_00000000, 6), (0b00111000_00000000_00000000_00000000, 5), (0b10101100_00000000_00000000_00000000, 6), (0b11101100_00000000_00000000_00000000, 7), (0b10110000_00000000_00000000_00000000, 6), (0b01000000_00000000_00000000_00000000, 5), (0b01001000_00000000_00000000_00000000, 5), (0b10110100_00000000_00000000_00000000, 6), (0b11101110_00000000_00000000_00000000, 7), (0b11110000_00000000_00000000_00000000, 7), (0b11110010_00000000_00000000_00000000, 7), (0b11110100_00000000_00000000_00000000, 7), (0b11110110_00000000_00000000_00000000, 7), (0b11111111_11111100_00000000_00000000, 15), (0b11111111_10000000_00000000_00000000, 11), (0b11111111_11110100_00000000_00000000, 14), (0b11111111_11101000_00000000_00000000, 13), (0b11111111_11111111_11111111_11000000, 28), (0b11111111_11111110_01100000_00000000, 20), (0b11111111_11111111_01001000_00000000, 22), (0b11111111_11111110_01110000_00000000, 20), (0b11111111_11111110_10000000_00000000, 20), (0b11111111_11111111_01001100_00000000, 22), (0b11111111_11111111_01010000_00000000, 22), (0b11111111_11111111_01010100_00000000, 22), (0b11111111_11111111_10110010_00000000, 23), (0b11111111_11111111_01011000_00000000, 22), (0b11111111_11111111_10110100_00000000, 23), (0b11111111_11111111_10110110_00000000, 23), (0b11111111_11111111_10111000_00000000, 23), (0b11111111_11111111_10111010_00000000, 23), (0b11111111_11111111_10111100_00000000, 23), (0b11111111_11111111_11101011_00000000, 24), (0b11111111_11111111_10111110_00000000, 23), (0b11111111_11111111_11101100_00000000, 24), (0b11111111_11111111_11101101_00000000, 24), (0b11111111_11111111_01011100_00000000, 22), (0b11111111_11111111_11000000_00000000, 23), (0b11111111_11111111_11101110_00000000, 24), (0b11111111_11111111_11000010_00000000, 23), (0b11111111_11111111_11000100_00000000, 23), (0b11111111_11111111_11000110_00000000, 23), (0b11111111_11111111_11001000_00000000, 23), (0b11111111_11111110_11100000_00000000, 21), (0b11111111_11111111_01100000_00000000, 22), (0b11111111_11111111_11001010_00000000, 23), (0b11111111_11111111_01100100_00000000, 22), (0b11111111_11111111_11001100_00000000, 23), (0b11111111_11111111_11001110_00000000, 23), (0b11111111_11111111_11101111_00000000, 24), (0b11111111_11111111_01101000_00000000, 22), (0b11111111_11111110_11101000_00000000, 21), (0b11111111_11111110_10010000_00000000, 20), (0b11111111_11111111_01101100_00000000, 22), (0b11111111_11111111_01110000_00000000, 22), (0b11111111_11111111_11010000_00000000, 23), (0b11111111_11111111_11010010_00000000, 23), (0b11111111_11111110_11110000_00000000, 21), (0b11111111_11111111_11010100_00000000, 23), (0b11111111_11111111_01110100_00000000, 22), (0b11111111_11111111_01111000_00000000, 22), (0b11111111_11111111_11110000_00000000, 24), (0b11111111_11111110_11111000_00000000, 21), (0b11111111_11111111_01111100_00000000, 22), (0b11111111_11111111_11010110_00000000, 23), (0b11111111_11111111_11011000_00000000, 23), (0b11111111_11111111_00000000_00000000, 21), (0b11111111_11111111_00001000_00000000, 21), (0b11111111_11111111_10000000_00000000, 22), (0b11111111_11111111_00010000_00000000, 21), (0b11111111_11111111_11011010_00000000, 23), (0b11111111_11111111_10000100_00000000, 22), (0b11111111_11111111_11011100_00000000, 23), (0b11111111_11111111_11011110_00000000, 23), (0b11111111_11111110_10100000_00000000, 20), (0b11111111_11111111_10001000_00000000, 22), (0b11111111_11111111_10001100_00000000, 22), (0b11111111_11111111_10010000_00000000, 22), (0b11111111_11111111_11100000_00000000, 23), (0b11111111_11111111_10010100_00000000, 22), (0b11111111_11111111_10011000_00000000, 22), (0b11111111_11111111_11100010_00000000, 23), (0b11111111_11111111_11111000_00000000, 26), (0b11111111_11111111_11111000_01000000, 26), (0b11111111_11111110_10110000_00000000, 20), (0b11111111_11111110_00100000_00000000, 19), (0b11111111_11111111_10011100_00000000, 22), (0b11111111_11111111_11100100_00000000, 23), (0b11111111_11111111_10100000_00000000, 22), (0b11111111_11111111_11110110_00000000, 25), (0b11111111_11111111_11111000_10000000, 26), (0b11111111_11111111_11111000_11000000, 26), (0b11111111_11111111_11111001_00000000, 26), (0b11111111_11111111_11111011_11000000, 27), (0b11111111_11111111_11111011_11100000, 27), (0b11111111_11111111_11111001_01000000, 26), (0b11111111_11111111_11110001_00000000, 24), (0b11111111_11111111_11110110_10000000, 25), (0b11111111_11111110_01000000_00000000, 19), (0b11111111_11111111_00011000_00000000, 21), (0b11111111_11111111_11111001_10000000, 26), (0b11111111_11111111_11111100_00000000, 27), (0b11111111_11111111_11111100_00100000, 27), (0b11111111_11111111_11111001_11000000, 26), (0b11111111_11111111_11111100_01000000, 27), (0b11111111_11111111_11110010_00000000, 24), (0b11111111_11111111_00100000_00000000, 21), (0b11111111_11111111_00101000_00000000, 21), (0b11111111_11111111_11111010_00000000, 26), (0b11111111_11111111_11111010_01000000, 26), (0b11111111_11111111_11111111_11010000, 28), (0b11111111_11111111_11111100_01100000, 27), (0b11111111_11111111_11111100_10000000, 27), (0b11111111_11111111_11111100_10100000, 27), (0b11111111_11111110_11000000_00000000, 20), (0b11111111_11111111_11110011_00000000, 24), (0b11111111_11111110_11010000_00000000, 20), (0b11111111_11111111_00110000_00000000, 21), (0b11111111_11111111_10100100_00000000, 22), (0b11111111_11111111_00111000_00000000, 21), (0b11111111_11111111_01000000_00000000, 21), (0b11111111_11111111_11100110_00000000, 23), (0b11111111_11111111_10101000_00000000, 22), (0b11111111_11111111_10101100_00000000, 22), (0b11111111_11111111_11110111_00000000, 25), (0b11111111_11111111_11110111_10000000, 25), (0b11111111_11111111_11110100_00000000, 24), (0b11111111_11111111_11110101_00000000, 24), (0b11111111_11111111_11111010_10000000, 26), (0b11111111_11111111_11101000_00000000, 23), (0b11111111_11111111_11111010_11000000, 26), (0b11111111_11111111_11111100_11000000, 27), (0b11111111_11111111_11111011_00000000, 26), (0b11111111_11111111_11111011_01000000, 26), (0b11111111_11111111_11111100_11100000, 27), (0b11111111_11111111_11111101_00000000, 27), (0b11111111_11111111_11111101_00100000, 27), (0b11111111_11111111_11111101_01000000, 27), (0b11111111_11111111_11111101_01100000, 27), (0b11111111_11111111_11111111_11100000, 28), (0b11111111_11111111_11111101_10000000, 27), (0b11111111_11111111_11111101_10100000, 27), (0b11111111_11111111_11111101_11000000, 27), (0b11111111_11111111_11111101_11100000, 27), (0b11111111_11111111_11111110_00000000, 27), (0b11111111_11111111_11111011_10000000, 26) }; // Encoded values are 30 bits at most, so are stored in the table in a uint. // Convert to ulong here and put the encoded value in the most significant bits. // This makes the encoding logic below simpler. private static (ulong code, int bitLength) GetEncodedValue(byte b) { (uint code, int bitLength) = s_encodingTable[b]; return (((ulong)code) << 32, bitLength); } private static int Encode(byte[] source, byte[] destination, bool injectEOS) { ulong currentBits = 0; // We can have 7 bits of rollover plus 30 bits for the next encoded value, so use a ulong int currentBitCount = 0; int dstOffset = 0; for (int i = 0; i < source.Length; i++) { (ulong code, int bitLength) = GetEncodedValue(source[i]); // inject EOS if instructed to if (injectEOS) { code |= (ulong)0b11111111_11111111_11111111_11111100 << (32 - bitLength); bitLength += 30; injectEOS = false; } currentBits |= code >> currentBitCount; currentBitCount += bitLength; while (currentBitCount >= 8) { destination[dstOffset++] = (byte)(currentBits >> 56); currentBits = currentBits << 8; currentBitCount -= 8; } } // Fill any trailing bits with ones, per RFC if (currentBitCount > 0) { currentBits |= 0xFFFFFFFFFFFFFFFF >> currentBitCount; destination[dstOffset++] = (byte)(currentBits >> 56); } return dstOffset; } [Theory] [MemberData(nameof(TestData))] public void HuffmanDecoding_ValidEncoding_Succeeds(byte[] input) { // Worst case encoding is 30 bits per input byte, so make the encoded buffer 4 times as big byte[] encoded = new byte[input.Length * 4]; int encodedByteCount = Encode(input, encoded, false); // Worst case decoding is an output byte per 5 input bits, so make the decoded buffer 2 times as big byte[] decoded = new byte[encoded.Length * 2]; int decodedByteCount = Huffman.Decode(new ReadOnlySpan<byte>(encoded, 0, encodedByteCount), ref decoded); Assert.Equal(input.Length, decodedByteCount); Assert.Equal(input, decoded.Take(decodedByteCount)); } private static readonly Type s_huffmanDecodingExceptionType = typeof(HttpClient).Assembly.GetType("System.Net.Http.HPack.HuffmanDecodingException"); [Theory] [MemberData(nameof(InvalidEncodingData))] public void HuffmanDecoding_InvalidEncoding_Throws(byte[] encoded) { // Worst case decoding is an output byte per 5 input bits, so make the decoded buffer 2 times as big byte[] decoded = new byte[encoded.Length * 2]; Assert.Throws(s_huffmanDecodingExceptionType, () => Huffman.Decode(encoded, ref decoded)); } // This input sequence will encode to 17 bits, thus offsetting the next character to encode // by exactly one bit. We use this below to generate a prefix that encodes all of the possible starting // bit offsets for a character, from 0 to 7. private static readonly byte[] s_offsetByOneBit = new byte[] { (byte)'c', (byte)'l', (byte)'r' }; public static IEnumerable<object[]> TestData() { // Single byte data for (int i = 0; i < 256; i++) { yield return new object[] { new byte[] { (byte)i } }; } // Ensure that decoding every possible value leaves the decoder in a correct state so that // a subsequent value can be decoded (here, 'a') for (int i = 0; i < 256; i++) { yield return new object[] { new byte[] { (byte)i, (byte)'a' } }; } // Ensure that every possible bit starting position for every value is encoded properly // s_offsetByOneBit encodes to exactly 17 bits, leaving 1 bit for the next byte // So by repeating this sequence, we can generate any starting bit position we want. byte[] currentPrefix = new byte[0]; for (int prefixBits = 1; prefixBits <= 8; prefixBits++) { currentPrefix = currentPrefix.Concat(s_offsetByOneBit).ToArray(); // Make sure we're actually getting the correct number of prefix bits int encodedBits = currentPrefix.Select(b => s_encodingTable[b].bitLength).Sum(); Assert.Equal(prefixBits % 8, encodedBits % 8); for (int i = 0; i < 256; i++) { yield return new object[] { currentPrefix.Concat(new byte[] { (byte)i }.Concat(currentPrefix)).ToArray() }; } } // Finally, one really big chunk of randomly generated data. byte[] data = new byte[1024 * 1024]; new Random(42).NextBytes(data); yield return new object[] { data }; } public static IEnumerable<object[]> InvalidEncodingData() { // For encodings greater than 8 bits, truncate one or more bytes to generate an invalid encoding byte[] source = new byte[1]; byte[] destination = new byte[10]; for (int i = 0; i < 256; i++) { source[0] = (byte)i; int encodedByteCount = Encode(source, destination, false); if (encodedByteCount > 1) { yield return new object[] { destination.Take(encodedByteCount - 1).ToArray() }; if (encodedByteCount > 2) { yield return new object[] { destination.Take(encodedByteCount - 2).ToArray() }; if (encodedByteCount > 3) { yield return new object[] { destination.Take(encodedByteCount - 3).ToArray() }; } } } } // Pad encodings with invalid trailing one bits. This is disallowed. byte[] pad1 = new byte[] { 0xFF }; byte[] pad2 = new byte[] { 0xFF, 0xFF, }; byte[] pad3 = new byte[] { 0xFF, 0xFF, 0xFF }; byte[] pad4 = new byte[] { 0xFF, 0xFF, 0xFF, 0xFF }; for (int i = 0; i < 256; i++) { source[0] = (byte)i; int encodedByteCount = Encode(source, destination, false); yield return new object[] { destination.Take(encodedByteCount).Concat(pad1).ToArray() }; yield return new object[] { destination.Take(encodedByteCount).Concat(pad2).ToArray() }; yield return new object[] { destination.Take(encodedByteCount).Concat(pad3).ToArray() }; yield return new object[] { destination.Take(encodedByteCount).Concat(pad4).ToArray() }; } // send single EOS yield return new object[] { new byte[] { 0b11111111, 0b11111111, 0b11111111, 0b11111100 } }; // send combinations with EOS in the middle source = new byte[2]; destination = new byte[24]; for (int i = 0; i < 256; i++) { source[0] = source[1] = (byte)i; int encodedByteCount = Encode(source, destination, true); yield return new object[] { destination.Take(encodedByteCount).ToArray() }; } } } }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for details. using System; using System.ComponentModel; using System.Drawing; using System.Windows.Forms; using OpenLiveWriter.Controls; using OpenLiveWriter.CoreServices; namespace OpenLiveWriter.ApplicationFramework { /// <summary> /// Provides a control which is useful for providing a border for, or replacing the border of, /// another control. /// </summary> public class BorderControl : UserControl { #region Private Member Variables /// <summary> /// The border size. /// </summary> private const int BORDER_SIZE = 1; /// <summary> /// Required designer variable. /// </summary> private Container components = null; /// <summary> /// The theme border color color. /// </summary> private Color themeBorderColor; /// <summary> /// The FocusWatchingUserControl that contains the control. This allows us to hide borders /// or add borders to any control. /// </summary> private FocusWatchingUserControl focusWatchingUserControl; /// <summary> /// The control that this BorderControl is providing a border for. /// </summary> private Control control; /// <summary> /// /// </summary> private bool themeBorder = false; /// <summary> /// A value indicating whether the height of the BorderControl is automatically determined /// based on the height of the control. /// </summary> private bool autoHeight = false; /// <summary> /// The top inset. /// </summary> private int topInset; /// <summary> /// The left inset. /// </summary> private int leftInset; /// <summary> /// The bottom inset. /// </summary> private int bottomInset; /// <summary> /// True if bottom border should not be used. /// </summary> private bool suppressBottomBorder; /// <summary> /// The right inset. /// </summary> private int rightInset; #endregion Private Member Variables #region Class Initialization & Termination /// <summary> /// Initializes a new instance of the BorderObscuringControl class. /// </summary> public BorderControl() { // This call is required by the Windows.Forms Form Designer. InitializeComponent(); // Set the theme border color. themeBorderColor = SystemColors.ControlDark; } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #endregion Class Initialization & Termination #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.focusWatchingUserControl = new OpenLiveWriter.Controls.FocusWatchingUserControl(); this.SuspendLayout(); // // focusWatchingUserControl // this.focusWatchingUserControl.Anchor = System.Windows.Forms.AnchorStyles.None; this.focusWatchingUserControl.BackColor = System.Drawing.SystemColors.Window; this.focusWatchingUserControl.Location = new System.Drawing.Point(1, 1); this.focusWatchingUserControl.Name = "focusWatchingUserControl"; this.focusWatchingUserControl.Size = new System.Drawing.Size(148, 148); this.focusWatchingUserControl.TabStop = false; //this.focusWatchingUserControl.TabIndex = 0; // // BorderControl // this.Controls.Add(this.focusWatchingUserControl); this.Name = "BorderControl"; this.ResumeLayout(false); } #endregion #region Public Properties /// <summary> /// Gets or sets /// </summary> public Control Control { get { return control; } set { if (control != value) { if (control != null) { control.EnabledChanged -= new EventHandler(control_EnabledChanged); control.Parent = null; } control = value; if (control != null) { control.EnabledChanged += new EventHandler(control_EnabledChanged); control.Parent = focusWatchingUserControl; focusWatchingUserControl.BackColor = control.Enabled ? control.BackColor : SystemColors.Control; } } } } /// <summary> /// </summary> public bool ThemeBorder { get { return themeBorder; } set { if (themeBorder != value) { themeBorder = value; PerformLayout(); } } } /// <summary> /// Gets or sets a value indicating whether the height of the BorderControl is /// automatically determined based on the height of the control. /// </summary> public bool AutoHeight { get { return autoHeight; } set { autoHeight = value; } } /// <summary> /// Gets or sets the top inset. /// </summary> public int TopInset { get { return topInset; } set { if (topInset != value) { topInset = value; PerformLayout(); } } } /// <summary> /// Gets or sets the left inset. /// </summary> public int LeftInset { get { return leftInset; } set { if (leftInset != value) { leftInset = value; PerformLayout(); } } } /// <summary> /// Gets or sets the bottom inset. /// </summary> public int BottomInset { get { return bottomInset; } set { if (bottomInset != value) { bottomInset = value; PerformLayout(); } } } public bool SuppressBottomBorder { get { return suppressBottomBorder; } set { if (suppressBottomBorder!= value) { suppressBottomBorder = value; PerformLayout(); } } } /// <summary> /// Gets or sets the right inset. /// </summary> public int RightInset { get { return rightInset; } set { if (rightInset != value) { rightInset = value; PerformLayout(); } } } /// <summary> /// Gets or sets the background color for the control. /// </summary> public override Color BackColor { get { return base.BackColor; } set { base.BackColor = value; focusWatchingUserControl.BackColor = BackColor; } } #endregion Public Properties #region Protected Methods /// <summary> /// Performs the work of setting the specified bounds of this control. /// </summary> /// <param name="x">The new Left property value of the control.</param> /// <param name="y">The new Right property value of the control.</param> /// <param name="width">The new Width property value of the control.</param> /// <param name="height">The new Height property value of the control.</param> /// <param name="specified">A bitwise combination of the BoundsSpecified values.</param> protected override void SetBoundsCore(int x, int y, int width, int height, BoundsSpecified specified) { // If this is an auto-height BorderControl, and we have a control, constrain the height // of the BorderControl based on the height of the control. if (autoHeight && control != null) height = control.Size.Height+(topInset+bottomInset)+(BORDER_SIZE*2); // Call the base class's method. base.SetBoundsCore (x, y, width, height, specified); } #endregion Protected Methods #region Protected Event Overrides /// <summary> /// Raises the SystemColorsChanged event. /// </summary> /// <param name="e">An EventArgs that contains the event data.</param> protected override void OnSystemColorsChanged(EventArgs e) { // Call the base class's method so that registered delegates receive the event. base.OnSystemColorsChanged(e); // Obtain the theme border color again. themeBorderColor = ColorHelper.GetThemeBorderColor(SystemColors.ControlDark); // Invalidate. Invalidate(); } /// <summary> /// Raises the Layout event. /// </summary> /// <param name="e">A LayoutEventArgs that contains the event data.</param> protected override void OnLayout(LayoutEventArgs e) { // Call the base class's method so that registered delegates receive the event. base.OnLayout(e); // Layout the focusWatchingUserControl. if (themeBorder) focusWatchingUserControl.Bounds = new Rectangle(1, 1, Width-2, suppressBottomBorder ? Height - 1 : Height-2); else focusWatchingUserControl.Bounds = new Rectangle(2, 2, Width - 4, suppressBottomBorder ? Height - 2 : Height - 4); // Layout the control. if (control != null) control.Bounds = new Rectangle( leftInset, topInset, focusWatchingUserControl.Width-rightInset, autoHeight ? control.Height : focusWatchingUserControl.Height-(topInset+bottomInset)); // Make sure the control gets repainted. Invalidate(); } /// <summary> /// Raises the PaintBackground event. /// </summary> /// <param name="e">A PaintEventArgs that contains the event data.</param> protected override void OnPaintBackground(PaintEventArgs e) { // Call the base class's method so that registered delegates receive the event. base.OnPaintBackground (e); if (themeBorder) { // Draw the border. using (Pen pen = new Pen(themeBorderColor)) e.Graphics.DrawRectangle(pen, 0, 0, Width-1, Height-1); } else ControlPaint.DrawBorder3D(e.Graphics, ClientRectangle, Border3DStyle.Sunken); } /// <summary> /// control_EnabledChanged event handler. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void control_EnabledChanged(object sender, EventArgs e) { focusWatchingUserControl.BackColor = control.Enabled ? control.BackColor : SystemColors.Control; focusWatchingUserControl.Invalidate(); } #endregion Protected Event Overrides } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Network { using System; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; /// <summary> /// Extension methods for VirtualNetworksOperations. /// </summary> public static partial class VirtualNetworksOperationsExtensions { /// <summary> /// The Delete VirtualNetwork operation deletes the specifed virtual network /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> public static void Delete(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName) { Task.Factory.StartNew(s => ((IVirtualNetworksOperations)s).DeleteAsync(resourceGroupName, virtualNetworkName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Delete VirtualNetwork operation deletes the specifed virtual network /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task DeleteAsync(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName, CancellationToken cancellationToken = default(CancellationToken)) { await operations.DeleteWithHttpMessagesAsync(resourceGroupName, virtualNetworkName, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// The Delete VirtualNetwork operation deletes the specifed virtual network /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> public static void BeginDelete(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName) { Task.Factory.StartNew(s => ((IVirtualNetworksOperations)s).BeginDeleteAsync(resourceGroupName, virtualNetworkName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Delete VirtualNetwork operation deletes the specifed virtual network /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task BeginDeleteAsync(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName, CancellationToken cancellationToken = default(CancellationToken)) { await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, virtualNetworkName, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// The Get VirtualNetwork operation retrieves information about the specified /// virtual network. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='expand'> /// expand references resources. /// </param> public static VirtualNetwork Get(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName, string expand = default(string)) { return Task.Factory.StartNew(s => ((IVirtualNetworksOperations)s).GetAsync(resourceGroupName, virtualNetworkName, expand), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Get VirtualNetwork operation retrieves information about the specified /// virtual network. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='expand'> /// expand references resources. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<VirtualNetwork> GetAsync(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName, string expand = default(string), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, virtualNetworkName, expand, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// The Put VirtualNetwork operation creates/updates a virtual network in the /// specified resource group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='parameters'> /// Parameters supplied to the create/update Virtual Network operation /// </param> public static VirtualNetwork CreateOrUpdate(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName, VirtualNetwork parameters) { return Task.Factory.StartNew(s => ((IVirtualNetworksOperations)s).CreateOrUpdateAsync(resourceGroupName, virtualNetworkName, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Put VirtualNetwork operation creates/updates a virtual network in the /// specified resource group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='parameters'> /// Parameters supplied to the create/update Virtual Network operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<VirtualNetwork> CreateOrUpdateAsync(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName, VirtualNetwork parameters, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, virtualNetworkName, parameters, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// The Put VirtualNetwork operation creates/updates a virtual network in the /// specified resource group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='parameters'> /// Parameters supplied to the create/update Virtual Network operation /// </param> public static VirtualNetwork BeginCreateOrUpdate(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName, VirtualNetwork parameters) { return Task.Factory.StartNew(s => ((IVirtualNetworksOperations)s).BeginCreateOrUpdateAsync(resourceGroupName, virtualNetworkName, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The Put VirtualNetwork operation creates/updates a virtual network in the /// specified resource group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='parameters'> /// Parameters supplied to the create/update Virtual Network operation /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<VirtualNetwork> BeginCreateOrUpdateAsync(this IVirtualNetworksOperations operations, string resourceGroupName, string virtualNetworkName, VirtualNetwork parameters, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, virtualNetworkName, parameters, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// The list VirtualNetwork returns all Virtual Networks in a subscription /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> public static IPage<VirtualNetwork> ListAll(this IVirtualNetworksOperations operations) { return Task.Factory.StartNew(s => ((IVirtualNetworksOperations)s).ListAllAsync(), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The list VirtualNetwork returns all Virtual Networks in a subscription /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<VirtualNetwork>> ListAllAsync(this IVirtualNetworksOperations operations, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.ListAllWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// The list VirtualNetwork returns all Virtual Networks in a resource group /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> public static IPage<VirtualNetwork> List(this IVirtualNetworksOperations operations, string resourceGroupName) { return Task.Factory.StartNew(s => ((IVirtualNetworksOperations)s).ListAsync(resourceGroupName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The list VirtualNetwork returns all Virtual Networks in a resource group /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<VirtualNetwork>> ListAsync(this IVirtualNetworksOperations operations, string resourceGroupName, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.ListWithHttpMessagesAsync(resourceGroupName, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// The list VirtualNetwork returns all Virtual Networks in a subscription /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static IPage<VirtualNetwork> ListAllNext(this IVirtualNetworksOperations operations, string nextPageLink) { return Task.Factory.StartNew(s => ((IVirtualNetworksOperations)s).ListAllNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The list VirtualNetwork returns all Virtual Networks in a subscription /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<VirtualNetwork>> ListAllNextAsync(this IVirtualNetworksOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.ListAllNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// The list VirtualNetwork returns all Virtual Networks in a resource group /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static IPage<VirtualNetwork> ListNext(this IVirtualNetworksOperations operations, string nextPageLink) { return Task.Factory.StartNew(s => ((IVirtualNetworksOperations)s).ListNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// The list VirtualNetwork returns all Virtual Networks in a resource group /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<VirtualNetwork>> ListNextAsync(this IVirtualNetworksOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Numerics; using Newtonsoft.Json; using ProtoBuf; using QuantConnect.Configuration; using QuantConnect.Data.UniverseSelection; using QuantConnect.Interfaces; using QuantConnect.Logging; using QuantConnect.Securities.Future; using QuantConnect.Util; using static QuantConnect.StringExtensions; namespace QuantConnect { /// <summary> /// Defines a unique identifier for securities /// </summary> /// <remarks> /// The SecurityIdentifier contains information about a specific security. /// This includes the symbol and other data specific to the SecurityType. /// The symbol is limited to 12 characters /// </remarks> [JsonConverter(typeof(SecurityIdentifierJsonConverter))] [ProtoContract(SkipConstructor = true)] public class SecurityIdentifier : IEquatable<SecurityIdentifier>, IComparable<SecurityIdentifier>, IComparable { #region Empty, DefaultDate Fields private static readonly ConcurrentDictionary<string, SecurityIdentifier> SecurityIdentifierCache = new ConcurrentDictionary<string, SecurityIdentifier>(); private static readonly string MapFileProviderTypeName = Config.Get("map-file-provider", "LocalDiskMapFileProvider"); private static readonly char[] InvalidCharacters = {'|', ' '}; private static readonly Lazy<IMapFileProvider> MapFileProvider = new Lazy<IMapFileProvider>( () => Composer.Instance.GetExportedValueByTypeName<IMapFileProvider>(MapFileProviderTypeName) ); /// <summary> /// Gets an instance of <see cref="SecurityIdentifier"/> that is empty, that is, one with no symbol specified /// </summary> public static readonly SecurityIdentifier Empty = new SecurityIdentifier(string.Empty, 0); /// <summary> /// Gets an instance of <see cref="SecurityIdentifier"/> that is explicitly no symbol /// </summary> public static readonly SecurityIdentifier None = new SecurityIdentifier("NONE", 0); /// <summary> /// Gets the date to be used when it does not apply. /// </summary> public static readonly DateTime DefaultDate = DateTime.FromOADate(0); /// <summary> /// Gets the set of invalids symbol characters /// </summary> public static readonly HashSet<char> InvalidSymbolCharacters = new HashSet<char>(InvalidCharacters); #endregion #region Scales, Widths and Market Maps // these values define the structure of the 'otherData' // the constant width fields are used via modulus, so the width is the number of zeros specified, // {put/call:1}{oa-date:5}{style:1}{strike:6}{strike-scale:2}{market:3}{security-type:2} private const ulong SecurityTypeWidth = 100; private const ulong SecurityTypeOffset = 1; private const ulong MarketWidth = 1000; private const ulong MarketOffset = SecurityTypeOffset * SecurityTypeWidth; private const int StrikeDefaultScale = 4; private static readonly ulong StrikeDefaultScaleExpanded = Pow(10, StrikeDefaultScale); private const ulong StrikeScaleWidth = 100; private const ulong StrikeScaleOffset = MarketOffset * MarketWidth; private const ulong StrikeWidth = 1000000; private const ulong StrikeOffset = StrikeScaleOffset * StrikeScaleWidth; private const ulong OptionStyleWidth = 10; private const ulong OptionStyleOffset = StrikeOffset * StrikeWidth; private const ulong DaysWidth = 100000; private const ulong DaysOffset = OptionStyleOffset * OptionStyleWidth; private const ulong PutCallOffset = DaysOffset * DaysWidth; private const ulong PutCallWidth = 10; #endregion #region Member variables [ProtoMember(1)] private string _symbol; [ProtoMember(2)] private ulong _properties; [ProtoMember(3)] private SecurityIdentifier _underlying; private bool _hashCodeSet; private int _hashCode; private decimal? _strikePrice; private OptionStyle? _optionStyle; private OptionRight? _optionRight; private DateTime? _date; private string _stringRep; private string _market; #endregion #region Properties /// <summary> /// Gets whether or not this <see cref="SecurityIdentifier"/> is a derivative, /// that is, it has a valid <see cref="Underlying"/> property /// </summary> public bool HasUnderlying { get { return _underlying != null; } } /// <summary> /// Gets the underlying security identifier for this security identifier. When there is /// no underlying, this property will return a value of <see cref="Empty"/>. /// </summary> public SecurityIdentifier Underlying { get { if (_underlying == null) { throw new InvalidOperationException("No underlying specified for this identifier. Check that HasUnderlying is true before accessing the Underlying property."); } return _underlying; } } /// <summary> /// Gets the date component of this identifier. For equities this /// is the first date the security traded. Technically speaking, /// in LEAN, this is the first date mentioned in the map_files. /// For futures and options this is the expiry date of the contract. /// For other asset classes, this property will throw an /// exception as the field is not specified. /// </summary> public DateTime Date { get { try { return _date.Value; } catch (InvalidOperationException) { switch (SecurityType) { case SecurityType.Base: case SecurityType.Equity: case SecurityType.Option: case SecurityType.Future: case SecurityType.Index: case SecurityType.FutureOption: case SecurityType.IndexOption: var oadate = ExtractFromProperties(DaysOffset, DaysWidth); _date = DateTime.FromOADate(oadate); return _date.Value; default: throw new InvalidOperationException("Date is only defined for SecurityType.Equity, SecurityType.Option, SecurityType.Future, SecurityType.FutureOption, SecurityType.IndexOption, and SecurityType.Base"); } } } } /// <summary> /// Gets the original symbol used to generate this security identifier. /// For equities, by convention this is the first ticker symbol for which /// the security traded /// </summary> public string Symbol { get { return _symbol; } } /// <summary> /// Gets the market component of this security identifier. If located in the /// internal mappings, the full string is returned. If the value is unknown, /// the integer value is returned as a string. /// </summary> public string Market { get { if (_market == null) { var marketCode = ExtractFromProperties(MarketOffset, MarketWidth); var market = QuantConnect.Market.Decode((int)marketCode); // if we couldn't find it, send back the numeric representation _market = market ?? marketCode.ToStringInvariant(); } return _market; } } /// <summary> /// Gets the security type component of this security identifier. /// </summary> [ProtoMember(4)] public SecurityType SecurityType { get; } /// <summary> /// Gets the option strike price. This only applies to SecurityType.Option /// and will thrown anexception if accessed otherwse. /// </summary> public decimal StrikePrice { get { try { // will throw 'InvalidOperationException' if not set return _strikePrice.Value; } catch (InvalidOperationException) { if (!SecurityType.IsOption()) { throw new InvalidOperationException("StrikePrice is only defined for SecurityType.Option, SecurityType.FutureOption, and SecurityType.IndexOption"); } // performance: lets calculate strike price once var scale = ExtractFromProperties(StrikeScaleOffset, StrikeScaleWidth); var unscaled = ExtractFromProperties(StrikeOffset, StrikeWidth); var pow = Math.Pow(10, (int)scale - StrikeDefaultScale); // If the 20th bit is set to 1, we have a negative strike price. // Let's normalize the strike and explicitly make it negative if (((unscaled >> 19) & 1) == 1) { _strikePrice = -((unscaled ^ 1 << 19) * (decimal)pow); } else { _strikePrice = unscaled * (decimal)pow; } return _strikePrice.Value; } } } /// <summary> /// Gets the option type component of this security identifier. This /// only applies to SecurityType.Open and will throw an exception if /// accessed otherwise. /// </summary> public OptionRight OptionRight { get { try { // will throw 'InvalidOperationException' if not set return _optionRight.Value; } catch (InvalidOperationException) { if (!SecurityType.IsOption()) { throw new InvalidOperationException("OptionRight is only defined for SecurityType.Option, SecurityType.FutureOption, and SecurityType.IndexOption"); } _optionRight = (OptionRight)ExtractFromProperties(PutCallOffset, PutCallWidth); return _optionRight.Value; } } } /// <summary> /// Gets the option style component of this security identifier. This /// only applies to SecurityType.Open and will throw an exception if /// accessed otherwise. /// </summary> public OptionStyle OptionStyle { get { try { // will throw 'InvalidOperationException' if not set return _optionStyle.Value; } catch (InvalidOperationException) { if (!SecurityType.IsOption()) { throw new InvalidOperationException("OptionStyle is only defined for SecurityType.Option, SecurityType.FutureOption, and SecurityType.IndexOption"); } _optionStyle = (OptionStyle)(ExtractFromProperties(OptionStyleOffset, OptionStyleWidth)); return _optionStyle.Value; } } } #endregion #region Constructors /// <summary> /// Initializes a new instance of the <see cref="SecurityIdentifier"/> class /// </summary> /// <param name="symbol">The base36 string encoded as a long using alpha [0-9A-Z]</param> /// <param name="properties">Other data defining properties of the symbol including market, /// security type, listing or expiry date, strike/call/put/style for options, ect...</param> public SecurityIdentifier(string symbol, ulong properties) { if (symbol == null) { throw new ArgumentNullException(nameof(symbol), "SecurityIdentifier requires a non-null string 'symbol'"); } if (symbol.IndexOfAny(InvalidCharacters) != -1) { throw new ArgumentException("symbol must not contain the characters '|' or ' '.", nameof(symbol)); } _symbol = symbol; _properties = properties; _underlying = null; _strikePrice = null; _optionStyle = null; _optionRight = null; _date = null; SecurityType = (SecurityType)ExtractFromProperties(SecurityTypeOffset, SecurityTypeWidth, properties); if (!SecurityType.IsValid()) { throw new ArgumentException($"The provided properties do not match with a valid {nameof(SecurityType)}", "properties"); } _hashCode = unchecked (symbol.GetHashCode() * 397) ^ properties.GetHashCode(); _hashCodeSet = true; } /// <summary> /// Initializes a new instance of the <see cref="SecurityIdentifier"/> class /// </summary> /// <param name="symbol">The base36 string encoded as a long using alpha [0-9A-Z]</param> /// <param name="properties">Other data defining properties of the symbol including market, /// security type, listing or expiry date, strike/call/put/style for options, ect...</param> /// <param name="underlying">Specifies a <see cref="SecurityIdentifier"/> that represents the underlying security</param> public SecurityIdentifier(string symbol, ulong properties, SecurityIdentifier underlying) : this(symbol, properties) { if (symbol == null) { throw new ArgumentNullException(nameof(symbol), "SecurityIdentifier requires a non-null string 'symbol'"); } _symbol = symbol; _properties = properties; // performance: directly call Equals(SecurityIdentifier other), shortcuts Equals(object other) if (!underlying.Equals(Empty)) { _underlying = underlying; } } #endregion #region AddMarket, GetMarketCode, and Generate /// <summary> /// Generates a new <see cref="SecurityIdentifier"/> for an option /// </summary> /// <param name="expiry">The date the option expires</param> /// <param name="underlying">The underlying security's symbol</param> /// <param name="market">The market</param> /// <param name="strike">The strike price</param> /// <param name="optionRight">The option type, call or put</param> /// <param name="optionStyle">The option style, American or European</param> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified option security</returns> public static SecurityIdentifier GenerateOption(DateTime expiry, SecurityIdentifier underlying, string market, decimal strike, OptionRight optionRight, OptionStyle optionStyle) { return Generate(expiry, underlying.Symbol, QuantConnect.Symbol.GetOptionTypeFromUnderlying(underlying.SecurityType), market, strike, optionRight, optionStyle, underlying); } /// <summary> /// Generates a new <see cref="SecurityIdentifier"/> for a future /// </summary> /// <param name="expiry">The date the future expires</param> /// <param name="symbol">The security's symbol</param> /// <param name="market">The market</param> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified futures security</returns> public static SecurityIdentifier GenerateFuture(DateTime expiry, string symbol, string market) { return Generate(expiry, symbol, SecurityType.Future, market); } /// <summary> /// Helper overload that will search the mapfiles to resolve the first date. This implementation /// uses the configured <see cref="IMapFileProvider"/> via the <see cref="Composer.Instance"/> /// </summary> /// <param name="symbol">The symbol as it is known today</param> /// <param name="market">The market</param> /// <param name="mapSymbol">Specifies if symbol should be mapped using map file provider</param> /// <param name="mapFileProvider">Specifies the IMapFileProvider to use for resolving symbols, specify null to load from Composer</param> /// <param name="mappingResolveDate">The date to use to resolve the map file. Default value is <see cref="DateTime.Today"/></param> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified symbol today</returns> public static SecurityIdentifier GenerateEquity(string symbol, string market, bool mapSymbol = true, IMapFileProvider mapFileProvider = null, DateTime? mappingResolveDate = null) { var firstDate = DefaultDate; if (mapSymbol) { var firstTickerDate = GetFirstTickerAndDate(mapFileProvider ?? MapFileProvider.Value, symbol, market, mappingResolveDate: mappingResolveDate); firstDate = firstTickerDate.Item2; symbol = firstTickerDate.Item1; } return GenerateEquity(firstDate, symbol, market); } /// <summary> /// Generates a new <see cref="SecurityIdentifier"/> for an equity /// </summary> /// <param name="date">The first date this security traded (in LEAN this is the first date in the map_file</param> /// <param name="symbol">The ticker symbol this security traded under on the <paramref name="date"/></param> /// <param name="market">The security's market</param> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified equity security</returns> public static SecurityIdentifier GenerateEquity(DateTime date, string symbol, string market) { return Generate(date, symbol, SecurityType.Equity, market); } /// <summary> /// Generates a new <see cref="SecurityIdentifier"/> for a <see cref="ConstituentsUniverseData"/>. /// Note that the symbol ticker is case sensitive here. /// </summary> /// <param name="symbol">The ticker to use for this constituent identifier</param> /// <param name="securityType">The security type of this constituent universe</param> /// <param name="market">The security's market</param> /// <remarks>This method is special in the sense that it does not force the Symbol to be upper /// which is required to determine the source file of the constituent /// <see cref="ConstituentsUniverseData.GetSource(Data.SubscriptionDataConfig,DateTime,bool)"/></remarks> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified constituent universe</returns> public static SecurityIdentifier GenerateConstituentIdentifier(string symbol, SecurityType securityType, string market) { return Generate(DefaultDate, symbol, securityType, market, forceSymbolToUpper: false); } /// <summary> /// Generates the <see cref="Symbol"/> property for <see cref="QuantConnect.SecurityType.Base"/> security identifiers /// </summary> /// <param name="dataType">The base data custom data type if namespacing is required, null otherwise</param> /// <param name="symbol">The ticker symbol</param> /// <returns>The value used for the security identifier's <see cref="Symbol"/></returns> public static string GenerateBaseSymbol(Type dataType, string symbol) { if (dataType == null) { return symbol; } return $"{symbol.ToUpperInvariant()}.{dataType.Name}"; } /// <summary> /// Generates a new <see cref="SecurityIdentifier"/> for a custom security with the option of providing the first date /// </summary> /// <param name="dataType">The custom data type</param> /// <param name="symbol">The ticker symbol of this security</param> /// <param name="market">The security's market</param> /// <param name="mapSymbol">Whether or not we should map this symbol</param> /// <param name="date">First date that the security traded on</param> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified base security</returns> public static SecurityIdentifier GenerateBase(Type dataType, string symbol, string market, bool mapSymbol = false, DateTime? date = null) { var firstDate = date ?? DefaultDate; if (mapSymbol) { var firstTickerDate = GetFirstTickerAndDate(MapFileProvider.Value, symbol, market); firstDate = firstTickerDate.Item2; symbol = firstTickerDate.Item1; } return Generate( firstDate, GenerateBaseSymbol(dataType, symbol), SecurityType.Base, market, forceSymbolToUpper: false ); } /// <summary> /// Generates a new <see cref="SecurityIdentifier"/> for a forex pair /// </summary> /// <param name="symbol">The currency pair in the format similar to: 'EURUSD'</param> /// <param name="market">The security's market</param> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified forex pair</returns> public static SecurityIdentifier GenerateForex(string symbol, string market) { return Generate(DefaultDate, symbol, SecurityType.Forex, market); } /// <summary> /// Generates a new <see cref="SecurityIdentifier"/> for a Crypto pair /// </summary> /// <param name="symbol">The currency pair in the format similar to: 'EURUSD'</param> /// <param name="market">The security's market</param> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified Crypto pair</returns> public static SecurityIdentifier GenerateCrypto(string symbol, string market) { return Generate(DefaultDate, symbol, SecurityType.Crypto, market); } /// <summary> /// Generates a new <see cref="SecurityIdentifier"/> for a CFD security /// </summary> /// <param name="symbol">The CFD contract symbol</param> /// <param name="market">The security's market</param> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified CFD security</returns> public static SecurityIdentifier GenerateCfd(string symbol, string market) { return Generate(DefaultDate, symbol, SecurityType.Cfd, market); } /// <summary> /// Generates a new <see cref="SecurityIdentifier"/> for a INDEX security /// </summary> /// <param name="symbol">The Index contract symbol</param> /// <param name="market">The security's market</param> /// <returns>A new <see cref="SecurityIdentifier"/> representing the specified INDEX security</returns> public static SecurityIdentifier GenerateIndex(string symbol, string market) { return Generate(DefaultDate, symbol, SecurityType.Index, market); } /// <summary> /// Generic generate method. This method should be used carefully as some parameters are not required and /// some parameters mean different things for different security types /// </summary> private static SecurityIdentifier Generate(DateTime date, string symbol, SecurityType securityType, string market, decimal strike = 0, OptionRight optionRight = 0, OptionStyle optionStyle = 0, SecurityIdentifier underlying = null, bool forceSymbolToUpper = true) { if ((ulong)securityType >= SecurityTypeWidth || securityType < 0) { throw new ArgumentOutOfRangeException(nameof(securityType), "securityType must be between 0 and 99"); } if ((int)optionRight > 1 || optionRight < 0) { throw new ArgumentOutOfRangeException(nameof(optionRight), "optionType must be either 0 or 1"); } // normalize input strings market = market.ToLowerInvariant(); symbol = forceSymbolToUpper ? symbol.LazyToUpper() : symbol; if (securityType == SecurityType.FutureOption) { // Futures options tickers might not match, so we need // to map the provided future Symbol to the actual future option Symbol. symbol = FuturesOptionsSymbolMappings.Map(symbol); } var marketIdentifier = QuantConnect.Market.Encode(market); if (!marketIdentifier.HasValue) { throw new ArgumentOutOfRangeException(nameof(market), "The specified market wasn't found in the markets lookup. " + $"Requested: {market}. You can add markets by calling QuantConnect.Market.AddMarket(string,ushort)" ); } var days = (ulong)date.ToOADate() * DaysOffset; var marketCode = (ulong)marketIdentifier * MarketOffset; ulong strikeScale; var strk = NormalizeStrike(strike, out strikeScale) * StrikeOffset; strikeScale *= StrikeScaleOffset; var style = (ulong)optionStyle * OptionStyleOffset; var putcall = (ulong)optionRight * PutCallOffset; var otherData = putcall + days + style + strk + strikeScale + marketCode + (ulong)securityType; var result = new SecurityIdentifier(symbol, otherData, underlying ?? Empty); // we already have these so lets set them switch (securityType) { case SecurityType.Base: case SecurityType.Equity: case SecurityType.Future: result._date = date; break; case SecurityType.Option: case SecurityType.FutureOption: result._date = date; result._strikePrice = strike; result._optionRight = optionRight; result._optionStyle = optionStyle; break; } return result; } /// <summary> /// Resolves the first ticker/date of the security represented by <paramref name="tickerToday"/> /// </summary> /// <param name="mapFileProvider">The IMapFileProvider instance used for resolving map files</param> /// <param name="tickerToday">The security's ticker as it trades today</param> /// <param name="market">The market the security exists in</param> /// <param name="mappingResolveDate">The date to use to resolve the map file. Default value is <see cref="DateTime.Today"/></param> /// <returns>The security's first ticker/date if mapping data available, otherwise, the provided ticker and DefaultDate are returned</returns> private static Tuple<string, DateTime> GetFirstTickerAndDate(IMapFileProvider mapFileProvider, string tickerToday, string market, DateTime? mappingResolveDate = null) { var resolver = mapFileProvider.Get(market); var mapFile = resolver.ResolveMapFile(tickerToday, mappingResolveDate ?? DateTime.Today); // if we have mapping data, use the first ticker/date from there, otherwise use provided ticker and DefaultDate return mapFile.Any() ? Tuple.Create(mapFile.FirstTicker, mapFile.FirstDate) : Tuple.Create(tickerToday, DefaultDate); } /// <summary> /// The strike is normalized into deci-cents and then a scale factor /// is also saved to bring it back to un-normalized /// </summary> private static ulong NormalizeStrike(decimal strike, out ulong scale) { var str = strike; if (strike == 0) { scale = 0; return 0; } // convert strike to default scaling, this keeps the scale always positive strike *= StrikeDefaultScaleExpanded; scale = 0; while (strike % 10 == 0) { strike /= 10; scale++; } // Since our max precision was previously capped at 999999 and it had 20 bits set, // we sacrifice a single bit from the strike price to allow for negative strike prices. // 475711 is the maximum value that can be represented when setting the negative bit because // any number greater than that will cause an overflow in the strike price width and increase // its width to 7 digits. // The idea behind this formula is to determine what number the overflow would happen at. // We get the max number representable in 19 bits, subtract the width to normalize the value, // and then get the difference between the 20 bit mask and the 19 bit normalized value to get // the max strike price + 1. Subtract 1 to normalize the value, and we have established an exclusive // upper bound. const ulong negativeMask = 1 << 19; const ulong maxStrikePrice = negativeMask - ((negativeMask ^ (negativeMask - 1)) - StrikeWidth) - 1; if (strike >= maxStrikePrice || strike <= -(long)maxStrikePrice) { throw new ArgumentException(Invariant($"The specified strike price\'s precision is too high: {str}")); } var encodedStrike = (long)strike; if (strike < 0) { // Flip the sign encodedStrike = -encodedStrike; // Sets the 20th bit equal to 1 encodedStrike |= 1 << 19; } return (ulong)encodedStrike; } /// <summary> /// Accurately performs the integer exponentiation /// </summary> private static ulong Pow(uint x, int pow) { // don't use Math.Pow(double, double) due to precision issues return (ulong)BigInteger.Pow(x, pow); } #endregion #region Parsing routines /// <summary> /// Parses the specified string into a <see cref="SecurityIdentifier"/> /// The string must be a 40 digit number. The first 20 digits must be parseable /// to a 64 bit unsigned integer and contain ancillary data about the security. /// The second 20 digits must also be parseable as a 64 bit unsigned integer and /// contain the symbol encoded from base36, this provides for 12 alpha numeric case /// insensitive characters. /// </summary> /// <param name="value">The string value to be parsed</param> /// <returns>A new <see cref="SecurityIdentifier"/> instance if the <paramref name="value"/> is able to be parsed.</returns> /// <exception cref="FormatException">This exception is thrown if the string's length is not exactly 40 characters, or /// if the components are unable to be parsed as 64 bit unsigned integers</exception> public static SecurityIdentifier Parse(string value) { Exception exception; SecurityIdentifier identifier; if (!TryParse(value, out identifier, out exception)) { throw exception; } return identifier; } /// <summary> /// Attempts to parse the specified <see paramref="value"/> as a <see cref="SecurityIdentifier"/>. /// </summary> /// <param name="value">The string value to be parsed</param> /// <param name="identifier">The result of parsing, when this function returns true, <paramref name="identifier"/> /// was properly created and reflects the input string, when this function returns false <paramref name="identifier"/> /// will equal default(SecurityIdentifier)</param> /// <returns>True on success, otherwise false</returns> public static bool TryParse(string value, out SecurityIdentifier identifier) { Exception exception; return TryParse(value, out identifier, out exception); } /// <summary> /// Helper method impl to be used by parse and tryparse /// </summary> private static bool TryParse(string value, out SecurityIdentifier identifier, out Exception exception) { if (!TryParseProperties(value, out exception, out identifier)) { return false; } return true; } private static readonly char[] SplitSpace = {' '}; /// <summary> /// Parses the string into its component ulong pieces /// </summary> private static bool TryParseProperties(string value, out Exception exception, out SecurityIdentifier identifier) { exception = null; if (string.IsNullOrWhiteSpace(value) || value == " 0") { identifier = Empty; return true; } // for performance, we first verify if we already have parsed this SecurityIdentifier if (SecurityIdentifierCache.TryGetValue(value, out identifier)) { return true; } // after calling TryGetValue because if it failed it will set identifier to default identifier = Empty; try { var sids = value.Split('|'); for (var i = sids.Length - 1; i > -1; i--) { var current = sids[i]; var parts = current.Split(SplitSpace, StringSplitOptions.RemoveEmptyEntries); if (parts.Length != 2) { exception = new FormatException("The string must be splittable on space into two parts."); return false; } var symbol = parts[0]; var otherData = parts[1]; var props = otherData.DecodeBase36(); // toss the previous in as the underlying, if Empty, ignored by ctor identifier = new SecurityIdentifier(symbol, props, identifier); } } catch (Exception error) { exception = error; Log.Error($"SecurityIdentifier.TryParseProperties(): Error parsing SecurityIdentifier: '{value}', Exception: {exception}"); return false; } SecurityIdentifierCache.TryAdd(value, identifier); return true; } /// <summary> /// Extracts the embedded value from _otherData /// </summary> private ulong ExtractFromProperties(ulong offset, ulong width) { return ExtractFromProperties(offset, width, _properties); } /// <summary> /// Extracts the embedded value from _otherData /// </summary> /// <remarks>Static so it can be used in <see cref="SecurityIdentifier"/> initialization</remarks> private static ulong ExtractFromProperties(ulong offset, ulong width, ulong properties) { return (properties / offset) % width; } #endregion #region Equality members and ToString /// <summary>Compares the current instance with another object of the same type and returns an integer that indicates whether the current instance precedes, follows, or occurs in the same position in the sort order as the other object. </summary> /// <param name="other">An object to compare with this instance. </param> /// <returns>A value that indicates the relative order of the objects being compared. The return value has these meanings: Value Meaning Less than zero This instance precedes <paramref name="other" /> in the sort order. Zero This instance occurs in the same position in the sort order as <paramref name="other" />. Greater than zero This instance follows <paramref name="other" /> in the sort order. </returns> public int CompareTo(SecurityIdentifier other) { if (ReferenceEquals(this, other)) { return 0; } if (ReferenceEquals(null, other)) { return 1; } return string.Compare(ToString(), other.ToString(), StringComparison.Ordinal); } /// <summary>Compares the current instance with another object of the same type and returns an integer that indicates whether the current instance precedes, follows, or occurs in the same position in the sort order as the other object.</summary> /// <param name="obj">An object to compare with this instance. </param> /// <returns>A value that indicates the relative order of the objects being compared. The return value has these meanings: Value Meaning Less than zero This instance precedes <paramref name="obj" /> in the sort order. Zero This instance occurs in the same position in the sort order as <paramref name="obj" />. Greater than zero This instance follows <paramref name="obj" /> in the sort order. </returns> /// <exception cref="T:System.ArgumentException"> /// <paramref name="obj" /> is not the same type as this instance. </exception> public int CompareTo(object obj) { if (ReferenceEquals(null, obj)) { return 1; } if (ReferenceEquals(this, obj)) { return 0; } if (!(obj is SecurityIdentifier)) { throw new ArgumentException($"Object must be of type {nameof(SecurityIdentifier)}"); } return CompareTo((SecurityIdentifier) obj); } /// <summary> /// Indicates whether the current object is equal to another object of the same type. /// </summary> /// <returns> /// true if the current object is equal to the <paramref name="other"/> parameter; otherwise, false. /// </returns> /// <param name="other">An object to compare with this object.</param> public bool Equals(SecurityIdentifier other) { return ReferenceEquals(this, other) || _properties == other._properties && _symbol == other._symbol && _underlying == other._underlying; } /// <summary> /// Determines whether the specified <see cref="T:System.Object"/> is equal to the current <see cref="T:System.Object"/>. /// </summary> /// <returns> /// true if the specified object is equal to the current object; otherwise, false. /// </returns> /// <param name="obj">The object to compare with the current object. </param><filterpriority>2</filterpriority> public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (obj.GetType() != GetType()) return false; return Equals((SecurityIdentifier)obj); } /// <summary> /// Serves as a hash function for a particular type. /// </summary> /// <returns> /// A hash code for the current <see cref="T:System.Object"/>. /// </returns> /// <filterpriority>2</filterpriority> public override int GetHashCode() { if (!_hashCodeSet) { _hashCode = unchecked(_symbol.GetHashCode() * 397) ^ _properties.GetHashCode(); _hashCodeSet = true; } return _hashCode; } /// <summary> /// Override equals operator /// </summary> public static bool operator ==(SecurityIdentifier left, SecurityIdentifier right) { return Equals(left, right); } /// <summary> /// Override not equals operator /// </summary> public static bool operator !=(SecurityIdentifier left, SecurityIdentifier right) { return !Equals(left, right); } /// <summary> /// Returns a string that represents the current object. /// </summary> /// <returns> /// A string that represents the current object. /// </returns> /// <filterpriority>2</filterpriority> public override string ToString() { if (_stringRep == null) { var props = _properties.EncodeBase36(); props = props.Length == 0 ? "0" : props; _stringRep = HasUnderlying ? $"{_symbol} {props}|{_underlying}" : $"{_symbol} {props}"; } return _stringRep; } #endregion } }
//================================================================================= // // Created by: MrYukonC // Created on: 27 OCT 2017 // //================================================================================= // // MIT License // // Copyright (c) 2017 MrYukonC // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // //================================================================================= using System; using System.Collections.Generic; using System.Threading; namespace MYC { public class BittrexExt : Bittrex { //========================================================== public BittrexExt( String APIKey, String APISecret ) : base( APIKey, APISecret ) {} //========================================================== public static Boolean GetIsCurrencyValid( String Currency ) { return !String.IsNullOrEmpty( Currency ) && Currency.Length == 3; } //========================================================== public static Boolean GetIsMarketValid( Bittrex B, String Market ) { if( String.IsNullOrEmpty( Market ) || Market.Length != 7 ) return false; return B.GetOrderBook( Market, BittrexOrderBook.Type.Buy, 1 ).Success; } //========================================================== public void AutoSell( String Market, String SellCurrency, String BuyCurrency, Double MinSellThresh = 0.001 ) { const Int32 SleepMS = 1000; if( !GetIsCurrencyValid( SellCurrency ) ) return; if( !GetIsCurrencyValid( BuyCurrency ) ) return; if( !GetIsMarketValid( this, Market ) ) return; //String Market = BuyCurrency.ToUpper() + "-" + SellCurrency.ToUpper(); String SellOrderUuid = String.Empty; while( true ) { BittrexResult<BittrexBalance> SellBalance = base.GetBalance( SellCurrency ); if( !SellBalance.Success ) { Console.WriteLine( SellBalance.Message ); break; } if( SellBalance.Result.Available < MinSellThresh ) { Console.WriteLine( String.Format( "{0:0.00000000} {1} does not meet the specified minimum sell amount threshold of {2} {3}", SellBalance.Result.Available, SellCurrency, MinSellThresh, SellCurrency ) ); break; } Console.WriteLine( String.Format( "{0} balance: {1:0.00000000}", SellCurrency, SellBalance.Result.Available ) ); if( !String.IsNullOrEmpty( SellOrderUuid ) ) base.Cancel( SellOrderUuid ); SellOrderUuid = String.Empty; BittrexResult<BittrexOrderBook> OrderBook = base.GetOrderBook( Market, BittrexOrderBook.Type.Buy, 1 ); if( !OrderBook.Success ) { Console.WriteLine( OrderBook.Message ); //Thread.Sleep( SleepMS ); //continue; break; } BittrexOrderBook.Entry BestOffer = OrderBook.Result.Buy[ 0 ]; Double SellAmount = Math.Min( SellBalance.Result.Available, BestOffer.Quantity ); Console.WriteLine( String.Format( "Attempt to sell {0:0.00000000} {1} @ {2:0.00000000} {3}/{4}", SellAmount, SellCurrency, BestOffer.Rate, SellCurrency, BuyCurrency ) ); BittrexResult<BittrexUuid> OrderUuid = base.SellLimit( Market, SellAmount, BestOffer.Rate ); if( !OrderUuid.Success ) { Console.WriteLine( OrderUuid.Message ); //continue; break; } SellOrderUuid = OrderUuid.Result.Uuid; Thread.Sleep( SleepMS ); } } //========================================================== public void AutoBuy( String Market, String SellCurrency, String BuyCurrency, Double MinSellThresh = 0.001 ) { const Int32 SleepMS = 1000; if( !GetIsCurrencyValid( SellCurrency ) ) return; if( !GetIsCurrencyValid( BuyCurrency ) ) return; if( !GetIsMarketValid( this, Market ) ) return; String BuyOrderUuid = String.Empty; while( true ) { BittrexResult<BittrexBalance> SellBalance = base.GetBalance( SellCurrency ); if( !SellBalance.Success ) { Console.WriteLine( SellBalance.Message ); break; } if( SellBalance.Result.Available < MinSellThresh ) { Console.WriteLine( String.Format( "{0:0.00000000} {1} does not meet the specified minimum sell amount threshold of {2} {3}", SellBalance.Result.Available, SellCurrency, MinSellThresh, SellCurrency ) ); break; } Console.WriteLine( String.Format( "{0} balance: {1:0.00000000}", SellCurrency, SellBalance.Result.Available ) ); if( !String.IsNullOrEmpty( BuyOrderUuid ) ) base.Cancel( BuyOrderUuid ); BuyOrderUuid = String.Empty; BittrexResult<BittrexOrderBook> OrderBook = base.GetOrderBook( Market, BittrexOrderBook.Type.Sell, 1 ); if( !OrderBook.Success ) { Console.WriteLine( OrderBook.Message ); //Thread.Sleep( SleepMS ); //continue; break; } BittrexOrderBook.Entry BestOffer = OrderBook.Result.Sell[ 0 ]; Double BuyAmount = Math.Min( BestOffer.Quantity, SellBalance.Result.Available / BestOffer.Rate ); BuyAmount = Math.Truncate( BuyAmount * 10 ) / 10; Console.WriteLine( String.Format( "Attempt to buy {0:0.00000000} {1} @ {2:0.00000000} {3}/{4} for a total of {5:0.000000000000} {6}", BuyAmount, BuyCurrency, BestOffer.Rate, SellCurrency, BuyCurrency, BuyAmount * BestOffer.Rate, SellCurrency ) ); BittrexResult<BittrexUuid> OrderUuid = base.BuyLimit( Market, BuyAmount, BestOffer.Rate ); if( !OrderUuid.Success ) { Console.WriteLine( OrderUuid.Message ); //continue; break; } BuyOrderUuid = OrderUuid.Result.Uuid; Thread.Sleep( SleepMS ); } } //========================================================== public void WithdrawAll( String Currency, String DestAddress, Double MinAmountThresh = 0.1 ) { if( !GetIsCurrencyValid( Currency ) ) return; BittrexResult<BittrexBalance> Balance = base.GetBalance( Currency ); if( !Balance.Success ) { Console.WriteLine( Balance.Message ); return; } Console.WriteLine( "{0} balance: {1:0.00000000}", Currency, Balance.Result.Available ); if( Balance.Result.Available >= MinAmountThresh ) base.Withdraw( Currency, Balance.Result.Available, DestAddress ); } } }
// // Copyright (c) 2004-2021 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // #if !MONO && !NETSTANDARD namespace NLog.Internal.FileAppenders { using System; using System.IO; using System.Security; using System.Threading; using NLog.Common; /// <summary> /// Provides a multi process-safe atomic file append while /// keeping the files open. /// </summary> [SecuritySafeCritical] internal class WindowsMultiProcessFileAppender : BaseMutexFileAppender { public static readonly IFileAppenderFactory TheFactory = new Factory(); private FileStream _fileStream; /// <summary> /// Initializes a new instance of the <see cref="WindowsMultiProcessFileAppender" /> class. /// </summary> /// <param name="fileName">Name of the file.</param> /// <param name="parameters">The parameters.</param> public WindowsMultiProcessFileAppender(string fileName, ICreateFileParameters parameters) : base(fileName, parameters) { try { CreateAppendOnlyFile(fileName); } catch { CloseFileSafe(ref _fileStream, fileName); throw; } } /// <summary> /// Creates or opens a file in a special mode, so that writes are automatically /// as atomic writes at the file end. /// See also "UnixMultiProcessFileAppender" which does a similar job on *nix platforms. /// </summary> /// <param name="fileName">File to create or open</param> private void CreateAppendOnlyFile(string fileName) { string dir = Path.GetDirectoryName(fileName); if (!Directory.Exists(dir)) { if (!CreateFileParameters.CreateDirs) { throw new DirectoryNotFoundException(dir); } Directory.CreateDirectory(dir); } var fileShare = FileShare.ReadWrite; if (CreateFileParameters.EnableFileDelete) fileShare |= FileShare.Delete; try { bool fileExists = File.Exists(fileName); // https://blogs.msdn.microsoft.com/oldnewthing/20151127-00/?p=92211/ // https://msdn.microsoft.com/en-us/library/ff548289.aspx // If only the FILE_APPEND_DATA and SYNCHRONIZE flags are set, the caller can write only to the end of the file, // and any offset information about writes to the file is ignored. // However, the file will automatically be extended as necessary for this type of write operation. _fileStream = new FileStream( fileName, FileMode.Append, System.Security.AccessControl.FileSystemRights.AppendData | System.Security.AccessControl.FileSystemRights.Synchronize, // <- Atomic append fileShare, 1, // No internal buffer, write directly from user-buffer FileOptions.None); long filePosition = _fileStream.Position; if (fileExists || filePosition > 0) { CreationTimeUtc = File.GetCreationTimeUtc(FileName); if (CreationTimeUtc < DateTime.UtcNow - TimeSpan.FromSeconds(2) && filePosition == 0) { // File wasn't created "almost now". // This could mean creation time has tunneled through from another file (see comment below). Thread.Sleep(50); // Having waited for a short amount of time usually means the file creation process has continued // code execution just enough to the above point where it has fixed up the creation time. CreationTimeUtc = File.GetCreationTimeUtc(FileName); } } else { // We actually created the file and eventually concurrent processes // may have opened the same file in between. // Only the one process creating the file should adjust the file creation time // to avoid being thwarted by Windows' Tunneling capabilities (https://support.microsoft.com/en-us/kb/172190). // Unfortunately we can't use the native SetFileTime() to prevent opening the file 2nd time. // This would require another desiredAccess flag which would disable the atomic append feature. // See also UpdateCreationTime() CreationTimeUtc = DateTime.UtcNow; File.SetCreationTimeUtc(FileName, CreationTimeUtc); } } catch { CloseFileSafe(ref _fileStream, fileName); throw; } } /// <summary> /// Writes the specified bytes. /// </summary> /// <param name="bytes">The bytes array.</param> /// <param name="offset">The bytes array offset.</param> /// <param name="count">The number of bytes.</param> public override void Write(byte[] bytes, int offset, int count) { _fileStream?.Write(bytes, offset, count); } /// <summary> /// Closes this instance. /// </summary> public override void Close() { if (_fileStream is null) { return; } InternalLogger.Trace("{0}: Closing '{1}'", CreateFileParameters, FileName); try { _fileStream?.Dispose(); } catch (Exception ex) { InternalLogger.Warn(ex, "{0}: Failed to close file '{1}'", CreateFileParameters, FileName); Thread.Sleep(1); // Artificial delay to avoid hammering a bad file location } finally { _fileStream = null; } } /// <summary> /// Flushes this instance. /// </summary> public override void Flush() { // do nothing, the file is written directly } public override DateTime? GetFileCreationTimeUtc() { return CreationTimeUtc; // File is kept open, so creation time is static } /// <summary> /// Gets the length in bytes of the file associated with the appender. /// </summary> /// <returns>A long value representing the length of the file in bytes.</returns> public override long? GetFileLength() { return _fileStream?.Length; } /// <summary> /// Factory class. /// </summary> private class Factory : IFileAppenderFactory { /// <summary> /// Opens the appender for given file name and parameters. /// </summary> /// <param name="fileName">Name of the file.</param> /// <param name="parameters">Creation parameters.</param> /// <returns> /// Instance of <see cref="BaseFileAppender"/> which can be used to write to the file. /// </returns> BaseFileAppender IFileAppenderFactory.Open(string fileName, ICreateFileParameters parameters) { return new WindowsMultiProcessFileAppender(fileName, parameters); } } } } #endif
using System; using System.Collections.Generic; using System.Text; using System.Drawing; using System.Drawing.Drawing2D; using System.Drawing.Design; using System.Windows.Forms; using System.ComponentModel; using System.Runtime.InteropServices; using System.Windows.Forms.VisualStyles; // Taken from http://www.codeproject.com/Articles/23746/TreeView-with-Columns with minor tweaks // and fixes for my purposes. namespace TreelistView { [Designer(typeof(TreeListViewDesigner))] public class TreeListView : Control, ISupportInitialize { public event TreeViewEventHandler AfterSelect; protected virtual void OnAfterSelect(Node node) { raiseAfterSelect(node); } protected virtual void raiseAfterSelect(Node node) { if (AfterSelect != null && node != null) AfterSelect(this, new TreeViewEventArgs(null)); } public delegate void NotifyBeforeExpandHandler(Node node, bool isExpanding); public event NotifyBeforeExpandHandler NotifyBeforeExpand; public virtual void OnNotifyBeforeExpand(Node node, bool isExpanding) { raiseNotifyBeforeExpand(node, isExpanding); } protected virtual void raiseNotifyBeforeExpand(Node node, bool isExpanding) { if (NotifyBeforeExpand != null) NotifyBeforeExpand(node, isExpanding); } public delegate void NotifyAfterHandler(Node node, bool isExpanding); public event NotifyAfterHandler NotifyAfterExpand; public virtual void OnNotifyAfterExpand(Node node, bool isExpanded) { raiseNotifyAfterExpand(node, isExpanded); } protected virtual void raiseNotifyAfterExpand(Node node, bool isExpanded) { if (NotifyAfterExpand != null) NotifyAfterExpand(node, isExpanded); } public delegate void NodeDoubleClickedHandler(Node node); public event NodeDoubleClickedHandler NodeDoubleClicked; public virtual void OnNodeDoubleClicked(Node node) { raiseNodeDoubleClicked(node); } protected virtual void raiseNodeDoubleClicked(Node node) { if (NodeDoubleClicked != null) NodeDoubleClicked(node); } public delegate void NodeClickedHandler(Node node); public event NodeClickedHandler NodeClicked; public virtual void OnNodeClicked(Node node) { raiseNodeClicked(node); } protected virtual void raiseNodeClicked(Node node) { if (NodeClicked != null) NodeClicked(node); } TreeListViewNodes m_nodes; TreeListColumnCollection m_columns; TreeList.RowSetting m_rowSetting; TreeList.ViewSetting m_viewSetting; Color m_GridLineColour = SystemColors.Control; Image m_SelectedImage = null; [Category("Columns")] [Browsable(true)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public TreeListColumnCollection Columns { get { return m_columns; } } [Category("Options")] [Browsable(true)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public TreeList.CollumnSetting ColumnsOptions { get { return m_columns.Options; } } [Category("Options")] [Browsable(true)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public TreeList.RowSetting RowOptions { get { return m_rowSetting; } } [Category("Options")] [Browsable(true)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public TreeList.ViewSetting ViewOptions { get { return m_viewSetting; } } [Category("Behavior")] [DefaultValue(typeof(bool), "True")] public bool MultiSelect { get { return m_multiSelect; } set { m_multiSelect = value; } } [Category("Behavior")] [DefaultValue(typeof(int), "0")] public int TreeColumn { get { return m_treeColumn; } set { m_treeColumn = value; if(value >= m_columns.Count) throw new ArgumentOutOfRangeException("Tree column index invalid"); } } private int GetTreeColumn(Node n) { if (n != null && n.TreeColumn >= 0) return n.TreeColumn; return m_treeColumn; } [Category("Behavior")] [DefaultValue(typeof(bool), "False")] public bool AlwaysDisplayVScroll { get { return m_vScrollAlways; } set { m_vScrollAlways = value; } } [Category("Behavior")] [DefaultValue(typeof(bool), "False")] public bool AlwaysDisplayHScroll { get { return m_hScrollAlways; } set { m_hScrollAlways = value; } } [Category("Appearance")] [DefaultValue(typeof(Image), null)] public Image SelectedImage { get { return m_SelectedImage; } set { m_SelectedImage = value; } } [DefaultValue(typeof(Color), "Window")] public new Color BackColor { get { return base.BackColor; } set { base.BackColor = value; } } [Category("Appearance")] [DefaultValue(typeof(Color), "Control")] public Color GridLineColour { get { return m_GridLineColour; } set { m_GridLineColour = value; } } //[Browsable(false)] public TreeListViewNodes Nodes { get { return m_nodes; } } public TreeListView() { this.DoubleBuffered = true; this.BackColor = SystemColors.Window; this.TabStop = true; m_rowPainter = new RowPainter(); m_cellPainter = new CellPainter(this); m_nodes = new TreeListViewNodes(this); m_rowSetting = new TreeList.RowSetting(this); m_viewSetting = new TreeList.ViewSetting(this); m_columns = new TreeListColumnCollection(this); AddScrollBars(); } public void RecalcLayout() { if (m_firstVisibleNode == null) m_firstVisibleNode = Nodes.FirstNode; if (Nodes.Count == 0) m_firstVisibleNode = null; UpdateScrollBars(); m_columns.RecalcVisibleColumsRect(); UpdateScrollBars(); m_columns.RecalcVisibleColumsRect(); int vscroll = VScrollValue(); if (vscroll == 0) m_firstVisibleNode = Nodes.FirstNode; else m_firstVisibleNode = NodeCollection.GetNextNode(Nodes.FirstNode, vscroll); Invalidate(); } void AddScrollBars() { // I was not able to get the wanted behavior by using ScrollableControl with AutoScroll enabled. // horizontal scrolling is ok to do it by pixels, but for vertical I want to maintain the headers // and only scroll the rows. // I was not able to manually overwrite the vscroll bar handling to get this behavior, instead I opted for // custom implementation of scrollbars // to get the 'filler' between hscroll and vscroll I dock scroll + filler in a panel m_hScroll = new HScrollBar(); m_hScroll.Scroll += new ScrollEventHandler(OnHScroll); m_hScroll.Dock = DockStyle.Fill; m_vScroll = new VScrollBar(); m_vScroll.Scroll += new ScrollEventHandler(OnVScroll); m_vScroll.Dock = DockStyle.Right; m_hScrollFiller = new Panel(); m_hScrollFiller.BackColor = Color.Transparent; m_hScrollFiller.Size = new Size(m_vScroll.Width-1, m_hScroll.Height); m_hScrollFiller.Dock = DockStyle.Right; Controls.Add(m_vScroll); m_hScrollPanel = new Panel(); m_hScrollPanel.Height = m_hScroll.Height; m_hScrollPanel.Dock = DockStyle.Bottom; m_hScrollPanel.Controls.Add(m_hScroll); m_hScrollPanel.Controls.Add(m_hScrollFiller); Controls.Add(m_hScrollPanel); // try and force handle creation here, as it can fail randomly // at runtime with weird side-effects (See github #202). bool handlesCreated = false; handlesCreated |= m_hScroll.Handle.ToInt64() > 0; handlesCreated |= m_vScroll.Handle.ToInt64() > 0; handlesCreated |= m_hScrollFiller.Handle.ToInt64() > 0; handlesCreated |= m_hScrollPanel.Handle.ToInt64() > 0; if (!handlesCreated) renderdoc.StaticExports.LogText("Couldn't create any handles!"); } VScrollBar m_vScroll; HScrollBar m_hScroll; Panel m_hScrollFiller; Panel m_hScrollPanel; bool m_multiSelect = true; int m_treeColumn = 0; bool m_vScrollAlways = false; bool m_hScrollAlways = false; Node m_firstVisibleNode = null; RowPainter m_rowPainter; CellPainter m_cellPainter; [Browsable(false)] public CellPainter CellPainter { get { return m_cellPainter; } set { m_cellPainter = value; } } TreeListColumn m_resizingColumn; int m_resizingColumnScrollOffset; int m_resizingColumnLeft; TreeListColumn m_movingColumn; NodesSelection m_nodesSelection = new NodesSelection(); Node m_focusedNode = null; [Browsable(false)] public NodesSelection NodesSelection { get { return m_nodesSelection; } } [Browsable(false)] public Node SelectedNode { get { return m_nodesSelection.Count == 0 ? FocusedNode : m_nodesSelection[0]; } } [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public Node FocusedNode { get { return m_focusedNode; } set { Node curNode = FocusedNode; if (object.ReferenceEquals(curNode, value)) return; if (MultiSelect == false) NodesSelection.Clear(); int oldrow = NodeCollection.GetVisibleNodeIndex(curNode); int newrow = NodeCollection.GetVisibleNodeIndex(value); m_focusedNode = value; OnAfterSelect(value); InvalidateRow(oldrow); InvalidateRow(newrow); EnsureVisible(m_focusedNode); } } public void EnsureVisible(Node node) { int screenvisible = MaxVisibleRows() - 1; int visibleIndex = NodeCollection.GetVisibleNodeIndex(node); if (visibleIndex < VScrollValue()) { SetVScrollValue(visibleIndex); } if (visibleIndex > VScrollValue() + screenvisible) { SetVScrollValue(visibleIndex - screenvisible); } } public Node CalcHitNode(Point mousepoint) { if (!ClientRectangle.Contains(mousepoint)) return null; int hitrow = CalcHitRow(mousepoint); if (hitrow < 0) return null; return NodeCollection.GetNextNode(m_firstVisibleNode, hitrow); } public Node GetHitNode() { return CalcHitNode(PointToClient(Control.MousePosition)); } public TreelistView.HitInfo CalcColumnHit(Point mousepoint) { return Columns.CalcHitInfo(mousepoint, HScrollValue()); } public bool HitTestScrollbar(Point mousepoint) { if (m_hScroll.Visible && mousepoint.Y >= ClientRectangle.Height - m_hScroll.Height) return true; return false; } protected override void OnSizeChanged(EventArgs e) { base.OnSizeChanged(e); if (ClientRectangle.Width > 0 && ClientRectangle.Height > 0) { Columns.RecalcVisibleColumsRect(); UpdateScrollBars(); Columns.RecalcVisibleColumsRect(); } } protected override void OnVisibleChanged(EventArgs e) { base.OnVisibleChanged(e); RecalcLayout(); } protected virtual void BeforeShowContextMenu() { } protected void InvalidateRow(int absoluteRowIndex) { int visibleRowIndex = absoluteRowIndex - VScrollValue(); Rectangle r = CalcRowRectangle(visibleRowIndex); if (r != Rectangle.Empty) { r.Inflate(1,1); Invalidate(r); } } void OnVScroll(object sender, ScrollEventArgs e) { int diff = e.NewValue - e.OldValue; //assumedScrollPos += diff; if (e.NewValue == 0) { m_firstVisibleNode = Nodes.FirstNode; diff = 0; } m_firstVisibleNode = NodeCollection.GetNextNode(m_firstVisibleNode, diff); Invalidate(); } void OnHScroll(object sender, ScrollEventArgs e) { Invalidate(); } public void SetVScrollValue(int value) { if (value < 0) value = 0; int max = m_vScroll.Maximum - m_vScroll.LargeChange + 1; if (value > max) value = max; if ((value >= 0 && value <= max) && (value != m_vScroll.Value)) { ScrollEventArgs e = new ScrollEventArgs(ScrollEventType.ThumbPosition, m_vScroll.Value, value, ScrollOrientation.VerticalScroll); // setting the scroll value does not cause a Scroll event m_vScroll.Value = value; // so we have to fake it OnVScroll(m_vScroll, e); } } public int VScrollValue() { if (m_vScroll.Visible == false) return 0; return m_vScroll.Value; } int HScrollValue() { if (m_hScroll.Visible == false) return 0; return m_hScroll.Value; } void UpdateScrollBars() { if (ClientRectangle.Width < 0) return; int maxvisiblerows = MaxVisibleRows(); int totalrows = Nodes.VisibleNodeCount; m_vScroll.SmallChange = 1; m_vScroll.LargeChange = Math.Max(1, maxvisiblerows); m_vScroll.Enabled = true; m_vScroll.Minimum = 0; m_vScroll.Maximum = Math.Max(1,totalrows - 1); if (maxvisiblerows >= totalrows) { m_vScroll.Visible = false; SetVScrollValue(0); if (m_vScrollAlways) { m_vScroll.Visible = true; m_vScroll.Enabled = false; } } else { m_vScroll.Visible = true; int maxscrollvalue = m_vScroll.Maximum - m_vScroll.LargeChange; if (maxscrollvalue < m_vScroll.Value) SetVScrollValue(maxscrollvalue); } m_hScroll.Enabled = true; if (ClientRectangle.Width > MinWidth()) { m_hScrollPanel.Visible = false; m_hScroll.Value = 0; if (m_hScrollAlways) { m_hScroll.Enabled = false; m_hScrollPanel.Visible = true; m_hScroll.Minimum = 0; m_hScroll.Maximum = 0; m_hScroll.SmallChange = 1; m_hScroll.LargeChange = 1; m_hScrollFiller.Visible = m_vScroll.Visible; } } else { m_hScroll.Minimum = 0; m_hScroll.Maximum = Math.Max(1, MinWidth()); m_hScroll.SmallChange = 5; m_hScroll.LargeChange = Math.Max(1, ClientRectangle.Width); m_hScrollFiller.Visible = m_vScroll.Visible; m_hScrollPanel.Visible = true; } } int m_hotrow = -1; int CalcHitRow(Point mousepoint) { if (mousepoint.Y <= Columns.Options.HeaderHeight) return -1; return (mousepoint.Y - Columns.Options.HeaderHeight) / RowOptions.ItemHeight; } int VisibleRowToYPoint(int visibleRowIndex) { return Columns.Options.HeaderHeight + (visibleRowIndex * RowOptions.ItemHeight); } Rectangle CalcRowRectangle(int visibleRowIndex) { Rectangle r = ClientRectangle; r.Y = VisibleRowToYPoint(visibleRowIndex); if (r.Top < Columns.Options.HeaderHeight || r.Top > ClientRectangle.Height) return Rectangle.Empty; r.Height = RowOptions.ItemHeight; return r; } void MultiSelectAdd(Node clickedNode, Keys modifierKeys) { if (Control.ModifierKeys == Keys.None) { foreach (Node node in NodesSelection) { int newrow = NodeCollection.GetVisibleNodeIndex(node); InvalidateRow(newrow); } NodesSelection.Clear(); NodesSelection.Add(clickedNode); } if (Control.ModifierKeys == Keys.Shift) { if (NodesSelection.Count == 0) NodesSelection.Add(clickedNode); else { int startrow = NodeCollection.GetVisibleNodeIndex(NodesSelection[0]); int currow = NodeCollection.GetVisibleNodeIndex(clickedNode); if (currow > startrow) { Node startingNode = NodesSelection[0]; NodesSelection.Clear(); foreach (Node node in NodeCollection.ForwardNodeIterator(startingNode, clickedNode, true)) NodesSelection.Add(node); Invalidate(); } if (currow < startrow) { Node startingNode = NodesSelection[0]; NodesSelection.Clear(); foreach (Node node in NodeCollection.ReverseNodeIterator(startingNode, clickedNode, true)) NodesSelection.Add(node); Invalidate(); } } } if (Control.ModifierKeys == Keys.Control) { if (NodesSelection.Contains(clickedNode)) NodesSelection.Remove(clickedNode); else NodesSelection.Add(clickedNode); } InvalidateRow(NodeCollection.GetVisibleNodeIndex(clickedNode)); FocusedNode = clickedNode; } internal event MouseEventHandler AfterResizingColumn; protected override void OnMouseClick(MouseEventArgs e) { if (e.Button == MouseButtons.Left) { Point mousePoint = new Point(e.X, e.Y); Node clickedNode = CalcHitNode(mousePoint); if (clickedNode != null && Columns.Count > 0) { int clickedRow = CalcHitRow(mousePoint); Rectangle glyphRect = Rectangle.Empty; int treeColumn = GetTreeColumn(clickedNode); if (treeColumn >= 0) glyphRect = GetPlusMinusRectangle(clickedNode, Columns[treeColumn], clickedRow); if (clickedNode.HasChildren && glyphRect != Rectangle.Empty && glyphRect.Contains(mousePoint)) clickedNode.Expanded = !clickedNode.Expanded; var columnHit = CalcColumnHit(mousePoint); if (glyphRect == Rectangle.Empty && columnHit.Column != null && columnHit.Column.Index == treeColumn && GetNodeBitmap(clickedNode) != null) { OnNodeClicked(clickedNode); } if (MultiSelect) { MultiSelectAdd(clickedNode, Control.ModifierKeys); } else FocusedNode = clickedNode; } /* else { FocusedNode = null; NodesSelection.Clear(); }*/ } base.OnMouseClick(e); } protected override void OnMouseMove(MouseEventArgs e) { base.OnMouseMove(e); if (m_movingColumn != null) { m_movingColumn.Moving = true; Cursor = Cursors.SizeAll; var idx = m_movingColumn.VisibleIndex; if (idx + 1 < Columns.VisibleColumns.Length) { var nextcol = Columns.VisibleColumns[idx + 1]; if (nextcol.CalculatedRect.X + (nextcol.CalculatedRect.Width * 3) / 4 < e.X) { Columns.SetVisibleIndex(m_movingColumn, idx + 1); } } if (idx - 1 >= 0) { var prevcol = Columns.VisibleColumns[idx - 1]; if (prevcol.CalculatedRect.Right - (prevcol.CalculatedRect.Width * 3) / 4 > e.X) { Columns.SetVisibleIndex(m_movingColumn, idx - 1); } } Columns.RecalcVisibleColumsRect(true); Invalidate(); return; } if (m_resizingColumn != null) { // if we've clicked on an autosize column, actually resize the next one along. if (m_resizingColumn.AutoSize) { if (Columns.VisibleColumns.Length > m_resizingColumn.VisibleIndex + 1) { TreeListColumn realResizeColumn = Columns.VisibleColumns[m_resizingColumn.VisibleIndex + 1]; int right = realResizeColumn.CalculatedRect.Right - m_resizingColumnScrollOffset; int width = right - e.X; if (width < 10) width = 10; bool resize = true; if (Columns.VisibleColumns.Length > realResizeColumn.VisibleIndex + 1) if (Columns.VisibleColumns[realResizeColumn.VisibleIndex + 1].CalculatedRect.Width <= 10 && m_resizingColumn.Width < width) resize = false; if (realResizeColumn.VisibleIndex > 1) if (Columns.VisibleColumns[realResizeColumn.VisibleIndex - 1].CalculatedRect.Width <= 10 && m_resizingColumn.Width < width) resize = false; if (resize) { realResizeColumn.Width = width; } } } else { int left = m_resizingColumnLeft; int width = e.X - left; if (width < 10) width = 10; bool resize = true; if (Columns.VisibleColumns.Length > m_resizingColumn.VisibleIndex + 1) if (Columns.VisibleColumns[m_resizingColumn.VisibleIndex + 1].CalculatedRect.Width <= 10 && m_resizingColumn.Width < width) resize = false; if (m_resizingColumn.internalIndex > 1) if (Columns.VisibleColumns[m_resizingColumn.VisibleIndex - 1].CalculatedRect.Width <= 10 && m_resizingColumn.Width < width) resize = false; if (resize) m_resizingColumn.Width = width; } Columns.RecalcVisibleColumsRect(true); Invalidate(); return; } TreeListColumn hotcol = null; TreelistView.HitInfo info = Columns.CalcHitInfo(new Point(e.X, e.Y), HScrollValue()); if ((int)(info.HitType & HitInfo.eHitType.kColumnHeader) > 0) hotcol = info.Column; Node clickedNode = CalcHitNode(new Point(e.X, e.Y)); if ((int)(info.HitType & HitInfo.eHitType.kColumnHeaderResize) > 0) Cursor = Cursors.VSplit; else if (info.Column != null && info.Column.Index == GetTreeColumn(clickedNode) && GetNodeBitmap(clickedNode) != null && m_viewSetting.HoverHandTreeColumn) Cursor = Cursors.Hand; else Cursor = Cursors.Arrow; if (GetHoverNodeBitmap(clickedNode) != null && GetNodeBitmap(clickedNode) != GetHoverNodeBitmap(clickedNode)) Invalidate(); SetHotColumn(hotcol, true); int vScrollOffset = VScrollValue(); int newhotrow = -1; if (hotcol == null) { int row = (e.Y - Columns.Options.HeaderHeight) / RowOptions.ItemHeight; newhotrow = row + vScrollOffset; } if (newhotrow != m_hotrow) { InvalidateRow(m_hotrow); m_hotrow = newhotrow; InvalidateRow(m_hotrow); } } protected override void OnMouseLeave(EventArgs e) { base.OnMouseLeave(e); SetHotColumn(null, false); Cursor = Cursors.Arrow; Invalidate(); } protected override void OnMouseWheel(MouseEventArgs e) { int value = m_vScroll.Value - (e.Delta * SystemInformation.MouseWheelScrollLines / 120); if (m_vScroll.Visible) SetVScrollValue(value); base.OnMouseWheel(e); } protected override void OnMouseDown(MouseEventArgs e) { this.Focus(); if (e.Button == MouseButtons.Right) { Point mousePoint = new Point(e.X, e.Y); Node clickedNode = CalcHitNode(mousePoint); if (clickedNode != null) { // if multi select the selection is cleard if clicked node is not in selection if (MultiSelect) { if (NodesSelection.Contains(clickedNode) == false) MultiSelectAdd(clickedNode, Control.ModifierKeys); } FocusedNode = clickedNode; Invalidate(); } BeforeShowContextMenu(); } if (e.Button == MouseButtons.Left) { TreelistView.HitInfo info = Columns.CalcHitInfo(new Point(e.X, e.Y), HScrollValue()); if ((int)(info.HitType & HitInfo.eHitType.kColumnHeaderResize) > 0) { m_resizingColumn = info.Column; m_resizingColumnScrollOffset = HScrollValue(); m_resizingColumnLeft = m_resizingColumn.CalculatedRect.Left - m_resizingColumnScrollOffset; return; } if ((int)(info.HitType & HitInfo.eHitType.kColumnHeader) > 0 && m_viewSetting.UserRearrangeableColumns) { m_movingColumn = info.Column; return; } } base.OnMouseDown(e); } protected override void OnMouseUp(MouseEventArgs e) { if (m_resizingColumn != null) { m_resizingColumn = null; Columns.RecalcVisibleColumsRect(); UpdateScrollBars(); Invalidate(); if (AfterResizingColumn != null) AfterResizingColumn(this, e); } if (m_movingColumn != null) { m_movingColumn.Moving = false; m_movingColumn = null; Cursor = Cursors.Arrow; Columns.RecalcVisibleColumsRect(); UpdateScrollBars(); Invalidate(); } base.OnMouseUp(e); } protected override void OnMouseDoubleClick(MouseEventArgs e) { base.OnMouseDoubleClick(e); Point mousePoint = new Point(e.X, e.Y); Node clickedNode = CalcHitNode(mousePoint); if (clickedNode != null && clickedNode.HasChildren) clickedNode.Expanded = !clickedNode.Expanded; if (clickedNode != null) OnNodeDoubleClicked(clickedNode); } // Somewhere I read that it could be risky to do any handling in GetFocus / LostFocus. // The reason is that it will throw exception incase you make a call which recreates the windows handle (e.g. // change the border style. Instead one should always use OnEnter and OnLeave instead. That is why I'm using // OnEnter and OnLeave instead, even though I'm only doing Invalidate. protected override void OnEnter(EventArgs e) { base.OnEnter(e); Invalidate(); } protected override void OnLeave(EventArgs e) { base.OnLeave(e); Invalidate(); } protected override void OnLostFocus(EventArgs e) { base.OnLostFocus(e); Invalidate(); } void SetHotColumn(TreeListColumn col, bool ishot) { int scrolloffset = HScrollValue(); if (col != m_hotColumn) { if (m_hotColumn != null) { m_hotColumn.ishot = false; Rectangle r = m_hotColumn.CalculatedRect; r.X -= scrolloffset; Invalidate(r); } m_hotColumn = col; if (m_hotColumn != null) { m_hotColumn.ishot = ishot; Rectangle r = m_hotColumn.CalculatedRect; r.X -= scrolloffset; Invalidate(r); } } } internal int RowHeaderWidth() { if (RowOptions.ShowHeader) return RowOptions.HeaderWidth; return 0; } int MinWidth() { return RowHeaderWidth() + Columns.ColumnsWidth; } int MaxVisibleRows(out int remainder) { remainder = 0; if (ClientRectangle.Height < 0) return 0; int height = ClientRectangle.Height - Columns.Options.HeaderHeight; //return (int) Math.Ceiling((double)(ClientRectangle.Height - Columns.HeaderHeight) / (double)Nodes.ItemHeight); remainder = (ClientRectangle.Height - Columns.Options.HeaderHeight) % RowOptions.ItemHeight ; return Math.Max(0, (ClientRectangle.Height - Columns.Options.HeaderHeight) / RowOptions.ItemHeight); } int MaxVisibleRows() { int unused; return MaxVisibleRows(out unused); } public void BeginUpdate() { m_nodes.BeginUpdate(); } public void EndUpdate() { m_nodes.EndUpdate(); RecalcLayout(); Invalidate(); } protected override CreateParams CreateParams { get { const int WS_BORDER = 0x00800000; const int WS_EX_CLIENTEDGE = 0x00000200; CreateParams p = base.CreateParams; p.Style &= ~(int)WS_BORDER; p.ExStyle &= ~(int)WS_EX_CLIENTEDGE; switch (ViewOptions.BorderStyle) { case BorderStyle.Fixed3D: p.ExStyle |= (int)WS_EX_CLIENTEDGE; break; case BorderStyle.FixedSingle: p.Style |= (int)WS_BORDER; break; default: break; } return p; } } TreeListColumn m_hotColumn = null; object GetDataDesignMode(Node node, TreeListColumn column) { string id = string.Empty; while (node != null) { id = node.Owner.GetNodeIndex(node).ToString() + ":" + id; node = node.Parent; } return "<temp>" + id; } protected virtual object GetData(Node node, TreeListColumn column) { if (node[column.Index] != null) return node[column.Index]; return null; } public new Rectangle ClientRectangle { get { Rectangle r = base.ClientRectangle; if (m_vScroll.Visible) r.Width -= m_vScroll.Width+1; if (m_hScroll.Visible) r.Height -= m_hScroll.Height+1; return r; } } protected virtual TreelistView.TreeList.TextFormatting GetFormatting(TreelistView.Node node, TreelistView.TreeListColumn column) { return column.CellFormat; } protected virtual void PaintCellPlusMinus(Graphics dc, Rectangle glyphRect, Node node, TreeListColumn column) { CellPainter.PaintCellPlusMinus(dc, glyphRect, node, column, GetFormatting(node, column)); } protected virtual void PaintCellBackground(Graphics dc, Rectangle cellRect, Node node, TreeListColumn column) { if (this.DesignMode) CellPainter.PaintCellBackground(dc, cellRect, node, column, GetFormatting(node, column), GetDataDesignMode(node, column)); else CellPainter.PaintCellBackground(dc, cellRect, node, column, GetFormatting(node, column), GetData(node, column)); } protected virtual void PaintCellText(Graphics dc, Rectangle cellRect, Node node, TreeListColumn column) { if (this.DesignMode) CellPainter.PaintCellText(dc, cellRect, node, column, GetFormatting(node, column), GetDataDesignMode(node, column)); else CellPainter.PaintCellText(dc, cellRect, node, column, GetFormatting(node, column), GetData(node, column)); } protected virtual void PaintImage(Graphics dc, Rectangle imageRect, Node node, Image image) { if (image != null) dc.DrawImage(image, imageRect.X, imageRect.Y, imageRect.Width, imageRect.Height); } protected virtual void PaintNode(Graphics dc, Rectangle rowRect, Node node, TreeListColumn[] visibleColumns, int visibleRowIndex) { CellPainter.DrawSelectionBackground(dc, rowRect, node); foreach (TreeListColumn col in visibleColumns) { if (col.CalculatedRect.Right - HScrollValue() < RowHeaderWidth()) continue; Rectangle cellRect = rowRect; cellRect.X = col.CalculatedRect.X - HScrollValue(); cellRect.Width = col.CalculatedRect.Width; dc.SetClip(cellRect); if (col.Index == GetTreeColumn(node)) { int lineindet = 10; // add left margin cellRect.X += Columns.Options.LeftMargin; cellRect.Width -= Columns.Options.LeftMargin; // add indent size int indentSize = GetIndentSize(node) + 5; cellRect.X += indentSize; cellRect.Width -= indentSize; // save rectangle for line drawing below Rectangle lineCellRect = cellRect; cellRect.X += lineindet; cellRect.Width -= lineindet; Rectangle glyphRect = GetPlusMinusRectangle(node, col, visibleRowIndex); Rectangle plusminusRect = glyphRect; if (!ViewOptions.ShowLine && (!ViewOptions.ShowPlusMinus || (!ViewOptions.PadForPlusMinus && plusminusRect == Rectangle.Empty))) { cellRect.X -= (lineindet + 5); cellRect.Width += (lineindet + 5); } Point mousePoint = PointToClient(Cursor.Position); Node hoverNode = CalcHitNode(mousePoint); Image icon = hoverNode != null && hoverNode == node ? GetHoverNodeBitmap(node) : GetNodeBitmap(node); PaintCellBackground(dc, cellRect, node, col); if (ViewOptions.ShowLine) PaintLines(dc, lineCellRect, node); if (SelectedImage != null && (NodesSelection.Contains(node) || FocusedNode == node)) { // center the image vertically glyphRect.Y = cellRect.Y + (cellRect.Height / 2) - (SelectedImage.Height / 2); glyphRect.X = cellRect.X; glyphRect.Width = SelectedImage.Width; glyphRect.Height = SelectedImage.Height; PaintImage(dc, glyphRect, node, SelectedImage); cellRect.X += (glyphRect.Width + 2); cellRect.Width -= (glyphRect.Width + 2); } if (icon != null) { // center the image vertically glyphRect.Y = cellRect.Y + (cellRect.Height / 2) - (icon.Height / 2); glyphRect.X = cellRect.X; glyphRect.Width = icon.Width; glyphRect.Height = icon.Height; PaintImage(dc, glyphRect, node, icon); cellRect.X += (glyphRect.Width + 2); cellRect.Width -= (glyphRect.Width + 2); } PaintCellText(dc, cellRect, node, col); if (plusminusRect != Rectangle.Empty && ViewOptions.ShowPlusMinus) PaintCellPlusMinus(dc, plusminusRect, node, col); } else { PaintCellBackground(dc, cellRect, node, col); PaintCellText(dc, cellRect, node, col); } dc.ResetClip(); } } protected virtual void PaintLines(Graphics dc, Rectangle cellRect, Node node) { Pen pen = new Pen(Color.Gray); pen.DashStyle = System.Drawing.Drawing2D.DashStyle.Dot; int halfPoint = cellRect.Top + (cellRect.Height / 2); // line should start from center at first root node if (node.Parent == null && node.PrevSibling == null) { cellRect.Y += (cellRect.Height / 2); cellRect.Height -= (cellRect.Height / 2); } if (node.NextSibling != null || node.HasChildren) // draw full height line dc.DrawLine(pen, cellRect.X, cellRect.Top, cellRect.X, cellRect.Bottom); else dc.DrawLine(pen, cellRect.X, cellRect.Top, cellRect.X, halfPoint); dc.DrawLine(pen, cellRect.X, halfPoint, cellRect.X + 10, halfPoint); // now draw the lines for the parents sibling Node parent = node.Parent; while (parent != null) { Pen linePen = null; if (parent.TreeLineColor != Color.Transparent || parent.TreeLineWidth > 0.0f) linePen = new Pen(parent.TreeLineColor, parent.TreeLineWidth); cellRect.X -= ViewOptions.Indent; dc.DrawLine(linePen != null ? linePen : pen, cellRect.X, cellRect.Top, cellRect.X, cellRect.Bottom); parent = parent.Parent; if (linePen != null) linePen.Dispose(); } pen.Dispose(); } protected virtual int GetIndentSize(Node node) { int indent = 0; Node parent = node.Parent; while (parent != null) { indent += ViewOptions.Indent; parent = parent.Parent; } return indent; } protected virtual Rectangle GetPlusMinusRectangle(Node node, TreeListColumn firstColumn, int visibleRowIndex) { if (node.HasChildren == false) return Rectangle.Empty; int hScrollOffset = HScrollValue(); if (firstColumn.CalculatedRect.Right - hScrollOffset < RowHeaderWidth()) return Rectangle.Empty; //System.Diagnostics.Debug.Assert(firstColumn.VisibleIndex == 0); Rectangle glyphRect = firstColumn.CalculatedRect; glyphRect.X -= hScrollOffset; glyphRect.X += GetIndentSize(node); glyphRect.X += Columns.Options.LeftMargin; glyphRect.Width = 10; glyphRect.Y = VisibleRowToYPoint(visibleRowIndex); glyphRect.Height = RowOptions.ItemHeight; return glyphRect; } protected virtual Image GetNodeBitmap(Node node) { if (node != null) return node.Image; return null; } protected virtual Image GetHoverNodeBitmap(Node node) { if (node != null) return node.HoverImage; return null; } protected override void OnPaint(PaintEventArgs e) { base.OnPaint(e); int hScrollOffset = HScrollValue(); int remainder = 0; int visiblerows = MaxVisibleRows(out remainder); if (remainder > 0) visiblerows++; bool drawColumnHeaders = true; // draw columns if (drawColumnHeaders) { Rectangle headerRect = e.ClipRectangle; Columns.Draw(e.Graphics, headerRect, hScrollOffset); } int visibleRowIndex = 0; TreeListColumn[] visibleColumns = this.Columns.VisibleColumns; int columnsWidth = Columns.ColumnsWidth; foreach (Node node in NodeCollection.ForwardNodeIterator(m_firstVisibleNode, true)) { Rectangle rowRect = CalcRowRectangle(visibleRowIndex); if (rowRect == Rectangle.Empty || rowRect.Bottom <= e.ClipRectangle.Top || rowRect.Top >= e.ClipRectangle.Bottom) { if (visibleRowIndex > visiblerows) break; visibleRowIndex++; continue; } rowRect.X = RowHeaderWidth() - hScrollOffset; rowRect.Width = columnsWidth; // draw the current node PaintNode(e.Graphics, rowRect, node, visibleColumns, visibleRowIndex); // drow row header for current node Rectangle headerRect = rowRect; headerRect.X = 0; headerRect.Width = RowHeaderWidth(); int absoluteRowIndex = visibleRowIndex + VScrollValue(); headerRect.Width = RowHeaderWidth(); m_rowPainter.DrawHeader(e.Graphics, headerRect, absoluteRowIndex == m_hotrow); visibleRowIndex++; } visibleRowIndex = 0; foreach (Node node in NodeCollection.ForwardNodeIterator(m_firstVisibleNode, true)) { Rectangle rowRect = CalcRowRectangle(visibleRowIndex); // draw horizontal grid line for current node if (ViewOptions.ShowGridLines) { Rectangle r = rowRect; r.X = RowHeaderWidth(); r.Width = columnsWidth - hScrollOffset; m_rowPainter.DrawHorizontalGridLine(e.Graphics, r, GridLineColour); } visibleRowIndex++; } // draw vertical grid lines if (ViewOptions.ShowGridLines) { // visible row count int remainRows = Nodes.VisibleNodeCount - m_vScroll.Value; if (visiblerows > remainRows) visiblerows = remainRows; Rectangle fullRect = ClientRectangle; if (drawColumnHeaders) fullRect.Y += Columns.Options.HeaderHeight; fullRect.Height = visiblerows * RowOptions.ItemHeight; Columns.Painter.DrawVerticalGridLines(Columns, e.Graphics, fullRect, hScrollOffset); } } protected override bool IsInputKey(Keys keyData) { if ((int)(keyData & Keys.Shift) > 0) return true; switch (keyData) { case Keys.Left: case Keys.Right: case Keys.Down: case Keys.Up: case Keys.PageUp: case Keys.PageDown: case Keys.Home: case Keys.End: return true; } return false; } protected override void OnKeyDown(KeyEventArgs e) { Node newnode = null; if (e.KeyCode == Keys.PageUp) { int remainder = 0; int diff = MaxVisibleRows(out remainder)-1; newnode = NodeCollection.GetNextNode(FocusedNode, -diff); if (newnode == null) newnode = Nodes.FirstVisibleNode(); } if (e.KeyCode == Keys.PageDown) { int remainder = 0; int diff = MaxVisibleRows(out remainder)-1; newnode = NodeCollection.GetNextNode(FocusedNode, diff); if (newnode == null) newnode = Nodes.LastVisibleNode(true); } if (e.KeyCode == Keys.Down) { newnode = NodeCollection.GetNextNode(FocusedNode, 1); } if (e.KeyCode == Keys.Up) { newnode = NodeCollection.GetNextNode(FocusedNode, -1); } if (e.KeyCode == Keys.Home) { newnode = Nodes.FirstNode; } if (e.KeyCode == Keys.End) { newnode = Nodes.LastVisibleNode(true); } if (e.KeyCode == Keys.Left) { if (FocusedNode != null) { if (FocusedNode.Expanded) { FocusedNode.Collapse(); EnsureVisible(FocusedNode); return; } if (FocusedNode.Parent != null) { FocusedNode = FocusedNode.Parent; EnsureVisible(FocusedNode); } } } if (e.KeyCode == Keys.Right) { if (FocusedNode != null) { if (FocusedNode.Expanded == false && FocusedNode.HasChildren) { FocusedNode.Expand(); EnsureVisible(FocusedNode); return; } if (FocusedNode.Expanded == true && FocusedNode.HasChildren) { FocusedNode = FocusedNode.Nodes.FirstNode; EnsureVisible(FocusedNode); } } } if (newnode != null) { if (MultiSelect) { // tree behavior is // keys none, the selected node is added as the focused and selected node // keys control, only focused node is moved, the selected nodes collection is not modified // keys shift, selection from first selected node to current node is done if (Control.ModifierKeys == Keys.Control) FocusedNode = newnode; else MultiSelectAdd(newnode, Control.ModifierKeys); } else FocusedNode = newnode; EnsureVisible(FocusedNode); } base.OnKeyDown(e); } internal void internalUpdateStyles() { base.UpdateStyles(); } #region ISupportInitialize Members public void BeginInit() { Columns.BeginInit(); } public void EndInit() { Columns.EndInit(); } #endregion internal new bool DesignMode { get { return base.DesignMode; } } } public class TreeListViewNodes : NodeCollection { TreeListView m_tree; bool m_isUpdating = false; public void BeginUpdate() { m_isUpdating = true; } public void EndUpdate() { m_isUpdating = false; } public TreeListViewNodes(TreeListView owner) : base(null) { m_tree = owner; OwnerView = owner; } protected override void UpdateNodeCount(int oldvalue, int newvalue) { base.UpdateNodeCount(oldvalue, newvalue); if (!m_isUpdating) m_tree.RecalcLayout(); } public override void Clear() { base.Clear(); m_tree.RecalcLayout(); } public override void NodetifyBeforeExpand(Node nodeToExpand, bool expanding) { if (!m_tree.DesignMode) m_tree.OnNotifyBeforeExpand(nodeToExpand, expanding); } public override void NodetifyAfterExpand(Node nodeToExpand, bool expanded) { m_tree.OnNotifyAfterExpand(nodeToExpand, expanded); } protected override int GetFieldIndex(string fieldname) { TreeListColumn col = m_tree.Columns[fieldname]; if (col != null) return col.Index; return -1; } } }
// Copyright (c) 2014 Thong Nguyen (tumtumtum@gmail.com) using System; using System.Globalization; using System.Text; using System.Text.RegularExpressions; namespace Platform { /// <summary> /// Provides extension methods and static utility methods for strings. /// </summary> public static class StringUtils { public static Func<string, Func<string>> NewAdder(char appendChar) { return NewAdder(appendChar, true); } public static Func<string, Func<string>> NewAdder(char appendChar, bool appendAtEnd) { Func<string> retval; var builder = new StringBuilder(); if (appendAtEnd) { retval = builder.ToString; } else { retval = delegate { if (builder.Length > 0) { builder.Length--; } return builder.ToString(); }; } return delegate(string value) { builder.Append(value).Append(appendChar); return retval; }; } /// <summary> /// Returns a new string that is made up of part of the current string. /// This method will return an empty or smaller than expected string if /// the index or lengths overflow. /// </summary> /// <param name="s">The current string</param> /// <param name="index">The index of the first character to return</param> /// <param name="length">The length of the string to return</param> /// <returns> /// A new string /// </returns> public static string MidString(this string s, int index, int length) { if (length < 0) { length = 0; } if (index < 0) { index = 0; } else if (index >= s.Length) { return string.Empty; } int y; y = index + length; if (y >= s.Length) { length = Math.Max(length - (y - s.Length), 0); } return s.Substring(index, length); } public static Pair<string, string> SplitOnFirst(this string s, string searchString) { var x = s.IndexOf(searchString, StringComparison.Ordinal); if (x < 0) { return new Pair<string, string>(s, String.Empty); } return new Pair<string, string>(s.Substring(0, x), s.Substring(x + searchString.Length)); } public static Pair<string, string> SplitOnLast(this string s, string searchString) { var x = s.LastIndexOf(searchString, StringComparison.Ordinal); if (x < 0) { return new Pair<string, string>(String.Empty, s); } return new Pair<string, string>(s.Substring(0, x), s.Substring(x + searchString.Length)); } public static Pair<string, string> SplitAroundFirstCharFromLeft(this string s, char c) { return s.SplitAroundCharFromLeft(PredicateUtils.ObjectEquals(c)); } public static Pair<string, string> SplitAroundFirstCharFromRight(this string s, char c) { return s.SplitAroundCharFromRight(PredicateUtils.ObjectEquals(c)); } public static Pair<string, string> SplitAroundCharFromLeft(this string s, char c) { return s.SplitAroundCharFromLeft(PredicateUtils.ObjectEquals(c)); } public static Pair<string, string> SplitAroundCharFromLeft(this string s, Predicate<char> isSplitChar) { return s.SplitAroundCharFromLeft(0, isSplitChar); } public static Pair<string, string> SplitAroundCharFromLeft(this string s, int startIndex, Predicate<char> isSplitChar) { for (var i = startIndex; i < s.Length; i++) { if (isSplitChar(s[i])) { return new Pair<string, string>(s.Left(i), Right(s, s.Length - i - 1)); } } return new Pair<string, string>(s, String.Empty); } public static Pair<string, string> SplitAroundCharFromRight(this string s, char c) { return s.SplitAroundCharFromRight(PredicateUtils.ObjectEquals(c)); } public static Pair<string, string> SplitAroundCharFromRight(this string s, Predicate<char> isSplitChar) { for (var i = s.Length - 1; i >= 0; i--) { if (isSplitChar(s[i])) { return new Pair<string, string>(s.Left(i), Right(s, s.Length - i - 1)); } } return new Pair<string, string>(String.Empty, s); } public static bool IsNullOrEmpty(this string s) { return string.IsNullOrEmpty(s); } public static string[] Split(this string s, string regex, int count, int startIndex) { var re = new Regex(regex); return re.Split(s, count, startIndex); } public static string[] Split(this string s, string regex, int count) { var re = new Regex(regex); return re.Split(s, count); } public static string[] Split(this string s, string regex) { return Regex.Split(s, regex); } public static string Replace(this string s, string regex, string replacement, int count, int startIndex) { var re = new Regex(regex); return re.Replace(s, replacement, count, startIndex); } public static string Replace(this string s, string regex, string replacement, int count) { var re = new Regex(regex); return re.Replace(s, replacement, count); } public static string Replace(this string s, string regex, string replacement) { return Regex.Replace(s, regex, replacement); } public static string ReplaceFirst(this string s, string regex, string replacement) { var re = new Regex(regex); return re.Replace(s, replacement, 1); } public static string ReplaceLast(this string s, string regex, string replacement) { Regex re = new Regex(regex, RegexOptions.RightToLeft); return re.Replace(s, replacement, 1); } public static string Capitalize(this string s) { if (s.Length == 0) { return s; } if (Char.IsUpper(s[0])) { return s; } else { var builder = new StringBuilder(s.Length); builder.Append(Char.ToUpper(s[0])); builder.Append(s, 1, s.Length - 1); return builder.ToString(); } } public static string Uncapitalize(this string s) { if (s.Length == 0) { return s; } if (Char.IsLower(s[0])) { return s; } else { var builder = new StringBuilder(s.Length); builder.Append(Char.ToLower(s[0])); builder.Append(s, 1, s.Length - 1); return builder.ToString(); } } public static bool EqualsIgnoreCase(this string s1, string s2) { return s1.Equals(s2, StringComparison.CurrentCultureIgnoreCase); } public static bool EqualsIgnoreCaseInvariant(this string s1, string s2) { return s1.Equals(s2, StringComparison.InvariantCultureIgnoreCase); } /// <summary> /// Gets the string made of all the characters on the left of the string while the /// predicate is satisfied. /// </summary> /// <param name="s">The string to operate on</param> /// <param name="acceptChar">A predicate that takes a char and returns false when left should return</param> /// <returns>A new string</returns> public static string Left(this string s, Predicate<char> acceptChar) { int i; for (i = 0; i < s.Length; i++) { if (acceptChar(s[i])) { continue; } else { break; } } if (i >= s.Length) { return s; } return s.Substring(0, i); } /// <summary> /// Gets the string made up of all the characters on the right of all the characters /// on the left that match the predicate /// </summary> public static string RightFromLeft(this string s, Predicate<char> predicate) { int i; for (i = 0; i < s.Length; i++) { if (predicate(s[i])) { continue; } else { break; } } if (i >= s.Length) { return String.Empty; } return s.Substring(i + 1); } /// <summary> /// Gets the string that is made up of the right most characters of <c>s</c> /// that satisfy <c>predicate</c>. /// </summary> /// <remarks> /// Gets the string that is made up of the right most characters of <c>s</c> /// that satisfy <c>predicate</c>. The method terminates and returns as soon /// as a character that doesn't satisfy <c>predicate</c> is found. /// <p> /// If every character satisfies the predicate then <c>s</c> is returned. /// </p> /// </remarks> /// <param name="s"></param> /// <param name="acceptChar"></param> /// <returns></returns> public static string Right(this string s, Predicate<char> acceptChar) { int i; for (i = s.Length - 1; i >= 0; i--) { if (acceptChar(s[i])) { continue; } else { break; } } return i < 0 ? s : s.Substring(i + 1); } public static string LeftFromRight(this string s, Predicate<char> accept) { int i; for (i = s.Length - 1; i >= 0; i--) { if (accept(s[i])) { continue; } else { break; } } return i < 0 ? String.Empty : s.Substring(0, i + 1); } public static string Left(this string s, int count) { return count >= s.Length ? s : s.Substring(0, count); } public static string Right(this string s, int count) { if (count >= s.Length) { return s; } if (count < 0) { return String.Empty; } if (s.Length - count <= 0) { return String.Empty; } return s.Substring(s.Length - count, count); } public static int CountChars(this string s, Predicate<char> predicate) { return CountChars(s, predicate, 0, s.Length); } public static int CountChars(this string s, Predicate<char> acceptChar, int startIndex, int count) { var retval = 0; for (var i = startIndex; i < count; i++) { if (acceptChar(s[i])) { retval++; } } return retval; } public static string TrimLeft(this string s) { return s.TrimLeft(' '); } public static string TrimRight(this string s) { return s.TrimRight(' '); } public static string Trim(this string s) { return Trim(s, ' '); } public static string TrimLeft(this string s, char c) { int i; for (i = 0; i < s.Length; i++) { if (s[i] != c) { break; } } return s.Substring(i); } public static string TrimRight(this string s, char c) { int i; for (i = s.Length - 1; i >= 0; i--) { if (s[i] != c) { break; } } return s.Substring(0, i + 1); } public static string Trim(this string s, char c) { int x, y; for (x = 0; x < s.Length; x++) { if (s[x] != c) { break; } } for (y = s.Length - 1; y > x; y--) { if (s[y] != c) { break; } } return s.Substring(x, y + 1 - x); } public static string TrimLeft(this string s, Predicate<char> trimChar) { int i; for (i = 0; i < s.Length; i++) { if (!trimChar(s[i])) { break; } } return s.Substring(i); } public static string TrimRight(this string s, Predicate<char> trimChar) { int i; for (i = s.Length - 1; i >= 0; i--) { if (!trimChar(s[i])) { break; } } return s.Substring(0, i + 1); } public static string Trim(this string s, Predicate<char> trimChar) { int x, y; for (x = 0; x < s.Length; x++) { if (!trimChar(s[x])) { break; } } for (y = s.Length - 1; y > x; y--) { if (!trimChar(s[y])) { break; } } return s.Substring(x, y + 1 - x); } public static string TrimLeft(this string s, string match) { return s.StartsWith(match) ? s.Substring(match.Length) : s; } public static string TrimRight(this string s, string match) { return s.EndsWith(match) ? s.Substring(0, s.Length - match.Length) : s; } /// <summary> /// Returns the index of the first character that satifies the predicate. /// </summary> /// <param name="s">The string to search</param> /// <param name="acceptChar">The predicate</param> /// <returns>The index of the first character if found otherwise -1</returns> public static int IndexOf(this string s, Predicate<char> acceptChar) { return s.IndexOf(0, acceptChar); } /// <summary> /// Returns the index of the first character that satisfies the given predicate. /// </summary> /// <param name="s">The string to search.</param> /// <param name="startIndex">The index to start search at</param> /// <param name="acceptChar"> /// The predicate that every character is asserted against. /// </param> /// <returns> /// The index of the first character that satisfies <c>predicate</c> or -1 if no /// characters satisfy <c>predicate</c>. /// </returns> public static int IndexOf(this string s, int startIndex, Predicate<char> acceptChar) { for (var i = startIndex; i < s.Length; i++) { if (acceptChar(s[i])) { return i; } } return -1; } /// <summary> /// Returns the index of the last character that satisfies the given predicate. /// </summary> /// <param name="s">The string to search.</param> /// <param name="acceptChar"> /// The predicate that every character is asserted against. /// </param> /// <returns> /// The index of the last character that satisfies <c>predicate</c> or -1 if no /// characters satisfy <c>predicate</c>. /// </returns> public static int LastIndexOf(this string s, Predicate<char> acceptChar) { for (var i = s.Length - 1; i >= 0; i--) { if (acceptChar(s[i])) { return i; } } return -1; } public static string LongHead(this string s) { if (s.Length <= 1) { return s; } return s.Substring(0, s.Length - 1); } public static string ShortTail(this string s) { return s.Length == 0 ? s : s[s.Length - 1].ToString(CultureInfo.InvariantCulture); } public static string Head(this string s) { return s.Length == 0 ? s : s[0].ToString(CultureInfo.InvariantCulture); } public static string Tail(this string s) { return s.Length <= 1 ? s : s.Substring(1); } public static bool IsNumeric(this string s) { var i = 0; if (s[i] == '-') { i++; } for (; i < s.Length; i++) { if (!Char.IsNumber(s[i])) { return false; } } return true; } public static bool EndsWith(this string s, char value) { if (s.Length == 0) { return false; } return s[s.Length - 1] == value; } public static bool StartsWith(this string s, char value) { if (s.Length == 0) { return false; } return s[0] == value; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32.SafeHandles; using System.Diagnostics; using System.Runtime.InteropServices; namespace System.IO { internal class MultiplexingWin32WinRTFileSystem : FileSystem { private readonly FileSystem _win32FileSystem = new Win32FileSystem(); private readonly FileSystem _winRTFileSystem = new WinRTFileSystem(); internal static IFileSystemObject GetFileSystemObject(FileSystemInfo caller, string fullPath) { return ShouldUseWinRT(fullPath, isCreate: false) ? (IFileSystemObject)new WinRTFileSystem.WinRTFileSystemObject(fullPath, asDirectory: caller is DirectoryInfo) : (IFileSystemObject)caller; } public override int MaxPath { get { return Interop.Kernel32.MAX_PATH; } } public override int MaxDirectoryPath { get { return Interop.Kernel32.MAX_DIRECTORY_PATH; } } public override void CopyFile(string sourceFullPath, string destFullPath, bool overwrite) { Select(sourceFullPath, destFullPath).CopyFile(sourceFullPath, destFullPath, overwrite); } public override void ReplaceFile(string sourceFullPath, string destFullPath, string destBackupFullPath, bool ignoreMetadataErrors) { Select(sourceFullPath, destFullPath, destBackupFullPath).ReplaceFile(sourceFullPath, destFullPath, destBackupFullPath, ignoreMetadataErrors); } public override void CreateDirectory(string fullPath) { Select(fullPath, isCreate: true).CreateDirectory(fullPath); } public override void DeleteFile(string fullPath) { Select(fullPath).DeleteFile(fullPath); } public override bool DirectoryExists(string fullPath) { return Select(fullPath).DirectoryExists(fullPath); } public override Collections.Generic.IEnumerable<string> EnumeratePaths(string fullPath, string searchPattern, SearchOption searchOption, SearchTarget searchTarget) { return Select(fullPath).EnumeratePaths(fullPath, searchPattern, searchOption, searchTarget); } public override Collections.Generic.IEnumerable<FileSystemInfo> EnumerateFileSystemInfos(string fullPath, string searchPattern, SearchOption searchOption, SearchTarget searchTarget) { return Select(fullPath).EnumerateFileSystemInfos(fullPath, searchPattern, searchOption, searchTarget); } public override bool FileExists(string fullPath) { return Select(fullPath).FileExists(fullPath); } public override FileAttributes GetAttributes(string fullPath) { return Select(fullPath).GetAttributes(fullPath); } public override DateTimeOffset GetCreationTime(string fullPath) { return Select(fullPath).GetCreationTime(fullPath); } public override string GetCurrentDirectory() { // WinRT honors the Win32 current directory, but does not expose it, // so we use the Win32 implementation always. return _win32FileSystem.GetCurrentDirectory(); } public override IFileSystemObject GetFileSystemInfo(string fullPath, bool asDirectory) { return Select(fullPath).GetFileSystemInfo(fullPath, asDirectory); } public override DateTimeOffset GetLastAccessTime(string fullPath) { return Select(fullPath).GetLastAccessTime(fullPath); } public override DateTimeOffset GetLastWriteTime(string fullPath) { return Select(fullPath).GetLastWriteTime(fullPath); } public override void MoveDirectory(string sourceFullPath, string destFullPath) { Select(sourceFullPath, destFullPath).MoveDirectory(sourceFullPath, destFullPath); } public override void MoveFile(string sourceFullPath, string destFullPath) { Select(sourceFullPath, destFullPath).MoveFile(sourceFullPath, destFullPath); } public override void RemoveDirectory(string fullPath, bool recursive) { Select(fullPath).RemoveDirectory(fullPath, recursive); } public override void SetAttributes(string fullPath, FileAttributes attributes) { Select(fullPath).SetAttributes(fullPath, attributes); } public override void SetCreationTime(string fullPath, DateTimeOffset time, bool asDirectory) { Select(fullPath).SetCreationTime(fullPath, time, asDirectory); } public override void SetCurrentDirectory(string fullPath) { // WinRT honors the Win32 current directory, but does not expose it, // so we use the Win32 implementation always. // This will throw UnauthorizedAccess on brokered paths. _win32FileSystem.SetCurrentDirectory(fullPath); } public override void SetLastAccessTime(string fullPath, DateTimeOffset time, bool asDirectory) { Select(fullPath).SetLastAccessTime(fullPath, time, asDirectory); } public override void SetLastWriteTime(string fullPath, DateTimeOffset time, bool asDirectory) { Select(fullPath).SetLastWriteTime(fullPath, time, asDirectory); } private FileSystem Select(string fullPath, bool isCreate = false) { return ShouldUseWinRT(fullPath, isCreate) ? _winRTFileSystem : _win32FileSystem; } private FileSystem Select(string sourceFullPath, string destFullPath) { return (ShouldUseWinRT(sourceFullPath, isCreate: false) || ShouldUseWinRT(destFullPath, isCreate: true)) ? _winRTFileSystem : _win32FileSystem; } private FileSystem Select(string sourceFullPath, string destFullPath, string destFullBackupPath) { return (ShouldUseWinRT(sourceFullPath, isCreate: false) || ShouldUseWinRT(destFullPath, isCreate: true) || ShouldUseWinRT(destFullBackupPath, isCreate: true)) ? _winRTFileSystem : _win32FileSystem; } public override string[] GetLogicalDrives() { // This API is always blocked on WinRT, don't use Win32 return _winRTFileSystem.GetLogicalDrives(); } private static bool ShouldUseWinRT(string fullPath, bool isCreate) { // The purpose of this method is to determine if we can access a path // via Win32 or if we need to fallback to WinRT. // We prefer Win32 since it is faster, WinRT's APIs eventually just // call into Win32 after all, but it doesn't provide access to, // brokered paths (like Pictures or Documents) nor does it handle // placeholder files. So we'd like to fall back to WinRT whenever // we can't access a path, or if it known to be a placeholder file. bool useWinRt = false; do { // first use GetFileAttributesEx as it is faster than FindFirstFile and requires minimum permissions Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA data = new Interop.Kernel32.WIN32_FILE_ATTRIBUTE_DATA(); if (Interop.Kernel32.GetFileAttributesEx(fullPath, Interop.Kernel32.GET_FILEEX_INFO_LEVELS.GetFileExInfoStandard, ref data)) { // got the attributes if ((data.fileAttributes & Interop.Kernel32.FileAttributes.FILE_ATTRIBUTE_DIRECTORY) != 0 || (data.fileAttributes & Interop.Kernel32.FileAttributes.FILE_ATTRIBUTE_REPARSE_POINT) == 0) { // we have a directory or a file that is not a reparse point // useWinRt = false; break; } else { // we need to get the find data to determine if it is a placeholder file Interop.Kernel32.WIN32_FIND_DATA findData = new Interop.Kernel32.WIN32_FIND_DATA(); using (SafeFindHandle handle = Interop.Kernel32.FindFirstFile(fullPath, ref findData)) { if (!handle.IsInvalid) { // got the find data, use WinRT for placeholder files Debug.Assert((findData.dwFileAttributes & Interop.Kernel32.FileAttributes.FILE_ATTRIBUTE_DIRECTORY) == 0); Debug.Assert((findData.dwFileAttributes & Interop.Kernel32.FileAttributes.FILE_ATTRIBUTE_REPARSE_POINT) != 0); useWinRt = findData.dwReserved0 == Interop.Kernel32.IOReparseOptions.IO_REPARSE_TAG_FILE_PLACEHOLDER; break; } } } } int error = Marshal.GetLastWin32Error(); Debug.Assert(error != Interop.Errors.ERROR_SUCCESS); if (error == Interop.Errors.ERROR_ACCESS_DENIED) { // The path was not accessible with Win32, so try WinRT useWinRt = true; break; } else if (error != Interop.Errors.ERROR_PATH_NOT_FOUND && error != Interop.Errors.ERROR_FILE_NOT_FOUND) { // We hit some error other than ACCESS_DENIED or NOT_FOUND, // Default to Win32 to provide most accurate error behavior break; } // error was ERROR_PATH_NOT_FOUND or ERROR_FILE_NOT_FOUND // if we are creating a file/directory we cannot assume that Win32 will have access to // the parent directory, so we walk up the path. fullPath = PathHelpers.GetDirectoryNameInternal(fullPath); // only walk up the path if we are creating a file/directory and not at the root } while (isCreate && !string.IsNullOrEmpty(fullPath)); return useWinRt; } } }
using System; using ExcelDna.Integration; using Qwack.Core.Basic; using Qwack.Excel.Services; using Qwack.Excel.Utils; using Microsoft.Extensions.Logging; using Microsoft.Extensions.DependencyInjection; using Qwack.Dates; using Qwack.Models.Models; using Qwack.Core.Cubes; using Qwack.Excel.Instruments; using Qwack.Models.Risk; using Qwack.Core.Instruments.Funding; using System.Linq; using System.Collections.Generic; namespace Qwack.Excel.Curves { public class RiskFunctions { private const bool Parallel = false; private static readonly ILogger _logger = ContainerStores.GlobalContainer.GetService<ILoggerFactory>()?.CreateLogger<ModelFunctions>(); [ExcelFunction(Description = "Returns PV of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioPV), IsThreadSafe = Parallel)] public static object PortfolioPV( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX or MC model name")] string ModelName, [ExcelArgument(Description = "Reporting currency")] string ReportingCcy) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var ccy = ContainerStores.CurrencyProvider[ReportingCcy]; var result = model.PV(ccy); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns asset vega of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioVega), IsThreadSafe = Parallel)] public static object PortfolioVega( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX or MC model name")] string ModelName, [ExcelArgument(Description = "Reporting currency")] string ReportingCcy, [ExcelArgument(Description = "Parallel execution, default true")] object Parallelize) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var ccy = ContainerStores.CurrencyProvider[ReportingCcy]; var result = model.AssetVega(ccy, Parallelize.OptionalExcel(true)); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns asset sega/rega of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioSegaRega), IsThreadSafe = Parallel)] public static object PortfolioSegaRega( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX or MC model name")] string ModelName, [ExcelArgument(Description = "Reporting currency")] string ReportingCcy) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var ccy = ContainerStores.CurrencyProvider[ReportingCcy]; var result = model.AssetSegaRega(ccy); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns fx vega of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioFxVega), IsThreadSafe = Parallel)] public static object PortfolioFxVega( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX or MC model name")] string ModelName, [ExcelArgument(Description = "Reporting currency")] string ReportingCcy) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var ccy = ContainerStores.CurrencyProvider[ReportingCcy]; var result = model.FxVega(ccy); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns asset delta of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioDelta), IsThreadSafe = Parallel)] public static object PortfolioDelta( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX or MC model name")] string ModelName, [ExcelArgument(Description = "Compute gamma, default false")] object ComputeGamma, [ExcelArgument(Description = "Parallel execution, default true")] object Parallelize) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var result = model.AssetDelta(ComputeGamma.OptionalExcel(false), Parallelize.OptionalExcel(true)); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns asset delta of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioParallelDelta), IsThreadSafe = Parallel)] public static object PortfolioParallelDelta( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX or MC model name")] string ModelName) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var result = model.AssetParallelDelta(ContainerStores.CurrencyProvider); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns fx delta of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioFxDelta), IsThreadSafe = Parallel)] public static object PortfolioFxDelta( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX or MC model name")] string ModelName, [ExcelArgument(Description = "Home currency, e.g. ZAR")] string HomeCcy, [ExcelArgument(Description = "Compute gamma, default false")] object ComputeGamma, [ExcelArgument(Description = "Report delta on opposite side of pair, default true")] object InverseDelta) { return ExcelHelper.Execute(_logger, () => { var gamma = ComputeGamma.OptionalExcel(false); var inverseD = InverseDelta.OptionalExcel(true); var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var ccy = ContainerStores.CurrencyProvider[HomeCcy]; var result = model.FxDelta(ccy, ContainerStores.CurrencyProvider, gamma, inverseD); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns fx delta of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioFxDeltaSpecific), IsThreadSafe = Parallel)] public static object PortfolioFxDeltaSpecific( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX or MC model name")] string ModelName, [ExcelArgument(Description = "Pairs to bump")] object[] PairsToBump, [ExcelArgument(Description = "Home currency, e.g. ZAR")] string HomeCcy, [ExcelArgument(Description = "Compute gamma, default false")] object ComputeGamma) { return ExcelHelper.Execute(_logger, () => { var gamma = ComputeGamma.OptionalExcel(false); var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var ccy = ContainerStores.CurrencyProvider[HomeCcy]; var pairsToBumpStr = PairsToBump.ObjectRangeToVector<string>(); var pairsToBump = pairsToBumpStr.Select(p => p.FxPairFromString(ContainerStores.CurrencyProvider, ContainerStores.CalendarProvider)).ToList(); var result = model.FxDeltaSpecific(ccy, pairsToBump, ContainerStores.CurrencyProvider, gamma); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns theta and charm of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioThetaCharm), IsThreadSafe = Parallel)] public static object PortfolioThetaCharm( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX model name")] string ModelName, [ExcelArgument(Description = "Fwd value date, usually T+1")] DateTime FwdValDate, [ExcelArgument(Description = "Reporting currency")] string ReportingCcy, [ExcelArgument(Description = "List of FxPairs for fx metrics")] object[] PairsToRisk) { return ExcelHelper.Execute(_logger, () => { var pairs = (PairsToRisk == null || !PairsToRisk.Any() || PairsToRisk.First() is ExcelMissing) ? null : PairsToRisk.ObjectRangeToVector<string>() .Select(x => x.FxPairFromString(ContainerStores.CurrencyProvider, ContainerStores.CalendarProvider)) .ToList(); return ThetaCharm(ResultObjectName, PortfolioName, ModelName, FwdValDate, ReportingCcy, true, pairs); }); } [ExcelFunction(Description = "Returns theta of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioTheta), IsThreadSafe = Parallel)] public static object PortfolioTheta( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX model name")] string ModelName, [ExcelArgument(Description = "Fwd value date, usually T+1")] DateTime FwdValDate, [ExcelArgument(Description = "Reporting currency")] string ReportingCcy) { return ExcelHelper.Execute(_logger, () => ThetaCharm(ResultObjectName, PortfolioName, ModelName, FwdValDate, ReportingCcy, false, null)); } private static string ThetaCharm(string ResultObjectName, string PortfolioName, string ModelName,DateTime FwdValDate, string ReportingCcy, bool computeCharm, List<FxPair> fxPairs=null) { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var ccy = ContainerStores.CurrencyProvider[ReportingCcy]; var result = model.AssetThetaCharm(FwdValDate, ccy, ContainerStores.CurrencyProvider, computeCharm, fxPairs); return PushCubeToCache(result, ResultObjectName); } [ExcelFunction(Description = "Returns interest rate delta cube of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioIrDelta), IsThreadSafe = Parallel)] public static object PortfolioIrDelta( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX model name")] string ModelName, [ExcelArgument(Description = "Optional reporting ccy")] string ReportingCurrency, [ExcelArgument(Description = "Bump size, default 0.0001")] object BumpSize) { return ExcelHelper.Execute(_logger, () => { var bump = BumpSize.OptionalExcel(0.0001); var repCCy = ContainerStores.CurrencyProvider.TryGetCurrency(ReportingCurrency, out var ccy) ? ccy : null; var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var result = model.AssetIrDelta(repCCy,bump); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns interest rate benchmark delta cube of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioIrBenchmarkDelta), IsThreadSafe = Parallel)] public static object PortfolioIrBenchmarkDelta( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX model name")] string ModelName, [ExcelArgument(Description = "Funding instrument collection name")] string FICName, [ExcelArgument(Description = "Reporting currency")] string ReportingCcy) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var fic = ContainerStores.GetObjectCache<FundingInstrumentCollection>().GetObjectOrThrow(FICName, $"FIC {FICName} not found in cache"); var ccy = ContainerStores.CurrencyProvider.GetCurrency(ReportingCcy); var result = model.BenchmarkRisk(fic.Value, ContainerStores.CurrencyProvider, ccy); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns correlation delta of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioCorrelationDelta), IsThreadSafe = Parallel)] public static object PortfolioCorrelationDelta( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX model name")] string ModelName, [ExcelArgument(Description = "Reporting currency")] string ReportingCcy, [ExcelArgument(Description = "Epsilon bump size, rho' = rho + epsilon * (1-rho)")] double Epsilon) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var ccy = ContainerStores.CurrencyProvider.GetCurrency(ReportingCcy); var result = model.CorrelationDelta(ccy, Epsilon); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns greeks cube of a portfolio given an AssetFx model", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioGreeks), IsThreadSafe = Parallel)] public static object PortfolioGreeks( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX model name")] string ModelName, [ExcelArgument(Description = "Fwd value date, usually T+1")] DateTime FwdValDate, [ExcelArgument(Description = "Reporting currency")] string ReportingCcy) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var ccy = ContainerStores.CurrencyProvider.GetCurrency(ReportingCcy); var result = model.AssetGreeks(FwdValDate, ccy, ContainerStores.CurrencyProvider).Result; return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns risk ladder for a portfolio given an AssetFx model and some bump parameters", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioRiskLadder), IsThreadSafe = Parallel)] public static object PortfolioRiskLadder( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX model name")] string ModelName, [ExcelArgument(Description = "Asset Id to bump")] string AssetId, [ExcelArgument(Description = "Bump type, defualt FlatShift")] object BumpType, [ExcelArgument(Description = "Number of bumps (returns 2*N+1 values)")] int NScenarios, [ExcelArgument(Description = "Bump step size")] double BumpStep, [ExcelArgument(Description = "Risk metric to produce for each scenario")] object RiskMetric, [ExcelArgument(Description = "Return differential to base case, default True")] object ReturnDiff) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); if (!Enum.TryParse(BumpType.OptionalExcel("FlatShift"), out MutationType bType)) throw new Exception($"Unknown bump/mutation type {BumpType}"); if (!Enum.TryParse(RiskMetric.OptionalExcel("AssetCurveDelta"), out RiskMetric metric)) throw new Exception($"Unknown risk metric {RiskMetric}"); var retDiff = ReturnDiff.OptionalExcel(true); var isFx = AssetId.Length == 7 && AssetId[3] == '/'; ICube result; if (isFx) { var ccy = ContainerStores.CurrencyProvider.GetCurrency(AssetId.Substring(0, 3)); var riskLadder = new RiskLadder(ccy, bType, metric, BumpStep, NScenarios, retDiff); result = riskLadder.Generate(model, model.Portfolio); } else { var riskLadder = new RiskLadder(AssetId, bType, metric, BumpStep, NScenarios, retDiff); result = riskLadder.Generate(model, model.Portfolio); } return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns time ladder for a portfolio given an AssetFx model and some bump parameters", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioTimeLadder), IsThreadSafe = Parallel)] public static object PortfolioTimeLadder( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX model name")] string ModelName, [ExcelArgument(Description = "Number of bumps (returns 2*N+1 values)")] int NScenarios, [ExcelArgument(Description = "Calendar, default ZAR")] object Calendar, [ExcelArgument(Description = "Risk metric to produce for each scenario")] object RiskMetric, [ExcelArgument(Description = "Return differential to base case, default True")] object ReturnDiff) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); var fCal = Calendar.OptionalExcel("ZAR"); if (!ContainerStores.SessionContainer.GetService<ICalendarProvider>().Collection.TryGetCalendar(fCal, out var cal)) { _logger?.LogInformation("Calendar {calendar} not found in cache", fCal); return $"Calendar {fCal} not found in cache"; } if (!Enum.TryParse(RiskMetric.OptionalExcel("AssetCurveDelta"), out RiskMetric metric)) throw new Exception($"Unknown risk metric {RiskMetric}"); var retDiff = ReturnDiff.OptionalExcel(true); var riskLadder = new TimeLadder(metric, NScenarios, cal, ContainerStores.CurrencyProvider, retDiff); var result = riskLadder.Generate(model, model.Portfolio); return PushCubeToCache(result, ResultObjectName); }); } [ExcelFunction(Description = "Returns an asset/currency risk matrix for a portfolio given an AssetFx model and some bump parameters", Category = CategoryNames.Risk, Name = CategoryNames.Risk + "_" + nameof(PortfolioRiskMatrix), IsThreadSafe = Parallel)] public static object PortfolioRiskMatrix( [ExcelArgument(Description = "Result object name")] string ResultObjectName, [ExcelArgument(Description = "Portolio object name")] string PortfolioName, [ExcelArgument(Description = "Asset-FX model name")] string ModelName, [ExcelArgument(Description = "Asset Id to bump")] string AssetId, [ExcelArgument(Description = "Currency to bump")] string Currency, [ExcelArgument(Description = "Bump type, defualt FlatShift")] object BumpType, [ExcelArgument(Description = "Number of bumps (returns 2*N+1 values)")] int NScenarios, [ExcelArgument(Description = "Bump step size asset")] double BumpStepAsset, [ExcelArgument(Description = "Bump step size fx")] double BumpStepFx, [ExcelArgument(Description = "Risk metric to produce for each scenario")] object RiskMetric, [ExcelArgument(Description = "Return differential to base case, default True")] object ReturnDiff, [ExcelArgument(Description = "List of FxPairs for fx metrics")] object[] PairsToRisk) { return ExcelHelper.Execute(_logger, () => { var model = InstrumentFunctions.GetModelFromCache(ModelName, PortfolioName); if (!Enum.TryParse(BumpType.OptionalExcel("FlatShift"), out MutationType bType)) throw new Exception($"Unknown bump/mutation type {BumpType}"); if (!Enum.TryParse(RiskMetric.OptionalExcel("AssetCurveDelta"), out RiskMetric metric)) throw new Exception($"Unknown risk metric {RiskMetric}"); var retDiff = ReturnDiff.OptionalExcel(true); var isFxFx = AssetId.Length == 7 && AssetId[3] == '/' && Currency.Length == 7 && Currency[3] == '/'; ICube result = null; if (isFxFx) { var pair1 = Currency.FxPairFromString(ContainerStores.CurrencyProvider, ContainerStores.CalendarProvider); var pair2 = AssetId.FxPairFromString(ContainerStores.CurrencyProvider, ContainerStores.CalendarProvider); var riskMatrix = new RiskMatrix(pair2, pair1, bType, metric, BumpStepAsset, BumpStepFx, NScenarios, ContainerStores.CurrencyProvider, retDiff); if(PairsToRisk!=null && PairsToRisk.Any()) { riskMatrix.FxPairsForDelta = PairsToRisk.ObjectRangeToVector<string>() .Select(x => x.FxPairFromString(ContainerStores.CurrencyProvider, ContainerStores.CalendarProvider)) .ToList(); } result = riskMatrix.Generate(model, model.Portfolio); } else { var ccy = ContainerStores.CurrencyProvider.GetCurrency(Currency); var riskMatrix = new RiskMatrix(AssetId, ccy, bType, metric, BumpStepAsset, BumpStepFx, NScenarios, ContainerStores.CurrencyProvider, retDiff); if (PairsToRisk != null && PairsToRisk.Any()) { riskMatrix.FxPairsForDelta = PairsToRisk.ObjectRangeToVector<string>() .Select(x => x.FxPairFromString(ContainerStores.CurrencyProvider, ContainerStores.CalendarProvider)) .ToList(); } result = riskMatrix.Generate(model, model.Portfolio); } return PushCubeToCache(result, ResultObjectName); }); } public static string PushCubeToCache(ICube cube, string ResultObjectName) => ExcelHelper.PushToCache(cube, ResultObjectName); } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using OpenSim.Framework.Servers; using OpenSim.Framework.Servers.HttpServer; namespace OpenSim.ApplicationPlugins.Rest.Inventory { /// <remarks> /// The class signature reveals the roles that RestHandler plays. /// /// [1] It is a sub-class of RestPlugin. It inherits and extends /// the functionality of this class, constraining it to the /// specific needs of this REST implementation. This relates /// to the plug-in mechanism supported by OpenSim, the specifics /// of which are mostly hidden by RestPlugin. /// [2] IRestHandler describes the interface that this class /// exports to service implementations. This is the services /// management interface. /// [3] IHttpAgentHandler describes the interface that is exported /// to the BaseHttpServer in support of this particular HTTP /// processing model. This is the request interface of the /// handler. /// </remarks> public class RestHandler : RestPlugin, IRestHandler, IHttpAgentHandler { // Handler tables: both stream and REST are supported. The path handlers and their // respective allocators are stored in separate tables. internal Dictionary<string,RestMethodHandler> pathHandlers = new Dictionary<string,RestMethodHandler>(); internal Dictionary<string,RestMethodAllocator> pathAllocators = new Dictionary<string,RestMethodAllocator>(); internal Dictionary<string,RestStreamHandler> streamHandlers = new Dictionary<string,RestStreamHandler>(); #region local static state private static bool handlersLoaded = false; private static List<Type> classes = new List<Type>(); private static List<IRest> handlers = new List<IRest>(); private static Type[] parms = new Type[0]; private static Object[] args = new Object[0]; /// <summary> /// This static initializer scans the ASSEMBLY for classes that /// export the IRest interface and builds a list of them. These /// are later activated by the handler. To add a new handler it /// is only necessary to create a new services class that implements /// the IRest interface, and recompile the handler. This gives /// all of the build-time flexibility of a modular approach /// while not introducing yet-another module loader. Note that /// multiple assembles can still be built, each with its own set /// of handlers. Examples of services classes are RestInventoryServices /// and RestSkeleton. /// </summary> static RestHandler() { Module[] mods = Assembly.GetExecutingAssembly().GetModules(); foreach (Module m in mods) { Type[] types = m.GetTypes(); foreach (Type t in types) { try { if (t.GetInterface("IRest") != null) { classes.Add(t); } } catch (Exception) { Rest.Log.WarnFormat("[STATIC-HANDLER]: #0 Error scanning {1}", t); Rest.Log.InfoFormat("[STATIC-HANDLER]: #0 {1} is not included", t); } } } } #endregion local static state #region local instance state /// <summary> /// This routine loads all of the handlers discovered during /// instance initialization. /// A table of all loaded and successfully constructed handlers /// is built, and this table is then used by the constructor to /// initialize each of the handlers in turn. /// NOTE: The loading process does not automatically imply that /// the handler has registered any kind of an interface, that /// may be (optionally) done by the handler either during /// construction, or during initialization. /// /// I was not able to make this code work within a constructor /// so it is isolated within this method. /// </summary> private void LoadHandlers() { lock (handlers) { if (!handlersLoaded) { ConstructorInfo ci; Object ht; foreach (Type t in classes) { try { ci = t.GetConstructor(parms); ht = ci.Invoke(args); handlers.Add((IRest)ht); } catch (Exception e) { Rest.Log.WarnFormat("{0} Unable to load {1} : {2}", MsgId, t, e.Message); } } handlersLoaded = true; } } } #endregion local instance state #region overriding properties // These properties override definitions // in the base class. // Name is used to differentiate the message header. public override string Name { get { return "HANDLER"; } } // Used to partition the .ini configuration space. public override string ConfigName { get { return "RestHandler"; } } // We have to rename these because we want // to be able to share the values with other // classes in our assembly and the base // names are protected. public string MsgId { get { return base.MsgID; } } public string RequestId { get { return base.RequestID; } } #endregion overriding properties #region overriding methods /// <summary> /// This method is called by OpenSimMain immediately after loading the /// plugin and after basic server setup, but before running any server commands. /// </summary> /// <remarks> /// Note that entries MUST be added to the active configuration files before /// the plugin can be enabled. /// </remarks> public override void Initialise(OpenSimBase openSim) { try { // This plugin will only be enabled if the broader // REST plugin mechanism is enabled. //Rest.Log.InfoFormat("{0} Plugin is initializing", MsgId); base.Initialise(openSim); // IsEnabled is implemented by the base class and // reflects an overall RestPlugin status if (!IsEnabled) { //Rest.Log.WarnFormat("{0} Plugins are disabled", MsgId); return; } Rest.Log.InfoFormat("{0} Rest <{1}> plugin will be enabled", MsgId, Name); Rest.Log.InfoFormat("{0} Configuration parameters read from <{1}>", MsgId, ConfigName); // These are stored in static variables to make // them easy to reach from anywhere in the assembly. Rest.main = openSim; if (Rest.main == null) throw new Exception("OpenSim base pointer is null"); Rest.Plugin = this; Rest.Config = Config; Rest.Prefix = Prefix; Rest.GodKey = GodKey; Rest.Authenticate = Rest.Config.GetBoolean("authenticate", Rest.Authenticate); Rest.Scheme = Rest.Config.GetString("auth-scheme", Rest.Scheme); Rest.Secure = Rest.Config.GetBoolean("secured", Rest.Secure); Rest.ExtendedEscape = Rest.Config.GetBoolean("extended-escape", Rest.ExtendedEscape); Rest.Realm = Rest.Config.GetString("realm", Rest.Realm); Rest.DumpAsset = Rest.Config.GetBoolean("dump-asset", Rest.DumpAsset); Rest.Fill = Rest.Config.GetBoolean("path-fill", Rest.Fill); Rest.DumpLineSize = Rest.Config.GetInt("dump-line-size", Rest.DumpLineSize); Rest.FlushEnabled = Rest.Config.GetBoolean("flush-on-error", Rest.FlushEnabled); // Note: Odd spacing is required in the following strings Rest.Log.InfoFormat("{0} Authentication is {1}required", MsgId, (Rest.Authenticate ? "" : "not ")); Rest.Log.InfoFormat("{0} Security is {1}enabled", MsgId, (Rest.Secure ? "" : "not ")); Rest.Log.InfoFormat("{0} Extended URI escape processing is {1}enabled", MsgId, (Rest.ExtendedEscape ? "" : "not ")); Rest.Log.InfoFormat("{0} Dumping of asset data is {1}enabled", MsgId, (Rest.DumpAsset ? "" : "not ")); // The supplied prefix MUST be absolute if (Rest.Prefix.Substring(0,1) != Rest.UrlPathSeparator) { Rest.Log.WarnFormat("{0} Prefix <{1}> is not absolute and must be", MsgId, Rest.Prefix); Rest.Log.InfoFormat("{0} Prefix changed to </{1}>", MsgId, Rest.Prefix); Rest.Prefix = String.Format("{0}{1}", Rest.UrlPathSeparator, Rest.Prefix); } // If data dumping is requested, report on the chosen line // length. if (Rest.DumpAsset) { Rest.Log.InfoFormat("{0} Dump {1} bytes per line", MsgId, Rest.DumpLineSize); } // Load all of the handlers present in the // assembly // In principle, as we're an application plug-in, // most of what needs to be done could be done using // static resources, however the Open Sim plug-in // model makes this an instance, so that's what we // need to be. // There is only one Communications manager per // server, and by inference, only one each of the // user, asset, and inventory servers. So we can cache // those using a static initializer. // We move all of this processing off to another // services class to minimize overlap between function // and infrastructure. LoadHandlers(); // The intention of a post construction initializer // is to allow for setup that is dependent upon other // activities outside of the agency. foreach (IRest handler in handlers) { try { handler.Initialize(); } catch (Exception e) { Rest.Log.ErrorFormat("{0} initialization error: {1}", MsgId, e.Message); } } // Now that everything is setup we can proceed to // add THIS agent to the HTTP server's handler list // FIXME: If this code is ever to be re-enabled (most of it is disabled already) then this will // have to be handled through the AddHttpHandler interface. // if (!AddAgentHandler(Rest.Name,this)) // { // Rest.Log.ErrorFormat("{0} Unable to activate handler interface", MsgId); // foreach (IRest handler in handlers) // { // handler.Close(); // } // } } catch (Exception e) { Rest.Log.ErrorFormat("{0} Plugin initialization has failed: {1}", MsgId, e.Message); } } /// <summary> /// In the interests of efficiency, and because we cannot determine whether /// or not this instance will actually be harvested, we clobber the only /// anchoring reference to the working state for this plug-in. What the /// call to close does is irrelevant to this class beyond knowing that it /// can nullify the reference when it returns. /// To make sure everything is copacetic we make sure the primary interface /// is disabled by deleting the handler from the HTTP server tables. /// </summary> public override void Close() { Rest.Log.InfoFormat("{0} Plugin is terminating", MsgId); // FIXME: If this code is ever to be re-enabled (most of it is disabled already) then this will // have to be handled through the AddHttpHandler interface. // try // { // RemoveAgentHandler(Rest.Name, this); // } // catch (KeyNotFoundException){} foreach (IRest handler in handlers) { handler.Close(); } } #endregion overriding methods #region interface methods /// <summary> /// This method is called by the HTTP server to match an incoming /// request. It scans all of the strings registered by the /// underlying handlers and looks for the best match. It returns /// true if a match is found. /// The matching process could be made arbitrarily complex. /// Note: The match is case-insensitive. /// </summary> public bool Match(OSHttpRequest request, OSHttpResponse response) { string path = request.RawUrl.ToLower(); // Rest.Log.DebugFormat("{0} Match ENTRY", MsgId); try { foreach (string key in pathHandlers.Keys) { // Rest.Log.DebugFormat("{0} Match testing {1} against agent prefix <{2}>", MsgId, path, key); // Note that Match will not necessarily find the handler that will // actually be used - it does no test for the "closest" fit. It // simply reflects that at least one possible handler exists. if (path.StartsWith(key)) { // Rest.Log.DebugFormat("{0} Matched prefix <{1}>", MsgId, key); // This apparently odd evaluation is needed to prevent a match // on anything other than a URI token boundary. Otherwise we // may match on URL's that were not intended for this handler. return (path.Length == key.Length || path.Substring(key.Length, 1) == Rest.UrlPathSeparator); } } path = String.Format("{0}{1}{2}", request.HttpMethod, Rest.UrlMethodSeparator, path); foreach (string key in streamHandlers.Keys) { // Rest.Log.DebugFormat("{0} Match testing {1} against stream prefix <{2}>", MsgId, path, key); // Note that Match will not necessarily find the handler that will // actually be used - it does no test for the "closest" fit. It // simply reflects that at least one possible handler exists. if (path.StartsWith(key)) { // Rest.Log.DebugFormat("{0} Matched prefix <{1}>", MsgId, key); // This apparently odd evaluation is needed to prevent a match // on anything other than a URI token boundary. Otherwise we // may match on URL's that were not intended for this handler. return (path.Length == key.Length || path.Substring(key.Length, 1) == Rest.UrlPathSeparator); } } } catch (Exception e) { Rest.Log.ErrorFormat("{0} matching exception for path <{1}> : {2}", MsgId, path, e.Message); } return false; } /// <summary> /// This is called by the HTTP server once the handler has indicated /// that it is able to handle the request. /// Preconditions: /// [1] request != null and is a valid request object /// [2] response != null and is a valid response object /// Behavior is undefined if preconditions are not satisfied. /// </summary> public bool Handle(OSHttpRequest request, OSHttpResponse response) { bool handled; base.MsgID = base.RequestID; // Debug only if (Rest.DEBUG) { Rest.Log.DebugFormat("{0} ENTRY", MsgId); Rest.Log.DebugFormat("{0} Agent: {1}", MsgId, request.UserAgent); Rest.Log.DebugFormat("{0} Method: {1}", MsgId, request.HttpMethod); for (int i = 0; i < request.Headers.Count; i++) { Rest.Log.DebugFormat("{0} Header [{1}] : <{2}> = <{3}>", MsgId, i, request.Headers.GetKey(i), request.Headers.Get(i)); } Rest.Log.DebugFormat("{0} URI: {1}", MsgId, request.RawUrl); } // If a path handler worked we're done, otherwise try any // available stream handlers too. try { handled = (FindPathHandler(request, response) || FindStreamHandler(request, response)); } catch (Exception e) { // A raw exception indicates that something we weren't expecting has // happened. This should always reflect a shortcoming in the plugin, // or a failure to satisfy the preconditions. It should not reflect // an error in the request itself. Under such circumstances the state // of the request cannot be determined and we are obliged to mark it // as 'handled'. Rest.Log.ErrorFormat("{0} Plugin error: {1}", MsgId, e.Message); handled = true; } Rest.Log.DebugFormat("{0} EXIT", MsgId); return handled; } #endregion interface methods /// <summary> /// If there is a stream handler registered that can handle the /// request, then fine. If the request is not matched, do /// nothing. /// Note: The selection is case-insensitive /// </summary> private bool FindStreamHandler(OSHttpRequest request, OSHttpResponse response) { RequestData rdata = new RequestData(request, response, String.Empty); string bestMatch = String.Empty; string path = String.Format("{0}:{1}", rdata.method, rdata.path).ToLower(); Rest.Log.DebugFormat("{0} Checking for stream handler for <{1}>", MsgId, path); if (!IsEnabled) { return false; } foreach (string pattern in streamHandlers.Keys) { if (path.StartsWith(pattern)) { if (pattern.Length > bestMatch.Length) { bestMatch = pattern; } } } // Handle using the best match available if (bestMatch.Length > 0) { Rest.Log.DebugFormat("{0} Stream-based handler matched with <{1}>", MsgId, bestMatch); RestStreamHandler handler = streamHandlers[bestMatch]; rdata.buffer = handler.Handle(rdata.path, rdata.request.InputStream, rdata.request, rdata.response); rdata.AddHeader(rdata.response.ContentType,handler.ContentType); rdata.Respond("FindStreamHandler Completion"); } return rdata.handled; } /// <summary> /// Add a stream handler for the designated HTTP method and path prefix. /// If the handler is not enabled, the request is ignored. If the path /// does not start with the REST prefix, it is added. If method-qualified /// path has not already been registered, the method is added to the active /// handler table. /// </summary> public void AddStreamHandler(string httpMethod, string path, RestMethod method) { if (!IsEnabled) { return; } if (!path.StartsWith(Rest.Prefix)) { path = String.Format("{0}{1}", Rest.Prefix, path); } path = String.Format("{0}{1}{2}", httpMethod, Rest.UrlMethodSeparator, path); // Conditionally add to the list if (!streamHandlers.ContainsKey(path)) { streamHandlers.Add(path, new RestStreamHandler(httpMethod, path, method)); Rest.Log.DebugFormat("{0} Added handler for {1}", MsgId, path); } else { Rest.Log.WarnFormat("{0} Ignoring duplicate handler for {1}", MsgId, path); } } /// <summary> /// Given the supplied request/response, if the handler is enabled, the inbound /// information is used to match an entry in the active path handler tables, using /// the method-qualified path information. If a match is found, then the handler is /// invoked. The result is the boolean result of the handler, or false if no /// handler was located. The boolean indicates whether or not the request has been /// handled, not whether or not the request was successful - that information is in /// the response. /// Note: The selection process is case-insensitive /// </summary> internal bool FindPathHandler(OSHttpRequest request, OSHttpResponse response) { RequestData rdata = null; string bestMatch = null; if (!IsEnabled) { return false; } // Conditionally add to the list Rest.Log.DebugFormat("{0} Checking for path handler for <{1}>", MsgId, request.RawUrl); foreach (string pattern in pathHandlers.Keys) { if (request.RawUrl.ToLower().StartsWith(pattern)) { if (String.IsNullOrEmpty(bestMatch) || pattern.Length > bestMatch.Length) { bestMatch = pattern; } } } if (!String.IsNullOrEmpty(bestMatch)) { rdata = pathAllocators[bestMatch](request, response, bestMatch); Rest.Log.DebugFormat("{0} Path based REST handler matched with <{1}>", MsgId, bestMatch); try { pathHandlers[bestMatch](rdata); } // A plugin generated error indicates a request-related error // that has been handled by the plugin. catch (RestException r) { Rest.Log.WarnFormat("{0} Request failed: {1}", MsgId, r.Message); } } return (rdata == null) ? false : rdata.handled; } /// <summary> /// A method handler and a request allocator are stored using the designated /// path as a key. If an entry already exists, it is replaced by the new one. /// </summary> public void AddPathHandler(RestMethodHandler mh, string path, RestMethodAllocator ra) { if (!IsEnabled) { return; } if (pathHandlers.ContainsKey(path)) { Rest.Log.DebugFormat("{0} Replacing handler for <${1}>", MsgId, path); pathHandlers.Remove(path); } if (pathAllocators.ContainsKey(path)) { Rest.Log.DebugFormat("{0} Replacing allocator for <${1}>", MsgId, path); pathAllocators.Remove(path); } Rest.Log.DebugFormat("{0} Adding path handler for {1}", MsgId, path); pathHandlers.Add(path, mh); pathAllocators.Add(path, ra); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Microsoft.Win32.SafeHandles; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.InteropServices; using System.Security; using System.Threading; using System.Threading.Tasks; namespace System.IO.Pipes { public abstract partial class PipeStream : Stream { // The Windows implementation of PipeStream sets the stream's handle during // creation, and as such should always have a handle, but the Unix implementation // sometimes sets the handle not during creation but later during connection. // As such, validation during member access needs to verify a valid handle on // Windows, but can't assume a valid handle on Unix. internal const bool CheckOperationsRequiresSetHandle = false; internal static string GetPipePath(string serverName, string pipeName) { if (serverName != "." && serverName != Interop.libc.gethostname()) { // Cross-machine pipes are not supported. throw new PlatformNotSupportedException(); } if (pipeName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0) { // Since pipes are stored as files in the file system, we don't support // pipe names that are actually paths or that otherwise have invalid // filename characters in them. throw new PlatformNotSupportedException(); } // Return the pipe path return Path.Combine(EnsurePipeDirectoryPath(), pipeName); } /// <summary>Throws an exception if the supplied handle does not represent a valid pipe.</summary> /// <param name="safePipeHandle">The handle to validate.</param> internal void ValidateHandleIsPipe(SafePipeHandle safePipeHandle) { SysCall(safePipeHandle, (fd, _, __) => { Interop.Sys.FileStatus status; int result = Interop.Sys.FStat(fd, out status); if (result == 0) { if ((status.Mode & Interop.Sys.FileTypes.S_IFMT) != Interop.Sys.FileTypes.S_IFIFO) { throw new IOException(SR.IO_InvalidPipeHandle); } } return result; }); } /// <summary>Initializes the handle to be used asynchronously.</summary> /// <param name="handle">The handle.</param> [SecurityCritical] private void InitializeAsyncHandle(SafePipeHandle handle) { // nop } private void UninitializeAsyncHandle() { // nop } [SecurityCritical] private unsafe int ReadCore(byte[] buffer, int offset, int count) { Debug.Assert(_handle != null, "_handle is null"); Debug.Assert(!_handle.IsClosed, "_handle is closed"); Debug.Assert(CanRead, "can't read"); Debug.Assert(buffer != null, "buffer is null"); Debug.Assert(offset >= 0, "offset is negative"); Debug.Assert(count >= 0, "count is negative"); fixed (byte* bufPtr = buffer) { return (int)SysCall(_handle, (fd, ptr, len) => { long result = (long)Interop.libc.read(fd, (byte*)ptr, (IntPtr)len); Debug.Assert(result <= len); return result; }, (IntPtr)(bufPtr + offset), count); } } [SecuritySafeCritical] private Task<int> ReadAsyncCore(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { // Delegate to the base Stream's ReadAsync, which will just invoke Read asynchronously. return base.ReadAsync(buffer, offset, count, cancellationToken); } [SecurityCritical] private unsafe void WriteCore(byte[] buffer, int offset, int count) { Debug.Assert(_handle != null, "_handle is null"); Debug.Assert(!_handle.IsClosed, "_handle is closed"); Debug.Assert(CanWrite, "can't write"); Debug.Assert(buffer != null, "buffer is null"); Debug.Assert(offset >= 0, "offset is negative"); Debug.Assert(count >= 0, "count is negative"); fixed (byte* bufPtr = buffer) { while (count > 0) { int bytesWritten = (int)SysCall(_handle, (fd, ptr, len) => { long result = (long)Interop.libc.write(fd, (byte*)ptr, (IntPtr)len); Debug.Assert(result <= len); return result; }, (IntPtr)(bufPtr + offset), count); count -= bytesWritten; offset += bytesWritten; } } } [SecuritySafeCritical] private Task WriteAsyncCore(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { // Delegate to the base Stream's WriteAsync, which will just invoke Write asynchronously. return base.WriteAsync(buffer, offset, count, cancellationToken); } // Blocks until the other end of the pipe has read in all written buffer. [SecurityCritical] public void WaitForPipeDrain() { CheckWriteOperations(); if (!CanWrite) { throw __Error.GetWriteNotSupported(); } throw new PlatformNotSupportedException(); // no mechanism for this on Unix } // Gets the transmission mode for the pipe. This is virtual so that subclassing types can // override this in cases where only one mode is legal (such as anonymous pipes) public virtual PipeTransmissionMode TransmissionMode { [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")] get { CheckPipePropertyOperations(); return PipeTransmissionMode.Byte; // Unix pipes are only byte-based, not message-based } } // Gets the buffer size in the inbound direction for the pipe. This checks if pipe has read // access. If that passes, call to GetNamedPipeInfo will succeed. public virtual int InBufferSize { [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")] get { CheckPipePropertyOperations(); if (!CanRead) { throw new NotSupportedException(SR.NotSupported_UnreadableStream); } return InBufferSizeCore; } } // Gets the buffer size in the outbound direction for the pipe. This uses cached version // if it's an outbound only pipe because GetNamedPipeInfo requires read access to the pipe. // However, returning cached is good fallback, especially if user specified a value in // the ctor. public virtual int OutBufferSize { [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")] get { CheckPipePropertyOperations(); if (!CanWrite) { throw new NotSupportedException(SR.NotSupported_UnwritableStream); } return OutBufferSizeCore; } } public virtual PipeTransmissionMode ReadMode { [SecurityCritical] get { CheckPipePropertyOperations(); return PipeTransmissionMode.Byte; // Unix pipes are only byte-based, not message-based } [SecurityCritical] [SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")] set { CheckPipePropertyOperations(); if (value < PipeTransmissionMode.Byte || value > PipeTransmissionMode.Message) { throw new ArgumentOutOfRangeException("value", SR.ArgumentOutOfRange_TransmissionModeByteOrMsg); } if (value != PipeTransmissionMode.Byte) // Unix pipes are only byte-based, not message-based { throw new PlatformNotSupportedException(); } // nop, since it's already the only valid value } } // ----------------------------- // ---- PAL layer ends here ---- // ----------------------------- private static string s_pipeDirectoryPath; private static string EnsurePipeDirectoryPath() { const string PipesFeatureName = "pipes"; // Ideally this would simply use PersistedFiles.GetTempFeatureDirectory(PipesFeatureName) and then // Directory.CreateDirectory to ensure it exists. But this assembly doesn't reference System.IO.FileSystem. // As such, we'd be calling GetTempFeatureDirectory, only to then need to parse it in order // to create each of the individual directories as part of the path. We instead access the named portions // of the path directly and do the building of the path and directory structure manually. // First ensure we know what the full path should be, e.g. /tmp/.dotnet/corefx/pipes/ string fullPath = s_pipeDirectoryPath; string tempPath = null; if (fullPath == null) { tempPath = Path.GetTempPath(); fullPath = Path.Combine(tempPath, PersistedFiles.TopLevelHiddenDirectory, PersistedFiles.SecondLevelDirectory, PipesFeatureName); s_pipeDirectoryPath = fullPath; } // Then create the directory if it doesn't already exist. If we get any error back from stat, // just proceed to build up the directory, failing in the CreateDirectory calls if there's some // problem. Similarly, it's possible stat succeeds but the path is a file rather than directory; we'll // call that success for now and let this fail later when the code tries to create a file in this "directory" // (we don't want to overwrite/delete whatever that unknown file may be, and this is similar to other cases // we can't control where the file system is manipulated concurrently with and separately from this code). Interop.Sys.FileStatus ignored; bool pathExists = Interop.Sys.Stat(fullPath, out ignored) == 0; if (!pathExists) { // We need to build up the directory manually. Ensure we have the temp directory in which // we'll create the structure, e.g. /tmp/ if (tempPath == null) { tempPath = Path.GetTempPath(); } Debug.Assert(Interop.Sys.Stat(tempPath, out ignored) == 0, "Path.GetTempPath() directory could not be accessed"); // Create /tmp/.dotnet/ if it doesn't exist. string partialPath = Path.Combine(tempPath, PersistedFiles.TopLevelHiddenDirectory); CreateDirectory(partialPath); // Create /tmp/.dotnet/corefx/ if it doesn't exist partialPath = Path.Combine(partialPath, PersistedFiles.SecondLevelDirectory); CreateDirectory(partialPath); // Create /tmp/.dotnet/corefx/pipes/ if it doesn't exist CreateDirectory(fullPath); } return fullPath; } private static void CreateDirectory(string directoryPath) { while (true) { int result = Interop.libc.mkdir(directoryPath, (int)Interop.libc.Permissions.S_IRWXU); // If successful created, we're done. if (result >= 0) return; // If the directory already exists, consider it a success. Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo(); if (errorInfo.Error == Interop.Error.EEXIST) return; // If the I/O was interrupted, try again. if (errorInfo.Error == Interop.Error.EINTR) continue; // Otherwise, fail. throw Interop.GetExceptionForIoErrno(errorInfo, directoryPath, isDirectory: true); } } internal static Interop.Sys.OpenFlags TranslateFlags(PipeDirection direction, PipeOptions options, HandleInheritability inheritability) { // Translate direction Interop.Sys.OpenFlags flags = direction == PipeDirection.InOut ? Interop.Sys.OpenFlags.O_RDWR : direction == PipeDirection.Out ? Interop.Sys.OpenFlags.O_WRONLY : Interop.Sys.OpenFlags.O_RDONLY; // Translate options if ((options & PipeOptions.WriteThrough) != 0) { flags |= Interop.Sys.OpenFlags.O_SYNC; } // Translate inheritability. if ((inheritability & HandleInheritability.Inheritable) == 0) { flags |= Interop.Sys.OpenFlags.O_CLOEXEC; } // PipeOptions.Asynchronous is ignored, at least for now. Asynchronous processing // is handling just by queueing a work item to do the work synchronously on a pool thread. return flags; } /// <summary> /// Helper for making system calls that involve the stream's file descriptor. /// System calls are expected to return greather than or equal to zero on success, /// and less than zero on failure. In the case of failure, errno is expected to /// be set to the relevant error code. /// </summary> /// <param name="sysCall">A delegate that invokes the system call.</param> /// <param name="arg1">The first argument to be passed to the system call, after the file descriptor.</param> /// <param name="arg2">The second argument to be passed to the system call.</param> /// <returns>The return value of the system call.</returns> /// <remarks> /// Arguments are expected to be passed via <paramref name="arg1"/> and <paramref name="arg2"/> /// so as to avoid delegate and closure allocations at the call sites. /// </remarks> private long SysCall( SafePipeHandle handle, Func<int, IntPtr, int, long> sysCall, IntPtr arg1 = default(IntPtr), int arg2 = default(int)) { bool gotRefOnHandle = false; try { // Get the file descriptor from the handle. We increment the ref count to help // ensure it's not closed out from under us. handle.DangerousAddRef(ref gotRefOnHandle); Debug.Assert(gotRefOnHandle); int fd = (int)handle.DangerousGetHandle(); Debug.Assert(fd >= 0); while (true) { long result = sysCall(fd, arg1, arg2); if (result < 0) { Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo(); if (errorInfo.Error == Interop.Error.EINTR) continue; if (errorInfo.Error == Interop.Error.EPIPE) State = PipeState.Broken; throw Interop.GetExceptionForIoErrno(errorInfo); } return result; } } finally { if (gotRefOnHandle) { handle.DangerousRelease(); } } } } }
using System.Collections.Generic; using System.Threading.Tasks; using Newtonsoft.Json; namespace Citrina { public class Stories : IStories { /// <summary> /// Allows to hide stories from chosen sources from current user's feed. /// </summary> public Task<ApiRequest<bool?>> BanOwnerApi(IEnumerable<int> ownersIds = null) { var request = new Dictionary<string, string> { ["owners_ids"] = RequestHelpers.ParseEnumerable(ownersIds), }; return RequestManager.CreateRequestAsync<bool?>("stories.banOwner", null, request); } /// <summary> /// Allows to delete story. /// </summary> public Task<ApiRequest<bool?>> DeleteApi(int? ownerId = null, int? storyId = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["story_id"] = storyId?.ToString(), }; return RequestManager.CreateRequestAsync<bool?>("stories.delete", null, request); } /// <summary> /// Returns stories available for current user. /// </summary> public Task<ApiRequest<StoriesGetResponse>> GetApi(int? ownerId = null, bool? extended = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["extended"] = RequestHelpers.ParseBoolean(extended), }; return RequestManager.CreateRequestAsync<StoriesGetResponse>("stories.get", null, request); } /// <summary> /// Returns stories available for current user. /// </summary> public Task<ApiRequest<StoriesGetExtendedResponse>> GetApi(int? ownerId = null, bool? extended = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["extended"] = RequestHelpers.ParseBoolean(extended), }; return RequestManager.CreateRequestAsync<StoriesGetExtendedResponse>("stories.get", null, request); } /// <summary> /// Returns list of sources hidden from current user's feed. /// </summary> public Task<ApiRequest<StoriesGetBannedResponse>> GetBannedApi(bool? extended = null, IEnumerable<BaseUserGroupFields> fields = null) { var request = new Dictionary<string, string> { ["extended"] = RequestHelpers.ParseBoolean(extended), ["fields"] = RequestHelpers.ParseEnumerable(fields), }; return RequestManager.CreateRequestAsync<StoriesGetBannedResponse>("stories.getBanned", null, request); } /// <summary> /// Returns list of sources hidden from current user's feed. /// </summary> public Task<ApiRequest<StoriesGetBannedExtendedResponse>> GetBannedApi(bool? extended = null, IEnumerable<BaseUserGroupFields> fields = null) { var request = new Dictionary<string, string> { ["extended"] = RequestHelpers.ParseBoolean(extended), ["fields"] = RequestHelpers.ParseEnumerable(fields), }; return RequestManager.CreateRequestAsync<StoriesGetBannedExtendedResponse>("stories.getBanned", null, request); } /// <summary> /// Returns story by its ID. /// </summary> public Task<ApiRequest<StoriesGetByIdResponse>> GetByIdApi(IEnumerable<string> stories = null, bool? extended = null, IEnumerable<BaseUserGroupFields> fields = null) { var request = new Dictionary<string, string> { ["stories"] = RequestHelpers.ParseEnumerable(stories), ["extended"] = RequestHelpers.ParseBoolean(extended), ["fields"] = RequestHelpers.ParseEnumerable(fields), }; return RequestManager.CreateRequestAsync<StoriesGetByIdResponse>("stories.getById", null, request); } /// <summary> /// Returns story by its ID. /// </summary> public Task<ApiRequest<StoriesGetByIdExtendedResponse>> GetByIdApi(IEnumerable<string> stories = null, bool? extended = null, IEnumerable<BaseUserGroupFields> fields = null) { var request = new Dictionary<string, string> { ["stories"] = RequestHelpers.ParseEnumerable(stories), ["extended"] = RequestHelpers.ParseBoolean(extended), ["fields"] = RequestHelpers.ParseEnumerable(fields), }; return RequestManager.CreateRequestAsync<StoriesGetByIdExtendedResponse>("stories.getById", null, request); } /// <summary> /// Returns URL for uploading a story with photo. /// </summary> public Task<ApiRequest<StoriesGetPhotoUploadServerResponse>> GetPhotoUploadServerApi(bool? addToNews = null, IEnumerable<int> userIds = null, string replyToStory = null, string linkText = null, string linkUrl = null, int? groupId = null) { var request = new Dictionary<string, string> { ["add_to_news"] = RequestHelpers.ParseBoolean(addToNews), ["user_ids"] = RequestHelpers.ParseEnumerable(userIds), ["reply_to_story"] = replyToStory, ["link_text"] = linkText, ["link_url"] = linkUrl, ["group_id"] = groupId?.ToString(), }; return RequestManager.CreateRequestAsync<StoriesGetPhotoUploadServerResponse>("stories.getPhotoUploadServer", null, request); } /// <summary> /// Returns replies to the story. /// </summary> public Task<ApiRequest<StoriesGetRepliesResponse>> GetRepliesApi(int? ownerId = null, int? storyId = null, string accessKey = null, bool? extended = null, IEnumerable<BaseUserGroupFields> fields = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["story_id"] = storyId?.ToString(), ["access_key"] = accessKey, ["extended"] = RequestHelpers.ParseBoolean(extended), ["fields"] = RequestHelpers.ParseEnumerable(fields), }; return RequestManager.CreateRequestAsync<StoriesGetRepliesResponse>("stories.getReplies", null, request); } /// <summary> /// Returns replies to the story. /// </summary> public Task<ApiRequest<StoriesGetRepliesExtendedResponse>> GetRepliesApi(int? ownerId = null, int? storyId = null, string accessKey = null, bool? extended = null, IEnumerable<BaseUserGroupFields> fields = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["story_id"] = storyId?.ToString(), ["access_key"] = accessKey, ["extended"] = RequestHelpers.ParseBoolean(extended), ["fields"] = RequestHelpers.ParseEnumerable(fields), }; return RequestManager.CreateRequestAsync<StoriesGetRepliesExtendedResponse>("stories.getReplies", null, request); } /// <summary> /// Returns stories available for current user. /// </summary> public Task<ApiRequest<StoriesStoryStats>> GetStatsApi(int? ownerId = null, int? storyId = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["story_id"] = storyId?.ToString(), }; return RequestManager.CreateRequestAsync<StoriesStoryStats>("stories.getStats", null, request); } /// <summary> /// Allows to receive URL for uploading story with video. /// </summary> public Task<ApiRequest<StoriesGetVideoUploadServerResponse>> GetVideoUploadServerApi(bool? addToNews = null, IEnumerable<int> userIds = null, string replyToStory = null, string linkText = null, string linkUrl = null, int? groupId = null) { var request = new Dictionary<string, string> { ["add_to_news"] = RequestHelpers.ParseBoolean(addToNews), ["user_ids"] = RequestHelpers.ParseEnumerable(userIds), ["reply_to_story"] = replyToStory, ["link_text"] = linkText, ["link_url"] = linkUrl, ["group_id"] = groupId?.ToString(), }; return RequestManager.CreateRequestAsync<StoriesGetVideoUploadServerResponse>("stories.getVideoUploadServer", null, request); } /// <summary> /// Returns a list of story viewers. /// </summary> public Task<ApiRequest<StoriesGetViewersResponse>> GetViewersApi(int? ownerId = null, int? storyId = null, int? count = null, int? offset = null, bool? extended = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["story_id"] = storyId?.ToString(), ["count"] = count?.ToString(), ["offset"] = offset?.ToString(), ["extended"] = RequestHelpers.ParseBoolean(extended), }; return RequestManager.CreateRequestAsync<StoriesGetViewersResponse>("stories.getViewers", null, request); } /// <summary> /// Returns a list of story viewers. /// </summary> public Task<ApiRequest<StoriesGetViewersExtendedResponse>> GetViewersApi(int? ownerId = null, int? storyId = null, int? count = null, int? offset = null, bool? extended = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["story_id"] = storyId?.ToString(), ["count"] = count?.ToString(), ["offset"] = offset?.ToString(), ["extended"] = RequestHelpers.ParseBoolean(extended), }; return RequestManager.CreateRequestAsync<StoriesGetViewersExtendedResponse>("stories.getViewers", null, request); } /// <summary> /// Hides all replies in the last 24 hours from the user to current user's stories. /// </summary> public Task<ApiRequest<bool?>> HideAllRepliesApi(int? ownerId = null, int? groupId = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["group_id"] = groupId?.ToString(), }; return RequestManager.CreateRequestAsync<bool?>("stories.hideAllReplies", null, request); } /// <summary> /// Hides the reply to the current user's story. /// </summary> public Task<ApiRequest<bool?>> HideReplyApi(int? ownerId = null, int? storyId = null) { var request = new Dictionary<string, string> { ["owner_id"] = ownerId?.ToString(), ["story_id"] = storyId?.ToString(), }; return RequestManager.CreateRequestAsync<bool?>("stories.hideReply", null, request); } /// <summary> /// Allows to show stories from hidden sources in current user's feed. /// </summary> public Task<ApiRequest<bool?>> UnbanOwnerApi(IEnumerable<int> ownersIds = null) { var request = new Dictionary<string, string> { ["owners_ids"] = RequestHelpers.ParseEnumerable(ownersIds), }; return RequestManager.CreateRequestAsync<bool?>("stories.unbanOwner", null, request); } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using SR = System.Reflection; using System.Runtime.CompilerServices; using Mono.Cecil.Cil; using NUnit.Framework; namespace Mono.Cecil.Tests { [TestFixture] public class ImportCecilTests : BaseTestFixture { [Test] public void ImportStringByRef () { var get_string = Compile<Func<string, string>> ((module, body) => { var type = module.Types [1]; var method_by_ref = new MethodDefinition { Name = "ModifyString", IsPrivate = true, IsStatic = true, }; type.Methods.Add (method_by_ref); method_by_ref.MethodReturnType.ReturnType = module.ImportReference (typeof (void).ToDefinition ()); method_by_ref.Parameters.Add (new ParameterDefinition (module.ImportReference (typeof (string).ToDefinition ()))); method_by_ref.Parameters.Add (new ParameterDefinition (module.ImportReference (new ByReferenceType (typeof (string).ToDefinition ())))); var m_il = method_by_ref.Body.GetILProcessor (); m_il.Emit (OpCodes.Ldarg_1); m_il.Emit (OpCodes.Ldarg_0); m_il.Emit (OpCodes.Stind_Ref); m_il.Emit (OpCodes.Ret); var v_0 = new VariableDefinition (module.ImportReference (typeof (string).ToDefinition ())); body.Variables.Add (v_0); var il = body.GetILProcessor (); il.Emit (OpCodes.Ldnull); il.Emit (OpCodes.Stloc, v_0); il.Emit (OpCodes.Ldarg_0); il.Emit (OpCodes.Ldloca, v_0); il.Emit (OpCodes.Call, method_by_ref); il.Emit (OpCodes.Ldloc_0); il.Emit (OpCodes.Ret); }); Assert.AreEqual ("foo", get_string ("foo")); } [Test] public void ImportStringArray () { var identity = Compile<Func<string [,], string [,]>> ((module, body) => { var il = body.GetILProcessor (); il.Emit (OpCodes.Ldarg_0); il.Emit (OpCodes.Ret); }); var array = new string [2, 2]; Assert.AreEqual (array, identity (array)); } [Test] public void ImportFieldStringEmpty () { var get_empty = Compile<Func<string>> ((module, body) => { var il = body.GetILProcessor (); il.Emit (OpCodes.Ldsfld, module.ImportReference (typeof (string).GetField ("Empty").ToDefinition ())); il.Emit (OpCodes.Ret); }); Assert.AreEqual ("", get_empty ()); } [Test] public void ImportStringConcat () { var concat = Compile<Func<string, string, string>> ((module, body) => { var il = body.GetILProcessor (); il.Emit (OpCodes.Ldarg_0); il.Emit (OpCodes.Ldarg_1); il.Emit (OpCodes.Call, module.ImportReference (typeof (string).GetMethod ("Concat", new [] { typeof (string), typeof (string) }).ToDefinition ())); il.Emit (OpCodes.Ret); }); Assert.AreEqual ("FooBar", concat ("Foo", "Bar")); } public class Generic<T> { public T Field; public T Method (T t) { return t; } public TS GenericMethod<TS> (T t, TS s) { return s; } public Generic<TS> ComplexGenericMethod<TS> (T t, TS s) { return new Generic<TS> { Field = s }; } } [Test] public void ImportGenericField () { var get_field = Compile<Func<Generic<string>, string>> ((module, body) => { var generic_def = module.ImportReference (typeof (Generic<>)).Resolve (); var field_def = generic_def.Fields.Where (f => f.Name == "Field").First (); var field_string = field_def.MakeGeneric (module.ImportReference (typeof (string))); var field_ref = module.ImportReference (field_string); var il = body.GetILProcessor (); il.Emit (OpCodes.Ldarg_0); il.Emit (OpCodes.Ldfld, field_ref); il.Emit (OpCodes.Ret); }); var generic = new Generic<string> { Field = "foo", }; Assert.AreEqual ("foo", get_field (generic)); } [Test] public void ImportGenericMethod () { var generic_identity = Compile<Func<Generic<int>, int, int>> ((module, body) => { var generic_def = module.ImportReference (typeof (Generic<>)).Resolve (); var method_def = generic_def.Methods.Where (m => m.Name == "Method").First (); var method_int = method_def.MakeGeneric (module.ImportReference (typeof (int))); var method_ref = module.ImportReference (method_int); var il = body.GetILProcessor (); il.Emit (OpCodes.Ldarg_0); il.Emit (OpCodes.Ldarg_1); il.Emit (OpCodes.Callvirt, method_ref); il.Emit (OpCodes.Ret); }); Assert.AreEqual (42, generic_identity (new Generic<int> (), 42)); } [Test] public void ImportGenericMethodSpec () { var gen_spec_id = Compile<Func<Generic<string>, int, int>> ((module, body) => { var generic_def = module.ImportReference (typeof (Generic<>)).Resolve (); var method_def = generic_def.Methods.Where (m => m.Name == "GenericMethod").First (); var method_string = method_def.MakeGeneric (module.ImportReference (typeof (string))); var method_instance = method_string.MakeGenericMethod (module.ImportReference (typeof (int))); var method_ref = module.ImportReference (method_instance); var il = body.GetILProcessor (); il.Emit (OpCodes.Ldarg_0); il.Emit (OpCodes.Ldnull); il.Emit (OpCodes.Ldarg_1); il.Emit (OpCodes.Callvirt, method_ref); il.Emit (OpCodes.Ret); }); Assert.AreEqual (42, gen_spec_id (new Generic<string> (), 42)); } [Test] public void ImportComplexGenericMethodSpec () { var gen_spec_id = Compile<Func<Generic<string>, int, int>> ((module, body) => { var generic_def = module.ImportReference (typeof (Generic<>)).Resolve (); var method_def = generic_def.Methods.Where (m => m.Name == "ComplexGenericMethod").First (); var method_string = method_def.MakeGeneric (module.ImportReference (typeof (string))); var method_instance = method_string.MakeGenericMethod (module.ImportReference (typeof (int))); var method_ref = module.ImportReference (method_instance); var field_def = generic_def.Fields.Where (f => f.Name == "Field").First (); var field_int = field_def.MakeGeneric (module.ImportReference (typeof (int))); var field_ref = module.ImportReference (field_int); var il = body.GetILProcessor (); il.Emit (OpCodes.Ldarg_0); il.Emit (OpCodes.Ldnull); il.Emit (OpCodes.Ldarg_1); il.Emit (OpCodes.Callvirt, method_ref); il.Emit (OpCodes.Ldfld, field_ref); il.Emit (OpCodes.Ret); }); Assert.AreEqual (42, gen_spec_id (new Generic<string> (), 42)); } [Test] public void ImportMethodOnOpenGeneric () { var generic = typeof (Generic<>).ToDefinition (); using (var module = ModuleDefinition.CreateModule ("foo", ModuleKind.Dll)) { var method = module.ImportReference (generic.GetMethod ("Method")); Assert.AreEqual ("T Mono.Cecil.Tests.ImportCecilTests/Generic`1::Method(T)", method.FullName); } } public class ContextGeneric1Method2<G1> { public G1 GenericMethod<R1, S1> (R1 r, S1 s) { return default (G1); } } public class ContextGeneric2Method1<G2, H2> { public R2 GenericMethod<R2> (G2 g, H2 h) { return default (R2); } } public class NestedGenericsA<A> { public class NestedGenericsB<B> { public class NestedGenericsC<C> { public A GenericMethod (B b, C c) { return default (A); } } } } [Test] public void ContextGenericTest () { if (Platform.OnCoreClr) return; var module = ModuleDefinition.ReadModule (typeof (ContextGeneric1Method2<>).Module.FullyQualifiedName); // by mixing open generics with 2 & 1 parameters, we make sure the right context is used (because otherwise, an exception will be thrown) var type = typeof (ContextGeneric1Method2<>).MakeGenericType (typeof (ContextGeneric2Method1<,>)); var meth = type.GetMethod ("GenericMethod"); var imported_type = module.ImportReference (type); var method = module.ImportReference (meth, imported_type); Assert.AreEqual ("G1 Mono.Cecil.Tests.ImportCecilTests/ContextGeneric1Method2`1<Mono.Cecil.Tests.ImportCecilTests/ContextGeneric2Method1`2<G2,H2>>::GenericMethod<R1,S1>(R1,S1)", method.FullName); // and the other way around type = typeof (ContextGeneric2Method1<,>).MakeGenericType (typeof (ContextGeneric1Method2<>), typeof (IList<>)); meth = type.GetMethod ("GenericMethod"); imported_type = module.ImportReference (type); method = module.ImportReference (meth, imported_type); Assert.AreEqual ("R2 Mono.Cecil.Tests.ImportCecilTests/ContextGeneric2Method1`2<Mono.Cecil.Tests.ImportCecilTests/ContextGeneric1Method2`1<G1>,System.Collections.Generic.IList`1<T>>::GenericMethod<R2>(G2,H2)", method.FullName); // not sure about this one type = typeof (NestedGenericsA<string>.NestedGenericsB<int>.NestedGenericsC<float>); meth = type.GetMethod ("GenericMethod"); imported_type = module.ImportReference (type); method = module.ImportReference (meth, imported_type); Assert.AreEqual ("A Mono.Cecil.Tests.ImportCecilTests/NestedGenericsA`1/NestedGenericsB`1/NestedGenericsC`1<System.String,System.Int32,System.Single>::GenericMethod(B,C)", method.FullName); // We need both the method & type ! type = typeof (Generic<>).MakeGenericType (typeof (string)); meth = type.GetMethod ("ComplexGenericMethod"); imported_type = module.ImportReference (type); method = module.ImportReference (meth, imported_type); Assert.AreEqual ("Mono.Cecil.Tests.ImportCecilTests/Generic`1<TS> Mono.Cecil.Tests.ImportCecilTests/Generic`1<System.String>::ComplexGenericMethod<TS>(T,TS)", method.FullName); } delegate void Emitter (ModuleDefinition module, MethodBody body); static TDelegate Compile<TDelegate> (Emitter emitter, [CallerMemberName] string testMethodName = null) where TDelegate : class { var name = "ImportCecil_" + testMethodName; var module = CreateTestModule<TDelegate> (name, emitter); var assembly = LoadTestModule (module); return CreateRunDelegate<TDelegate> (GetTestCase (name, assembly)); } static TDelegate CreateRunDelegate<TDelegate> (Type type) where TDelegate : class { return (TDelegate) (object) Delegate.CreateDelegate (typeof (TDelegate), type.GetMethod ("Run")); } static Type GetTestCase (string name, SR.Assembly assembly) { return assembly.GetType (name); } static SR.Assembly LoadTestModule (ModuleDefinition module) { using (var stream = new MemoryStream ()) { module.Write (stream); File.WriteAllBytes (Path.Combine (Path.Combine (Path.GetTempPath (), "cecil"), module.Name + ".dll"), stream.ToArray ()); return SR.Assembly.Load (stream.ToArray ()); } } static ModuleDefinition CreateTestModule<TDelegate> (string name, Emitter emitter) { var module = CreateModule (name); var type = new TypeDefinition ( "", name, TypeAttributes.Public | TypeAttributes.Sealed | TypeAttributes.Abstract, module.ImportReference (typeof (object))); module.Types.Add (type); var method = CreateMethod (type, typeof (TDelegate).GetMethod ("Invoke")); emitter (module, method.Body); return module; } static MethodDefinition CreateMethod (TypeDefinition type, SR.MethodInfo pattern) { var module = type.Module; var method = new MethodDefinition { Name = "Run", IsPublic = true, IsStatic = true, }; type.Methods.Add (method); method.MethodReturnType.ReturnType = module.ImportReference (pattern.ReturnType); foreach (var parameter_pattern in pattern.GetParameters ()) method.Parameters.Add (new ParameterDefinition (module.ImportReference (parameter_pattern.ParameterType))); return method; } static ModuleDefinition CreateModule (string name) { var resolver = new DefaultAssemblyResolver (); resolver.AddSearchDirectory (Path.GetDirectoryName (typeof (ImportCecilTests).Assembly.Location)); return ModuleDefinition.CreateModule (name, new ModuleParameters { Kind = ModuleKind.Dll, AssemblyResolver = resolver }); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Net.Security; using System.Net.Test.Common; using System.Runtime.InteropServices; using System.Security.Authentication.ExtendedProtection; using System.Security.Cryptography.X509Certificates; using System.Threading.Tasks; using Xunit; namespace System.Net.Http.Functional.Tests { using Configuration = System.Net.Test.Common.Configuration; [SkipOnTargetFramework(TargetFrameworkMonikers.Uap | TargetFrameworkMonikers.UapAot | TargetFrameworkMonikers.NetFramework, "uap: dotnet/corefx #20010, netfx: dotnet/corefx #16805")] public partial class HttpClientHandler_ServerCertificates_Test { [OuterLoop] // TODO: Issue #11345 [Fact] public async Task NoCallback_ValidCertificate_CallbackNotCalled() { var handler = new HttpClientHandler(); using (var client = new HttpClient(handler)) { Assert.Null(handler.ServerCertificateCustomValidationCallback); Assert.False(handler.CheckCertificateRevocationList); using (HttpResponseMessage response = await client.GetAsync(Configuration.Http.SecureRemoteEchoServer)) { Assert.Equal(HttpStatusCode.OK, response.StatusCode); } Assert.Throws<InvalidOperationException>(() => handler.ServerCertificateCustomValidationCallback = null); Assert.Throws<InvalidOperationException>(() => handler.CheckCertificateRevocationList = false); } } [OuterLoop] // TODO: Issue #11345 [ConditionalFact(nameof(BackendSupportsCustomCertificateHandling))] public void UseCallback_HaveNoCredsAndUseAuthenticatedCustomProxyAndPostToSecureServer_ProxyAuthenticationRequiredStatusCode() { int port; Task<LoopbackGetRequestHttpProxy.ProxyResult> proxyTask = LoopbackGetRequestHttpProxy.StartAsync( out port, requireAuth: true, expectCreds: false); Uri proxyUrl = new Uri($"http://localhost:{port}"); var handler = new HttpClientHandler(); handler.Proxy = new UseSpecifiedUriWebProxy(proxyUrl, null); handler.ServerCertificateCustomValidationCallback = delegate { return true; }; using (var client = new HttpClient(handler)) { Task<HttpResponseMessage> responseTask = client.PostAsync( Configuration.Http.SecureRemoteEchoServer, new StringContent("This is a test")); Task.WaitAll(proxyTask, responseTask); using (responseTask.Result) { Assert.Equal(HttpStatusCode.ProxyAuthenticationRequired, responseTask.Result.StatusCode); } } } [OuterLoop] // TODO: Issue #11345 [ConditionalFact(nameof(BackendSupportsCustomCertificateHandling))] public async Task UseCallback_NotSecureConnection_CallbackNotCalled() { var handler = new HttpClientHandler(); using (var client = new HttpClient(handler)) { bool callbackCalled = false; handler.ServerCertificateCustomValidationCallback = delegate { callbackCalled = true; return true; }; using (HttpResponseMessage response = await client.GetAsync(Configuration.Http.RemoteEchoServer)) { Assert.Equal(HttpStatusCode.OK, response.StatusCode); } Assert.False(callbackCalled); } } public static IEnumerable<object[]> UseCallback_ValidCertificate_ExpectedValuesDuringCallback_Urls() { foreach (bool checkRevocation in new[] { true, false }) { yield return new object[] { Configuration.Http.SecureRemoteEchoServer, checkRevocation }; yield return new object[] { Configuration.Http.RedirectUriForDestinationUri( secure:true, statusCode:302, destinationUri:Configuration.Http.SecureRemoteEchoServer, hops:1), checkRevocation }; } } [OuterLoop] // TODO: Issue #11345 [ConditionalTheory(nameof(BackendSupportsCustomCertificateHandling))] [MemberData(nameof(UseCallback_ValidCertificate_ExpectedValuesDuringCallback_Urls))] public async Task UseCallback_ValidCertificate_ExpectedValuesDuringCallback(Uri url, bool checkRevocation) { var handler = new HttpClientHandler(); using (var client = new HttpClient(handler)) { bool callbackCalled = false; handler.CheckCertificateRevocationList = checkRevocation; handler.ServerCertificateCustomValidationCallback = (request, cert, chain, errors) => { callbackCalled = true; Assert.NotNull(request); Assert.Equal(SslPolicyErrors.None, errors); Assert.True(chain.ChainElements.Count > 0); Assert.NotEmpty(cert.Subject); Assert.Equal(checkRevocation ? X509RevocationMode.Online : X509RevocationMode.NoCheck, chain.ChainPolicy.RevocationMode); return true; }; using (HttpResponseMessage response = await client.GetAsync(url)) { Assert.Equal(HttpStatusCode.OK, response.StatusCode); } Assert.True(callbackCalled); } } [OuterLoop] // TODO: Issue #11345 [ConditionalFact(nameof(BackendSupportsCustomCertificateHandling))] public async Task UseCallback_CallbackReturnsFailure_ThrowsException() { var handler = new HttpClientHandler(); using (var client = new HttpClient(handler)) { handler.ServerCertificateCustomValidationCallback = delegate { return false; }; await Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync(Configuration.Http.SecureRemoteEchoServer)); } } [OuterLoop] // TODO: Issue #11345 [ConditionalFact(nameof(BackendSupportsCustomCertificateHandling))] public async Task UseCallback_CallbackThrowsException_ExceptionPropagates() { var handler = new HttpClientHandler(); using (var client = new HttpClient(handler)) { var e = new DivideByZeroException(); handler.ServerCertificateCustomValidationCallback = delegate { throw e; }; Assert.Same(e, await Assert.ThrowsAsync<DivideByZeroException>(() => client.GetAsync(Configuration.Http.SecureRemoteEchoServer))); } } public static readonly object[][] CertificateValidationServers = { new object[] { Configuration.Http.ExpiredCertRemoteServer }, new object[] { Configuration.Http.SelfSignedCertRemoteServer }, new object[] { Configuration.Http.WrongHostNameCertRemoteServer }, }; [OuterLoop] // TODO: Issue #11345 [Theory] [MemberData(nameof(CertificateValidationServers))] public async Task NoCallback_BadCertificate_ThrowsException(string url) { using (var client = new HttpClient()) { await Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync(url)); } } [OuterLoop] // TODO: Issue #11345 [Fact] public async Task NoCallback_RevokedCertificate_NoRevocationChecking_Succeeds() { // On macOS (libcurl+darwinssl) we cannot turn revocation off. // But we also can't realistically say that the default value for // CheckCertificateRevocationList throws in the general case. try { using (var client = new HttpClient()) using (HttpResponseMessage response = await client.GetAsync(Configuration.Http.RevokedCertRemoteServer)) { Assert.Equal(HttpStatusCode.OK, response.StatusCode); } } catch (HttpRequestException) { if (!ShouldSuppressRevocationException) throw; } } [OuterLoop] // TODO: Issue #11345 [ConditionalFact(nameof(BackendSupportsCustomCertificateHandling))] public async Task NoCallback_RevokedCertificate_RevocationChecking_Fails() { var handler = new HttpClientHandler() { CheckCertificateRevocationList = true }; using (var client = new HttpClient(handler)) { await Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync(Configuration.Http.RevokedCertRemoteServer)); } } public static readonly object[][] CertificateValidationServersAndExpectedPolicies = { new object[] { Configuration.Http.ExpiredCertRemoteServer, SslPolicyErrors.RemoteCertificateChainErrors }, new object[] { Configuration.Http.SelfSignedCertRemoteServer, SslPolicyErrors.RemoteCertificateChainErrors }, new object[] { Configuration.Http.WrongHostNameCertRemoteServer , SslPolicyErrors.RemoteCertificateNameMismatch}, }; [ActiveIssue(7812, TestPlatforms.Windows)] [OuterLoop] // TODO: Issue #11345 [ConditionalTheory(nameof(BackendSupportsCustomCertificateHandling))] [MemberData(nameof(CertificateValidationServersAndExpectedPolicies))] public async Task UseCallback_BadCertificate_ExpectedPolicyErrors(string url, SslPolicyErrors expectedErrors) { var handler = new HttpClientHandler(); using (var client = new HttpClient(handler)) { bool callbackCalled = false; handler.ServerCertificateCustomValidationCallback = (request, cert, chain, errors) => { callbackCalled = true; Assert.NotNull(request); Assert.NotNull(cert); Assert.NotNull(chain); Assert.Equal(expectedErrors, errors); return true; }; using (HttpResponseMessage response = await client.GetAsync(url)) { Assert.Equal(HttpStatusCode.OK, response.StatusCode); } Assert.True(callbackCalled); } } [OuterLoop] // TODO: Issue #11345 [ConditionalFact(nameof(BackendDoesNotSupportCustomCertificateHandling))] public async Task SSLBackendNotSupported_Callback_ThrowsPlatformNotSupportedException() { using (var client = new HttpClient(new HttpClientHandler() { ServerCertificateCustomValidationCallback = delegate { return true; } })) { await Assert.ThrowsAsync<PlatformNotSupportedException>(() => client.GetAsync(Configuration.Http.SecureRemoteEchoServer)); } } [OuterLoop] // TODO: Issue #11345 [ConditionalFact(nameof(BackendDoesNotSupportCustomCertificateHandling))] // For macOS the "custom handling" means that revocation can't be *disabled*. So this test does not apply. [PlatformSpecific(~TestPlatforms.OSX)] public async Task SSLBackendNotSupported_Revocation_ThrowsPlatformNotSupportedException() { using (var client = new HttpClient(new HttpClientHandler() { CheckCertificateRevocationList = true })) { await Assert.ThrowsAsync<PlatformNotSupportedException>(() => client.GetAsync(Configuration.Http.SecureRemoteEchoServer)); } } [OuterLoop] // TODO: Issue #11345 [PlatformSpecific(TestPlatforms.Windows)] // CopyToAsync(Stream, TransportContext) isn't used on unix [Fact] public async Task PostAsync_Post_ChannelBinding_ConfiguredCorrectly() { var content = new ChannelBindingAwareContent("Test contest"); using (var client = new HttpClient()) using (HttpResponseMessage response = await client.PostAsync(Configuration.Http.SecureRemoteEchoServer, content)) { // Validate status. Assert.Equal(HttpStatusCode.OK, response.StatusCode); // Validate the ChannelBinding object exists. ChannelBinding channelBinding = content.ChannelBinding; Assert.NotNull(channelBinding); // Validate the ChannelBinding's validity. if (BackendSupportsCustomCertificateHandling) { Assert.False(channelBinding.IsInvalid, "Expected valid binding"); Assert.NotEqual(IntPtr.Zero, channelBinding.DangerousGetHandle()); // Validate the ChannelBinding's description. string channelBindingDescription = channelBinding.ToString(); Assert.NotNull(channelBindingDescription); Assert.NotEmpty(channelBindingDescription); Assert.True((channelBindingDescription.Length + 1) % 3 == 0, $"Unexpected length {channelBindingDescription.Length}"); for (int i = 0; i < channelBindingDescription.Length; i++) { char c = channelBindingDescription[i]; if (i % 3 == 2) { Assert.Equal(' ', c); } else { Assert.True((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F'), $"Expected hex, got {c}"); } } } else { // Backend doesn't support getting the details to create the CBT. Assert.True(channelBinding.IsInvalid, "Expected invalid binding"); Assert.Equal(IntPtr.Zero, channelBinding.DangerousGetHandle()); Assert.Null(channelBinding.ToString()); } } } } }
using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Text; using System.Windows.Forms; using CslaGenerator.Metadata; using CslaGenerator.Util; using DBSchemaInfo.Base; using DBSchemaInfo.MsSql; namespace CslaGenerator.Controls { /// <summary> /// Summary description for DbSchemaPanel. /// </summary> public partial class DbSchemaPanel : UserControl { private CslaGeneratorUnit _currentUnit = null; private CslaObjectInfo _currentCslaObject = null; private ObjectFactory _currentFactory = null; private string cn = ""; public DbSchemaPanel(CslaGeneratorUnit cslagenunit, CslaObjectInfo cslaobject, string connection) { _currentUnit = cslagenunit; cn = connection; _currentCslaObject = cslaobject; // This call is required by the Windows.Forms Form Designer. InitializeComponent(); } private void DbSchemaPanel_Load(object sender, EventArgs e) { // hookup event handler for treeview select dbTreeView1.TreeViewAfterSelect += dbTreeView1_TreeViewAfterSelect; // hookup event handler for treeview mouseup dbTreeView1.TreeViewMouseUp += dbTreeView1_TreeViewMouseUp; // set default width dbTreeView1.Width = (int)((double)0.5 * (double)Width); copySoftDeleteToolStripMenuItem.Checked = false; } private void DbSchemaPanel_Resize(object sender, EventArgs e) { // keep treeview and listbox equal widths of 50% of panel body // when panel resized dbTreeView1.Width = (int)((double)0.5 * (double)Width); } #region Properties internal CslaGeneratorUnit CurrentUnit { get { return _currentUnit; } set { _currentUnit = value; } } internal CslaObjectInfo CslaObjectInfo { get { return _currentCslaObject; } set { _currentCslaObject = value; _currentFactory = new ObjectFactory(_currentUnit, _currentCslaObject); } } internal string ConnectionString { get { return cn; } set { cn = value; } } internal TreeView TreeViewSchema { get { return dbTreeView1.TreeViewSchema; } } internal ListBox DbColumns { get { return dbColumns1.ListColumns; } } internal PropertyGrid PropertyGridColumn { get { return dbColumns1.PropertyGridColumn; } } internal PropertyGrid PropertyGridDbObjects { get { return dbTreeView1.PropertyGridDbObjects; } } internal Dictionary<string, IColumnInfo> SelectedColumns { get { return dbColumns1.SelectedIndices; } } internal int SelectedColumnsCount { get { return dbColumns1.SelectedIndicesCount; } } internal bool UseBoolSoftDelete { get { return !string.IsNullOrEmpty(_currentUnit.Params.SpBoolSoftDeleteColumn) && !copySoftDeleteToolStripMenuItem.Checked; } } #endregion #region Methods internal void SetDbColumnsPctHeight(double pct) { dbColumns1.SetDbColumnsPctHeight(pct); } internal void SetDbTreeViewPctHeight(double pct) { dbTreeView1.SetDbTreeViewPctHeight(pct); } #endregion // called to populate treeview from provided database connection ICatalog catalog = null; public void BuildSchemaTree() { TreeViewSchema.Nodes.Clear(); TreeViewSchema.ImageList = schemaImages; string catalogName = null; string[] cnparts = cn.ToLower().Split(';'); foreach (string cnpart in cnparts) { if (cnpart.Contains("initial catalog=") || cnpart.Contains("database=")) { catalogName = cnpart.Substring(cnpart.IndexOf("=") + 1).Trim(); } } OutputWindow.Current.ClearOutput(); catalog = new SqlCatalog(cn, catalogName); DateTime start; DateTime end; //OutputWindow.Current.AddOutputInfo("Load Tables & Views Start:" + DateTime.Now.ToLongTimeString()); start = DateTime.Now; catalog.LoadStaticObjects(); end = DateTime.Now; //OutputWindow.Current.AddOutputInfo("Load Tables & Views End:" + end.ToLongTimeString()); OutputWindow.Current.AddOutputInfo(string.Format("Loaded {0} tables and {1} views in {2:0.00} seconds...", catalog.Tables.Count.ToString(), catalog.Views.Count.ToString(), end.Subtract(start).TotalSeconds)); //OutputWindow.Current.AddOutputInfo("Load Procedures Start:" + DateTime.Now.ToLongTimeString()); start = DateTime.Now; catalog.LoadProcedures(); end = DateTime.Now; //OutputWindow.Current.AddOutputInfo("Load Procedures End:" + end.ToLongTimeString()); OutputWindow.Current.AddOutputInfo(string.Format("Found {0} sprocs in {1:0.00} seconds...", catalog.Procedures.Count.ToString(), end.Subtract(start).TotalSeconds), 2); SprocName[] requiredSprocs = GetRequiredProcedureList(); if (requiredSprocs.Length > 0) OutputWindow.Current.AddOutputInfo("Loading required procedures:"); foreach (SprocName sp in requiredSprocs) { StringBuilder sb = new StringBuilder(); if (!string.IsNullOrEmpty(sp.Schema)) sb.Append(sp.Schema).Append("."); sb.Append(sp.Name); sb.Append(": "); try { IStoredProcedureInfo sproc = catalog.Procedures[null, sp.Schema == "" ? null : sp.Schema, sp.Name]; if (sproc != null) { start = DateTime.Now; sproc.Reload(true); end = DateTime.Now; sb.AppendFormat("Loaded in {0:0.00} seconds...", end.Subtract(start).TotalSeconds); } else sb.Append("Not Found!"); } catch (Exception ex) { sb.AppendLine(ex.Message); sb.AppendLine("Stack Trace:"); sb.AppendLine(); sb.AppendLine(ex.StackTrace); sb.AppendLine(); } OutputWindow.Current.AddOutputInfo(sb.ToString()); } GeneratorController.Catalog = catalog; if (!String.IsNullOrEmpty(catalog.CatalogName)) paneDbName.Caption = catalog.CatalogName; else paneDbName.Caption = "Database Schema"; if (_currentUnit != null) { _currentUnit.ConnectionString = cn; } dbTreeView1.BuildSchemaTree(catalog); foreach (CslaObjectInfo info in _currentUnit.CslaObjects) { if (catalog != null) { info.LoadColumnInfo(catalog); } } } private class SprocName : IEquatable<SprocName> { private string _Schema; public string Schema { get { return _Schema; } } private string _Name; public string Name { get { return _Name; } } /// <summary> /// Initializes a new instance of the Pair class. /// </summary> /// <param name="schema"></param> /// <param name="name"></param> public SprocName(string schema, string name) { _Schema = schema == null ? string.Empty : schema; _Name = name == null ? string.Empty : name; } #region IEquatable<SprocName> Members public bool Equals(SprocName other) { return (_Name.Equals(other._Name, StringComparison.CurrentCultureIgnoreCase) && _Schema.Equals(other._Schema, StringComparison.CurrentCultureIgnoreCase)); } #endregion } private SprocName[] GetRequiredProcedureList() { List<SprocName> list = new List<SprocName>(); foreach (CslaObjectInfo obj in _currentUnit.CslaObjects) { foreach (ValueProperty prop in obj.GetAllValueProperties()) { if (prop.DbBindColumn.ColumnOriginType == ColumnOriginType.StoredProcedure) { SprocName sp = new SprocName(prop.DbBindColumn.SchemaName, prop.DbBindColumn.ObjectName); if (!list.Contains(sp)) list.Add(sp); } } foreach (Criteria crit in obj.CriteriaObjects) { foreach (CriteriaProperty prop in crit.Properties) { if (prop.DbBindColumn.ColumnOriginType == ColumnOriginType.StoredProcedure) { SprocName sp = new SprocName(prop.DbBindColumn.SchemaName, prop.DbBindColumn.ObjectName); if (!list.Contains(sp)) list.Add(sp); } } } } return list.ToArray(); } private void dbTreeView1_TreeViewMouseUp(object sender, MouseEventArgs e) { TreeNode node = TreeViewSchema.GetNodeAt(e.X, e.Y); if (TreeViewSchema.GetNodeAt(e.X, e.Y) == null) { isDBItemSelected = false; return; } if (e.Button == MouseButtons.Right) { TreeViewSchema.SelectedNode = node; } TreeNodeSelected(node); } private void dbTreeView1_TreeViewAfterSelect(object sender, TreeViewEventArgs e) { TreeNodeSelected(e.Node); } bool isDBItemSelected; TreeNode currentTreeNode = null; private void TreeNodeSelected(TreeNode node) { currentTreeNode = node; dbColumns1.Clear(); isDBItemSelected = false; PropertyGridColumn.SelectedObject = null; SetDbColumnsPctHeight(73); if (node != null) { if (node.Tag != null) { isDBItemSelected = true; if (node.Tag is IResultSet) { PropertyGridDbObjects.SelectedObject = node.Tag; foreach (IColumnInfo col in ((IResultSet)node.Tag).Columns) { DbColumns.Items.Add(col); } } else { isDBItemSelected = false; } } } } private int GetCurrentResultSetIndex() { // this is a hack because the CommandResultColumnSchema does not store a reference to its CommandResult //return frmGenerator.TreeViewSchema.SelectedNode.Index; return TreeViewSchema.SelectedNode.Index; } private void SetDbBindColumn(IColumnInfo p, DbBindColumn dbc) { SetDbBindColumn(TreeViewSchema.SelectedNode, p, dbc); } public static void SetDbBindColumn(TreeNode node, IColumnInfo p, DbBindColumn dbc) { //TreeNode node = TreeViewSchema.SelectedNode; IResultSet rs = (IResultSet)node.Tag; IStoredProcedureInfo sp = null; if (node.Parent.Tag != null) sp = (IStoredProcedureInfo)node.Parent.Tag; IDataBaseObject obj = null; if (sp != null) { obj = sp; dbc.SpResultIndex = sp.ResultSets.IndexOf(rs); } else obj = (IDataBaseObject)rs; switch (rs.Type) { case ResultType.Table: dbc.ColumnOriginType = ColumnOriginType.Table; break; case ResultType.View: dbc.ColumnOriginType = ColumnOriginType.View; break; case ResultType.StoredProcedure: dbc.ColumnOriginType = ColumnOriginType.StoredProcedure; break; } //dbc.ColumnOriginType= dbc.CatalogName = obj.ObjectCatalog; dbc.SchemaName = obj.ObjectSchema; dbc.ObjectName = obj.ObjectName; dbc.ColumnName = p.ColumnName; dbc.LoadColumn(GeneratorController.Catalog); } #region Context menu handlers private void addToCslaObjectToolStripMenuItem_Click(object sender, EventArgs e) { AddPropertiesForSelectedColumns(); } private void selectAllToolStripMenuItem_Click(object sender, EventArgs e) { dbColumns1.SelectAll(); } private void unselectAllToolStripMenuItem_Click(object sender, EventArgs e) { dbColumns1.UnSelectAll(); } private void readOnlyCollectionToolStripMenuItem_Click(object sender, EventArgs e) { NewObjectDefaults frm = NewObjectDefaults.NewReadOnlyListProperties(); if (frm.ShowDialog() == DialogResult.OK) { string collectionName = frm.GetPropertyValue("CollectionName"); string itemName = frm.GetPropertyValue("ItemName"); NewCollection(CslaObjectType.ReadOnlyCollection, collectionName, itemName); NewObject(CslaObjectType.ReadOnlyObject, itemName, collectionName); AddPropertiesForSelectedColumns(); } } private void editableRootCollectionToolStripMenuItem_Click(object sender, EventArgs e) { NewObjectDefaults frm = NewObjectDefaults.NewReadOnlyListProperties(); if (frm.ShowDialog() == DialogResult.OK) { string collectionName = frm.GetPropertyValue("CollectionName"); string itemName = frm.GetPropertyValue("ItemName"); NewCollection(CslaObjectType.EditableRootCollection, collectionName, itemName); NewObject(CslaObjectType.EditableChild, itemName, collectionName); AddPropertiesForSelectedColumns(); } } private void dynamicEditableRootCollectionToolStripMenuItem_Click(object sender, EventArgs e) { NewObjectDefaults frm = NewObjectDefaults.NewReadOnlyListProperties(); if (frm.ShowDialog() == DialogResult.OK) { string collectionName = frm.GetPropertyValue("CollectionName"); string itemName = frm.GetPropertyValue("ItemName"); NewCollection(CslaObjectType.DynamicEditableRootCollection, collectionName, itemName); NewObject(CslaObjectType.DynamicEditableRoot, itemName, collectionName); AddPropertiesForSelectedColumns(); } } private void editableChildCollectionToolStripMenuItem_Click(object sender, EventArgs e) { NewObjectDefaults frm = NewObjectDefaults.NewChildListProperties(); if (frm.ShowDialog() == DialogResult.OK) { string collectionName = frm.GetPropertyValue("CollectionName"); string itemName = frm.GetPropertyValue("ItemName"); string parentName = frm.GetPropertyValue("ParentType"); string propertyName = frm.GetPropertyValue("PropertyNameInParentType"); CslaObjectInfo parent = _currentUnit.CslaObjects.Find(parentName); if (parent == null) { MessageBox.Show(@"Parent type not found", @"CslaGenerator", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } NewCollection(CslaObjectType.EditableChildCollection, collectionName, itemName, parentName); NewObject(CslaObjectType.EditableChild, itemName, collectionName); AddPropertiesForSelectedColumns(); ArrayList lst = new ArrayList(); foreach (ValueProperty p in parent.ValueProperties) if (p.PrimaryKey != ValueProperty.UserDefinedKeyBehaviour.Default) lst.Add(p); foreach (Property p in lst) _currentCslaObject.ParentProperties.Add(p); ChildProperty col = new ChildProperty(); col.TypeName = collectionName; if (!string.IsNullOrEmpty(propertyName)) col.Name = propertyName; else col.Name = collectionName; col.ReadOnly = true; foreach (var crit in parent.CriteriaObjects) { if (crit.GetOptions.Factory || crit.GetOptions.AddRemove || crit.GetOptions.DataPortal) { foreach (var prop in crit.Properties) { col.LoadParameters.Add(new Parameter(crit, prop)); } } } parent.ChildCollectionProperties.Add(col); } } private void contextMenuStrip1_Opening(object sender, CancelEventArgs e) { bool objSelected = (_currentCslaObject != null); addToCslaObjectToolStripMenuItem.Enabled = objSelected; newCriteriaToolStripMenuItem.Enabled = objSelected; editableChildCollectionToolStripMenuItem.Enabled = objSelected; while (addInheritedValuePropertyToolStripMenuItem.DropDownItems.Count > 0) { ToolStripItem mnu = addInheritedValuePropertyToolStripMenuItem.DropDownItems[0]; mnu.Click -= addInheritedValuePropertyToolStripMenuItem_DropDownItemClicked; addInheritedValuePropertyToolStripMenuItem.DropDownItems.RemoveAt(0); } addInheritedValuePropertyToolStripMenuItem.Enabled = false; if (dbColumns1.SelectedIndicesCount != 1) return; foreach (ValueProperty prop in _currentCslaObject.InheritedValueProperties) { ToolStripMenuItem mnu = new ToolStripMenuItem(); mnu.Text = prop.Name; if (prop.DbBindColumn.ColumnOriginType == ColumnOriginType.None) mnu.Text += " (ASSIGN)"; else mnu.Text += " (UPDATE)"; mnu.Click += addInheritedValuePropertyToolStripMenuItem_DropDownItemClicked; mnu.Checked = (prop.DbBindColumn.ColumnOriginType != ColumnOriginType.None); mnu.Tag = prop.Name; addInheritedValuePropertyToolStripMenuItem.DropDownItems.Add(mnu); } addInheritedValuePropertyToolStripMenuItem.Enabled = (addInheritedValuePropertyToolStripMenuItem.DropDownItems.Count > 0); bool enableCreates = (dbColumns1.ListColumns.SelectedIndices.Count > 0); createToolStripMenuItem.Enabled = enableCreates; newCriteriaToolStripMenuItem.Enabled = enableCreates; } private void addInheritedValuePropertyToolStripMenuItem_DropDownItemClicked(object sender, EventArgs e) { string name = (string)((ToolStripMenuItem)sender).Tag; foreach (IColumnInfo col in SelectedColumns.Values) { // use name of column to see if a property of the same name exists foreach (ValueProperty valProp in _currentCslaObject.InheritedValueProperties) { if (valProp.Name.Equals(name)) { _currentFactory.SetValuePropertyInfo(GetCurrentDBObject(), GetCurrentResultSet(), col, valProp); } } } } private void createEditableRootToolStripMenuItem_Click(object sender, EventArgs e) { if (isDBItemSelected) { dbColumns1.SelectAll(UseBoolSoftDelete ? _currentUnit.Params.SpBoolSoftDeleteColumn : ""); NewObject(CslaObjectType.EditableRoot, dbTreeView1.TreeViewSchema.SelectedNode.Text, ""); AddPropertiesForSelectedColumns(); } } private void createDynamicEditableRootToolStripMenuItem_Click(object sender, EventArgs e) { if (isDBItemSelected) { dbColumns1.SelectAll(UseBoolSoftDelete ? _currentUnit.Params.SpBoolSoftDeleteColumn : ""); NewObject(CslaObjectType.DynamicEditableRoot, dbTreeView1.TreeViewSchema.SelectedNode.Text, ""); AddPropertiesForSelectedColumns(); } } private void createReadonlyCollectionToolStripMenuItem_Click(object sender, EventArgs e) { if (!isDBItemSelected) return; dbColumns1.SelectAll(this, _currentUnit); readOnlyCollectionToolStripMenuItem_Click(sender, e); } private void createEditableRootCollectionToolStripMenuItem_Click(object sender, EventArgs e) { if (!isDBItemSelected) return; dbColumns1.SelectAll(UseBoolSoftDelete ? _currentUnit.Params.SpBoolSoftDeleteColumn : ""); editableRootCollectionToolStripMenuItem_Click(sender, e); } private void createDynamicEditableRootCollectionToolStripMenuItem_Click(object sender, EventArgs e) { if (!isDBItemSelected) return; dbColumns1.SelectAll(UseBoolSoftDelete ? _currentUnit.Params.SpBoolSoftDeleteColumn : ""); dynamicEditableRootCollectionToolStripMenuItem_Click(sender, e); } private void newCriteriaToolStripMenuItem_Click(object sender, EventArgs e) { if (_currentCslaObject == null) return; string colNames = string.Empty; List<CriteriaProperty> cols = new List<CriteriaProperty>(); for (int i = 0; i < SelectedColumns.Count; i++) { IColumnInfo info = (IColumnInfo)this.dbColumns1.ListColumns.SelectedItems[i]; CriteriaProperty p = new CriteriaProperty(info.ColumnName, TypeHelper.GetTypeCodeEx(info.ManagedType), info.ColumnName); SetDbBindColumn(info, p.DbBindColumn); cols.Add(p); colNames += p.Name; } if (cols.Count == 0) return; string name = "Criteria" + colNames; int num = 0; while (true) { if (_currentCslaObject.CriteriaObjects.Contains(name)) { num++; name = "Criteria" + colNames + num; } else break; } Criteria c = new Criteria(_currentCslaObject); c.Name = name; c.Properties.AddRange(cols); c.SetSprocNames(); Design.ObjectEditorForm frm = new Design.ObjectEditorForm(); frm.ObjectToEdit = c; frm.StartPosition = FormStartPosition.CenterScreen; if (frm.ShowDialog() == DialogResult.OK) _currentCslaObject.CriteriaObjects.Add(c); } private void nameValueListToolStripMenuItem_Click(object sender, EventArgs e) { IColumnInfo pkColumn = null; IColumnInfo valueColumn = null; foreach (IColumnInfo info in dbColumns1.ListColumns.SelectedItems) { if (info.IsPrimaryKey) pkColumn = info; else valueColumn = info; } if (pkColumn != null && valueColumn != null && dbColumns1.ListColumns.SelectedItems.Count == 2) { NewObjectDefaults frm = NewObjectDefaults.NewNVLProperties(); if (frm.ShowDialog() == DialogResult.OK) { string collectionName = frm.GetPropertyValue("CollectionName"); NewNVL(collectionName); AddPropertiesForSelectedColumns(); _currentCslaObject.NameColumn = valueColumn.ColumnName; _currentCslaObject.ValueColumn = pkColumn.ColumnName; } } else MessageBox.Show(@"You must select a PK column and a non PK column in order to automatically create a name value list. If you need to create a NVL and can't meet this requirement, create a new object manually through the toolbar.", "New NVL", MessageBoxButtons.OK, MessageBoxIcon.Error); } #endregion #region New Object creation private void NewCollection(CslaObjectType type, string name, string item) { NewCollection(type, name, item, String.Empty); } private void NewCollection(CslaObjectType type, string name, string item, string parent) { CslaObjectInfo obj = new CslaObjectInfo(_currentUnit); obj.ObjectType = type; obj.ObjectName = ParseObjectName(name); obj.ParentType = parent; obj.ItemType = item; _currentUnit.CslaObjects.Add(obj); _currentFactory.AddDefaultCriteriaAndParameters(obj); } private void NewNVL(string name) { CslaObjectInfo obj = new CslaObjectInfo(_currentUnit); obj.ObjectType = CslaObjectType.NameValueList; obj.ObjectName = ParseObjectName(name); _currentUnit.CslaObjects.Add(obj); _currentCslaObject = obj; _currentFactory.AddDefaultCriteriaAndParameters(); } private string ParseObjectName(string name) { if (name != null) { int idx = name.LastIndexOf("."); idx++; if (idx > 0) return name.Substring(idx); return name; } return string.Empty; } private void NewObject(CslaObjectType type, string name, string parent) { CslaObjectInfo obj = new CslaObjectInfo(_currentUnit); obj.ObjectType = type; obj.ObjectName = ParseObjectName(name); obj.ParentType = parent; obj.ParentInsertOnly = true; _currentUnit.CslaObjects.Add(obj); _currentCslaObject = obj; } #endregion private void AddPropertiesForSelectedColumns() { if (_currentCslaObject == null) return; if (SelectedColumnsCount == 0) { MessageBox.Show(this, @"You must first select a column to add.", @"Warning"); return; } List<IColumnInfo> columns = new List<IColumnInfo>(); for (int i = 0; i < SelectedColumns.Count; i++) { columns.Add((IColumnInfo)dbColumns1.ListColumns.SelectedItems[i]); } IDataBaseObject dbObject = GetCurrentDBObject(); IResultSet resultSet = GetCurrentResultSet(); _currentFactory.AddProperties(_currentCslaObject, dbObject, resultSet, columns, true, false); } private IResultSet GetCurrentResultSet() { if (currentTreeNode == null) return null; return currentTreeNode.Tag as IResultSet; } private IDataBaseObject GetCurrentDBObject() { if (currentTreeNode.Parent.Tag != null) return currentTreeNode.Parent.Tag as IDataBaseObject; return GetCurrentResultSet() as IDataBaseObject; } private void reloadToolStripMenuItem_Click(object sender, EventArgs e) { IDataBaseObject obj = currentTreeNode.Tag as IDataBaseObject; if (obj != null) { try { obj.Reload(true); dbTreeView1.LoadNode(currentTreeNode, obj); TreeNodeSelected(currentTreeNode); } catch (Exception ex) { OutputWindow.Current.ClearOutput(); OutputWindow.Current.AddOutputInfo(ex.Message, 2); //OutputWindow.Current.AddOutputInfo(ex.StackTrace, 2); } } } } }
//------------------------------------------------------------------------------ // <copyright file="BlogsEntryService.cs"> // Copyright (c) 2014-present Andrea Di Giorgi // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // </copyright> // <author>Andrea Di Giorgi</author> // <website>https://github.com/Ithildir/liferay-sdk-builder-windows</website> //------------------------------------------------------------------------------ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; namespace Liferay.SDK.Service.V62.BlogsEntry { public class BlogsEntryService : ServiceBase { public BlogsEntryService(ISession session) : base(session) { } public async Task DeleteEntryAsync(long entryId) { var _parameters = new JsonObject(); _parameters.Add("entryId", entryId); var _command = new JsonObject() { { "/blogsentry/delete-entry", _parameters } }; await this.Session.InvokeAsync(_command); } public async Task<IEnumerable<dynamic>> GetCompanyEntriesAsync(long companyId, long displayDate, int status, int max) { var _parameters = new JsonObject(); _parameters.Add("companyId", companyId); _parameters.Add("displayDate", displayDate); _parameters.Add("status", status); _parameters.Add("max", max); var _command = new JsonObject() { { "/blogsentry/get-company-entries", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (IEnumerable<dynamic>)_obj; } public async Task<string> GetCompanyEntriesRssAsync(long companyId, long displayDate, int status, int max, string type, double version, string displayStyle, string feedURL, string entryURL, JsonObjectWrapper themeDisplay) { var _parameters = new JsonObject(); _parameters.Add("companyId", companyId); _parameters.Add("displayDate", displayDate); _parameters.Add("status", status); _parameters.Add("max", max); _parameters.Add("type", type); _parameters.Add("version", version); _parameters.Add("displayStyle", displayStyle); _parameters.Add("feedURL", feedURL); _parameters.Add("entryURL", entryURL); this.MangleWrapper(_parameters, "themeDisplay", "com.liferay.portal.theme.ThemeDisplay", themeDisplay); var _command = new JsonObject() { { "/blogsentry/get-company-entries-rss", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (string)_obj; } public async Task<dynamic> GetEntryAsync(long entryId) { var _parameters = new JsonObject(); _parameters.Add("entryId", entryId); var _command = new JsonObject() { { "/blogsentry/get-entry", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (dynamic)_obj; } public async Task<dynamic> GetEntryAsync(long groupId, string urlTitle) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); _parameters.Add("urlTitle", urlTitle); var _command = new JsonObject() { { "/blogsentry/get-entry", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (dynamic)_obj; } public async Task<IEnumerable<dynamic>> GetGroupEntriesAsync(long groupId, int status, int max) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); _parameters.Add("status", status); _parameters.Add("max", max); var _command = new JsonObject() { { "/blogsentry/get-group-entries", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (IEnumerable<dynamic>)_obj; } public async Task<IEnumerable<dynamic>> GetGroupEntriesAsync(long groupId, long displayDate, int status, int max) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); _parameters.Add("displayDate", displayDate); _parameters.Add("status", status); _parameters.Add("max", max); var _command = new JsonObject() { { "/blogsentry/get-group-entries", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (IEnumerable<dynamic>)_obj; } public async Task<IEnumerable<dynamic>> GetGroupEntriesAsync(long groupId, int status, int start, int end) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); _parameters.Add("status", status); _parameters.Add("start", start); _parameters.Add("end", end); var _command = new JsonObject() { { "/blogsentry/get-group-entries", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (IEnumerable<dynamic>)_obj; } public async Task<IEnumerable<dynamic>> GetGroupEntriesAsync(long groupId, long displayDate, int status, int start, int end) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); _parameters.Add("displayDate", displayDate); _parameters.Add("status", status); _parameters.Add("start", start); _parameters.Add("end", end); var _command = new JsonObject() { { "/blogsentry/get-group-entries", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (IEnumerable<dynamic>)_obj; } public async Task<long> GetGroupEntriesCountAsync(long groupId, int status) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); _parameters.Add("status", status); var _command = new JsonObject() { { "/blogsentry/get-group-entries-count", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (long)_obj; } public async Task<long> GetGroupEntriesCountAsync(long groupId, long displayDate, int status) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); _parameters.Add("displayDate", displayDate); _parameters.Add("status", status); var _command = new JsonObject() { { "/blogsentry/get-group-entries-count", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (long)_obj; } public async Task<string> GetGroupEntriesRssAsync(long groupId, long displayDate, int status, int max, string type, double version, string displayStyle, string feedURL, string entryURL, JsonObjectWrapper themeDisplay) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); _parameters.Add("displayDate", displayDate); _parameters.Add("status", status); _parameters.Add("max", max); _parameters.Add("type", type); _parameters.Add("version", version); _parameters.Add("displayStyle", displayStyle); _parameters.Add("feedURL", feedURL); _parameters.Add("entryURL", entryURL); this.MangleWrapper(_parameters, "themeDisplay", "com.liferay.portal.theme.ThemeDisplay", themeDisplay); var _command = new JsonObject() { { "/blogsentry/get-group-entries-rss", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (string)_obj; } public async Task<IEnumerable<dynamic>> GetGroupsEntriesAsync(long companyId, long groupId, long displayDate, int status, int max) { var _parameters = new JsonObject(); _parameters.Add("companyId", companyId); _parameters.Add("groupId", groupId); _parameters.Add("displayDate", displayDate); _parameters.Add("status", status); _parameters.Add("max", max); var _command = new JsonObject() { { "/blogsentry/get-groups-entries", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (IEnumerable<dynamic>)_obj; } public async Task<IEnumerable<dynamic>> GetOrganizationEntriesAsync(long organizationId, long displayDate, int status, int max) { var _parameters = new JsonObject(); _parameters.Add("organizationId", organizationId); _parameters.Add("displayDate", displayDate); _parameters.Add("status", status); _parameters.Add("max", max); var _command = new JsonObject() { { "/blogsentry/get-organization-entries", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (IEnumerable<dynamic>)_obj; } public async Task<string> GetOrganizationEntriesRssAsync(long organizationId, long displayDate, int status, int max, string type, double version, string displayStyle, string feedURL, string entryURL, JsonObjectWrapper themeDisplay) { var _parameters = new JsonObject(); _parameters.Add("organizationId", organizationId); _parameters.Add("displayDate", displayDate); _parameters.Add("status", status); _parameters.Add("max", max); _parameters.Add("type", type); _parameters.Add("version", version); _parameters.Add("displayStyle", displayStyle); _parameters.Add("feedURL", feedURL); _parameters.Add("entryURL", entryURL); this.MangleWrapper(_parameters, "themeDisplay", "com.liferay.portal.theme.ThemeDisplay", themeDisplay); var _command = new JsonObject() { { "/blogsentry/get-organization-entries-rss", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (string)_obj; } public async Task<dynamic> MoveEntryToTrashAsync(long entryId) { var _parameters = new JsonObject(); _parameters.Add("entryId", entryId); var _command = new JsonObject() { { "/blogsentry/move-entry-to-trash", _parameters } }; var _obj = await this.Session.InvokeAsync(_command); return (dynamic)_obj; } public async Task RestoreEntryFromTrashAsync(long entryId) { var _parameters = new JsonObject(); _parameters.Add("entryId", entryId); var _command = new JsonObject() { { "/blogsentry/restore-entry-from-trash", _parameters } }; await this.Session.InvokeAsync(_command); } public async Task SubscribeAsync(long groupId) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); var _command = new JsonObject() { { "/blogsentry/subscribe", _parameters } }; await this.Session.InvokeAsync(_command); } public async Task UnsubscribeAsync(long groupId) { var _parameters = new JsonObject(); _parameters.Add("groupId", groupId); var _command = new JsonObject() { { "/blogsentry/unsubscribe", _parameters } }; await this.Session.InvokeAsync(_command); } } }
using UnityEngine; using UnityEditor; using System.Collections; using System.Collections.Generic; namespace tk2dEditor.SpriteAnimationEditor { public enum AnimEditOperations { None = 0, AllClipsChanged = 1, // the allClips list has changed ClipContentChanged = 2, // the content of the clip has changed, only for the clipinspector & frame group inspector ClipNameChanged = 4, // clips name has changed NewClipCreated = 8, // the returned selectedClip is a newly created clip }; // Inherit directly from this class public class AnimOperator { protected AnimEditOperations operations = AnimEditOperations.None; public AnimEditOperations AnimEditOperations { get { return operations; } } public int sortId = 0; // Sort id allows the operations to be sorted for draw = decide how it appears in the inspector // Negative numbers are reserved for the system public int SortId { get { return sortId; } } // Insert menu item into "Create" menu public virtual string[] AnimToolsMenu { get { return new string[0]; } } // Called by system when one of the anim tools menu above is selected // Return new selection if selection changed. public virtual tk2dSpriteAnimationClip OnAnimMenu(string menuEntry, List<tk2dSpriteAnimationClip> allClips, tk2dSpriteAnimationClip selectedClip) { return selectedClip; } // Drawn in the clip inspector GUI for the selected clip. // Return true when data has changed. public virtual bool OnClipInspectorGUI(tk2dSpriteAnimationClip selectedClip, List<ClipEditor.FrameGroup> frameGroups, TimelineEditor.State state) { return false; } // Drawn in the frame group inspector GUI for the selected clip. // Return true when data has changed. public virtual bool OnFrameGroupInspectorGUI(tk2dSpriteAnimationClip selectedClip, List<ClipEditor.FrameGroup> frameGroups, TimelineEditor.State state ) { return false; } } public static class AnimOperatorUtil { public static ClipEditor.FrameGroup NewFrameGroup(List<ClipEditor.FrameGroup> frameGroups, int selectedFrameGroup) { ClipEditor.FrameGroup src = frameGroups[selectedFrameGroup]; ClipEditor.FrameGroup fg = new ClipEditor.FrameGroup(); fg.spriteCollection = src.spriteCollection; fg.spriteId = src.spriteId; tk2dSpriteAnimationFrame f = new tk2dSpriteAnimationFrame(); f.spriteCollection = fg.spriteCollection; f.spriteId = fg.spriteId; fg.frames.Add(f); return fg; } public static string UniqueClipName(List<tk2dSpriteAnimationClip> allClips, string baseName) { bool found = false; for (int i = 0; i < allClips.Count; ++i) { if (allClips[i].name == baseName) { found = true; break; } } if (!found) return baseName; string uniqueName = baseName + " "; int uniqueId = 1; for (int i = 0; i < allClips.Count; ++i) { string uname = uniqueName + uniqueId.ToString(); if (allClips[i].name == uname) { uniqueId++; i = -1; continue; } } uniqueName = uniqueName + uniqueId.ToString(); return uniqueName; } } // Add a "Copy" option to the Animation menu public class CopyAnimation : AnimOperator { public override string[] AnimToolsMenu { get { return new string[] { "Copy" }; } } public override tk2dSpriteAnimationClip OnAnimMenu(string menuEntry, List<tk2dSpriteAnimationClip> allClips, tk2dSpriteAnimationClip selectedClip) { tk2dSpriteAnimationClip newClip = new tk2dSpriteAnimationClip(); newClip.CopyFrom(selectedClip); newClip.name = AnimOperatorUtil.UniqueClipName( allClips, "Copy of " + selectedClip.name ); allClips.Add(newClip); operations = AnimEditOperations.NewClipCreated | AnimEditOperations.AllClipsChanged; return newClip; } } // "Reverse frames" public class ClipTools : AnimOperator { public ClipTools() { sortId = -1000; } bool textToggle = false; string textNames = ""; public override bool OnClipInspectorGUI(tk2dSpriteAnimationClip selectedClip, List<ClipEditor.FrameGroup> frameGroups, TimelineEditor.State state ) { GUILayout.BeginHorizontal(); bool changed = false; if (GUILayout.Button("Reverse", EditorStyles.miniButton)) { frameGroups.Reverse(); operations = AnimEditOperations.ClipContentChanged; state.selectedFrame = (state.selectedFrame == -1) ? state.selectedFrame : (frameGroups.Count - 1 - state.selectedFrame); changed = true; } GUIContent addTriggerContent = new GUIContent("Trigger", "You can also add a trigger by double clicking on the trigger area"); if (GUILayout.Button(addTriggerContent, EditorStyles.miniButton)) { for (int i = 0; i < selectedClip.frames.Length; ++i) { if (!selectedClip.frames[i].triggerEvent) { selectedClip.frames[i].triggerEvent = true; state.selectedTrigger = i; break; } } changed = true; } if (selectedClip.wrapMode != tk2dSpriteAnimationClip.WrapMode.Single) { bool newTextToggle = GUILayout.Toggle(textToggle, "Text", EditorStyles.miniButton); if (newTextToggle != textToggle) { if (newTextToggle == true) { textNames = BuildTextSpriteList(frameGroups); if (textNames.Length == 0) newTextToggle = false; } textToggle = newTextToggle; } } GUILayout.EndHorizontal(); if (textToggle) { textNames = EditorGUILayout.TextArea(textNames, GUILayout.ExpandWidth(true)); GUILayout.BeginHorizontal(); GUILayout.FlexibleSpace(); if (GUILayout.Button("Process")) { if (ProcessSpriteImport(frameGroups, textNames)) { textNames = ""; textToggle = false; state.selectedFrame = -1; changed = true; GUIUtility.keyboardControl = 0; } } GUILayout.EndHorizontal(); } return changed; } string BuildTextSpriteList(List<ClipEditor.FrameGroup> frameGroups) { bool fromSameCollection = true; bool areNamesValid = true; tk2dSpriteCollectionData coll = null; List<string> s = new List<string>(); foreach (ClipEditor.FrameGroup frameGroup in frameGroups) { tk2dSpriteDefinition def = frameGroup.spriteCollection.spriteDefinitions[frameGroup.spriteId]; if (coll == null) coll = frameGroup.spriteCollection; if (coll != frameGroup.spriteCollection) fromSameCollection = false; string spriteName = def.name; if (spriteName.IndexOf(";") != -1) areNamesValid = false; int frameCount = frameGroup.frames.Count; s.Add( (frameCount == 1) ? (spriteName) : (spriteName + ";" + frameCount.ToString()) ); } if (!fromSameCollection) { EditorUtility.DisplayDialog("Text importer failed", "Current animation clip contains sprites from multiple collections", "Ok"); return ""; } if (!areNamesValid) { EditorUtility.DisplayDialog("Text importer failed", "Sprite names contain the ; character", "Ok"); return ""; } string spriteList = ""; for (int i = 0; i < s.Count; ++i) spriteList += s[i] + "\n"; return spriteList; } bool ProcessSpriteImport(List<ClipEditor.FrameGroup> frameGroups, string spriteNames) { tk2dSpriteCollectionData coll = frameGroups[0].spriteCollection; // make new list List<int> spriteIds = new List<int>(); List<int> frameCounts = new List<int>(); int lineNumber = 1; string[] lines = spriteNames.Replace("\r\n", "\n").Split('\n'); foreach (string line in lines) { if (line.Trim().Length != 0) { string spriteName = line; int frameCount = 1; int splitIndex = line.LastIndexOf(';'); if (splitIndex != -1) { spriteName = line.Substring(0, splitIndex); string frameCountStr = line.Substring(splitIndex + 1, line.Length - 1 - splitIndex); if (!System.Int32.TryParse(frameCountStr, out frameCount)) { Debug.LogError("Parse error in line " + lineNumber.ToString()); return false; } frameCount = Mathf.Max(frameCount, 1); } int spriteId = coll.GetSpriteIdByName(spriteName, -1); if (spriteId == -1) { Debug.LogError(string.Format("Unable to find sprite '{0}' in sprite collection", spriteName)); return false; } spriteIds.Add(spriteId); frameCounts.Add(frameCount); } lineNumber++; } List<ClipEditor.FrameGroup> newFrameGroups = new List<ClipEditor.FrameGroup>(); for (int i = 0; i < spriteIds.Count; ++i) { if (i < frameGroups.Count && frameGroups[i].spriteId == spriteIds[i]) { if (frameGroups[i].frames.Count != frameCounts[i]) frameGroups[i].SetFrameCount(frameCounts[i]); newFrameGroups.Add(frameGroups[i]); } else { ClipEditor.FrameGroup fg = new ClipEditor.FrameGroup(); fg.spriteCollection = coll; fg.spriteId = spriteIds[i]; fg.SetFrameCount(frameCounts[i]); newFrameGroups.Add(fg); } } frameGroups.Clear(); foreach (ClipEditor.FrameGroup fg in newFrameGroups) frameGroups.Add(fg); operations = AnimEditOperations.ClipContentChanged; return true; } } // "Delete frames" public class DeleteFrames : AnimOperator { public DeleteFrames() { sortId = -50; } public override bool OnFrameGroupInspectorGUI(tk2dSpriteAnimationClip selectedClip, List<ClipEditor.FrameGroup> frameGroups, TimelineEditor.State state ) { bool changed = false; if (frameGroups.Count > 1) { GUILayout.Space(16); if (GUILayout.Button("Delete", EditorStyles.miniButton)) { frameGroups.RemoveAt(state.selectedFrame); state.selectedFrame = -1; changed = true; } GUILayout.BeginHorizontal(); if (GUILayout.Button("Delete <", EditorStyles.miniButton)) { frameGroups.RemoveRange(0, state.selectedFrame); changed = true; state.selectedFrame = 0; } if (GUILayout.Button("Delete >", EditorStyles.miniButton)) { frameGroups.RemoveRange(state.selectedFrame + 1, frameGroups.Count - 1 - state.selectedFrame); changed = true; state.selectedFrame = frameGroups.Count - 1; } GUILayout.EndHorizontal(); } operations = changed ? AnimEditOperations.ClipContentChanged : AnimEditOperations.None; return changed; } } // "Insert frames" public class InsertFrames : AnimOperator { public InsertFrames() { sortId = -100; } public override bool OnFrameGroupInspectorGUI(tk2dSpriteAnimationClip selectedClip, List<ClipEditor.FrameGroup> frameGroups, TimelineEditor.State state ) { if (selectedClip.wrapMode == tk2dSpriteAnimationClip.WrapMode.Single) return false; bool changed = false; GUILayout.BeginHorizontal(); if (GUILayout.Button("Insert <", EditorStyles.miniButton)) { frameGroups.Insert(state.selectedFrame, AnimOperatorUtil.NewFrameGroup(frameGroups, state.selectedFrame)); state.selectedFrame++; changed = true; } if (GUILayout.Button("Insert >", EditorStyles.miniButton)) { frameGroups.Insert(state.selectedFrame + 1, AnimOperatorUtil.NewFrameGroup(frameGroups, state.selectedFrame)); changed = true; } GUILayout.EndHorizontal(); operations = changed ? AnimEditOperations.ClipContentChanged : AnimEditOperations.None; return changed; } } // "AutoFill frames" public class AutoFillFrames : AnimOperator { public AutoFillFrames() { sortId = -110; } // Finds a sprite with the name and id // matches "baseName" [ 0..9 ]* as id // todo rewrite with regex int GetFrameIndex(tk2dSpriteDefinition def, string baseName) { if (System.String.Compare(baseName, 0, def.name, 0, baseName.Length, true) == 0) { int thisFrameId = 0; if (System.Int32.TryParse( def.name.Substring(baseName.Length), out thisFrameId )) { return thisFrameId; } } return -1; } int FindFrameIndex(tk2dSpriteDefinition[] spriteDefs, string baseName, int frameId) { for (int j = 0; j < spriteDefs.Length; ++j) { if (GetFrameIndex(spriteDefs[j], baseName) == frameId) return j; } return -1; } bool AutoFill(List<ClipEditor.FrameGroup> frameGroups, int selectedFrame, bool reverse) { ClipEditor.FrameGroup selectedFrameGroup = frameGroups[selectedFrame]; if (selectedFrameGroup.spriteCollection != null && selectedFrameGroup.spriteId >= 0 && selectedFrameGroup.spriteId < selectedFrameGroup.spriteCollection.inst.Count) { string na = selectedFrameGroup.spriteCollection.inst.spriteDefinitions[selectedFrameGroup.spriteId].name; int numStartA = na.Length - 1; if (na[numStartA] >= '0' && na[numStartA] <= '9') { while (numStartA > 0 && na[numStartA - 1] >= '0' && na[numStartA - 1] <= '9') numStartA--; string baseName = na.Substring(0, numStartA).ToLower(); int baseNo = System.Convert.ToInt32(na.Substring(numStartA)); int maxAllowedMissing = 10; int allowedMissing = maxAllowedMissing; List<int> pendingFrames = new List<int>(); int startOffset = reverse ? -1 : 1; int frameInc = reverse ? -1 : 1; for (int frameNo = baseNo + startOffset; frameNo >= 0 ; frameNo += frameInc) { int frameIdx = FindFrameIndex(selectedFrameGroup.spriteCollection.inst.spriteDefinitions, baseName, frameNo); if (frameIdx == -1) { if (--allowedMissing <= 0) break; } else { pendingFrames.Add(frameIdx); allowedMissing = maxAllowedMissing; // reset } } int numInserted = 0; int insertIndex = selectedFrame + 1; ClipEditor.FrameGroup nextFrameGroup = (insertIndex >= frameGroups.Count) ? null : frameGroups[insertIndex]; while (pendingFrames.Count > 0) { int frameToInsert = pendingFrames[0]; pendingFrames.RemoveAt(0); if (nextFrameGroup != null && nextFrameGroup.spriteCollection == selectedFrameGroup.spriteCollection && nextFrameGroup.spriteId == frameToInsert) break; ClipEditor.FrameGroup fg = AnimOperatorUtil.NewFrameGroup(frameGroups, selectedFrame); fg.spriteId = frameToInsert; fg.Update(); frameGroups.Insert(insertIndex++, fg); numInserted++; } return numInserted > 0; } } return false; } public override bool OnFrameGroupInspectorGUI(tk2dSpriteAnimationClip selectedClip, List<ClipEditor.FrameGroup> frameGroups, TimelineEditor.State state) { if (selectedClip.wrapMode == tk2dSpriteAnimationClip.WrapMode.Single) return false; bool changed = false; GUILayout.BeginHorizontal(); if (GUILayout.Button("Autofill 9..1", EditorStyles.miniButton) && AutoFill(frameGroups, state.selectedFrame, true)) { changed = true; } if (GUILayout.Button("Autofill 1..9", EditorStyles.miniButton) && AutoFill(frameGroups, state.selectedFrame, false)) { changed = true; } GUILayout.EndHorizontal(); operations = changed ? AnimEditOperations.ClipContentChanged : AnimEditOperations.None; return changed; } } }
using System; using System.Collections.Generic; using System.IO; using Journalist.Extensions; using Journalist.Options; using Journalist.WindowsAzure.Storage.Tables; namespace Journalist.EventStore.Events { public sealed class JournaledEvent : IEquatable<JournaledEvent> { private readonly MemoryStream m_eventPayload; private readonly Dictionary<string, string> m_eventHeaders; private readonly string m_eventTypeName; private readonly Guid m_eventId; private readonly Option<DateTimeOffset> m_commitTime; private readonly Option<StreamVersion> m_offset; private JournaledEvent( Guid eventId, string eventTypeName, Option<DateTimeOffset> commitTime, Option<StreamVersion> offset, Dictionary<string, string> eventHeaders, MemoryStream eventPayload) { m_eventId = eventId; m_eventTypeName = eventTypeName; m_eventPayload = eventPayload; m_eventHeaders = eventHeaders; m_commitTime = commitTime; m_offset = offset; } public static JournaledEvent Create( Guid eventId, object eventObject, Action<object, Type, StreamWriter> serialize) { Require.NotEmpty(eventId, "eventObject"); Require.NotNull(eventObject, "eventObject"); Require.NotNull(serialize, "serialize"); var eventType = eventObject.GetType(); MemoryStream payloadBytes; using (var stream = new MemoryStream()) { var writer = new StreamWriter(stream); serialize(eventObject, eventType, writer); writer.Flush(); payloadBytes = new MemoryStream( buffer: stream.GetBuffer(), index: 0, count: (int)stream.Length, writable: false, publiclyVisible: true); } return new JournaledEvent( eventId, eventType.AssemblyQualifiedName, Option.None(), Option.None(), new Dictionary<string, string>(), payloadBytes); } public static JournaledEvent Create(object eventObject, Action<object, Type, StreamWriter> serialize) { return Create(Guid.NewGuid(), eventObject, serialize); } public static JournaledEvent Create(IDictionary<string, object> properties) { Require.NotNull(properties, "properties"); var payload = new MemoryStream(); ((Stream)properties[JournaledEventPropertyNames.EventPayload]).CopyTo(payload); var headers = new Dictionary<string, string>(); if (properties.ContainsKey(JournaledEventPropertyNames.EventHeaders)) { var propertyValue = properties[JournaledEventPropertyNames.EventHeaders]; // for backward compatibility reading from string if (propertyValue is string) { var stringValue = (string)properties[JournaledEventPropertyNames.EventHeaders]; if (stringValue.IsNotNullOrEmpty()) { headers = JournaledEventHeadersSerializer.Deserialize((string)properties[JournaledEventPropertyNames.EventHeaders]); } } else { headers = JournaledEventHeadersSerializer.Deserialize((Stream)properties[JournaledEventPropertyNames.EventHeaders]); } } Option<DateTimeOffset> commitTime = Option.None(); object commitTimeValue; if (properties.TryGetValue(KnownProperties.Timestamp, out commitTimeValue)) { commitTime = Option.Some((DateTimeOffset)commitTimeValue); } Option<StreamVersion> offset = Option.None(); object offsetValue; if (properties.TryGetValue(KnownProperties.RowKey, out offsetValue)) { offset = Option.Some(StreamVersion.Parse((string)offsetValue)); } return new JournaledEvent( (Guid)properties[JournaledEventPropertyNames.EventId], (string)properties[JournaledEventPropertyNames.EventType], commitTime, offset, headers, payload); } public void SetHeader(string headerName, string headerValue) { Require.NotEmpty(headerName, "headerName"); if (headerValue.IsNullOrEmpty() && m_eventHeaders.ContainsKey(headerName)) { m_eventHeaders.Remove(headerName); return; } m_eventHeaders[headerName] = headerValue; } public MemoryStream GetEventPayload() { return new MemoryStream( buffer: m_eventPayload.GetBuffer(), index: 0, count: (int)m_eventPayload.Length, writable: false, publiclyVisible: false); } public Dictionary<string, object> ToDictionary() { var result = new Dictionary<string, object>(JournaledEventPropertyNames.All.Length) { [JournaledEventPropertyNames.EventId] = m_eventId, [JournaledEventPropertyNames.EventType] = m_eventTypeName, [JournaledEventPropertyNames.EventPayload] = GetEventPayload(), [JournaledEventPropertyNames.EventHeaders] = JournaledEventHeadersSerializer.Serialize(m_eventHeaders) }; return result; } public bool Equals(JournaledEvent other) { if (ReferenceEquals(null, other)) { return false; } if (ReferenceEquals(this, other)) { return true; } return m_eventId.Equals(other.m_eventId); } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) { return false; } if (ReferenceEquals(this, obj)) { return true; } return obj is JournaledEvent && Equals((JournaledEvent)obj); } public override int GetHashCode() => m_eventId.GetHashCode(); public IReadOnlyDictionary<string, string> Headers => m_eventHeaders; public Type EventType => Type.GetType(EventTypeName, true); public Guid EventId => m_eventId; public string EventTypeName => m_eventTypeName; public Option<DateTimeOffset> CommitTime => m_commitTime; public Option<StreamVersion> Offset => m_offset; } }
using fyiReporting.RDL; using System; using System.Collections; using System.Drawing; using System.Drawing.Drawing2D; using System.IO; namespace fyiReporting.RdlPrint { /// <summary> /// Helper class used by RdlPrint /// </summary> public class PageDrawing { private Pages _pgs; // the pages of the report to view // During drawing these are set float _left; float _top; float _vScroll; float _hScroll; float DpiX; float DpiY; public PageDrawing() : this(null) { } public PageDrawing(Pages pgs) { _pgs = pgs; } /// <summary> /// Enabling the SelectTool allows the user to select text and images. Enabling or disabling /// the SelectTool also clears out the current selection. /// </summary> internal Pages Pgs { get { return _pgs; } set { _pgs = value; } } /// <summary> /// Draw- simple draw of an entire page. Useful when printing or creating an image. /// </summary> /// <param name="g"></param> /// <param name="page"></param> /// <param name="clipRectangle"></param> public void Draw(Graphics g, int page, System.Drawing.Rectangle clipRectangle, bool drawBackground, PointF pageOffset) { DpiX = g.DpiX; // this can change (e.g. printing graphics context) DpiY = g.DpiY; // g.InterpolationMode = InterpolationMode.HighQualityBilinear; // try to unfuzz charts g.PageUnit = GraphicsUnit.Pixel; g.ScaleTransform(1, 1); if (!pageOffset.IsEmpty) // used when correcting for non-printable area on paper { g.TranslateTransform(pageOffset.X, pageOffset.Y); } _left = 0; _top = 0; _hScroll = 0; _vScroll = 0; RectangleF r = new RectangleF(clipRectangle.X, clipRectangle.Y, clipRectangle.Width, clipRectangle.Height); if (drawBackground) g.FillRectangle(Brushes.White, PixelsX(_left), PixelsY(_top), PixelsX(_pgs.PageWidth), PixelsY(_pgs.PageHeight)); ProcessPage(g, _pgs[page], r, false); } internal float PixelsX(float x) { return (float)(x * DpiX / 72.0f); } internal float PixelsY(float y) { return (float)(y * DpiY / 72.0f); } // render all the objects in a page (or any composite object private void ProcessPage(Graphics g, IEnumerable p, RectangleF clipRect, bool bHitList) { // TODO: (Peter) Support can grow and can shrink foreach (PageItem pi in p) { //if (pi is PageTextHtml) //{ // PageTextHtml is actually a composite object (just like a page) // if (SelectTool && bHitList) // { // RectangleF hr = new RectangleF(PixelsX(pi.X + _left - _hScroll), PixelsY(pi.Y + _top - _vScroll), // PixelsX(pi.W), PixelsY(pi.H)); // _HitList.Add(new HitListEntry(hr, pi)); // } // ProcessHtml(pi as PageTextHtml, g, clipRect, bHitList); // continue; //} if (pi is PageLine) { PageLine pl = pi as PageLine; DrawLine(pl.SI.BColorLeft, pl.SI.BStyleLeft, pl.SI.BWidthLeft, g, PixelsX(pl.X + _left - _hScroll), PixelsY(pl.Y + _top - _vScroll), PixelsX(pl.X2 + _left - _hScroll), PixelsY(pl.Y2 + _top - _vScroll)); continue; } RectangleF rect = new RectangleF(PixelsX(pi.X + _left - _hScroll), PixelsY(pi.Y + _top - _vScroll), PixelsX(pi.W), PixelsY(pi.H)); // Maintain the hit list //if (bHitList) //{ // if (SelectTool) // { // we need all PageText and PageImage items that have been displayed // if (pi is PageText || pi is PageImage) // { // _HitList.Add(new HitListEntry(rect, pi)); // } // } // // Only care about items with links and tips // else if (pi.HyperLink != null || pi.BookmarkLink != null || pi.Tooltip != null) // { // HitListEntry hle; // if (pi is PagePolygon) // hle = new HitListEntry(pi as PagePolygon, _left - _hScroll, _top - _vScroll, this); // else // hle = new HitListEntry(rect, pi); // _HitList.Add(hle); // } //} if ((pi is PagePolygon) || (pi is PageCurve)) { // intentionally empty; polygon's rectangles aren't calculated } else if (!rect.IntersectsWith(clipRect)) continue; if (pi.SI.BackgroundImage != null) { // put out any background image PageImage i = pi.SI.BackgroundImage; DrawImageBackground(i, pi.SI, g, rect); } if (pi is PageText) { // TODO: enable can shrink, can grow // 2005 spec file, page 9, in the text box has // CanGrow and CanShrink PageText pt = pi as PageText; DrawString(pt, g, rect); } else if (pi is PageImage) { PageImage i = pi as PageImage; DrawImage(i, g, rect); } else if (pi is PageRectangle) { this.DrawBackground(g, rect, pi.SI); } else if (pi is PageEllipse) { PageEllipse pe = pi as PageEllipse; DrawEllipse(pe, g, rect); } else if (pi is PagePie) { PagePie pp = pi as PagePie; DrawPie(pp, g, rect); } else if (pi is PagePolygon) { PagePolygon ppo = pi as PagePolygon; FillPolygon(ppo, g, rect); } else if (pi is PageCurve) { PageCurve pc = pi as PageCurve; DrawCurve(pc.SI.BColorLeft, pc.SI.BStyleLeft, pc.SI.BWidthLeft, g, pc.Points, pc.Offset, pc.Tension); } DrawBorder(pi, g, rect); } } private void DrawBackground(Graphics g, System.Drawing.RectangleF rect, StyleInfo si) { LinearGradientBrush linGrBrush = null; SolidBrush sb = null; HatchBrush hb = null; try { if (si.BackgroundGradientType != BackgroundGradientTypeEnum.None && !si.BackgroundGradientEndColor.IsEmpty && !si.BackgroundColor.IsEmpty) { Color c = si.BackgroundColor; Color ec = si.BackgroundGradientEndColor; switch (si.BackgroundGradientType) { case BackgroundGradientTypeEnum.LeftRight: linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Horizontal); break; case BackgroundGradientTypeEnum.TopBottom: linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Vertical); break; case BackgroundGradientTypeEnum.Center: linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Horizontal); break; case BackgroundGradientTypeEnum.DiagonalLeft: linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.ForwardDiagonal); break; case BackgroundGradientTypeEnum.DiagonalRight: linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.BackwardDiagonal); break; case BackgroundGradientTypeEnum.HorizontalCenter: linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Horizontal); break; case BackgroundGradientTypeEnum.VerticalCenter: linGrBrush = new LinearGradientBrush(rect, c, ec, LinearGradientMode.Vertical); break; default: break; } } if (si.PatternType != patternTypeEnum.None) { switch (si.PatternType) { case patternTypeEnum.BackwardDiagonal: hb = new HatchBrush(HatchStyle.BackwardDiagonal, si.Color, si.BackgroundColor); break; case patternTypeEnum.CheckerBoard: hb = new HatchBrush(HatchStyle.LargeCheckerBoard, si.Color, si.BackgroundColor); break; case patternTypeEnum.Cross: hb = new HatchBrush(HatchStyle.Cross, si.Color, si.BackgroundColor); break; case patternTypeEnum.DarkDownwardDiagonal: hb = new HatchBrush(HatchStyle.DarkDownwardDiagonal, si.Color, si.BackgroundColor); break; case patternTypeEnum.DarkHorizontal: hb = new HatchBrush(HatchStyle.DarkHorizontal, si.Color, si.BackgroundColor); break; case patternTypeEnum.DiagonalBrick: hb = new HatchBrush(HatchStyle.DiagonalBrick, si.Color, si.BackgroundColor); break; case patternTypeEnum.HorizontalBrick: hb = new HatchBrush(HatchStyle.HorizontalBrick, si.Color, si.BackgroundColor); break; case patternTypeEnum.LargeConfetti: hb = new HatchBrush(HatchStyle.LargeConfetti, si.Color, si.BackgroundColor); break; case patternTypeEnum.OutlinedDiamond: hb = new HatchBrush(HatchStyle.OutlinedDiamond, si.Color, si.BackgroundColor); break; case patternTypeEnum.SmallConfetti: hb = new HatchBrush(HatchStyle.SmallConfetti, si.Color, si.BackgroundColor); break; case patternTypeEnum.SolidDiamond: hb = new HatchBrush(HatchStyle.SolidDiamond, si.Color, si.BackgroundColor); break; case patternTypeEnum.Vertical: hb = new HatchBrush(HatchStyle.Vertical, si.Color, si.BackgroundColor); break; } } if (linGrBrush != null) { g.FillRectangle(linGrBrush, rect); linGrBrush.Dispose(); } else if (hb != null) { g.FillRectangle(hb, rect); hb.Dispose(); } else if (!si.BackgroundColor.IsEmpty) { sb = new SolidBrush(si.BackgroundColor); g.FillRectangle(sb, rect); sb.Dispose(); } } finally { if (linGrBrush != null) linGrBrush.Dispose(); if (sb != null) sb.Dispose(); } return; } private void DrawBorder(PageItem pi, Graphics g, RectangleF r) { if (pi.GetType().Name.Equals("PagePie")) return; if (r.Height <= 0 || r.Width <= 0) // no bounding box to use return; StyleInfo si = pi.SI; DrawLine(si.BColorTop, si.BStyleTop, si.BWidthTop, g, r.X, r.Y, r.Right, r.Y); DrawLine(si.BColorRight, si.BStyleRight, si.BWidthRight, g, r.Right, r.Y, r.Right, r.Bottom); DrawLine(si.BColorLeft, si.BStyleLeft, si.BWidthLeft, g, r.X, r.Y, r.X, r.Bottom); DrawLine(si.BColorBottom, si.BStyleBottom, si.BWidthBottom, g, r.X, r.Bottom, r.Right, r.Bottom); return; } private void DrawImage(PageImage pi, Graphics g, RectangleF r) { Stream strm = null; System.Drawing.Image im = null; try { strm = new MemoryStream(pi.ImageData); im = System.Drawing.Image.FromStream(strm); DrawImageSized(pi, im, g, r); } finally { if (strm != null) strm.Close(); if (im != null) im.Dispose(); } } private void DrawImageBackground(PageImage pi, StyleInfo si, Graphics g, RectangleF r) { Stream strm = null; System.Drawing.Image im = null; try { strm = new MemoryStream(pi.ImageData); im = System.Drawing.Image.FromStream(strm); // http://www.fyireporting.com/forum/viewtopic.php?t=892 //A.S.> convert pt to px if needed(when printing we need px, when draw preview - pt) RectangleF r2; if (g.PageUnit == GraphicsUnit.Pixel) { r2 = new RectangleF(r.Left + (si.PaddingLeft * g.DpiX) / 72, r.Top + (si.PaddingTop * g.DpiX) / 72, r.Width - ((si.PaddingLeft + si.PaddingRight) * g.DpiX) / 72, r.Height - ((si.PaddingTop + si.PaddingBottom) * g.DpiX) / 72); } else { // adjust drawing rectangle based on padding r2 = new RectangleF(r.Left + si.PaddingLeft, r.Top + si.PaddingTop, r.Width - si.PaddingLeft - si.PaddingRight, r.Height - si.PaddingTop - si.PaddingBottom); } int repeatX = 0; int repeatY = 0; switch (pi.Repeat) { case ImageRepeat.Repeat: repeatX = (int)Math.Floor(r2.Width / pi.SamplesW); repeatY = (int)Math.Floor(r2.Height / pi.SamplesH); break; case ImageRepeat.RepeatX: repeatX = (int)Math.Floor(r2.Width / pi.SamplesW); repeatY = 1; break; case ImageRepeat.RepeatY: repeatY = (int)Math.Floor(r2.Height / pi.SamplesH); repeatX = 1; break; case ImageRepeat.NoRepeat: default: repeatX = repeatY = 1; break; } //make sure the image is drawn at least 1 times repeatX = Math.Max(repeatX, 1); repeatY = Math.Max(repeatY, 1); float startX = r2.Left; float startY = r2.Top; Region saveRegion = g.Clip; Region clipRegion = new Region(g.Clip.GetRegionData()); clipRegion.Intersect(r2); g.Clip = clipRegion; for (int i = 0; i < repeatX; i++) { for (int j = 0; j < repeatY; j++) { float currX = startX + i * pi.SamplesW; float currY = startY + j * pi.SamplesH; g.DrawImage(im, new RectangleF(currX, currY, pi.SamplesW, pi.SamplesH)); } } g.Clip = saveRegion; } finally { if (strm != null) strm.Close(); if (im != null) im.Dispose(); } } private void DrawImageSized(PageImage pi, System.Drawing.Image im, Graphics g, RectangleF r) { float height, width; // some work variables StyleInfo si = pi.SI; // adjust drawing rectangle based on padding // http://www.fyireporting.com/forum/viewtopic.php?t=892 //A.S.> convert pt to px if needed(when printing we need px, when draw preview - pt) RectangleF r2; if (g.PageUnit == GraphicsUnit.Pixel) { r2 = new RectangleF(r.Left + (si.PaddingLeft * g.DpiX) / 72, r.Top + (si.PaddingTop * g.DpiX) / 72, r.Width - ((si.PaddingLeft + si.PaddingRight) * g.DpiX) / 72, r.Height - ((si.PaddingTop + si.PaddingBottom) * g.DpiX) / 72); } else { // adjust drawing rectangle based on padding r2 = new RectangleF(r.Left + si.PaddingLeft, r.Top + si.PaddingTop, r.Width - si.PaddingLeft - si.PaddingRight, r.Height - si.PaddingTop - si.PaddingBottom); } System.Drawing.Rectangle ir; // int work rectangle ir = new System.Drawing.Rectangle(Convert.ToInt32(r2.Left), Convert.ToInt32(r2.Top), Convert.ToInt32(r2.Width), Convert.ToInt32(r2.Height)); switch (pi.Sizing) { case ImageSizingEnum.AutoSize: // Note: GDI+ will stretch an image when you only provide // the left/top coordinates. This seems pretty stupid since // it results in the image being out of focus even though // you don't want the image resized. if (g.DpiX == im.HorizontalResolution && g.DpiY == im.VerticalResolution) { ir = new System.Drawing.Rectangle(Convert.ToInt32(r2.Left), Convert.ToInt32(r2.Top), im.Width, im.Height); } g.DrawImage(im, ir); break; case ImageSizingEnum.Clip: Region saveRegion = g.Clip; Region clipRegion = new Region(g.Clip.GetRegionData()); clipRegion.Intersect(r2); g.Clip = clipRegion; if (g.DpiX == im.HorizontalResolution && g.DpiY == im.VerticalResolution) { ir = new System.Drawing.Rectangle(Convert.ToInt32(r2.Left), Convert.ToInt32(r2.Top), im.Width, im.Height); } g.DrawImage(im, ir); g.Clip = saveRegion; break; case ImageSizingEnum.FitProportional: float ratioIm = (float)im.Height / (float)im.Width; float ratioR = r2.Height / r2.Width; height = r2.Height; width = r2.Width; if (ratioIm > ratioR) { // this means the rectangle width must be corrected width = height * (1 / ratioIm); } else if (ratioIm < ratioR) { // this means the ractangle height must be corrected height = width * ratioIm; } r2 = new RectangleF(r2.X, r2.Y, width, height); g.DrawImage(im, r2); break; case ImageSizingEnum.Fit: default: g.DrawImage(im, r2); break; } //if (SelectTool && pi.AllowSelect && _SelectList.Contains(pi)) //{ // g.FillRectangle(new SolidBrush(Color.FromArgb(50, _SelectItemColor)), ir); //} return; } private void DrawLine(Color c, BorderStyleEnum bs, float w, Graphics g, float x, float y, float x2, float y2) { if (bs == BorderStyleEnum.None || c.IsEmpty || w <= 0) // nothing to draw return; float tmpW = w; if (g.PageUnit == GraphicsUnit.Pixel) tmpW = (tmpW * g.DpiX) / 72; Pen p = new Pen(c, tmpW); try { p = new Pen(c, w); switch (bs) { case BorderStyleEnum.Dashed: p.DashStyle = DashStyle.Dash; break; case BorderStyleEnum.Dotted: p.DashStyle = DashStyle.Dot; break; case BorderStyleEnum.Double: case BorderStyleEnum.Groove: case BorderStyleEnum.Inset: case BorderStyleEnum.Solid: case BorderStyleEnum.Outset: case BorderStyleEnum.Ridge: case BorderStyleEnum.WindowInset: default: p.DashStyle = DashStyle.Solid; break; } g.DrawLine(p, x, y, x2, y2); } finally { if (p != null) p.Dispose(); } } private void DrawCurve(Color c, BorderStyleEnum bs, float w, Graphics g, PointF[] points, int Offset, float Tension) { if (bs == BorderStyleEnum.None || c.IsEmpty || w <= 0) // nothing to draw return; Pen p = null; try { p = new Pen(c, w); switch (bs) { case BorderStyleEnum.Dashed: p.DashStyle = DashStyle.Dash; break; case BorderStyleEnum.Dotted: p.DashStyle = DashStyle.Dot; break; case BorderStyleEnum.Double: case BorderStyleEnum.Groove: case BorderStyleEnum.Inset: case BorderStyleEnum.Solid: case BorderStyleEnum.Outset: case BorderStyleEnum.Ridge: case BorderStyleEnum.WindowInset: default: p.DashStyle = DashStyle.Solid; break; } PointF[] tmp = new PointF[points.Length]; for (int i = 0; i < points.Length; i++) { tmp[i].X = PixelsX(points[i].X + _left - _hScroll); tmp[i].Y = PixelsY(points[i].Y + _top - _vScroll); } g.DrawCurve(p, tmp, Offset, tmp.Length - 1, Tension); } finally { if (p != null) p.Dispose(); } } private void DrawEllipse(PageEllipse pe, Graphics g, RectangleF r) { StyleInfo si = pe.SI; if (!si.BackgroundColor.IsEmpty) { g.FillEllipse(new SolidBrush(si.BackgroundColor), r); } if (si.BStyleTop != BorderStyleEnum.None) { Pen p = new Pen(si.BColorTop, si.BWidthTop); switch (si.BStyleTop) { case BorderStyleEnum.Dashed: p.DashStyle = DashStyle.Dash; break; case BorderStyleEnum.Dotted: p.DashStyle = DashStyle.Dot; break; case BorderStyleEnum.Double: case BorderStyleEnum.Groove: case BorderStyleEnum.Inset: case BorderStyleEnum.Solid: case BorderStyleEnum.Outset: case BorderStyleEnum.Ridge: case BorderStyleEnum.WindowInset: default: p.DashStyle = DashStyle.Solid; break; } g.DrawEllipse(p, r); } } private void FillPolygon(PagePolygon pp, Graphics g, RectangleF r) { StyleInfo si = pp.SI; PointF[] tmp = new PointF[pp.Points.Length]; if (!si.BackgroundColor.IsEmpty) //RectangleF(PixelsX(pi.X + _left - _hScroll), PixelsY(pi.Y + _top - _vScroll), // PixelsX(pi.W), PixelsY(pi.H)) { for (int i = 0; i < pp.Points.Length; i++) { tmp[i].X = PixelsX(pp.Points[i].X + _left - _hScroll); tmp[i].Y = PixelsY(pp.Points[i].Y + _top - _vScroll); } g.FillPolygon(new SolidBrush(si.BackgroundColor), tmp); } } private void DrawPie(PagePie pp, Graphics g, RectangleF r) { StyleInfo si = pp.SI; if (!si.BackgroundColor.IsEmpty) { g.FillPie(new SolidBrush(si.BackgroundColor), (int)r.X, (int)r.Y, (int)r.Width, (int)r.Height, (float)pp.StartAngle, (float)pp.SweepAngle); } if (si.BStyleTop != BorderStyleEnum.None) { Pen p = new Pen(si.BColorTop, si.BWidthTop); switch (si.BStyleTop) { case BorderStyleEnum.Dashed: p.DashStyle = DashStyle.Dash; break; case BorderStyleEnum.Dotted: p.DashStyle = DashStyle.Dot; break; case BorderStyleEnum.Double: case BorderStyleEnum.Groove: case BorderStyleEnum.Inset: case BorderStyleEnum.Solid: case BorderStyleEnum.Outset: case BorderStyleEnum.Ridge: case BorderStyleEnum.WindowInset: default: p.DashStyle = DashStyle.Solid; break; } g.DrawPie(p, r, pp.StartAngle, pp.SweepAngle); } } private void DrawString(PageText pt, Graphics g, RectangleF r) { StyleInfo si = pt.SI; string s = pt.Text; Font drawFont = null; StringFormat drawFormat = null; Brush drawBrush = null; try { // STYLE System.Drawing.FontStyle fs = 0; if (si.FontStyle == FontStyleEnum.Italic) fs |= System.Drawing.FontStyle.Italic; switch (si.TextDecoration) { case TextDecorationEnum.Underline: fs |= System.Drawing.FontStyle.Underline; break; case TextDecorationEnum.LineThrough: fs |= System.Drawing.FontStyle.Strikeout; break; case TextDecorationEnum.Overline: case TextDecorationEnum.None: break; } // WEIGHT switch (si.FontWeight) { case FontWeightEnum.Bold: case FontWeightEnum.Bolder: case FontWeightEnum.W500: case FontWeightEnum.W600: case FontWeightEnum.W700: case FontWeightEnum.W800: case FontWeightEnum.W900: fs |= System.Drawing.FontStyle.Bold; break; default: break; } try { drawFont = new Font(si.GetFontFamily(), si.FontSize, fs); // si.FontSize already in points } catch (ArgumentException) { drawFont = new Font("Arial", si.FontSize, fs); // if this fails we'll let the error pass thru } // ALIGNMENT drawFormat = new StringFormat(); switch (si.TextAlign) { case TextAlignEnum.Right: drawFormat.Alignment = StringAlignment.Far; break; case TextAlignEnum.Center: drawFormat.Alignment = StringAlignment.Center; break; case TextAlignEnum.Left: default: drawFormat.Alignment = StringAlignment.Near; break; } if (pt.SI.WritingMode == WritingModeEnum.tb_rl) { drawFormat.FormatFlags |= StringFormatFlags.DirectionRightToLeft; drawFormat.FormatFlags |= StringFormatFlags.DirectionVertical; } switch (si.VerticalAlign) { case VerticalAlignEnum.Bottom: drawFormat.LineAlignment = StringAlignment.Far; break; case VerticalAlignEnum.Middle: drawFormat.LineAlignment = StringAlignment.Center; break; case VerticalAlignEnum.Top: default: drawFormat.LineAlignment = StringAlignment.Near; break; } // draw the background DrawBackground(g, r, si); // adjust drawing rectangle based on padding // http://www.fyireporting.com/forum/viewtopic.php?t=892 //A.S.> convert pt to px if needed(when printing we need px, when draw preview - pt) RectangleF r2; if (g.PageUnit == GraphicsUnit.Pixel) { r2 = new RectangleF(r.Left + (si.PaddingLeft * g.DpiX) / 72, r.Top + (si.PaddingTop * g.DpiX) / 72, r.Width - ((si.PaddingLeft + si.PaddingRight) * g.DpiX) / 72, r.Height - ((si.PaddingTop + si.PaddingBottom) * g.DpiX) / 72); } else { // adjust drawing rectangle based on padding r2 = new RectangleF(r.Left + si.PaddingLeft, r.Top + si.PaddingTop, r.Width - si.PaddingLeft - si.PaddingRight, r.Height - si.PaddingTop - si.PaddingBottom); } drawBrush = new SolidBrush(si.Color); if (si.TextAlign == TextAlignEnum.Justified) { GraphicsExtended.DrawStringJustified(g, pt.Text, drawFont, drawBrush, r2); } else if (pt.NoClip) // request not to clip text { g.DrawString(pt.Text, drawFont, drawBrush, new PointF(r.Left, r.Top), drawFormat); //HighlightString(g, pt, new RectangleF(r.Left, r.Top, float.MaxValue, float.MaxValue), // drawFont, drawFormat); } else { g.DrawString(pt.Text, drawFont, drawBrush, r2, drawFormat); //HighlightString(g, pt, r2, drawFont, drawFormat); } //if (SelectTool) //{ // if (pt.AllowSelect && _SelectList.Contains(pt)) // g.FillRectangle(new SolidBrush(Color.FromArgb(50, _SelectItemColor)), r2); //} } finally { if (drawFont != null) drawFont.Dispose(); if (drawFormat != null) drawFont.Dispose(); if (drawBrush != null) drawBrush.Dispose(); } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Reflection; using log4net; using OpenMetaverse; using OpenSim.Framework.Servers; using OpenSim.Framework.Servers.HttpServer; using OpenSim.Services.Interfaces; // using OpenSim.Region.Framework.Interfaces; namespace OpenSim.Framework.Capabilities { public delegate void UpLoadedAsset( string assetName, string description, UUID assetID, UUID inventoryItem, UUID parentFolder, byte[] data, string inventoryType, string assetType); public delegate UUID UpdateItem(UUID itemID, byte[] data); public delegate void UpdateTaskScript(UUID itemID, UUID primID, bool isScriptRunning, byte[] data); public delegate void NewInventoryItem(UUID userID, InventoryItemBase item); public delegate void NewAsset(AssetBase asset); public delegate UUID ItemUpdatedCallback(UUID userID, UUID itemID, byte[] data); public delegate void TaskScriptUpdatedCallback(UUID userID, UUID itemID, UUID primID, bool isScriptRunning, byte[] data); public delegate InventoryCollection FetchInventoryDescendentsCAPS(UUID agentID, UUID folderID, UUID ownerID, bool fetchFolders, bool fetchItems, int sortOrder, out int version); /// <summary> /// XXX Probably not a particularly nice way of allow us to get the scene presence from the scene (chiefly so that /// we can popup a message on the user's client if the inventory service has permanently failed). But I didn't want /// to just pass the whole Scene into CAPS. /// </summary> public delegate IClientAPI GetClientDelegate(UUID agentID); public class Caps { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private string m_httpListenerHostName; private uint m_httpListenPort; /// <summary> /// This is the uuid portion of every CAPS path. It is used to make capability urls private to the requester. /// </summary> private string m_capsObjectPath; public string CapsObjectPath { get { return m_capsObjectPath; } } private CapsHandlers m_capsHandlers; private static readonly string m_requestPath = "0000/"; // private static readonly string m_mapLayerPath = "0001/"; private static readonly string m_newInventory = "0002/"; //private static readonly string m_requestTexture = "0003/"; private static readonly string m_notecardUpdatePath = "0004/"; private static readonly string m_notecardTaskUpdatePath = "0005/"; private static readonly string m_fetchInventoryPath = "0006/"; // The following entries are in a module, however, they are also here so that we don't re-assign // the path to another cap by mistake. // private static readonly string m_parcelVoiceInfoRequestPath = "0007/"; // This is in a module. // private static readonly string m_provisionVoiceAccountRequestPath = "0008/";// This is in a module. // private static readonly string m_remoteParcelRequestPath = "0009/";// This is in the LandManagementModule. //private string eventQueue = "0100/"; private IHttpServer m_httpListener; private UUID m_agentID; private IAssetService m_assetCache; private int m_eventQueueCount = 1; private Queue<string> m_capsEventQueue = new Queue<string>(); private bool m_dumpAssetsToFile; private string m_regionName; private object m_fetchLock = new Object(); public bool SSLCaps { get { return m_httpListener.UseSSL; } } public string SSLCommonName { get { return m_httpListener.SSLCommonName; } } public CapsHandlers CapsHandlers { get { return m_capsHandlers; } } // These are callbacks which will be setup by the scene so that we can update scene data when we // receive capability calls public NewInventoryItem AddNewInventoryItem = null; public NewAsset AddNewAsset = null; public ItemUpdatedCallback ItemUpdatedCall = null; public TaskScriptUpdatedCallback TaskScriptUpdatedCall = null; public FetchInventoryDescendentsCAPS CAPSFetchInventoryDescendents = null; public GetClientDelegate GetClient = null; public Caps(IAssetService assetCache, IHttpServer httpServer, string httpListen, uint httpPort, string capsPath, UUID agent, bool dumpAssetsToFile, string regionName) { m_assetCache = assetCache; m_capsObjectPath = capsPath; m_httpListener = httpServer; m_httpListenerHostName = httpListen; m_httpListenPort = httpPort; if (httpServer.UseSSL) { m_httpListenPort = httpServer.SSLPort; httpListen = httpServer.SSLCommonName; httpPort = httpServer.SSLPort; } m_agentID = agent; m_dumpAssetsToFile = dumpAssetsToFile; m_capsHandlers = new CapsHandlers(httpServer, httpListen, httpPort, httpServer.UseSSL); m_regionName = regionName; } /// <summary> /// Register all CAPS http service handlers /// </summary> public void RegisterHandlers() { DeregisterHandlers(); string capsBase = "/CAPS/" + m_capsObjectPath; RegisterRegionServiceHandlers(capsBase); RegisterInventoryServiceHandlers(capsBase); } public void RegisterRegionServiceHandlers(string capsBase) { try { // the root of all evil m_capsHandlers["SEED"] = new RestStreamHandler("POST", capsBase + m_requestPath, CapsRequest); m_log.DebugFormat( "[CAPS]: Registered seed capability {0} for {1}", capsBase + m_requestPath, m_agentID); //m_capsHandlers["MapLayer"] = // new LLSDStreamhandler<OSDMapRequest, OSDMapLayerResponse>("POST", // capsBase + m_mapLayerPath, // GetMapLayer); m_capsHandlers["UpdateScriptTaskInventory"] = new RestStreamHandler("POST", capsBase + m_notecardTaskUpdatePath, ScriptTaskInventory); m_capsHandlers["UpdateScriptTask"] = m_capsHandlers["UpdateScriptTaskInventory"]; } catch (Exception e) { m_log.Error("[CAPS]: " + e.ToString()); } } public void RegisterInventoryServiceHandlers(string capsBase) { try { // I don't think this one works... m_capsHandlers["NewFileAgentInventory"] = new LLSDStreamhandler<LLSDAssetUploadRequest, LLSDAssetUploadResponse>("POST", capsBase + m_newInventory, NewAgentInventoryRequest); m_capsHandlers["UpdateNotecardAgentInventory"] = new RestStreamHandler("POST", capsBase + m_notecardUpdatePath, NoteCardAgentInventory); m_capsHandlers["UpdateScriptAgentInventory"] = m_capsHandlers["UpdateNotecardAgentInventory"]; m_capsHandlers["UpdateScriptAgent"] = m_capsHandlers["UpdateScriptAgentInventory"]; // As of RC 1.22.9 of the Linden client this is // supported m_capsHandlers["WebFetchInventoryDescendents"] =new RestStreamHandler("POST", capsBase + m_fetchInventoryPath, FetchInventoryDescendentsRequest); // justincc: I've disabled the CAPS service for now to fix problems with selecting textures, and // subsequent inventory breakage, in the edit object pane (such as mantis 1085). This requires // enhancements (probably filling out the folder part of the LLSD reply) to our CAPS service, // but when I went on the Linden grid, the // simulators I visited (version 1.21) were, surprisingly, no longer supplying this capability. Instead, // the 1.19.1.4 client appeared to be happily flowing inventory data over UDP // // This is very probably just a temporary measure - once the CAPS service appears again on the Linden grid // we will be // able to get the data we need to implement the necessary part of the protocol to fix the issue above. // m_capsHandlers["FetchInventoryDescendents"] = // new RestStreamHandler("POST", capsBase + m_fetchInventoryPath, FetchInventoryRequest); // m_capsHandlers["FetchInventoryDescendents"] = // new LLSDStreamhandler<LLSDFetchInventoryDescendents, LLSDInventoryDescendents>("POST", // capsBase + m_fetchInventory, // FetchInventory)); // m_capsHandlers["RequestTextureDownload"] = new RestStreamHandler("POST", // capsBase + m_requestTexture, // RequestTexture); } catch (Exception e) { m_log.Error("[CAPS]: " + e.ToString()); } } /// <summary> /// Register a handler. This allows modules to register handlers. /// </summary> /// <param name="capName"></param> /// <param name="handler"></param> public void RegisterHandler(string capName, IRequestHandler handler) { m_capsHandlers[capName] = handler; //m_log.DebugFormat("[CAPS]: Registering handler for \"{0}\": path {1}", capName, handler.Path); } /// <summary> /// Remove all CAPS service handlers. /// /// </summary> /// <param name="httpListener"></param> /// <param name="path"></param> /// <param name="restMethod"></param> public void DeregisterHandlers() { if (m_capsHandlers != null) { foreach (string capsName in m_capsHandlers.Caps) { m_capsHandlers.Remove(capsName); } } } /// <summary> /// Construct a client response detailing all the capabilities this server can provide. /// </summary> /// <param name="request"></param> /// <param name="path"></param> /// <param name="param"></param> /// <param name="httpRequest">HTTP request header object</param> /// <param name="httpResponse">HTTP response header object</param> /// <returns></returns> public string CapsRequest(string request, string path, string param, OSHttpRequest httpRequest, OSHttpResponse httpResponse) { //m_log.Debug("[CAPS]: Seed Caps Request in region: " + m_regionName); string result = LLSDHelpers.SerialiseLLSDReply(m_capsHandlers.CapsDetails); //m_log.DebugFormat("[CAPS] CapsRequest {0}", result); return result; } // FIXME: these all should probably go into the respective region // modules /// <summary> /// Processes a fetch inventory request and sends the reply /// </summary> /// <param name="request"></param> /// <param name="path"></param> /// <param name="param"></param> /// <returns></returns> // Request is like: //<llsd> // <map><key>folders</key> // <array> // <map> // <key>fetch-folders</key><boolean>1</boolean><key>fetch-items</key><boolean>1</boolean><key>folder-id</key><uuid>8e1e3a30-b9bf-11dc-95ff-0800200c9a66</uuid><key>owner-id</key><uuid>11111111-1111-0000-0000-000100bba000</uuid><key>sort-order</key><integer>1</integer> // </map> // </array> // </map> //</llsd> // // multiple fetch-folder maps are allowed within the larger folders map. public string FetchInventoryRequest(string request, string path, string param) { // string unmodifiedRequest = request.ToString(); //m_log.DebugFormat("[AGENT INVENTORY]: Received CAPS fetch inventory request {0}", unmodifiedRequest); m_log.Debug("[CAPS]: Inventory Request in region: " + m_regionName); Hashtable hash = new Hashtable(); try { hash = (Hashtable)LLSD.LLSDDeserialize(Utils.StringToBytes(request)); } catch (LLSD.LLSDParseException pe) { m_log.Error("[AGENT INVENTORY]: Fetch error: " + pe.Message); m_log.Error("Request: " + request.ToString()); } ArrayList foldersrequested = (ArrayList)hash["folders"]; string response = ""; for (int i = 0; i < foldersrequested.Count; i++) { string inventoryitemstr = ""; Hashtable inventoryhash = (Hashtable)foldersrequested[i]; LLSDFetchInventoryDescendents llsdRequest = new LLSDFetchInventoryDescendents(); LLSDHelpers.DeserialiseOSDMap(inventoryhash, llsdRequest); LLSDInventoryDescendents reply = FetchInventoryReply(llsdRequest); inventoryitemstr = LLSDHelpers.SerialiseLLSDReply(reply); inventoryitemstr = inventoryitemstr.Replace("<llsd><map><key>folders</key><array>", ""); inventoryitemstr = inventoryitemstr.Replace("</array></map></llsd>", ""); response += inventoryitemstr; } if (response.Length == 0) { // Ter-guess: If requests fail a lot, the client seems to stop requesting descendants. // Therefore, I'm concluding that the client only has so many threads available to do requests // and when a thread stalls.. is stays stalled. // Therefore we need to return something valid response = "<llsd><map><key>folders</key><array /></map></llsd>"; } else { response = "<llsd><map><key>folders</key><array>" + response + "</array></map></llsd>"; } //m_log.DebugFormat("[AGENT INVENTORY]: Replying to CAPS fetch inventory request with following xml"); //m_log.Debug(Util.GetFormattedXml(response)); return response; } public string FetchInventoryDescendentsRequest(string request, string path, string param,OSHttpRequest httpRequest, OSHttpResponse httpResponse) { // nasty temporary hack here, the linden client falsely // identifies the uuid 00000000-0000-0000-0000-000000000000 // as a string which breaks us // // correctly mark it as a uuid // request = request.Replace("<string>00000000-0000-0000-0000-000000000000</string>", "<uuid>00000000-0000-0000-0000-000000000000</uuid>"); // another hack <integer>1</integer> results in a // System.ArgumentException: Object type System.Int32 cannot // be converted to target type: System.Boolean // request = request.Replace("<key>fetch_folders</key><integer>0</integer>", "<key>fetch_folders</key><boolean>0</boolean>"); request = request.Replace("<key>fetch_folders</key><integer>1</integer>", "<key>fetch_folders</key><boolean>1</boolean>"); Hashtable hash = new Hashtable(); try { hash = (Hashtable)LLSD.LLSDDeserialize(Utils.StringToBytes(request)); } catch (LLSD.LLSDParseException pe) { m_log.Error("[AGENT INVENTORY]: Fetch error: " + pe.Message); m_log.Error("Request: " + request.ToString()); } ArrayList foldersrequested = (ArrayList)hash["folders"]; string response = ""; lock (m_fetchLock) { for (int i = 0; i < foldersrequested.Count; i++) { string inventoryitemstr = ""; Hashtable inventoryhash = (Hashtable)foldersrequested[i]; LLSDFetchInventoryDescendents llsdRequest = new LLSDFetchInventoryDescendents(); try{ LLSDHelpers.DeserialiseOSDMap(inventoryhash, llsdRequest); } catch(Exception e) { m_log.Debug("[CAPS]: caught exception doing OSD deserialize" + e); } LLSDInventoryDescendents reply = FetchInventoryReply(llsdRequest); inventoryitemstr = LLSDHelpers.SerialiseLLSDReply(reply); inventoryitemstr = inventoryitemstr.Replace("<llsd><map><key>folders</key><array>", ""); inventoryitemstr = inventoryitemstr.Replace("</array></map></llsd>", ""); response += inventoryitemstr; } if (response.Length == 0) { // Ter-guess: If requests fail a lot, the client seems to stop requesting descendants. // Therefore, I'm concluding that the client only has so many threads available to do requests // and when a thread stalls.. is stays stalled. // Therefore we need to return something valid response = "<llsd><map><key>folders</key><array /></map></llsd>"; } else { response = "<llsd><map><key>folders</key><array>" + response + "</array></map></llsd>"; } //m_log.DebugFormat("[CAPS]: Replying to CAPS fetch inventory request with following xml"); //m_log.Debug("[CAPS] "+response); } return response; } /// <summary> /// Construct an LLSD reply packet to a CAPS inventory request /// </summary> /// <param name="invFetch"></param> /// <returns></returns> private LLSDInventoryDescendents FetchInventoryReply(LLSDFetchInventoryDescendents invFetch) { LLSDInventoryDescendents reply = new LLSDInventoryDescendents(); LLSDInventoryFolderContents contents = new LLSDInventoryFolderContents(); contents.agent_id = m_agentID; contents.owner_id = invFetch.owner_id; contents.folder_id = invFetch.folder_id; reply.folders.Array.Add(contents); InventoryCollection inv = new InventoryCollection(); inv.Folders = new List<InventoryFolderBase>(); inv.Items = new List<InventoryItemBase>(); int version = 0; if (CAPSFetchInventoryDescendents != null) { inv = CAPSFetchInventoryDescendents(m_agentID, invFetch.folder_id, invFetch.owner_id, invFetch.fetch_folders, invFetch.fetch_items, invFetch.sort_order, out version); } if (inv.Folders != null) { foreach (InventoryFolderBase invFolder in inv.Folders) { contents.categories.Array.Add(ConvertInventoryFolder(invFolder)); } } if (inv.Items != null) { foreach (InventoryItemBase invItem in inv.Items) { contents.items.Array.Add(ConvertInventoryItem(invItem)); } } contents.descendents = contents.items.Array.Count + contents.categories.Array.Count; contents.version = version; return reply; } /// <summary> /// Convert an internal inventory folder object into an LLSD object. /// </summary> /// <param name="invFolder"></param> /// <returns></returns> private LLSDInventoryFolder ConvertInventoryFolder(InventoryFolderBase invFolder) { LLSDInventoryFolder llsdFolder = new LLSDInventoryFolder(); llsdFolder.folder_id = invFolder.ID; llsdFolder.parent_id = invFolder.ParentID; llsdFolder.name = invFolder.Name; if (invFolder.Type < 0 || invFolder.Type >= TaskInventoryItem.Types.Length) llsdFolder.type = "-1"; else llsdFolder.type = TaskInventoryItem.Types[invFolder.Type]; llsdFolder.preferred_type = "-1"; return llsdFolder; } /// <summary> /// Convert an internal inventory item object into an LLSD object. /// </summary> /// <param name="invItem"></param> /// <returns></returns> private LLSDInventoryItem ConvertInventoryItem(InventoryItemBase invItem) { LLSDInventoryItem llsdItem = new LLSDInventoryItem(); llsdItem.asset_id = invItem.AssetID; llsdItem.created_at = invItem.CreationDate; llsdItem.desc = invItem.Description; llsdItem.flags = (int)invItem.Flags; llsdItem.item_id = invItem.ID; llsdItem.name = invItem.Name; llsdItem.parent_id = invItem.Folder; try { // TODO reevaluate after upgrade to libomv >= r2566. Probably should use UtilsConversions. llsdItem.type = TaskInventoryItem.Types[invItem.AssetType]; llsdItem.inv_type = TaskInventoryItem.InvTypes[invItem.InvType]; } catch (Exception e) { m_log.Error("[CAPS]: Problem setting asset/inventory type while converting inventory item " + invItem.Name + " to LLSD:", e); } llsdItem.permissions = new LLSDPermissions(); llsdItem.permissions.creator_id = invItem.CreatorIdAsUuid; llsdItem.permissions.base_mask = (int)invItem.CurrentPermissions; llsdItem.permissions.everyone_mask = (int)invItem.EveryOnePermissions; llsdItem.permissions.group_id = invItem.GroupID; llsdItem.permissions.group_mask = (int)invItem.GroupPermissions; llsdItem.permissions.is_owner_group = invItem.GroupOwned; llsdItem.permissions.next_owner_mask = (int)invItem.NextPermissions; llsdItem.permissions.owner_id = m_agentID; llsdItem.permissions.owner_mask = (int)invItem.CurrentPermissions; llsdItem.sale_info = new LLSDSaleInfo(); llsdItem.sale_info.sale_price = invItem.SalePrice; switch (invItem.SaleType) { default: llsdItem.sale_info.sale_type = "not"; break; case 1: llsdItem.sale_info.sale_type = "original"; break; case 2: llsdItem.sale_info.sale_type = "copy"; break; case 3: llsdItem.sale_info.sale_type = "contents"; break; } return llsdItem; } /// <summary> /// /// </summary> /// <param name="mapReq"></param> /// <returns></returns> public LLSDMapLayerResponse GetMapLayer(LLSDMapRequest mapReq) { m_log.Debug("[CAPS]: MapLayer Request in region: " + m_regionName); LLSDMapLayerResponse mapResponse = new LLSDMapLayerResponse(); mapResponse.LayerData.Array.Add(GetOSDMapLayerResponse()); return mapResponse; } /// <summary> /// /// </summary> /// <returns></returns> protected static OSDMapLayer GetOSDMapLayerResponse() { OSDMapLayer mapLayer = new OSDMapLayer(); mapLayer.Right = 5000; mapLayer.Top = 5000; mapLayer.ImageID = new UUID("00000000-0000-1111-9999-000000000006"); return mapLayer; } /// <summary> /// /// </summary> /// <param name="request"></param> /// <param name="path"></param> /// <param name="param"></param> /// <returns></returns> public string RequestTexture(string request, string path, string param) { m_log.Debug("texture request " + request); // Needs implementing (added to remove compiler warning) return String.Empty; } #region EventQueue (Currently not enabled) /// <summary> /// /// </summary> /// <param name="request"></param> /// <param name="path"></param> /// <param name="param"></param> /// <returns></returns> public string ProcessEventQueue(string request, string path, string param) { string res = String.Empty; if (m_capsEventQueue.Count > 0) { lock (m_capsEventQueue) { string item = m_capsEventQueue.Dequeue(); res = item; } } else { res = CreateEmptyEventResponse(); } return res; } /// <summary> /// /// </summary> /// <param name="caps"></param> /// <param name="ipAddressPort"></param> /// <returns></returns> public string CreateEstablishAgentComms(string caps, string ipAddressPort) { LLSDCapEvent eventItem = new LLSDCapEvent(); eventItem.id = m_eventQueueCount; //should be creating a EstablishAgentComms item, but there isn't a class for it yet eventItem.events.Array.Add(new LLSDEmpty()); string res = LLSDHelpers.SerialiseLLSDReply(eventItem); m_eventQueueCount++; m_capsEventQueue.Enqueue(res); return res; } /// <summary> /// /// </summary> /// <returns></returns> public string CreateEmptyEventResponse() { LLSDCapEvent eventItem = new LLSDCapEvent(); eventItem.id = m_eventQueueCount; eventItem.events.Array.Add(new LLSDEmpty()); string res = LLSDHelpers.SerialiseLLSDReply(eventItem); m_eventQueueCount++; return res; } #endregion /// <summary> /// Called by the script task update handler. Provides a URL to which the client can upload a new asset. /// </summary> /// <param name="request"></param> /// <param name="path"></param> /// <param name="param"></param> /// <param name="httpRequest">HTTP request header object</param> /// <param name="httpResponse">HTTP response header object</param> /// <returns></returns> public string ScriptTaskInventory(string request, string path, string param, OSHttpRequest httpRequest, OSHttpResponse httpResponse) { try { m_log.Debug("[CAPS]: ScriptTaskInventory Request in region: " + m_regionName); //m_log.DebugFormat("[CAPS]: request: {0}, path: {1}, param: {2}", request, path, param); Hashtable hash = (Hashtable) LLSD.LLSDDeserialize(Utils.StringToBytes(request)); LLSDTaskScriptUpdate llsdUpdateRequest = new LLSDTaskScriptUpdate(); LLSDHelpers.DeserialiseOSDMap(hash, llsdUpdateRequest); string capsBase = "/CAPS/" + m_capsObjectPath; string uploaderPath = Util.RandomClass.Next(5000, 8000).ToString("0000"); TaskInventoryScriptUpdater uploader = new TaskInventoryScriptUpdater( llsdUpdateRequest.item_id, llsdUpdateRequest.task_id, llsdUpdateRequest.is_script_running, capsBase + uploaderPath, m_httpListener, m_dumpAssetsToFile); uploader.OnUpLoad += TaskScriptUpdated; m_httpListener.AddStreamHandler( new BinaryStreamHandler("POST", capsBase + uploaderPath, uploader.uploaderCaps)); string protocol = "http://"; if (m_httpListener.UseSSL) protocol = "https://"; string uploaderURL = protocol + m_httpListenerHostName + ":" + m_httpListenPort.ToString() + capsBase + uploaderPath; LLSDAssetUploadResponse uploadResponse = new LLSDAssetUploadResponse(); uploadResponse.uploader = uploaderURL; uploadResponse.state = "upload"; // m_log.InfoFormat("[CAPS]: " + // "ScriptTaskInventory response: {0}", // LLSDHelpers.SerialiseLLSDReply(uploadResponse))); return LLSDHelpers.SerialiseLLSDReply(uploadResponse); } catch (Exception e) { m_log.Error("[CAPS]: " + e.ToString()); } return null; } /// <summary> /// Called by the notecard update handler. Provides a URL to which the client can upload a new asset. /// </summary> /// <param name="request"></param> /// <param name="path"></param> /// <param name="param"></param> /// <returns></returns> public string NoteCardAgentInventory(string request, string path, string param, OSHttpRequest httpRequest, OSHttpResponse httpResponse) { //m_log.Debug("[CAPS]: NoteCardAgentInventory Request in region: " + m_regionName + "\n" + request); //m_log.Debug("[CAPS]: NoteCardAgentInventory Request is: " + request); //OpenMetaverse.StructuredData.OSDMap hash = (OpenMetaverse.StructuredData.OSDMap)OpenMetaverse.StructuredData.LLSDParser.DeserializeBinary(Utils.StringToBytes(request)); Hashtable hash = (Hashtable) LLSD.LLSDDeserialize(Utils.StringToBytes(request)); LLSDItemUpdate llsdRequest = new LLSDItemUpdate(); LLSDHelpers.DeserialiseOSDMap(hash, llsdRequest); string capsBase = "/CAPS/" + m_capsObjectPath; string uploaderPath = Util.RandomClass.Next(5000, 8000).ToString("0000"); ItemUpdater uploader = new ItemUpdater(llsdRequest.item_id, capsBase + uploaderPath, m_httpListener, m_dumpAssetsToFile); uploader.OnUpLoad += ItemUpdated; m_httpListener.AddStreamHandler( new BinaryStreamHandler("POST", capsBase + uploaderPath, uploader.uploaderCaps)); string protocol = "http://"; if (m_httpListener.UseSSL) protocol = "https://"; string uploaderURL = protocol + m_httpListenerHostName + ":" + m_httpListenPort.ToString() + capsBase + uploaderPath; LLSDAssetUploadResponse uploadResponse = new LLSDAssetUploadResponse(); uploadResponse.uploader = uploaderURL; uploadResponse.state = "upload"; // m_log.InfoFormat("[CAPS]: " + // "NoteCardAgentInventory response: {0}", // LLSDHelpers.SerialiseLLSDReply(uploadResponse))); return LLSDHelpers.SerialiseLLSDReply(uploadResponse); } /// <summary> /// /// </summary> /// <param name="llsdRequest"></param> /// <returns></returns> public LLSDAssetUploadResponse NewAgentInventoryRequest(LLSDAssetUploadRequest llsdRequest) { //m_log.Debug("[CAPS]: NewAgentInventoryRequest Request is: " + llsdRequest.ToString()); //m_log.Debug("asset upload request via CAPS" + llsdRequest.inventory_type + " , " + llsdRequest.asset_type); if (llsdRequest.asset_type == "texture" || llsdRequest.asset_type == "animation" || llsdRequest.asset_type == "sound") { IClientAPI client = null; IScene scene = null; if (GetClient != null) { client = GetClient(m_agentID); scene = client.Scene; IMoneyModule mm = scene.RequestModuleInterface<IMoneyModule>(); if (mm != null) { if (!mm.UploadCovered(client)) { if (client != null) client.SendAgentAlertMessage("Unable to upload asset. Insufficient funds.", false); LLSDAssetUploadResponse errorResponse = new LLSDAssetUploadResponse(); errorResponse.uploader = ""; errorResponse.state = "error"; return errorResponse; } } } } string assetName = llsdRequest.name; string assetDes = llsdRequest.description; string capsBase = "/CAPS/" + m_capsObjectPath; UUID newAsset = UUID.Random(); UUID newInvItem = UUID.Random(); UUID parentFolder = llsdRequest.folder_id; string uploaderPath = Util.RandomClass.Next(5000, 8000).ToString("0000"); AssetUploader uploader = new AssetUploader(assetName, assetDes, newAsset, newInvItem, parentFolder, llsdRequest.inventory_type, llsdRequest.asset_type, capsBase + uploaderPath, m_httpListener, m_dumpAssetsToFile); m_httpListener.AddStreamHandler( new BinaryStreamHandler("POST", capsBase + uploaderPath, uploader.uploaderCaps)); string protocol = "http://"; if (m_httpListener.UseSSL) protocol = "https://"; string uploaderURL = protocol + m_httpListenerHostName + ":" + m_httpListenPort.ToString() + capsBase + uploaderPath; LLSDAssetUploadResponse uploadResponse = new LLSDAssetUploadResponse(); uploadResponse.uploader = uploaderURL; uploadResponse.state = "upload"; uploader.OnUpLoad += UploadCompleteHandler; return uploadResponse; } /// <summary> /// /// </summary> /// <param name="assetID"></param> /// <param name="inventoryItem"></param> /// <param name="data"></param> public void UploadCompleteHandler(string assetName, string assetDescription, UUID assetID, UUID inventoryItem, UUID parentFolder, byte[] data, string inventoryType, string assetType) { sbyte assType = 0; sbyte inType = 0; if (inventoryType == "sound") { inType = 1; assType = 1; } else if (inventoryType == "animation") { inType = 19; assType = 20; } else if (inventoryType == "wearable") { inType = 18; switch (assetType) { case "bodypart": assType = 13; break; case "clothing": assType = 5; break; } } AssetBase asset; asset = new AssetBase(); asset.FullID = assetID; asset.Type = assType; asset.Name = assetName; asset.Data = data; if (AddNewAsset != null) AddNewAsset(asset); else if (m_assetCache != null) m_assetCache.Store(asset); InventoryItemBase item = new InventoryItemBase(); item.Owner = m_agentID; item.CreatorId = m_agentID.ToString(); item.ID = inventoryItem; item.AssetID = asset.FullID; item.Description = assetDescription; item.Name = assetName; item.AssetType = assType; item.InvType = inType; item.Folder = parentFolder; item.CurrentPermissions = 2147483647; item.BasePermissions = 2147483647; item.EveryOnePermissions = 0; item.NextPermissions = 2147483647; item.CreationDate = Util.UnixTimeSinceEpoch(); if (AddNewInventoryItem != null) { AddNewInventoryItem(m_agentID, item); } } /// <summary> /// Called when new asset data for an agent inventory item update has been uploaded. /// </summary> /// <param name="itemID">Item to update</param> /// <param name="data">New asset data</param> /// <returns></returns> public UUID ItemUpdated(UUID itemID, byte[] data) { if (ItemUpdatedCall != null) { return ItemUpdatedCall(m_agentID, itemID, data); } return UUID.Zero; } /// <summary> /// Called when new asset data for an agent inventory item update has been uploaded. /// </summary> /// <param name="itemID">Item to update</param> /// <param name="primID">Prim containing item to update</param> /// <param name="isScriptRunning">Signals whether the script to update is currently running</param> /// <param name="data">New asset data</param> public void TaskScriptUpdated(UUID itemID, UUID primID, bool isScriptRunning, byte[] data) { if (TaskScriptUpdatedCall != null) { TaskScriptUpdatedCall(m_agentID, itemID, primID, isScriptRunning, data); } } public class AssetUploader { public event UpLoadedAsset OnUpLoad; private UpLoadedAsset handlerUpLoad = null; private string uploaderPath = String.Empty; private UUID newAssetID; private UUID inventoryItemID; private UUID parentFolder; private IHttpServer httpListener; private bool m_dumpAssetsToFile; private string m_assetName = String.Empty; private string m_assetDes = String.Empty; private string m_invType = String.Empty; private string m_assetType = String.Empty; public AssetUploader(string assetName, string description, UUID assetID, UUID inventoryItem, UUID parentFolderID, string invType, string assetType, string path, IHttpServer httpServer, bool dumpAssetsToFile) { m_assetName = assetName; m_assetDes = description; newAssetID = assetID; inventoryItemID = inventoryItem; uploaderPath = path; httpListener = httpServer; parentFolder = parentFolderID; m_assetType = assetType; m_invType = invType; m_dumpAssetsToFile = dumpAssetsToFile; } /// <summary> /// /// </summary> /// <param name="data"></param> /// <param name="path"></param> /// <param name="param"></param> /// <returns></returns> public string uploaderCaps(byte[] data, string path, string param) { UUID inv = inventoryItemID; string res = String.Empty; LLSDAssetUploadComplete uploadComplete = new LLSDAssetUploadComplete(); uploadComplete.new_asset = newAssetID.ToString(); uploadComplete.new_inventory_item = inv; uploadComplete.state = "complete"; res = LLSDHelpers.SerialiseLLSDReply(uploadComplete); httpListener.RemoveStreamHandler("POST", uploaderPath); // TODO: probably make this a better set of extensions here string extension = ".jp2"; if (m_invType != "image") { extension = ".dat"; } if (m_dumpAssetsToFile) { SaveAssetToFile(m_assetName + extension, data); } handlerUpLoad = OnUpLoad; if (handlerUpLoad != null) { handlerUpLoad(m_assetName, m_assetDes, newAssetID, inv, parentFolder, data, m_invType, m_assetType); } return res; } ///Left this in and commented in case there are unforseen issues //private void SaveAssetToFile(string filename, byte[] data) //{ // FileStream fs = File.Create(filename); // BinaryWriter bw = new BinaryWriter(fs); // bw.Write(data); // bw.Close(); // fs.Close(); //} private static void SaveAssetToFile(string filename, byte[] data) { string assetPath = "UserAssets"; if (!Directory.Exists(assetPath)) { Directory.CreateDirectory(assetPath); } FileStream fs = File.Create(Path.Combine(assetPath, Util.safeFileName(filename))); BinaryWriter bw = new BinaryWriter(fs); bw.Write(data); bw.Close(); fs.Close(); } } /// <summary> /// This class is a callback invoked when a client sends asset data to /// an agent inventory notecard update url /// </summary> public class ItemUpdater { public event UpdateItem OnUpLoad; private UpdateItem handlerUpdateItem = null; private string uploaderPath = String.Empty; private UUID inventoryItemID; private IHttpServer httpListener; private bool m_dumpAssetToFile; public ItemUpdater(UUID inventoryItem, string path, IHttpServer httpServer, bool dumpAssetToFile) { m_dumpAssetToFile = dumpAssetToFile; inventoryItemID = inventoryItem; uploaderPath = path; httpListener = httpServer; } /// <summary> /// /// </summary> /// <param name="data"></param> /// <param name="path"></param> /// <param name="param"></param> /// <returns></returns> public string uploaderCaps(byte[] data, string path, string param) { UUID inv = inventoryItemID; string res = String.Empty; LLSDAssetUploadComplete uploadComplete = new LLSDAssetUploadComplete(); UUID assetID = UUID.Zero; handlerUpdateItem = OnUpLoad; if (handlerUpdateItem != null) { assetID = handlerUpdateItem(inv, data); } uploadComplete.new_asset = assetID.ToString(); uploadComplete.new_inventory_item = inv; uploadComplete.state = "complete"; res = LLSDHelpers.SerialiseLLSDReply(uploadComplete); httpListener.RemoveStreamHandler("POST", uploaderPath); if (m_dumpAssetToFile) { SaveAssetToFile("updateditem" + Util.RandomClass.Next(1, 1000) + ".dat", data); } return res; } ///Left this in and commented in case there are unforseen issues //private void SaveAssetToFile(string filename, byte[] data) //{ // FileStream fs = File.Create(filename); // BinaryWriter bw = new BinaryWriter(fs); // bw.Write(data); // bw.Close(); // fs.Close(); //} private static void SaveAssetToFile(string filename, byte[] data) { string assetPath = "UserAssets"; if (!Directory.Exists(assetPath)) { Directory.CreateDirectory(assetPath); } FileStream fs = File.Create(Path.Combine(assetPath, filename)); BinaryWriter bw = new BinaryWriter(fs); bw.Write(data); bw.Close(); fs.Close(); } } /// <summary> /// This class is a callback invoked when a client sends asset data to /// a task inventory script update url /// </summary> public class TaskInventoryScriptUpdater { public event UpdateTaskScript OnUpLoad; private UpdateTaskScript handlerUpdateTaskScript = null; private string uploaderPath = String.Empty; private UUID inventoryItemID; private UUID primID; private bool isScriptRunning; private IHttpServer httpListener; private bool m_dumpAssetToFile; public TaskInventoryScriptUpdater(UUID inventoryItemID, UUID primID, int isScriptRunning, string path, IHttpServer httpServer, bool dumpAssetToFile) { m_dumpAssetToFile = dumpAssetToFile; this.inventoryItemID = inventoryItemID; this.primID = primID; // This comes in over the packet as an integer, but actually appears to be treated as a bool this.isScriptRunning = (0 == isScriptRunning ? false : true); uploaderPath = path; httpListener = httpServer; } /// <summary> /// /// </summary> /// <param name="data"></param> /// <param name="path"></param> /// <param name="param"></param> /// <returns></returns> public string uploaderCaps(byte[] data, string path, string param) { try { // m_log.InfoFormat("[CAPS]: " + // "TaskInventoryScriptUpdater received data: {0}, path: {1}, param: {2}", // data, path, param)); string res = String.Empty; LLSDTaskInventoryUploadComplete uploadComplete = new LLSDTaskInventoryUploadComplete(); handlerUpdateTaskScript = OnUpLoad; if (handlerUpdateTaskScript != null) { handlerUpdateTaskScript(inventoryItemID, primID, isScriptRunning, data); } uploadComplete.item_id = inventoryItemID; uploadComplete.task_id = primID; uploadComplete.state = "complete"; res = LLSDHelpers.SerialiseLLSDReply(uploadComplete); httpListener.RemoveStreamHandler("POST", uploaderPath); if (m_dumpAssetToFile) { SaveAssetToFile("updatedtaskscript" + Util.RandomClass.Next(1, 1000) + ".dat", data); } // m_log.InfoFormat("[CAPS]: TaskInventoryScriptUpdater.uploaderCaps res: {0}", res); return res; } catch (Exception e) { m_log.Error("[CAPS]: " + e.ToString()); } // XXX Maybe this should be some meaningful error packet return null; } ///Left this in and commented in case there are unforseen issues //private void SaveAssetToFile(string filename, byte[] data) //{ // FileStream fs = File.Create(filename); // BinaryWriter bw = new BinaryWriter(fs); // bw.Write(data); // bw.Close(); // fs.Close(); //} private static void SaveAssetToFile(string filename, byte[] data) { string assetPath = "UserAssets"; if (!Directory.Exists(assetPath)) { Directory.CreateDirectory(assetPath); } FileStream fs = File.Create(Path.Combine(assetPath, filename)); BinaryWriter bw = new BinaryWriter(fs); bw.Write(data); bw.Close(); fs.Close(); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.AcceptanceTestsBodyDuration { using Microsoft.Rest; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.IO; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// Duration operations. /// </summary> public partial class Duration : IServiceOperations<AutoRestDurationTestService>, IDuration { /// <summary> /// Initializes a new instance of the Duration class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public Duration(AutoRestDurationTestService client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the AutoRestDurationTestService /// </summary> public AutoRestDurationTestService Client { get; private set; } /// <summary> /// Get null duration value /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse<System.TimeSpan?>> GetNullWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetNull", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "duration/null").ToString(); // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse<System.TimeSpan?>(); _result.Request = _httpRequest; _result.Response = _httpResponse; // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<System.TimeSpan?>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Put a positive duration value /// </summary> /// <param name='durationBody'> /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse> PutPositiveDurationWithHttpMessagesAsync(System.TimeSpan durationBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("durationBody", durationBody); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "PutPositiveDuration", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "duration/positiveduration").ToString(); // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("PUT"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; _requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(durationBody, Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8); _httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Get a positive duration value /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse<System.TimeSpan?>> GetPositiveDurationWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetPositiveDuration", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "duration/positiveduration").ToString(); // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse<System.TimeSpan?>(); _result.Request = _httpRequest; _result.Response = _httpResponse; // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<System.TimeSpan?>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Get an invalid duration value /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="ErrorException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<HttpOperationResponse<System.TimeSpan?>> GetInvalidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetInvalid", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "duration/invalid").ToString(); // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new HttpOperationResponse<System.TimeSpan?>(); _result.Request = _httpRequest; _result.Response = _httpResponse; // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<System.TimeSpan?>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Description; using SampleWebApp.WebAPI.Areas.HelpPage.Models; namespace SampleWebApp.WebAPI.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); model = GenerateApiModel(apiDescription, sampleGenerator); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HelpPageSampleGenerator sampleGenerator) { HelpPageApiModel apiModel = new HelpPageApiModel(); apiModel.ApiDescription = apiDescription; try { foreach (var item in sampleGenerator.GetSampleRequests(apiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception Message: {0}", e.Message)); } return apiModel; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using System.IO; using System; using System.Text; using System.Runtime.InteropServices; using System.Linq; public class DBOrganization : MonoBehaviour { void Start() { } void Update() { } public delegate void CreateOrganizationDelegate(string error); public void CreateOrganization(string orgname, string firstname, string lastname, string email, string phone, string localAdminUser, string localAdminPassword, CreateOrganizationDelegate callback) { EntityDBVitaOrganization profile = new EntityDBVitaOrganization(orgname, firstname, lastname, email, phone, localAdminUser, localAdminPassword); VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.AddEntity<EntityDBVitaOrganization>(profile, (result, error) => { if (!string.IsNullOrEmpty(error)) { if (callback != null) callback(error); return; } // each org has a built-in local admin user that's attached to the org. Create that here DBUser dbUser = GameObject.FindObjectOfType<DBUser>(); dbUser.CreateUser(profile.username, profile.password, profile.name, string.Format("{0} {1}", firstname, lastname), DBUser.AccountType.ORGADMIN, error2 => { if (callback != null) callback(error2); }); }); } public IEnumerator AddOrganizations(List<EntityDBVitaOrganization> orgs) { // TODO: Add bulk Add() function // this function is used for backup/restore purposes. In normal operation, use Create() instead int waitCount = orgs.Count; foreach (var org in orgs) { VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.AddEntity<EntityDBVitaOrganization>(org, (result, error) => { waitCount--; if (!string.IsNullOrEmpty(error)) { if (!string.IsNullOrEmpty(error)) { Debug.LogErrorFormat("AddOrganizations() failed - {0}", org.name); return; } } }); } while (waitCount > 0) yield return new WaitForEndOfFrame(); } public delegate void GetOrganizationDelegate(EntityDBVitaOrganization org, string error); public void GetOrganization(string name, GetOrganizationDelegate callback) { VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.GetEntity<EntityDBVitaOrganization>(name, null, null, (result, error) => { EntityDBVitaOrganization org = (EntityDBVitaOrganization)result; if (string.IsNullOrEmpty(error)) { org.FixNullLists(); } if (callback != null) callback(org, error); }); } public delegate void GetAllOrganizationsDelegate(List<EntityDBVitaOrganization> organizations, string error); public void GetAllOrganizations(GetAllOrganizationsDelegate callback) { VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.GetAllEntities<EntityDBVitaOrganization>(EntityDBVitaOrganization.tableName, null, (result, error)=> { List<EntityDBVitaOrganization> orgs = null; if (string.IsNullOrEmpty(error)) { orgs = new List<EntityDBVitaOrganization>((List<EntityDBVitaOrganization>)result); orgs.Sort((a, b) => a.name.CompareTo(b.name)); orgs.ForEach((a) => a.FixNullLists()); } if (callback != null) callback(orgs, error); }); } public delegate void DeleteOrganizationDelegate(string error); public void DeleteOrganization(string orgName, DeleteOrganizationDelegate callback) { VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.DeleteEntity<EntityDBVitaOrganization>(orgName, null, (result, error) => { if (callback != null) callback(error); }); } public void DeleteOrganizationAndData(string orgName, DeleteOrganizationDelegate callback) { StartCoroutine(DeleteOrganizationAndDataInternal(orgName, callback)); } IEnumerator DeleteOrganizationAndDataInternal(string orgName, DeleteOrganizationDelegate callback) { bool error = false; bool waitForCallback = true; VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.GetEntities<EntityDBVitaClass>(EntityDBVitaClass.tableName, "organization-index", "organization", orgName, null, (list1, error1) => { waitForCallback = false; if (!string.IsNullOrEmpty(error1)) { error = true; if (callback != null) callback(error1); return; } List<EntityDBVitaClass> classItems = (List<EntityDBVitaClass>)list1; if (classItems.Count > 0) { waitForCallback = true; vitaDynamoDB.BatchDelete<EntityDBVitaClass>(classItems, (obj2, error2) => { waitForCallback = false; if (!string.IsNullOrEmpty(error2)) { error = true; if (callback != null) callback(error2); return; } }); } }); while (waitForCallback) yield return new WaitForEndOfFrame(); if (error) yield break; error = false; waitForCallback = true; vitaDynamoDB.GetEntities<EntityDBVitaClassHomeworkAssigment>(EntityDBVitaClassHomeworkAssigment.tableName, "organization-index", "organization", orgName, null, (list3, error3) => { waitForCallback = false; if (!string.IsNullOrEmpty(error3)) { error = true; if (callback != null) callback(error3); return; } List<EntityDBVitaClassHomeworkAssigment> classAssigmentItems = (List<EntityDBVitaClassHomeworkAssigment>)list3; if (classAssigmentItems.Count > 0) { waitForCallback = true; vitaDynamoDB.BatchDelete<EntityDBVitaClassHomeworkAssigment>(classAssigmentItems, (obj4, error4) => { waitForCallback = false; if (!string.IsNullOrEmpty(error4)) { error = true; if (callback != null) callback(error4); return; } }); } }); while (waitForCallback) yield return new WaitForEndOfFrame(); if (error) yield break; error = false; waitForCallback = true; vitaDynamoDB.GetEntities<EntityDBVitaProfile>(VitaDynamoDB.TableNameEntityDBVitaProfile, "organization-index", "organization", orgName, null, (list5, error5) => { waitForCallback = false; if (!string.IsNullOrEmpty(error5)) { error = true; if (callback != null) callback(error5); return; } List<EntityDBVitaProfile> profileItems = (List<EntityDBVitaProfile>)list5; if (profileItems.Count > 0) { waitForCallback = true; vitaDynamoDB.BatchDelete<EntityDBVitaProfile>(profileItems, (obj6, error6) => { waitForCallback = false; if (!string.IsNullOrEmpty(error6)) { error = true; if (callback != null) callback(error6); return; } }); } }); while (waitForCallback) yield return new WaitForEndOfFrame(); if (error) yield break; error = false; waitForCallback = true; vitaDynamoDB.GetEntities<EntityDBVitaStudentSession>("VitaStudentSession", "organization-index", "organization", orgName, null, (listB, errorB) => { waitForCallback = false; if (!string.IsNullOrEmpty(errorB)) { error = true; if (callback != null) callback(errorB); return; } List<EntityDBVitaStudentSession> studentSessionItems = (List<EntityDBVitaStudentSession>)listB; if (studentSessionItems.Count > 0) { waitForCallback = true; vitaDynamoDB.BatchDelete<EntityDBVitaStudentSession>(studentSessionItems, (objC, errorC) => { waitForCallback = false; if (!string.IsNullOrEmpty(errorC)) { error = true; if (callback != null) callback(errorC); return; } }); } }); while (waitForCallback) yield return new WaitForEndOfFrame(); if (error) yield break; error = false; waitForCallback = true; DeleteOrganization(orgName, (errorD) => { if (callback != null) callback(errorD); }); } public delegate void ArchiveOrganizationDelegate(string error); public void ArchiveOrganization(string orgName, ArchiveOrganizationDelegate callback) { GetOrganization(orgName, (org, error) => { if (!string.IsNullOrEmpty(error)) { if (callback != null) callback(error); return; } org.archived = 1; VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.AddEntity<EntityDBVitaOrganization>(org, (result, error2) => { if (callback != null) callback(error2); }); }); } public delegate void ReinstateOrganizationDelegate(string error); public void ReinstateOrganization(string orgName, ReinstateOrganizationDelegate callback) { GetOrganization(orgName, (org, error) => { if (!string.IsNullOrEmpty(error)) { if (callback != null) callback(error); return; } org.archived = 0; VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.AddEntity<EntityDBVitaOrganization>(org, (result, error2) => { if (callback != null) callback(error2); }); }); } public delegate void UpdateOrganizationNameDelegate(string error); public void UpdateOrganizationName(string oldName, string newName, UpdateOrganizationNameDelegate callback) { GetOrganization(oldName, (org, error) => { if (!string.IsNullOrEmpty(error)) { if (callback != null) callback(error); return; } org.name = newName; VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.AddEntity<EntityDBVitaOrganization>(org, (result2, error2) => { if (!string.IsNullOrEmpty(error2)) { if (callback != null) callback(error2); return; } vitaDynamoDB.DeleteEntity<EntityDBVitaProfile>(oldName, null, (result3, error3) => { if (callback != null) callback(error3); }); }); }); } public delegate void UpdateOrganizationDelegate(string error); [Obsolete("This overload does not save account expiration date")] public void UpdateOrganization(string name, string firstname, string lastname, string email, string phone, string username, string password, UpdateOrganizationDelegate callback) { UpdateOrganization(name, firstname, lastname, email, phone, username, password, "", callback); } public void UpdateOrganization(string name, string firstname, string lastname, string email, string phone, string username, string password, string accexpire, UpdateOrganizationDelegate callback) { GetOrganization(name, (org, error) => { if (!string.IsNullOrEmpty(error)) { if (callback != null) callback(error); return; } org.firstname = firstname; org.lastname = lastname; org.email = email; org.phone = phone; org.username = username; org.password = password; org.accexpire = accexpire; VitaDynamoDB vitaDynamoDB = GameObject.Find("AWS").GetComponent<VitaDynamoDB>(); vitaDynamoDB.AddEntity<EntityDBVitaOrganization>(org, (result, error2) => { if (callback != null) callback(error2); }); }); } }
/* * [The "BSD licence"] * Copyright (c) 2011 Terence Parr * All rights reserved. * * Conversion to C#: * Copyright (c) 2011 Sam Harwell, Pixel Mine, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ namespace TypeSql.Antlr.Runtime.Tree { using System.Collections.Generic; using ArgumentNullException = System.ArgumentNullException; using Exception = System.Exception; using IDictionary = System.Collections.IDictionary; using NotSupportedException = System.NotSupportedException; /** <summary>A TreeAdaptor that works with any Tree implementation.</summary> */ public abstract class BaseTreeAdaptor : ITreeAdaptor { /** <summary> * System.identityHashCode() is not always unique; we have to * track ourselves. That's ok, it's only for debugging, though it's * expensive: we have to create a hashtable with all tree nodes in it. * </summary> */ protected IDictionary<object, int> treeToUniqueIDMap; protected int uniqueNodeID = 1; public virtual object Nil() { return Create( null ); } /** <summary> * Create tree node that holds the start and stop tokens associated * with an error. * </summary> * * <remarks> * If you specify your own kind of tree nodes, you will likely have to * override this method. CommonTree returns Token.INVALID_TOKEN_TYPE * if no token payload but you might have to set token type for diff * node type. * * You don't have to subclass CommonErrorNode; you will likely need to * subclass your own tree node class to avoid class cast exception. * </remarks> */ public virtual object ErrorNode( ITokenStream input, IToken start, IToken stop, RecognitionException e ) { CommonErrorNode t = new CommonErrorNode( input, start, stop, e ); //System.out.println("returning error node '"+t+"' @index="+input.index()); return t; } public virtual bool IsNil( object tree ) { return ( (ITree)tree ).IsNil; } public virtual object DupNode(int type, object treeNode) { object t = DupNode(treeNode); SetType(t, type); return t; } public virtual object DupNode(object treeNode, string text) { object t = DupNode(treeNode); SetText(t, text); return t; } public virtual object DupNode(int type, object treeNode, string text) { object t = DupNode(treeNode); SetType(t, type); SetText(t, text); return t; } public virtual object DupTree( object tree ) { return DupTree( tree, null ); } /** <summary> * This is generic in the sense that it will work with any kind of * tree (not just ITree interface). It invokes the adaptor routines * not the tree node routines to do the construction. * </summary> */ public virtual object DupTree( object t, object parent ) { if ( t == null ) { return null; } object newTree = DupNode( t ); // ensure new subtree root has parent/child index set SetChildIndex( newTree, GetChildIndex( t ) ); // same index in new tree SetParent( newTree, parent ); int n = GetChildCount( t ); for ( int i = 0; i < n; i++ ) { object child = GetChild( t, i ); object newSubTree = DupTree( child, t ); AddChild( newTree, newSubTree ); } return newTree; } /** <summary> * Add a child to the tree t. If child is a flat tree (a list), make all * in list children of t. Warning: if t has no children, but child does * and child isNil then you can decide it is ok to move children to t via * t.children = child.children; i.e., without copying the array. Just * make sure that this is consistent with have the user will build * ASTs. * </summary> */ public virtual void AddChild( object t, object child ) { if ( t != null && child != null ) { ( (ITree)t ).AddChild( (ITree)child ); } } /** <summary> * If oldRoot is a nil root, just copy or move the children to newRoot. * If not a nil root, make oldRoot a child of newRoot. * </summary> * * <remarks> * old=^(nil a b c), new=r yields ^(r a b c) * old=^(a b c), new=r yields ^(r ^(a b c)) * * If newRoot is a nil-rooted single child tree, use the single * child as the new root node. * * old=^(nil a b c), new=^(nil r) yields ^(r a b c) * old=^(a b c), new=^(nil r) yields ^(r ^(a b c)) * * If oldRoot was null, it's ok, just return newRoot (even if isNil). * * old=null, new=r yields r * old=null, new=^(nil r) yields ^(nil r) * * Return newRoot. Throw an exception if newRoot is not a * simple node or nil root with a single child node--it must be a root * node. If newRoot is ^(nil x) return x as newRoot. * * Be advised that it's ok for newRoot to point at oldRoot's * children; i.e., you don't have to copy the list. We are * constructing these nodes so we should have this control for * efficiency. * </remarks> */ public virtual object BecomeRoot( object newRoot, object oldRoot ) { //System.out.println("becomeroot new "+newRoot.toString()+" old "+oldRoot); ITree newRootTree = (ITree)newRoot; ITree oldRootTree = (ITree)oldRoot; if ( oldRoot == null ) { return newRoot; } // handle ^(nil real-node) if ( newRootTree.IsNil ) { int nc = newRootTree.ChildCount; if ( nc == 1 ) newRootTree = (ITree)newRootTree.GetChild( 0 ); else if ( nc > 1 ) { // TODO: make tree run time exceptions hierarchy throw new Exception( "more than one node as root (TODO: make exception hierarchy)" ); } } // add oldRoot to newRoot; addChild takes care of case where oldRoot // is a flat list (i.e., nil-rooted tree). All children of oldRoot // are added to newRoot. newRootTree.AddChild( oldRootTree ); return newRootTree; } /** <summary>Transform ^(nil x) to x and nil to null</summary> */ public virtual object RulePostProcessing( object root ) { //System.out.println("rulePostProcessing: "+((Tree)root).toStringTree()); ITree r = (ITree)root; if ( r != null && r.IsNil ) { if ( r.ChildCount == 0 ) { r = null; } else if ( r.ChildCount == 1 ) { r = (ITree)r.GetChild( 0 ); // whoever invokes rule will set parent and child index r.Parent = null; r.ChildIndex = -1; } } return r; } public virtual object BecomeRoot( IToken newRoot, object oldRoot ) { return BecomeRoot( Create( newRoot ), oldRoot ); } public virtual object Create( int tokenType, IToken fromToken ) { fromToken = CreateToken( fromToken ); fromToken.Type = tokenType; object t = Create( fromToken ); return t; } public virtual object Create( int tokenType, IToken fromToken, string text ) { if ( fromToken == null ) return Create( tokenType, text ); fromToken = CreateToken( fromToken ); fromToken.Type = tokenType; fromToken.Text = text; object result = Create(fromToken); return result; } public virtual object Create(IToken fromToken, string text) { if (fromToken == null) throw new ArgumentNullException("fromToken"); fromToken = CreateToken(fromToken); fromToken.Text = text; object result = Create(fromToken); return result; } public virtual object Create( int tokenType, string text ) { IToken fromToken = CreateToken( tokenType, text ); object t = Create( fromToken ); return t; } public virtual int GetType( object t ) { ITree tree = GetTree(t); if (tree == null) return TokenTypes.Invalid; return tree.Type; } public virtual void SetType( object t, int type ) { throw new NotSupportedException( "don't know enough about Tree node" ); } public virtual string GetText( object t ) { ITree tree = GetTree(t); if (tree == null) return null; return tree.Text; } public virtual void SetText( object t, string text ) { throw new NotSupportedException( "don't know enough about Tree node" ); } public virtual object GetChild( object t, int i ) { ITree tree = GetTree(t); if (tree == null) return null; return tree.GetChild(i); } public virtual void SetChild( object t, int i, object child ) { ITree tree = GetTree(t); if (tree == null) return; ITree childTree = GetTree(child); tree.SetChild(i, childTree); } public virtual object DeleteChild( object t, int i ) { return ( (ITree)t ).DeleteChild( i ); } public virtual int GetChildCount( object t ) { ITree tree = GetTree(t); if (tree == null) return 0; return tree.ChildCount; } public virtual int GetUniqueID( object node ) { if ( treeToUniqueIDMap == null ) { treeToUniqueIDMap = new Dictionary<object, int>(); } int id; if ( treeToUniqueIDMap.TryGetValue( node, out id ) ) return id; id = uniqueNodeID; treeToUniqueIDMap[node] = id; uniqueNodeID++; return id; // GC makes these nonunique: // return System.identityHashCode(node); } /** <summary> * Tell me how to create a token for use with imaginary token nodes. * For example, there is probably no input symbol associated with imaginary * token DECL, but you need to create it as a payload or whatever for * the DECL node as in ^(DECL type ID). * </summary> * * <remarks> * If you care what the token payload objects' type is, you should * override this method and any other createToken variant. * </remarks> */ public abstract IToken CreateToken( int tokenType, string text ); /** <summary> * Tell me how to create a token for use with imaginary token nodes. * For example, there is probably no input symbol associated with imaginary * token DECL, but you need to create it as a payload or whatever for * the DECL node as in ^(DECL type ID). * </summary> * * <remarks> * This is a variant of createToken where the new token is derived from * an actual real input token. Typically this is for converting '{' * tokens to BLOCK etc... You'll see * * r : lc='{' ID+ '}' -> ^(BLOCK[$lc] ID+) ; * * If you care what the token payload objects' type is, you should * override this method and any other createToken variant. * </remarks> */ public abstract IToken CreateToken( IToken fromToken ); public abstract object Create( IToken payload ); /** <summary> * Duplicate a node. This is part of the factory; * override if you want another kind of node to be built. * </summary> * * <remarks> * I could use reflection to prevent having to override this * but reflection is slow. * </remarks> */ public virtual object DupNode(object treeNode) { ITree tree = GetTree(treeNode); if (tree == null) return null; return tree.DupNode(); } public abstract IToken GetToken( object t ); /** <summary> * Track start/stop token for subtree root created for a rule. * Only works with Tree nodes. For rules that match nothing, * seems like this will yield start=i and stop=i-1 in a nil node. * Might be useful info so I'll not force to be i..i. * </summary> */ public virtual void SetTokenBoundaries(object t, IToken startToken, IToken stopToken) { ITree tree = GetTree(t); if (tree == null) return; int start = 0; int stop = 0; if (startToken != null) start = startToken.TokenIndex; if (stopToken != null) stop = stopToken.TokenIndex; tree.TokenStartIndex = start; tree.TokenStopIndex = stop; } public virtual int GetTokenStartIndex(object t) { ITree tree = GetTree(t); if (tree == null) return -1; return tree.TokenStartIndex; } public virtual int GetTokenStopIndex(object t) { ITree tree = GetTree(t); if (tree == null) return -1; return tree.TokenStopIndex; } public virtual object GetParent(object t) { ITree tree = GetTree(t); if (tree == null) return null; return tree.Parent; } public virtual void SetParent(object t, object parent) { ITree tree = GetTree(t); if (tree == null) return; ITree parentTree = GetTree(parent); tree.Parent = parentTree; } public virtual int GetChildIndex(object t) { ITree tree = GetTree(t); if (tree == null) return 0; return tree.ChildIndex; } public virtual void SetChildIndex(object t, int index) { ITree tree = GetTree(t); if (tree == null) return; tree.ChildIndex = index; } public virtual void ReplaceChildren(object parent, int startChildIndex, int stopChildIndex, object t) { ITree tree = GetTree(parent); if (tree == null) return; tree.ReplaceChildren(startChildIndex, stopChildIndex, t); } protected virtual ITree GetTree(object t) { if (t == null) return null; ITree tree = t as ITree; if (tree == null) throw new NotSupportedException(); return tree; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.IO; using System.Text; using System.Reflection; using System.Collections.Generic; using System.Runtime.InteropServices; using System.Globalization; using Xunit; internal class Outside { public class Inside { } } internal class Outside<T> { public class Inside<U> { } } public class TypeTests { [Fact] public static void TestDeclaringType() { Type t; Type d; t = typeof(int); d = t.DeclaringType; Assert.Null(d); t = typeof(Outside.Inside); d = t.DeclaringType; Assert.Equal(d, typeof(Outside)); t = typeof(int[]); d = t.DeclaringType; Assert.Null(d); t = typeof(Outside.Inside[]); d = t.DeclaringType; Assert.Null(d); t = typeof(Outside<int>); d = t.DeclaringType; Assert.Null(d); t = typeof(Outside<int>.Inside<double>); d = t.DeclaringType; Assert.Equal(d, typeof(Outside<>)); } [Fact] public static void TestGenericParameterPosition() { Type t; int pos; t = typeof(int); Assert.Throws<InvalidOperationException>(() => pos = t.GenericParameterPosition); t = typeof(int[]); Assert.Throws<InvalidOperationException>(() => pos = t.GenericParameterPosition); t = typeof(IList<int>); Assert.Throws<InvalidOperationException>(() => pos = t.GenericParameterPosition); t = typeof(IList<>); Assert.Throws<InvalidOperationException>(() => pos = t.GenericParameterPosition); } [Fact] public static void TestGenericTypeArguments() { Type t; Type[] a; t = typeof(int); a = t.GenericTypeArguments; Assert.Equal(a.Length, 0); t = typeof(IDictionary<int, String>); a = t.GenericTypeArguments; Assert.Equal(a.Length, 2); Assert.Equal(a[0], typeof(int)); Assert.Equal(a[1], typeof(String)); t = typeof(IList<int>[]); a = t.GenericTypeArguments; Assert.Equal(a.Length, 0); t = typeof(IList<>); a = t.GenericTypeArguments; Assert.Equal(a.Length, 0); } [Fact] public static void TestHasElementType() { Type t; bool b; t = typeof(int); b = t.HasElementType; Assert.False(b); t = typeof(int[]); b = t.HasElementType; Assert.True(b); t = typeof(IList<int>); b = t.HasElementType; Assert.False(b); t = typeof(IList<>); b = t.HasElementType; Assert.False(b); } [Fact] public static void TestIsArray() { Type t; bool b; t = typeof(int); b = t.IsArray; Assert.False(b); t = typeof(int[]); b = t.IsArray; Assert.True(b); t = typeof(IList<int>); b = t.IsArray; Assert.False(b); t = typeof(IList<>); b = t.IsArray; Assert.False(b); } [Fact] public static void TestIsByRef() { Type t; bool b; t = typeof(int); b = t.IsByRef; Assert.False(b); t = typeof(int[]); b = t.IsByRef; Assert.False(b); t = typeof(IList<int>); b = t.IsByRef; Assert.False(b); t = typeof(IList<>); b = t.IsByRef; Assert.False(b); } [Fact] public static void TestIsPointer() { Type t; bool b; t = typeof(int); b = t.IsPointer; Assert.False(b); t = typeof(int[]); b = t.IsPointer; Assert.False(b); t = typeof(IList<int>); b = t.IsPointer; Assert.False(b); t = typeof(IList<int>); b = t.IsPointer; Assert.False(b); } [Fact] public static void TestIsConstructedGenericType() { Type t; bool b; t = typeof(int); b = t.IsConstructedGenericType; Assert.False(b); t = typeof(int[]); b = t.IsConstructedGenericType; Assert.False(b); t = typeof(IList<int>); b = t.IsConstructedGenericType; Assert.True(b); t = typeof(IList<>); b = t.IsConstructedGenericType; Assert.False(b); } [Fact] public static void TestIsGenericParameter() { Type t; bool b; t = typeof(int); b = t.IsGenericParameter; Assert.False(b); t = typeof(int[]); b = t.IsGenericParameter; Assert.False(b); t = typeof(IList<int>); b = t.IsGenericParameter; Assert.False(b); t = typeof(IList<>); b = t.IsGenericParameter; Assert.False(b); } [Fact] public static void TestIsNested() { Type t; bool b; t = typeof(int); b = t.IsNested; Assert.False(b); t = typeof(Outside.Inside); b = t.IsNested; Assert.True(b); t = typeof(int[]); b = t.IsNested; Assert.False(b); t = typeof(Outside.Inside[]); b = t.IsNested; Assert.False(b); t = typeof(Outside<int>); b = t.IsNested; Assert.False(b); t = typeof(Outside<int>.Inside<double>); b = t.IsNested; Assert.True(b); } [Fact] public static void TestTypeHandle() { Type t, t1, t2; RuntimeTypeHandle r, r1; t = typeof(int); r = t.TypeHandle; t1 = typeof(Outside<int>).GenericTypeArguments[0]; r1 = t1.TypeHandle; Assert.Equal(r, r1); t2 = Type.GetTypeFromHandle(r); Assert.Equal(t, t2); t2 = Type.GetTypeFromHandle(r1); Assert.Equal(t, t2); r = default(RuntimeTypeHandle); t = Type.GetTypeFromHandle(r); Assert.Null(t); t = typeof(int[]); r = t.TypeHandle; t1 = typeof(int[]); r1 = t1.TypeHandle; Assert.Equal(r, r1); t2 = Type.GetTypeFromHandle(r); Assert.Equal(t, t2); t2 = Type.GetTypeFromHandle(r1); Assert.Equal(t, t2); t = typeof(Outside<int>); r = t.TypeHandle; t1 = typeof(Outside<int>); r1 = t1.TypeHandle; Assert.Equal(r, r1); t2 = Type.GetTypeFromHandle(r); Assert.Equal(t, t2); t2 = Type.GetTypeFromHandle(r1); Assert.Equal(t, t2); } [Fact] public static void TestGetArrayRank() { Type t; int i; t = typeof(int); Assert.Throws<ArgumentException>(() => i = t.GetArrayRank()); t = typeof(int[]); i = t.GetArrayRank(); Assert.Equal(i, 1); t = typeof(IList<int>); Assert.Throws<ArgumentException>(() => i = t.GetArrayRank()); t = typeof(IList<>); Assert.Throws<ArgumentException>(() => i = t.GetArrayRank()); } [Fact] public static void TestGetElementType() { Type t; Type d; t = typeof(int); d = t.GetElementType(); Assert.Null(d); t = typeof(Outside.Inside); d = t.GetElementType(); Assert.Null(d); t = typeof(int[]); d = t.GetElementType(); Assert.Equal(d, typeof(int)); t = typeof(Outside<int>.Inside<double>[]); d = t.GetElementType(); Assert.Equal(d, typeof(Outside<int>.Inside<double>)); t = typeof(Outside<int>); d = t.GetElementType(); Assert.Null(d); t = typeof(Outside<int>.Inside<double>); d = t.GetElementType(); Assert.Null(d); } [Fact] public static void TestMakeArrayType() { Type t1, t2, t3, t5; bool b; t1 = typeof(int); t2 = typeof(int[]); t3 = t1.MakeArrayType(); b = t3.IsArray; Assert.True(b); b = t3.HasElementType; Assert.True(b); t5 = t3.GetElementType(); b = t5.Equals(t1); Assert.True(b); b = t2.Equals(t3); Assert.True(b); t5 = t1.MakeArrayType(); b = t5.Equals(t3); Assert.True(b); String s1 = t1.ToString(); String s2 = t3.ToString(); Assert.Equal<String>(s2, s1 + "[]"); } [Fact] public static void TestMakeByRefType() { Type t1, t2, t3, t5; bool b; t1 = typeof(int); t2 = t1.MakeByRefType(); t3 = t1.MakeByRefType(); b = t3.IsByRef; Assert.True(b); b = t3.HasElementType; Assert.True(b); t5 = t3.GetElementType(); b = t5.Equals(t1); Assert.True(b); b = t2.Equals(t3); Assert.True(b); String s1 = t1.ToString(); String s2 = t3.ToString(); Assert.Equal<String>(s2, s1 + "&"); } }
using System; using System.Data; using System.Data.SqlClient; using Csla; using Csla.Data; namespace ParentLoadSoftDelete.Business.ERCLevel { /// <summary> /// F05Level111Child (editable child object).<br/> /// This is a generated base class of <see cref="F05Level111Child"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="F04Level11"/> collection. /// </remarks> [Serializable] public partial class F05Level111Child : BusinessBase<F05Level111Child> { #region State Fields [NotUndoable] [NonSerialized] internal int cMarentID1 = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="Level_1_1_1_Child_Name"/> property. /// </summary> public static readonly PropertyInfo<string> Level_1_1_1_Child_NameProperty = RegisterProperty<string>(p => p.Level_1_1_1_Child_Name, "Level_1_1_1 Child Name"); /// <summary> /// Gets or sets the Level_1_1_1 Child Name. /// </summary> /// <value>The Level_1_1_1 Child Name.</value> public string Level_1_1_1_Child_Name { get { return GetProperty(Level_1_1_1_Child_NameProperty); } set { SetProperty(Level_1_1_1_Child_NameProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="F05Level111Child"/> object. /// </summary> /// <returns>A reference to the created <see cref="F05Level111Child"/> object.</returns> internal static F05Level111Child NewF05Level111Child() { return DataPortal.CreateChild<F05Level111Child>(); } /// <summary> /// Factory method. Loads a <see cref="F05Level111Child"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> /// <returns>A reference to the fetched <see cref="F05Level111Child"/> object.</returns> internal static F05Level111Child GetF05Level111Child(SafeDataReader dr) { F05Level111Child obj = new F05Level111Child(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(dr); obj.MarkOld(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="F05Level111Child"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> private F05Level111Child() { // Prevent direct creation // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="F05Level111Child"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="F05Level111Child"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(Level_1_1_1_Child_NameProperty, dr.GetString("Level_1_1_1_Child_Name")); cMarentID1 = dr.GetInt32("CMarentID1"); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Inserts a new <see cref="F05Level111Child"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(F04Level11 parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("AddF05Level111Child", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_ID", parent.Level_1_1_ID).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@Level_1_1_1_Child_Name", ReadProperty(Level_1_1_1_Child_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnInsertPre(args); cmd.ExecuteNonQuery(); OnInsertPost(args); } } } /// <summary> /// Updates in the database all changes made to the <see cref="F05Level111Child"/> object. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update(F04Level11 parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("UpdateF05Level111Child", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_ID", parent.Level_1_1_ID).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@Level_1_1_1_Child_Name", ReadProperty(Level_1_1_1_Child_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnUpdatePre(args); cmd.ExecuteNonQuery(); OnUpdatePost(args); } } } /// <summary> /// Self deletes the <see cref="F05Level111Child"/> object from database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf(F04Level11 parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("DeleteF05Level111Child", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Level_1_1_ID", parent.Level_1_1_ID).DbType = DbType.Int32; var args = new DataPortalHookArgs(cmd); OnDeletePre(args); cmd.ExecuteNonQuery(); OnDeletePost(args); } } } #endregion #region Pseudo Events /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Data; using System.Reflection; using log4net; using System.Data.SqlClient; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using System.Text; namespace OpenSim.Data.MSSQL { public class MSSQLGenericTableHandler<T> where T : class, new() { // private static readonly ILog m_log = // LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); protected string m_ConnectionString; protected MSSQLManager m_database; //used for parameter type translation protected Dictionary<string, FieldInfo> m_Fields = new Dictionary<string, FieldInfo>(); protected List<string> m_ColumnNames = null; protected string m_Realm; protected FieldInfo m_DataField = null; public MSSQLGenericTableHandler(string connectionString, string realm, string storeName) { m_Realm = realm; m_ConnectionString = connectionString; if (storeName != String.Empty) { using (SqlConnection conn = new SqlConnection(m_ConnectionString)) { conn.Open(); Migration m = new Migration(conn, GetType().Assembly, storeName); m.Update(); } } m_database = new MSSQLManager(m_ConnectionString); Type t = typeof(T); FieldInfo[] fields = t.GetFields(BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly); if (fields.Length == 0) return; foreach (FieldInfo f in fields) { if (f.Name != "Data") m_Fields[f.Name] = f; else m_DataField = f; } } private void CheckColumnNames(SqlDataReader reader) { if (m_ColumnNames != null) return; m_ColumnNames = new List<string>(); DataTable schemaTable = reader.GetSchemaTable(); foreach (DataRow row in schemaTable.Rows) { if (row["ColumnName"] != null && (!m_Fields.ContainsKey(row["ColumnName"].ToString()))) m_ColumnNames.Add(row["ColumnName"].ToString()); } } private List<string> GetConstraints() { List<string> constraints = new List<string>(); string query = string.Format(@"SELECT COL_NAME(ic.object_id,ic.column_id) AS column_name FROM sys.indexes AS i INNER JOIN sys.index_columns AS ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id WHERE i.is_primary_key = 1 AND i.object_id = OBJECT_ID('{0}');", m_Realm); using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand(query, conn)) { conn.Open(); using (SqlDataReader rdr = cmd.ExecuteReader()) { while (rdr.Read()) { // query produces 0 to many rows of single column, so always add the first item in each row constraints.Add((string)rdr[0]); } } return constraints; } } public virtual T[] Get(string field, string key) { return Get(new string[] { field }, new string[] { key }); } public virtual T[] Get(string[] fields, string[] keys) { if (fields.Length != keys.Length) return new T[0]; List<string> terms = new List<string>(); using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand()) { for (int i = 0; i < fields.Length; i++) { cmd.Parameters.Add(m_database.CreateParameter(fields[i], keys[i])); terms.Add("[" + fields[i] + "] = @" + fields[i]); } string where = String.Join(" AND ", terms.ToArray()); string query = String.Format("SELECT * FROM {0} WHERE {1}", m_Realm, where); cmd.Connection = conn; cmd.CommandText = query; conn.Open(); return DoQuery(cmd); } } protected T[] DoQuery(SqlCommand cmd) { using (SqlDataReader reader = cmd.ExecuteReader()) { if (reader == null) return new T[0]; CheckColumnNames(reader); List<T> result = new List<T>(); while (reader.Read()) { T row = new T(); foreach (string name in m_Fields.Keys) { if (m_Fields[name].GetValue(row) is bool) { int v = Convert.ToInt32(reader[name]); m_Fields[name].SetValue(row, v != 0 ? true : false); } else if (m_Fields[name].GetValue(row) is UUID) { UUID uuid = UUID.Zero; UUID.TryParse(reader[name].ToString(), out uuid); m_Fields[name].SetValue(row, uuid); } else if (m_Fields[name].GetValue(row) is int) { int v = Convert.ToInt32(reader[name]); m_Fields[name].SetValue(row, v); } else { m_Fields[name].SetValue(row, reader[name]); } } if (m_DataField != null) { Dictionary<string, string> data = new Dictionary<string, string>(); foreach (string col in m_ColumnNames) { data[col] = reader[col].ToString(); if (data[col] == null) data[col] = String.Empty; } m_DataField.SetValue(row, data); } result.Add(row); } return result.ToArray(); } } public virtual T[] Get(string where) { using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand()) { string query = String.Format("SELECT * FROM {0} WHERE {1}", m_Realm, where); cmd.Connection = conn; cmd.CommandText = query; //m_log.WarnFormat("[MSSQLGenericTable]: SELECT {0} WHERE {1}", m_Realm, where); conn.Open(); return DoQuery(cmd); } } public virtual bool Store(T row) { List<string> constraintFields = GetConstraints(); List<KeyValuePair<string, string>> constraints = new List<KeyValuePair<string, string>>(); using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand()) { StringBuilder query = new StringBuilder(); List<String> names = new List<String>(); List<String> values = new List<String>(); foreach (FieldInfo fi in m_Fields.Values) { names.Add(fi.Name); values.Add("@" + fi.Name); if (constraintFields.Count > 0 && constraintFields.Contains(fi.Name)) { constraints.Add(new KeyValuePair<string, string>(fi.Name, fi.GetValue(row).ToString())); } cmd.Parameters.Add(m_database.CreateParameter(fi.Name, fi.GetValue(row).ToString())); } if (m_DataField != null) { Dictionary<string, string> data = (Dictionary<string, string>)m_DataField.GetValue(row); foreach (KeyValuePair<string, string> kvp in data) { if (constraintFields.Count > 0 && constraintFields.Contains(kvp.Key)) { constraints.Add(new KeyValuePair<string, string>(kvp.Key, kvp.Key)); } names.Add(kvp.Key); values.Add("@" + kvp.Key); cmd.Parameters.Add(m_database.CreateParameter("@" + kvp.Key, kvp.Value)); } } query.AppendFormat("UPDATE {0} SET ", m_Realm); int i = 0; for (i = 0; i < names.Count - 1; i++) { query.AppendFormat("[{0}] = {1}, ", names[i], values[i]); } query.AppendFormat("[{0}] = {1} ", names[i], values[i]); if (constraints.Count > 0) { List<string> terms = new List<string>(); for (int j = 0; j < constraints.Count; j++) { terms.Add(" [" + constraints[j].Key + "] = @" + constraints[j].Key); } string where = String.Join(" AND ", terms.ToArray()); query.AppendFormat(" WHERE {0} ", where); } cmd.Connection = conn; cmd.CommandText = query.ToString(); conn.Open(); if (cmd.ExecuteNonQuery() > 0) { //m_log.WarnFormat("[MSSQLGenericTable]: Updating {0}", m_Realm); return true; } else { // assume record has not yet been inserted query = new StringBuilder(); query.AppendFormat("INSERT INTO {0} ([", m_Realm); query.Append(String.Join("],[", names.ToArray())); query.Append("]) values (" + String.Join(",", values.ToArray()) + ")"); cmd.Connection = conn; cmd.CommandText = query.ToString(); //m_log.WarnFormat("[MSSQLGenericTable]: Inserting into {0}", m_Realm); if (conn.State != ConnectionState.Open) conn.Open(); if (cmd.ExecuteNonQuery() > 0) return true; } return false; } } public virtual bool Delete(string field, string val) { using (SqlConnection conn = new SqlConnection(m_ConnectionString)) using (SqlCommand cmd = new SqlCommand()) { string deleteCommand = String.Format("DELETE FROM {0} WHERE [{1}] = @{1}", m_Realm, field); cmd.CommandText = deleteCommand; cmd.Parameters.Add(m_database.CreateParameter(field, val)); cmd.Connection = conn; conn.Open(); if (cmd.ExecuteNonQuery() > 0) { //m_log.Warn("[MSSQLGenericTable]: " + deleteCommand); return true; } return false; } } } }
/************************************************************************************ Copyright : Copyright 2014 Oculus VR, LLC. All Rights reserved. Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License"); you may not use the Oculus VR Rift SDK except in compliance with the License, which is provided at the time of installation or download, or which otherwise accompanies this software in either electronic or hard copy form. You may obtain a copy of the License at http://www.oculusvr.com/licenses/LICENSE-3.2 Unless required by applicable law or agreed to in writing, the Oculus VR SDK distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ************************************************************************************/ using UnityEngine; using System.Collections; using System.Collections.Generic; /// <summary> /// Cross-platform wrapper for Unity Input. /// See OVRGamepadController for a list of the base axis and button names. /// Depending on joystick number and platform /// the base names will be pre-pended with "Platform:Joy #:" to look them up in the /// Unity Input table. For instance: using an axis name of "Left_X_Axis" with GetJoystickAxis() /// will result in looking up the axis named "Win: Joy 1: Left_X_Axis" when running on /// Windows and "Android: Joy 1: Left_X_Axis" when running on Android. /// /// In addition to wrapping joystick input, this class allows the assignment of up, held /// and down events for any key, mouse button, or joystick button via the AddInputHandler() /// method. /// /// Currently this class relies on enumerations defined in OVRGamepadController /// so that it remains compatible with existing Unity OVR projects. When this class /// is included it overloads the default GPC_GetAxis() and GPC_GetButton() calls to /// to ReadAxis() and ReadButton() in this class. /// Ideally this class would completely replace the OVRGamepadController class. This /// would involve changing the GPC_GetAxis() and GPC_GetButton() calls in a project /// and removing references to OVRGamepadController in this file (and moving some of /// the tables to InputControl). /// </summary> public static class OVRInputControl { [SerializeField] // FIXME: this was originally settable on the behavior before this was a static class... maybe remove it. /// <summary> /// Set 'true' to allow keyboard input (will be set 'false' on some platforms). /// </summary> private static bool allowKeyControls = true; public delegate void OnKeyUp(MonoBehaviour comp); public delegate void OnKeyDown(MonoBehaviour comp); public delegate void OnKeyHeld(MonoBehaviour comp); /// <summary> /// Types of devices we can handle input for. /// </summary> public enum DeviceType { None = -1, Keyboard = 0, // a key Mouse, // a mouse button Gamepad, // a gamepad button Axis, // a joystick axis (or trigger) }; /// <summary> /// Mouse button definitions. /// </summary> public enum MouseButton { None = -1, Left = 0, Right = 1, Middle = 2, Fourth = 4, Fifth = 5, }; /// <summary> /// Holds information about a single key event. /// </summary> class KeyInfo { public DeviceType deviceType = DeviceType.None; public string keyName = ""; public MouseButton mouseButton = MouseButton.None; public OVRGamepadController.Button joystickButton = OVRGamepadController.Button.None; public OVRGamepadController.Axis joystickAxis = OVRGamepadController.Axis.None; public float threshold = 1000.0f; // threshold for triggers public bool wasDown = false; public OnKeyDown downHandler; public OnKeyHeld heldHandler; public OnKeyUp upHandler; /// <summary> /// Key constructor. /// </summary> public KeyInfo( DeviceType inDeviceType, string inKeyName, OnKeyDown inDownHandler, OnKeyHeld inHeldHandler, OnKeyUp inUpHandler) { deviceType = inDeviceType; keyName = inKeyName; mouseButton = MouseButton.None; joystickButton = OVRGamepadController.Button.None; joystickAxis = OVRGamepadController.Axis.None; threshold = 1000.0f; wasDown = false; downHandler = inDownHandler; heldHandler = inHeldHandler; upHandler = inUpHandler; } /// <summary> /// Mouse button constructor. /// </summary> public KeyInfo( DeviceType inDeviceType, MouseButton inMouseButton, OnKeyDown inDownHandler, OnKeyHeld inHeldHandler, OnKeyUp inUpHandler) { deviceType = inDeviceType; keyName = "Mouse Button " + (int)inMouseButton; mouseButton = inMouseButton; joystickButton = OVRGamepadController.Button.None; joystickAxis = OVRGamepadController.Axis.None; threshold = 1000.0f; wasDown = false; downHandler = inDownHandler; heldHandler = inHeldHandler; upHandler = inUpHandler; } /// <summary> /// Joystick button constructor. /// </summary> public KeyInfo( DeviceType inDeviceType, OVRGamepadController.Button inJoystickButton, OnKeyDown inDownHandler, OnKeyHeld inHeldHandler, OnKeyUp inUpHandler) { deviceType = inDeviceType; keyName = OVRGamepadController.ButtonNames[(int)inJoystickButton]; mouseButton = MouseButton.None; joystickButton = inJoystickButton; joystickAxis = OVRGamepadController.Axis.None; threshold = 1000.0f; wasDown = false; downHandler = inDownHandler; heldHandler = inHeldHandler; upHandler = inUpHandler; } /// <summary> /// Joystick axis constructor. /// </summary> public KeyInfo( DeviceType inDeviceType, OVRGamepadController.Axis inJoystickAxis, OnKeyDown inDownHandler, OnKeyHeld inHeldHandler, OnKeyUp inUpHandler) { deviceType = inDeviceType; keyName = OVRGamepadController.AxisNames[(int)inJoystickAxis]; mouseButton = MouseButton.None; joystickButton = OVRGamepadController.Button.None; joystickAxis = inJoystickAxis; threshold = 0.5f; wasDown = false; downHandler = inDownHandler; heldHandler = inHeldHandler; upHandler = inUpHandler; } }; private static List<KeyInfo> keyInfos = new List<KeyInfo>(); private static string platformPrefix = ""; /// <summary> /// Maps joystick input to a component. /// </summary> public class InputMapping { public InputMapping(MonoBehaviour comp, int inJoystickNumber) { component = comp; joystickNumber = inJoystickNumber; } public MonoBehaviour component; // the component input goes to public int joystickNumber; // the joystick that controls the object }; /// <summary> /// List of mappings from joystick to component. /// </summary> private static List<InputMapping> inputMap = new List<InputMapping>(); /// <summary> /// Initializes the input system for OSX. /// </summary> private static void Init_Windows() { OVRDebugUtils.Print("Initializing input for Windows."); allowKeyControls = false; platformPrefix = "Win:"; } /// <summary> /// Initializes the input system for Windows when running from the Unity editor. /// </summary> private static void Init_Windows_Editor() { OVRDebugUtils.Print("Initializing input for Windows Editor."); allowKeyControls = true; platformPrefix = "Win:"; } /// <summary> /// Initializes the input system for Android. /// </summary> private static void Init_Android() { OVRDebugUtils.Print("Initializing input for Android."); allowKeyControls = true; platformPrefix = "Android:"; } /// <summary> /// Initializes the input system for OSX. /// </summary> private static void Init_OSX() { OVRDebugUtils.Print("Initializing input for OSX."); allowKeyControls = false; platformPrefix = "OSX:"; } /// <summary> /// Initializes the input system for OSX when running from the Unity editor. /// </summary> private static void Init_OSX_Editor() { OVRDebugUtils.Print("Initializing input for OSX Editor."); allowKeyControls = true; platformPrefix = "OSX:"; } /// <summary> /// Initializes the input system for iPhone. /// </summary> private static void Init_iPhone() { OVRDebugUtils.Print("Initializing input for iPhone."); allowKeyControls = false; platformPrefix = "iPhone:"; } /// <summary> /// Static contructor for the OVRInputControl class. /// </summary> static OVRInputControl() { #if UNITY_ANDROID && !UNITY_EDITOR OVRGamepadController.SetReadAxisDelegate(ReadJoystickAxis); OVRGamepadController.SetReadButtonDelegate(ReadJoystickButton); #endif switch (Application.platform) { case RuntimePlatform.WindowsPlayer: Init_Windows(); break; case RuntimePlatform.WindowsEditor: Init_Windows_Editor(); break; case RuntimePlatform.Android: Init_Android(); break; case RuntimePlatform.OSXPlayer: Init_OSX(); break; case RuntimePlatform.OSXEditor: Init_OSX_Editor(); break; case RuntimePlatform.IPhonePlayer: Init_iPhone(); break; } string[] joystickNames = Input.GetJoystickNames(); for (int i = 0; i < joystickNames.Length; ++i) { OVRDebugUtils.Print("Found joystick '" + joystickNames[i] + "'..."); } } /// <summary> /// Adds a handler for key input /// </summary> public static void AddInputHandler( DeviceType dt, string keyName, OnKeyDown onDown, OnKeyHeld onHeld, OnKeyUp onUp) { keyInfos.Add(new KeyInfo(dt, keyName, onDown, onHeld, onUp)); } /// <summary> /// Adds a handler for mouse button input. /// </summary> public static void AddInputHandler( DeviceType dt, MouseButton mouseButton, OnKeyDown onDown, OnKeyHeld onHeld, OnKeyUp onUp) { keyInfos.Add(new KeyInfo(dt, mouseButton, onDown, onHeld, onUp)); } /// <summary> /// Adds a handler for joystick button input. /// </summary> public static void AddInputHandler( DeviceType dt, OVRGamepadController.Button joystickButton, OnKeyDown onDown, OnKeyHeld onHeld, OnKeyUp onUp) { keyInfos.Add(new KeyInfo(dt, joystickButton, onDown, onHeld, onUp)); } /// <summary> /// Adds a handler for joystick axis input. /// </summary> public static void AddInputHandler( DeviceType dt, OVRGamepadController.Axis axis, OnKeyDown onDown, OnKeyHeld onHeld, OnKeyUp onUp) { keyInfos.Add(new KeyInfo(dt, axis, onDown, onHeld, onUp)); } /// <summary> /// Returns the current value of the joystick axis specified by the name parameter. /// The name should partially match the name of an axis specified in the Unity /// Edit -> Project Settings -> Input pane, minus the Platform: Joy #: qualifiers. /// For instance, specify "Left_X_Axis" to select the appropriate axis for the /// current platform. This will be permuted into something like "Win:Joy 1:Left_X_Axis" /// before it is queried. /// </summary> public static float GetJoystickAxis(int joystickNumber, string name) { // TODO: except for the joystick prefix this could be a table lookup // with a table-per-joystick this could be a lookup. #if UNITY_ANDROID && !UNITY_EDITOR // on the Samsung gamepad, the left and right triggers are actually buttons // so we map left and right triggers to the left and right shoulder buttons. if (name == "LeftTrigger") { return GetJoystickButton(joystickNumber, OVRGamepadController.Button.LeftShoulder) ? 1.0f : 0.0f; } else if (name == "RightTrigger") { return GetJoystickButton(joystickNumber, OVRGamepadController.Button.RightShoulder) ? 1.0f : 0.0f; } #endif string platformName = platformPrefix + "Joy " + joystickNumber + ":" + name; return Input.GetAxis(platformName); } /// <summary> /// Delegate for OVRGamepadController. /// Returns the current value of the specified joystick axis. /// </summary> public static float GetJoystickAxis(int joystickNumber, OVRGamepadController.Axis axis) { string platformName = platformPrefix + "Joy " + joystickNumber + ":" + OVRGamepadController.AxisNames[(int)axis]; return Input.GetAxis(platformName); } /// <summary> /// Delegate for OVRGamepadController. /// This only exists for legacy compatibility with OVRGamepadController. /// </summary> public static float ReadJoystickAxis(OVRGamepadController.Axis axis) { //OVRDebugUtils.Print("OVRInputControl.ReadJoystickAxis"); return GetJoystickAxis(1, axis); } /// <summary> /// Returns true if a joystick button is depressed. /// The name should partially match the name of an axis specified in the Unity /// Edit -> Project Settings -> Input pane, minus the Platform: Joy #: qualifiers. /// For instance, specify "Button A" to select the appropriate axis for the /// current platform. This will be permuted into something like "Win:Joy 1:Button A" /// before it is queried. /// </summary> public static bool GetJoystickButton(int joystickNumber, string name) { // TODO: except for the joystick prefix this could be a table lookup // with a table-per-joystick this could be a lookup. string fullName = platformPrefix + "Joy " + joystickNumber + ":" + name; return Input.GetButton(fullName); } /// <summary> /// Delegate for OVRGamepadController. /// Returns true if the specified joystick button is pressed. /// </summary> public static bool GetJoystickButton(int joystickNumber, OVRGamepadController.Button button) { string fullName = platformPrefix + "Joy " + joystickNumber + ":" + OVRGamepadController.ButtonNames[(int)button]; //OVRDebugUtils.Print("Checking button " + fullName); return Input.GetButton(fullName); } /// <summary> /// Delegate for OVRGamepadController. /// This only exists for legacy compatibility with OVRGamepadController. /// </summary> public static bool ReadJoystickButton(OVRGamepadController.Button button) { //OVRDebugUtils.Print("OVRInputControl.ReadJoystickButton"); return GetJoystickButton(1, button); } //====================== // GetMouseButton // Returns true if the specified mouse button is pressed. //====================== public static bool GetMouseButton(MouseButton button) { return Input.GetMouseButton((int)button); } /// <summary> /// Outputs debug spam for any non-zero axis. /// This is only used for finding which axes are which with new controllers. /// </summary> private static void ShowAxisValues() { for (int i = 1; i <= 20; ++i) { string axisName = "Test Axis " + i; float v = Input.GetAxis(axisName); if (Mathf.Abs(v) > 0.2f) { OVRDebugUtils.Print("Test Axis " + i + ": v = " + v); } } } /// <summary> /// Outputs debug spam for any depressed button. /// This is only used for finding which buttons are which with new controllers. /// </summary> private static void ShowButtonValues() { for (int i = 0; i < 6; ++i) { string buttonName = "Test Button " + i; if (Input.GetButton(buttonName)) { OVRDebugUtils.Print("Test Button " + i + " is down."); } } } /// <summary> /// Adds a mapping from a joystick to a behavior. /// </summary> public static void AddInputMapping(int joystickNumber, MonoBehaviour comp) { for (int i = 0; i < inputMap.Count; ++i) { InputMapping im = inputMap[i]; if (im.component == comp && im.joystickNumber == joystickNumber) { OVRDebugUtils.Assert(false, "Input mapping already exists!"); return; } } inputMap.Add(new InputMapping(comp, joystickNumber)); } /// <summary> /// Removes a mapping from a joystick to a behavior. /// </summary> public static void RemoveInputMapping(int joystickNumber, MonoBehaviour comp) { for (int i = 0; i < inputMap.Count; ++i) { InputMapping im = inputMap[i]; if (im.component == comp && im.joystickNumber == joystickNumber) { inputMap.RemoveAt(i); return; } } } /// <summary> /// Removes all control mappings. /// </summary> public static void ClearControlMappings() { inputMap.Clear(); } /// <summary> /// Updates the state of all input mappings. This must be called from /// a single MonoBehaviour's Update() method for input to be read. /// </summary> public static void Update() { // Enable these two lines if you have a new controller that you need to // set up for which you do not know the axes. //ShowAxisValues(); //ShowButtonValues(); for (int i = 0; i < inputMap.Count; ++i) { UpdateInputMapping(inputMap[i].joystickNumber, inputMap[i].component); } } /// <summary> /// Updates a single input mapping. /// </summary> private static void UpdateInputMapping(int joystickNumber, MonoBehaviour comp) { for (int i = 0; i < keyInfos.Count; ++i) { bool keyDown = false; // query the correct device KeyInfo keyInfo = keyInfos[i]; if (keyInfo.deviceType == DeviceType.Gamepad) { //OVRDebugUtils.Print("Checking gamepad button " + keyInfo.KeyName); keyDown = GetJoystickButton(joystickNumber, keyInfo.joystickButton); } else if (keyInfo.deviceType == DeviceType.Axis) { float axisValue = GetJoystickAxis(joystickNumber, keyInfo.joystickAxis); keyDown = (axisValue >= keyInfo.threshold); } else if (allowKeyControls) { if (keyInfo.deviceType == DeviceType.Mouse) { keyDown = GetMouseButton(keyInfo.mouseButton); } else if (keyInfo.deviceType == DeviceType.Keyboard) { keyDown = Input.GetKey(keyInfo.keyName); } } // handle the event if (!keyDown) { if (keyInfo.wasDown) { // key was just released keyInfo.upHandler(comp); } } else { if (!keyInfo.wasDown) { // key was just pressed //OVRDebugUtils.Print( "Key or Button down: " + keyInfo.KeyName ); keyInfo.downHandler(comp); } else { // key is held keyInfo.heldHandler(comp); } } // update the key info keyInfo.wasDown = keyDown; } } };
using System; using System.Collections.ObjectModel; using System.Linq; using System.Runtime.CompilerServices; using System.Runtime.ExceptionServices; using System.Threading; using OpenQA.Selenium; using OpenQA.Selenium.Interactions; using OpenQA.Selenium.Internal; using OpenQA.Selenium.Remote; using OpenQA.Selenium.Support.UI; using Stubbornium.Configuration; namespace Stubbornium { public class StubbornWebElement : IClickable, IButtonClickable, ISearchContext, IWrapsDriver { private readonly RemoteWebDriver _browser; private readonly ISearchContext _parent; private readonly By _selector; private readonly Func<IWebElement, bool> _collectionPredicate; private readonly int _elementIndex; private readonly StubbornConfiguration _configuration; public StubbornWebElement(By selector, StubbornWebElement parent, int elementIndex = 1, StubbornConfiguration configuration = null) : this(selector, parent._browser, parent._browser, null, elementIndex, configuration) { } public StubbornWebElement(By selector, RemoteWebDriver browser, ISearchContext parent = null, int elementIndex = 1, StubbornConfiguration configuration = null) : this(selector, browser, parent ?? browser, null, elementIndex, configuration) { } public StubbornWebElement(By selector, RemoteWebDriver browser, ISearchContext parent, Func<IWebElement, bool> collectionPredicate, int elementIndex, StubbornConfiguration configuration = null) { _browser = browser; _parent = parent; _selector = selector; _collectionPredicate = collectionPredicate; _elementIndex = elementIndex; _configuration = configuration ?? StubbornConfiguration.Default; } public By Selector => _selector; public StubbornFinder Find => new StubbornFinder(_browser, this); private IWebElement Element { get { if (_collectionPredicate == null) { if (_elementIndex == 1) return _parent.FindElement(_selector); else return _parent.FindElements(_selector).ElementAtOrDefault(_elementIndex); } else if (_elementIndex == 1) return _parent.FindElements(_selector).FirstOrDefault(_collectionPredicate); else return _parent.FindElements(_selector).Where(_collectionPredicate).ElementAtOrDefault(_elementIndex); } } public void SetText(string content) { Do( element => { element().Clear(); element().SendKeys(content); }, _ => Element.Value() == content, ExpectedConditions.ElementIsVisible(_selector), logMessage: "\"" + content + "\""); } public void Click<TResult>(Func<Func<IWebElement>, TResult> expectedConditionAfterAction, WaitTime waitTime = WaitTime.Short) { Do( element => element().Click(), expectedConditionAfterAction, ExpectedConditions.ElementIsVisible(_selector), waitTime: waitTime); } public void ClickButton<TResult>(Func<Func<IWebElement>, TResult> expectedConditionAfterAction) { Do( element => element().ClickButton(), expectedConditionAfterAction, ExpectedConditions.ElementIsVisible(_selector)); } public void RightClick<TResult>(Func<Func<IWebElement>, TResult> expectedConditionAfterAction) { Do( element => new Actions(element().Driver()).ContextClick(element()).Build().Perform(), expectedConditionAfterAction, ExpectedConditions.ElementIsVisible(_selector)); } public void AssertExists(string message = "") { Assert(e => { Assertions.AreNotEqual(null, e(), message); return true; }, message ?? "Exists"); } public void AssertIsMissing() { Assert(element => _browser.IsElementMissing(_selector), "Is missing"); } public void AssertIsVisible() { Do( element => Assertions.AreEqual(true, element().Displayed), element => element().Displayed, ExpectedConditions.ElementIsVisible(_selector), logMessage: "Is visible"); } public void AssertHasText(string expectedText) { Assert(e => e().Text == expectedText, $"Has text \"{expectedText}\""); } public void Assert(Func<Func<IWebElement>, bool> assertion, string logMessage) { Do( _ => { }, _ => assertion(() => Element), logMessage: logMessage); } public void Do<TResult>(Action<Func<IWebElement>> seleniumAction, Func<Func<IWebElement>, TResult> expectedConditionAfterAction, int maxRetries = 10, WaitTime waitTime = WaitTime.Short, [CallerMemberName] string caller = "", string logMessage = null) { Do(seleniumAction, expectedConditionAfterAction, (Func<IWebDriver, bool>)null, maxRetries, waitTime, caller, logMessage); } public void Do<TResult1, TResult2>(Action<Func<IWebElement>> seleniumAction, Func<Func<IWebElement>, TResult1> expectedConditionAfterAction, Func<IWebDriver, TResult2> errorWaitCondition = null, int maxRetries = 10, WaitTime waitTime = WaitTime.Short, [CallerMemberName] string caller = "", string logMessage = null) { var fullLogMessage = $"{caller} - {_selector}"; if (logMessage != null) fullLogMessage += " - " + logMessage; Do(_browser, () => Element, seleniumAction, expectedConditionAfterAction, errorWaitCondition, maxRetries, waitTime, caller, _configuration, fullLogMessage); } public static void Do<TResult1, TResult2>(RemoteWebDriver browser, Func<IWebElement> webElementSource, Action<Func<IWebElement>> seleniumAction, Func<Func<IWebElement>, TResult1> expectedConditionAfterAction, Func<IWebDriver, TResult2> errorWaitCondition = null, int maxRetries = 10, WaitTime waitTime = WaitTime.Short, [CallerMemberName] string caller = "", StubbornConfiguration configuration = null, string logMessage = null) { configuration = configuration ?? StubbornConfiguration.Default; configuration.Log.Info(logMessage ?? caller); var wait = new WebDriverWait(browser, waitTime.ToTimeSpan()); int attemptNo = 0; while (true) { configuration.BeforeDoActions.ForEach(action => action(browser)); var actionException = Try(() => seleniumAction(webElementSource)); configuration.BetweenDoActions.ForEach(action => action(browser)); var expectedConditionException = Try(() => wait.Until(expectedConditionAfterAction, webElementSource)); if (actionException == null && expectedConditionException == null) return; if (attemptNo > 0 && actionException != null && expectedConditionException == null) { configuration.Log.Warning($"Action threw exception (\"{actionException.Message}\") but excepted condition is met"); return; } var relevantException = actionException ?? expectedConditionException; if (attemptNo >= maxRetries) ExceptionDispatchInfo.Capture(relevantException).Throw(); attemptNo++; try { if (errorWaitCondition != null) wait.Until(errorWaitCondition); else Thread.Sleep(WaitTime.Short.ToTimeSpan()); } catch (Exception) { // Ignore wait errors - just try to perform the core action again } configuration.Log.Warning($"Repeating {caller}"); } } IWebElement ISearchContext.FindElement(By @by) { return Element.FindElement(@by); } ReadOnlyCollection<IWebElement> ISearchContext.FindElements(By @by) { return Element.FindElements(@by); } IWebDriver IWrapsDriver.WrappedDriver => _browser; private static Exception Try(Action action) { try { action(); } catch (Exception e) { return e; } return null; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using System; namespace OpenSim.Region.CoreModules.World.Terrain.PaintBrushes { /// <summary> /// Hydraulic Erosion Brush /// </summary> public class ErodeSphere : ITerrainPaintableEffect { private const double rainHeight = 0.2; private const int rounds = 10; private const NeighbourSystem type = NeighbourSystem.Moore; private const double waterSaturation = 0.30; #region Supporting Functions private static int[] Neighbours(NeighbourSystem neighbourType, int index) { int[] coord = new int[2]; index++; switch (neighbourType) { case NeighbourSystem.Moore: switch (index) { case 1: coord[0] = -1; coord[1] = -1; break; case 2: coord[0] = -0; coord[1] = -1; break; case 3: coord[0] = +1; coord[1] = -1; break; case 4: coord[0] = -1; coord[1] = -0; break; case 5: coord[0] = -0; coord[1] = -0; break; case 6: coord[0] = +1; coord[1] = -0; break; case 7: coord[0] = -1; coord[1] = +1; break; case 8: coord[0] = -0; coord[1] = +1; break; case 9: coord[0] = +1; coord[1] = +1; break; default: break; } break; case NeighbourSystem.VonNeumann: switch (index) { case 1: coord[0] = 0; coord[1] = -1; break; case 2: coord[0] = -1; coord[1] = 0; break; case 3: coord[0] = +1; coord[1] = 0; break; case 4: coord[0] = 0; coord[1] = +1; break; case 5: coord[0] = -0; coord[1] = -0; break; default: break; } break; } return coord; } private enum NeighbourSystem { Moore, VonNeumann } ; #endregion #region ITerrainPaintableEffect Members public void PaintEffect(ITerrainChannel map, bool[,] mask, double rx, double ry, double rz, double strength, double duration) { strength = TerrainUtil.MetersToSphericalStrength(strength); int x, y; // Using one 'rain' round for this, so skipping a useless loop // Will need to adapt back in for the Flood brush ITerrainChannel water = new TerrainChannel(map.Width, map.Height); ITerrainChannel sediment = new TerrainChannel(map.Width, map.Height); // Fill with rain for (x = 0; x < water.Width; x++) for (y = 0; y < water.Height; y++) water[x, y] = Math.Max(0.0, TerrainUtil.SphericalFactor(x, y, rx, ry, strength) * rainHeight * duration); for (int i = 0; i < rounds; i++) { // Erode underlying terrain for (x = 0; x < water.Width; x++) { for (y = 0; y < water.Height; y++) { if (mask[x,y]) { const double solConst = (1.0 / rounds); double sedDelta = water[x, y] * solConst; map[x, y] -= sedDelta; sediment[x, y] += sedDelta; } } } // Move water for (x = 0; x < water.Width; x++) { for (y = 0; y < water.Height; y++) { if (water[x, y] <= 0) continue; // Step 1. Calculate average of neighbours int neighbours = 0; double altitudeTotal = 0.0; double altitudeMe = map[x, y] + water[x, y]; const int NEIGHBOUR_ME = 4; const int NEIGHBOUR_MAX = 9; for (int j = 0; j < NEIGHBOUR_MAX; j++) { if (j != NEIGHBOUR_ME) { int[] coords = Neighbours(type, j); coords[0] += x; coords[1] += y; if (coords[0] > map.Width - 1) continue; if (coords[1] > map.Height - 1) continue; if (coords[0] < 0) continue; if (coords[1] < 0) continue; // Calculate total height of this neighbour double altitudeNeighbour = water[coords[0], coords[1]] + map[coords[0], coords[1]]; // If it's greater than me... if (altitudeNeighbour - altitudeMe < 0) { // Add it to our calculations neighbours++; altitudeTotal += altitudeNeighbour; } } } if (neighbours == 0) continue; double altitudeAvg = altitudeTotal / neighbours; // Step 2. Allocate water to neighbours. for (int j = 0; j < NEIGHBOUR_MAX; j++) { if (j != NEIGHBOUR_ME) { int[] coords = Neighbours(type, j); coords[0] += x; coords[1] += y; if (coords[0] > map.Width - 1) continue; if (coords[1] > map.Height - 1) continue; if (coords[0] < 0) continue; if (coords[1] < 0) continue; // Skip if we dont have water to begin with. if (water[x, y] < 0) continue; // Calculate our delta average double altitudeDelta = altitudeMe - altitudeAvg; if (altitudeDelta < 0) continue; // Calculate how much water we can move double waterMin = Math.Min(water[x, y], altitudeDelta); double waterDelta = waterMin * ((water[coords[0], coords[1]] + map[coords[0], coords[1]]) / altitudeTotal); double sedimentDelta = sediment[x, y] * (waterDelta / water[x, y]); if (sedimentDelta > 0) { sediment[x, y] -= sedimentDelta; sediment[coords[0], coords[1]] += sedimentDelta; } } } } } // Evaporate for (x = 0; x < water.Width; x++) { for (y = 0; y < water.Height; y++) { water[x, y] *= 1.0 - (rainHeight / rounds); double waterCapacity = waterSaturation * water[x, y]; double sedimentDeposit = sediment[x, y] - waterCapacity; if (sedimentDeposit > 0) { if (mask[x,y]) { sediment[x, y] -= sedimentDeposit; map[x, y] += sedimentDeposit; } } } } } // Deposit any remainder (should be minimal) for (x = 0; x < water.Width; x++) for (y = 0; y < water.Height; y++) if (mask[x,y] && sediment[x, y] > 0) map[x, y] += sediment[x, y]; } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.ComponentModel.Composition; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.CodeFixes; using Microsoft.CodeAnalysis.CodeFixes.Suppression; using Microsoft.CodeAnalysis.CodeRefactorings; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Editor.Host; using Microsoft.CodeAnalysis.Editor.Shared; using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.CodeAnalysis.Editor.Shared.Options; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.Internal.Log; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.TestHooks; using Microsoft.CodeAnalysis.Text; using Microsoft.CodeAnalysis.Text.Shared.Extensions; using Microsoft.VisualStudio.Language.Intellisense; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.Implementation.Suggestions { using CodeFixGroupKey = Tuple<DiagnosticData, CodeActionPriority>; [Export(typeof(ISuggestedActionsSourceProvider))] [VisualStudio.Utilities.ContentType(ContentTypeNames.RoslynContentType)] [VisualStudio.Utilities.Name("Roslyn Code Fix")] [VisualStudio.Utilities.Order] internal class SuggestedActionsSourceProvider : ISuggestedActionsSourceProvider { private static readonly Guid s_CSharpSourceGuid = new Guid("b967fea8-e2c3-4984-87d4-71a38f49e16a"); private static readonly Guid s_visualBasicSourceGuid = new Guid("4de30e93-3e0c-40c2-a4ba-1124da4539f6"); private const int InvalidSolutionVersion = -1; private readonly ICodeRefactoringService _codeRefactoringService; private readonly IDiagnosticAnalyzerService _diagnosticService; private readonly ICodeFixService _codeFixService; private readonly ICodeActionEditHandlerService _editHandler; private readonly IAsynchronousOperationListener _listener; private readonly IWaitIndicator _waitIndicator; [ImportingConstructor] public SuggestedActionsSourceProvider( ICodeRefactoringService codeRefactoringService, IDiagnosticAnalyzerService diagnosticService, ICodeFixService codeFixService, ICodeActionEditHandlerService editHandler, IWaitIndicator waitIndicator, [ImportMany] IEnumerable<Lazy<IAsynchronousOperationListener, FeatureMetadata>> asyncListeners) { _codeRefactoringService = codeRefactoringService; _diagnosticService = diagnosticService; _codeFixService = codeFixService; _editHandler = editHandler; _waitIndicator = waitIndicator; _listener = new AggregateAsynchronousOperationListener(asyncListeners, FeatureAttribute.LightBulb); } public ISuggestedActionsSource CreateSuggestedActionsSource(ITextView textView, ITextBuffer textBuffer) { Contract.ThrowIfNull(textView); Contract.ThrowIfNull(textBuffer); return new Source(this, textView, textBuffer); } private class Source : ForegroundThreadAffinitizedObject, ISuggestedActionsSource { // state that will be only reset when source is disposed. private SuggestedActionsSourceProvider _owner; private ITextView _textView; private ITextBuffer _subjectBuffer; private WorkspaceRegistration _registration; // mutable state private Workspace _workspace; private int _lastSolutionVersionReported; public Source(SuggestedActionsSourceProvider owner, ITextView textView, ITextBuffer textBuffer) { _owner = owner; _textView = textView; _textView.Closed += OnTextViewClosed; _subjectBuffer = textBuffer; _registration = Workspace.GetWorkspaceRegistration(textBuffer.AsTextContainer()); _lastSolutionVersionReported = InvalidSolutionVersion; var updateSource = (IDiagnosticUpdateSource)_owner._diagnosticService; updateSource.DiagnosticsUpdated += OnDiagnosticsUpdated; if (_registration.Workspace != null) { _workspace = _registration.Workspace; _workspace.DocumentActiveContextChanged += OnActiveContextChanged; } _registration.WorkspaceChanged += OnWorkspaceChanged; } public event EventHandler<EventArgs> SuggestedActionsChanged; public bool TryGetTelemetryId(out Guid telemetryId) { telemetryId = default(Guid); var workspace = _workspace; if (workspace == null || _subjectBuffer == null) { return false; } var documentId = workspace.GetDocumentIdInCurrentContext(_subjectBuffer.AsTextContainer()); if (documentId == null) { return false; } var project = workspace.CurrentSolution.GetProject(documentId.ProjectId); if (project == null) { return false; } switch (project.Language) { case LanguageNames.CSharp: telemetryId = s_CSharpSourceGuid; return true; case LanguageNames.VisualBasic: telemetryId = s_visualBasicSourceGuid; return true; default: return false; } } public IEnumerable<SuggestedActionSet> GetSuggestedActions(ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, CancellationToken cancellationToken) { AssertIsForeground(); using (Logger.LogBlock(FunctionId.SuggestedActions_GetSuggestedActions, cancellationToken)) { var documentAndSnapshot = GetMatchingDocumentAndSnapshotAsync(range.Snapshot, cancellationToken).WaitAndGetResult(cancellationToken); if (!documentAndSnapshot.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return null; } var document = documentAndSnapshot.Value.Item1; var workspace = document.Project.Solution.Workspace; var supportsFeatureService = workspace.Services.GetService<IDocumentSupportsFeatureService>(); var fixes = GetCodeFixes(supportsFeatureService, requestedActionCategories, workspace, document, range, cancellationToken); var refactorings = GetRefactorings(supportsFeatureService, requestedActionCategories, workspace, document, range, cancellationToken); var result = fixes == null ? refactorings : refactorings == null ? fixes : fixes.Concat(refactorings); if (result == null) { return null; } var allActionSets = result.ToList(); allActionSets = InlineActionSetsIfDesirable(allActionSets); return allActionSets; } } private List<SuggestedActionSet> InlineActionSetsIfDesirable(List<SuggestedActionSet> allActionSets) { // If we only have a single set of items, and that set only has three max suggestion // offered. Then we can consider inlining any nested actions into the top level list. // (but we only do this if the parent of the nested actions isn't invokable itself). if (allActionSets.Sum(a => a.Actions.Count()) > 3) { return allActionSets; } return allActionSets.Select(InlineActions).ToList(); } private bool IsInlineable(ISuggestedAction action) { var suggestedAction = action as SuggestedAction; return suggestedAction != null && !suggestedAction.CodeAction.IsInvokable && suggestedAction.CodeAction.HasCodeActions; } private SuggestedActionSet InlineActions(SuggestedActionSet actionSet) { if (!actionSet.Actions.Any(IsInlineable)) { return actionSet; } var newActions = new List<ISuggestedAction>(); foreach (var action in actionSet.Actions) { if (IsInlineable(action)) { // Looks like something we can inline. var childActionSets = ((SuggestedAction)action).GetActionSets(); if (childActionSets.Length != 1) { return actionSet; } newActions.AddRange(childActionSets[0].Actions); continue; } newActions.Add(action); } return new SuggestedActionSet(newActions, actionSet.Title, actionSet.Priority, actionSet.ApplicableToSpan); } private IEnumerable<SuggestedActionSet> GetCodeFixes( IDocumentSupportsFeatureService supportsFeatureService, ISuggestedActionCategorySet requestedActionCategories, Workspace workspace, Document document, SnapshotSpan range, CancellationToken cancellationToken) { this.AssertIsForeground(); if (_owner._codeFixService != null && supportsFeatureService.SupportsCodeFixes(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.CodeFix)) { // We only include suppressions if lightbulb is asking for everything. // If the light bulb is only asking for code fixes, then we don't include suppressions. var includeSuppressionFixes = requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Any); var fixes = Task.Run( async () => { var stream = await _owner._codeFixService.GetFixesAsync( document, range.Span.ToTextSpan(), includeSuppressionFixes, cancellationToken).ConfigureAwait(false); return stream.ToList(); }, cancellationToken).WaitAndGetResult(cancellationToken); var filteredFixes = FilterOnUIThread(fixes, workspace); return OrganizeFixes(workspace, filteredFixes, hasSuppressionFixes: includeSuppressionFixes); } return null; } private List<CodeFixCollection> FilterOnUIThread(List<CodeFixCollection> collections, Workspace workspace) { this.AssertIsForeground(); return collections.Select(c => FilterOnUIThread(c, workspace)).WhereNotNull().ToList(); } private CodeFixCollection FilterOnUIThread(CodeFixCollection collection, Workspace workspace) { this.AssertIsForeground(); var applicableFixes = collection.Fixes.Where(f => IsApplicable(f.Action, workspace)).ToList(); return applicableFixes.Count == 0 ? null : applicableFixes.Count == collection.Fixes.Length ? collection : new CodeFixCollection(collection.Provider, collection.TextSpan, applicableFixes, collection.FixAllContext); } private bool IsApplicable(CodeAction action, Workspace workspace) { if (!action.PerformFinalApplicabilityCheck) { // If we don't even need to perform the final applicability check, // then the code actoin is applicable. return true; } // Otherwise, defer to the action to make the decision. this.AssertIsForeground(); return action.IsApplicable(workspace); } private List<CodeRefactoring> FilterOnUIThread(List<CodeRefactoring> refactorings, Workspace workspace) { return refactorings.Select(r => FilterOnUIThread(r, workspace)).WhereNotNull().ToList(); } private CodeRefactoring FilterOnUIThread(CodeRefactoring refactoring, Workspace workspace) { var actions = refactoring.Actions.Where(a => IsApplicable(a, workspace)).ToList(); return actions.Count == 0 ? null : actions.Count == refactoring.Actions.Count ? refactoring : new CodeRefactoring(refactoring.Provider, actions); } /// <summary> /// Arrange fixes into groups based on the issue (diagnostic being fixed) and prioritize these groups. /// </summary> private IEnumerable<SuggestedActionSet> OrganizeFixes(Workspace workspace, IEnumerable<CodeFixCollection> fixCollections, bool hasSuppressionFixes) { var map = ImmutableDictionary.CreateBuilder<CodeFixGroupKey, IList<SuggestedAction>>(); var order = ImmutableArray.CreateBuilder<CodeFixGroupKey>(); // First group fixes by diagnostic and priority. GroupFixes(workspace, fixCollections, map, order, hasSuppressionFixes); // Then prioritize between the groups. return PrioritizeFixGroups(map.ToImmutable(), order.ToImmutable()); } /// <summary> /// Groups fixes by the diagnostic being addressed by each fix. /// </summary> private void GroupFixes(Workspace workspace, IEnumerable<CodeFixCollection> fixCollections, IDictionary<CodeFixGroupKey, IList<SuggestedAction>> map, IList<CodeFixGroupKey> order, bool hasSuppressionFixes) { foreach (var fixCollection in fixCollections) { var fixes = fixCollection.Fixes; var fixCount = fixes.Length; Func<CodeAction, SuggestedActionSet> getFixAllSuggestedActionSet = codeAction => CodeFixSuggestedAction.GetFixAllSuggestedActionSet( codeAction, fixCount, fixCollection.FixAllContext, workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, _owner._listener); foreach (var fix in fixes) { // Suppression fixes are handled below. if (!(fix.Action is SuppressionCodeAction)) { SuggestedAction suggestedAction; if (fix.Action.HasCodeActions) { var nestedActions = new List<SuggestedAction>(); foreach (var nestedAction in fix.Action.GetCodeActions()) { nestedActions.Add(new CodeFixSuggestedAction(workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix, nestedAction, fixCollection.Provider, getFixAllSuggestedActionSet(nestedAction), _owner._listener)); } var diag = fix.PrimaryDiagnostic; var set = new SuggestedActionSet(nestedActions, SuggestedActionSetPriority.Medium, diag.Location.SourceSpan.ToSpan()); suggestedAction = new SuggestedAction(workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix.Action, fixCollection.Provider, _owner._listener, new[] { set }); } else { suggestedAction = new CodeFixSuggestedAction( workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix, fix.Action, fixCollection.Provider, getFixAllSuggestedActionSet(fix.Action), _owner._listener); } AddFix(fix, suggestedAction, map, order); } } if (hasSuppressionFixes) { // Add suppression fixes to the end of a given SuggestedActionSet so that they always show up last in a group. foreach (var fix in fixes) { if (fix.Action is SuppressionCodeAction) { SuggestedAction suggestedAction; if (fix.Action.HasCodeActions) { suggestedAction = new SuppressionSuggestedAction( workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix, fixCollection.Provider, getFixAllSuggestedActionSet, _owner._listener); } else { suggestedAction = new CodeFixSuggestedAction( workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix, fix.Action, fixCollection.Provider, getFixAllSuggestedActionSet(fix.Action), _owner._listener); } AddFix(fix, suggestedAction, map, order); } } } } } private static void AddFix(CodeFix fix, SuggestedAction suggestedAction, IDictionary<CodeFixGroupKey, IList<SuggestedAction>> map, IList<CodeFixGroupKey> order) { var diag = fix.GetPrimaryDiagnosticData(); var groupKey = new CodeFixGroupKey(diag, fix.Action.Priority); if (!map.ContainsKey(groupKey)) { order.Add(groupKey); map[groupKey] = ImmutableArray.CreateBuilder<SuggestedAction>(); } map[groupKey].Add(suggestedAction); } /// <summary> /// Return prioritized set of fix groups such that fix group for suppression always show up at the bottom of the list. /// </summary> /// <remarks> /// Fix groups are returned in priority order determined based on <see cref="ExtensionOrderAttribute"/>. /// Priority for all <see cref="SuggestedActionSet"/>s containing fixes is set to <see cref="SuggestedActionSetPriority.Medium"/> by default. /// The only exception is the case where a <see cref="SuggestedActionSet"/> only contains suppression fixes - /// the priority of such <see cref="SuggestedActionSet"/>s is set to <see cref="SuggestedActionSetPriority.None"/> so that suppression fixes /// always show up last after all other fixes (and refactorings) for the selected line of code. /// </remarks> private static IEnumerable<SuggestedActionSet> PrioritizeFixGroups(IDictionary<CodeFixGroupKey, IList<SuggestedAction>> map, IList<CodeFixGroupKey> order) { var sets = ImmutableArray.CreateBuilder<SuggestedActionSet>(); foreach (var diag in order) { var actions = map[diag]; foreach (var group in actions.GroupBy(a => a.Priority)) { var priority = GetSuggestedActionSetPriority(group.Key); // diagnostic from things like build shouldn't reach here since we don't support LB for those diagnostics Contract.Requires(diag.Item1.HasTextSpan); sets.Add(new SuggestedActionSet(group, priority, diag.Item1.TextSpan.ToSpan())); } } return sets.ToImmutable(); } private static SuggestedActionSetPriority GetSuggestedActionSetPriority(CodeActionPriority key) { switch (key) { case CodeActionPriority.None: return SuggestedActionSetPriority.None; case CodeActionPriority.Low: return SuggestedActionSetPriority.Low; case CodeActionPriority.Medium: return SuggestedActionSetPriority.Medium; case CodeActionPriority.High: return SuggestedActionSetPriority.High; default: throw new InvalidOperationException(); } } private IEnumerable<SuggestedActionSet> GetRefactorings( IDocumentSupportsFeatureService supportsFeatureService, ISuggestedActionCategorySet requestedActionCategories, Workspace workspace, Document document, SnapshotSpan range, CancellationToken cancellationToken) { this.AssertIsForeground(); var optionService = workspace.Services.GetService<IOptionService>(); if (optionService.GetOption(EditorComponentOnOffOptions.CodeRefactorings) && _owner._codeRefactoringService != null && supportsFeatureService.SupportsRefactorings(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Refactoring)) { // Get the selection while on the UI thread. var selection = TryGetCodeRefactoringSelection(_subjectBuffer, _textView, range); if (!selection.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return null; } var refactorings = Task.Run( async () => { var stream = await _owner._codeRefactoringService.GetRefactoringsAsync( document, selection.Value, cancellationToken).ConfigureAwait(false); return stream.ToList(); }, cancellationToken).WaitAndGetResult(cancellationToken); var filteredRefactorings = FilterOnUIThread(refactorings, workspace); return filteredRefactorings.Select(r => OrganizeRefactorings(workspace, r)); } return null; } /// <summary> /// Arrange refactorings into groups. /// </summary> /// <remarks> /// Refactorings are returned in priority order determined based on <see cref="ExtensionOrderAttribute"/>. /// Priority for all <see cref="SuggestedActionSet"/>s containing refactorings is set to <see cref="SuggestedActionSetPriority.Low"/> /// and should show up after fixes but before suppression fixes in the light bulb menu. /// </remarks> private SuggestedActionSet OrganizeRefactorings(Workspace workspace, CodeRefactoring refactoring) { var refactoringSuggestedActions = ImmutableArray.CreateBuilder<SuggestedAction>(); foreach (var a in refactoring.Actions) { refactoringSuggestedActions.Add(new CodeRefactoringSuggestedAction( workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, a, refactoring.Provider, _owner._listener)); } return new SuggestedActionSet(refactoringSuggestedActions.ToImmutable(), SuggestedActionSetPriority.Low); } public async Task<bool> HasSuggestedActionsAsync(ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, CancellationToken cancellationToken) { // Explicitly hold onto below fields in locals and use these locals throughout this code path to avoid crashes // if these fields happen to be cleared by Dispose() below. This is required since this code path involves // code that can run asynchronously from background thread. var view = _textView; var buffer = _subjectBuffer; var provider = _owner; if (view == null || buffer == null || provider == null) { return false; } using (var asyncToken = provider._listener.BeginAsyncOperation("HasSuggestedActionsAsync")) { var documentAndSnapshot = await GetMatchingDocumentAndSnapshotAsync(range.Snapshot, cancellationToken).ConfigureAwait(false); if (!documentAndSnapshot.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return false; } var document = documentAndSnapshot.Value.Item1; var workspace = document.Project.Solution.Workspace; var supportsFeatureService = workspace.Services.GetService<IDocumentSupportsFeatureService>(); return await HasFixesAsync( supportsFeatureService, requestedActionCategories, provider, document, range, cancellationToken).ConfigureAwait(false) || await HasRefactoringsAsync( supportsFeatureService, requestedActionCategories, provider, document, buffer, view, range, cancellationToken).ConfigureAwait(false); } } private async Task<bool> HasFixesAsync( IDocumentSupportsFeatureService supportsFeatureService, ISuggestedActionCategorySet requestedActionCategories, SuggestedActionsSourceProvider provider, Document document, SnapshotSpan range, CancellationToken cancellationToken) { if (provider._codeFixService != null && supportsFeatureService.SupportsCodeFixes(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.CodeFix)) { // We only consider suppressions if lightbulb is asking for everything. // If the light bulb is only asking for code fixes, then we don't consider suppressions. var considerSuppressionFixes = requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Any); var result = await Task.Run( async () => await provider._codeFixService.GetFirstDiagnosticWithFixAsync( document, range.Span.ToTextSpan(), considerSuppressionFixes, cancellationToken).ConfigureAwait(false), cancellationToken).ConfigureAwait(false); if (result.HasFix) { Logger.Log(FunctionId.SuggestedActions_HasSuggestedActionsAsync); return true; } if (result.PartialResult) { // reset solution version number so that we can raise suggested action changed event Volatile.Write(ref _lastSolutionVersionReported, InvalidSolutionVersion); return false; } } return false; } private async Task<bool> HasRefactoringsAsync( IDocumentSupportsFeatureService supportsFeatureService, ISuggestedActionCategorySet requestedActionCategories, SuggestedActionsSourceProvider provider, Document document, ITextBuffer buffer, ITextView view, SnapshotSpan range, CancellationToken cancellationToken) { var optionService = document.Project.Solution.Workspace.Services.GetService<IOptionService>(); if (optionService.GetOption(EditorComponentOnOffOptions.CodeRefactorings) && provider._codeRefactoringService != null && supportsFeatureService.SupportsRefactorings(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Refactoring)) { TextSpan? selection = null; if (IsForeground()) { // This operation needs to happen on UI thread because it needs to access textView.Selection. selection = TryGetCodeRefactoringSelection(buffer, view, range); } else { await InvokeBelowInputPriority(() => { // This operation needs to happen on UI thread because it needs to access textView.Selection. selection = TryGetCodeRefactoringSelection(buffer, view, range); }).ConfigureAwait(false); } if (!selection.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return false; } return await Task.Run( async () => await provider._codeRefactoringService.HasRefactoringsAsync( document, selection.Value, cancellationToken).ConfigureAwait(false), cancellationToken).ConfigureAwait(false); } return false; } private static TextSpan? TryGetCodeRefactoringSelection(ITextBuffer buffer, ITextView view, SnapshotSpan range) { var selectedSpans = view.Selection.SelectedSpans .SelectMany(ss => view.BufferGraph.MapDownToBuffer(ss, SpanTrackingMode.EdgeExclusive, buffer)) .Where(ss => !view.IsReadOnlyOnSurfaceBuffer(ss)) .ToList(); // We only support refactorings when there is a single selection in the document. if (selectedSpans.Count != 1) { return null; } var translatedSpan = selectedSpans[0].TranslateTo(range.Snapshot, SpanTrackingMode.EdgeInclusive); // We only support refactorings when selected span intersects with the span that the light bulb is asking for. if (!translatedSpan.IntersectsWith(range)) { return null; } return translatedSpan.Span.ToTextSpan(); } private static async Task<ValueTuple<Document, ITextSnapshot>?> GetMatchingDocumentAndSnapshotAsync(ITextSnapshot givenSnapshot, CancellationToken cancellationToken) { var buffer = givenSnapshot.TextBuffer; if (buffer == null) { return null; } var workspace = buffer.GetWorkspace(); if (workspace == null) { return null; } var documentId = workspace.GetDocumentIdInCurrentContext(buffer.AsTextContainer()); if (documentId == null) { return null; } var document = workspace.CurrentSolution.GetDocument(documentId); if (document == null) { return null; } var sourceText = await document.GetTextAsync(cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); var snapshot = sourceText.FindCorrespondingEditorTextSnapshot(); if (snapshot == null || snapshot.Version.ReiteratedVersionNumber != givenSnapshot.Version.ReiteratedVersionNumber) { return null; } return ValueTuple.Create(document, snapshot); } private void OnTextViewClosed(object sender, EventArgs e) { Dispose(); } private void OnWorkspaceChanged(object sender, EventArgs e) { // REVIEW: this event should give both old and new workspace as argument so that // one doesn't need to hold onto workspace in field. // remove existing event registration if (_workspace != null) { _workspace.DocumentActiveContextChanged -= OnActiveContextChanged; } // REVIEW: why one need to get new workspace from registration? why not just pass in the new workspace? // add new event registration _workspace = _registration.Workspace; if (_workspace != null) { _workspace.DocumentActiveContextChanged += OnActiveContextChanged; } } private void OnActiveContextChanged(object sender, DocumentEventArgs e) { // REVIEW: it would be nice for changed event to pass in both old and new document. OnSuggestedActionsChanged(e.Document.Project.Solution.Workspace, e.Document.Id, e.Document.Project.Solution.WorkspaceVersion); } private void OnDiagnosticsUpdated(object sender, DiagnosticsUpdatedArgs e) { // document removed case. no reason to raise event if (e.Solution == null) { return; } OnSuggestedActionsChanged(e.Workspace, e.DocumentId, e.Solution.WorkspaceVersion); } private void OnSuggestedActionsChanged(Workspace currentWorkspace, DocumentId currentDocumentId, int solutionVersion, DiagnosticsUpdatedArgs args = null) { // Explicitly hold onto the _subjectBuffer field in a local and use this local in this function to avoid crashes // if this field happens to be cleared by Dispose() below. This is required since this code path involves code // that can run on background thread. var buffer = _subjectBuffer; if (buffer == null) { return; } var workspace = buffer.GetWorkspace(); // workspace is not ready, nothing to do. if (workspace == null || workspace != currentWorkspace) { return; } if (currentDocumentId != workspace.GetDocumentIdInCurrentContext(buffer.AsTextContainer()) || solutionVersion == Volatile.Read(ref _lastSolutionVersionReported)) { return; } this.SuggestedActionsChanged?.Invoke(this, EventArgs.Empty); Volatile.Write(ref _lastSolutionVersionReported, solutionVersion); } public void Dispose() { if (_owner != null) { var updateSource = (IDiagnosticUpdateSource)_owner._diagnosticService; updateSource.DiagnosticsUpdated -= OnDiagnosticsUpdated; _owner = null; } if (_workspace != null) { _workspace.DocumentActiveContextChanged -= OnActiveContextChanged; _workspace = null; } if (_registration != null) { _registration.WorkspaceChanged -= OnWorkspaceChanged; _registration = null; } if (_textView != null) { _textView.Closed -= OnTextViewClosed; _textView = null; } if (_subjectBuffer != null) { _subjectBuffer = null; } } } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.IO; using System.Drawing; using System.Text; using System.Windows.Forms; using PiHexScreensaver; namespace PiHexScreensaver { public partial class Settings : Form { private bool changesApplied = true; public Settings() { InitializeComponent(); } private void weblinkLabel_LinkClicked(object sender, LinkLabelLinkClickedEventArgs e) { System.Diagnostics.Process.Start("www.benjaminsbox.com"); } private void okButton_Click(object sender, EventArgs e) { PiHexSession.enableLogging(enableLoggingCheckbox.Checked); PiHexSession.enableResumeCalculation(enableResumeCheckbox.Checked); PiHexSession.enableErrorChecking(enableErrorCheckingCheckbox.Checked); PiHexSession.enableScrolling(enableScrollingCheckbox.Checked); PiHexSession.setTimeDisplayMode(CalcTimeComboBox.Text); PiHexSession.saveSession(); this.Close(); System.Environment.Exit(0); } private void cancelButton_Click(object sender, EventArgs e) { this.Close(); System.Environment.Exit(0); } private void enableResumeCheckbox_CheckedChanged(object sender, EventArgs e) { ApplyButton.Enabled = true; changesApplied = false; if (enableLoggingCheckbox.Checked == false && enableResumeCheckbox.Checked == true) { enableResumeCheckbox.Checked = false; } if (enableLoggingCheckbox.Checked == false || enableResumeCheckbox.Checked == false) { AdvancedOptionsButton.Enabled = false; } if (enableLoggingCheckbox.Checked == true && enableResumeCheckbox.Checked == true) { AdvancedOptionsButton.Enabled = true; } } private void enableLoggingCheckbox_CheckedChanged(object sender, EventArgs e) { ApplyButton.Enabled = true; changesApplied = false; if (enableLoggingCheckbox.Checked == true) { if (!(CalcTimeComboBox.Items.Contains("Days"))) { CalcTimeComboBox.Items.Add("Days"); } if (!(CalcTimeComboBox.Items.Contains("Hours"))) { CalcTimeComboBox.Items.Add("Hours"); } if (!(CalcTimeComboBox.Items.Contains("Minutes"))) { CalcTimeComboBox.Items.Add("Minutes"); } if (!(CalcTimeComboBox.Items.Contains("Day/Hour/Min/Sec/Ms"))) { CalcTimeComboBox.Items.Add("Day/Hour/Min/Sec/Ms"); } } if (enableLoggingCheckbox.Checked == false) { enableResumeCheckbox.Checked = false; CalcTimeComboBox.Text = "No Display"; if (CalcTimeComboBox.Items.Contains("Days")) { CalcTimeComboBox.Items.Remove("Days"); } if (CalcTimeComboBox.Items.Contains("Hours")) { CalcTimeComboBox.Items.Remove("Hours"); } if (CalcTimeComboBox.Items.Contains("Minutes")) { CalcTimeComboBox.Items.Remove("Minutes"); } if (CalcTimeComboBox.Items.Contains("Day/Hour/Min/Sec/Ms")) { CalcTimeComboBox.Items.Remove("Day/Hour/Min/Sec/Ms"); } } if (enableLoggingCheckbox.Checked == false || enableResumeCheckbox.Checked==false) { AdvancedOptionsButton.Enabled = false; } if (enableLoggingCheckbox.Checked == true && enableResumeCheckbox.Checked == true) { AdvancedOptionsButton.Enabled = true; } } private void Settings_Load(object sender, EventArgs e) { PiHexSession.restoreSession(); if (enableLoggingCheckbox.Checked == true) { if (!(CalcTimeComboBox.Items.Contains("Days"))) { CalcTimeComboBox.Items.Add("Days"); } if (!(CalcTimeComboBox.Items.Contains("Hours"))) { CalcTimeComboBox.Items.Add("Hours"); } if (!(CalcTimeComboBox.Items.Contains("Minutes"))) { CalcTimeComboBox.Items.Add("Minutes"); } if (!(CalcTimeComboBox.Items.Contains("Day/Hour/Min/Sec/Ms"))) { CalcTimeComboBox.Items.Add("Day/Hour/Min/Sec/Ms"); } } if (enableLoggingCheckbox.Checked == false) { enableResumeCheckbox.Checked = false; CalcTimeComboBox.Text = "No Display"; if (!(CalcTimeComboBox.Items.Contains("No Display"))) { CalcTimeComboBox.Items.Add("No Display"); } if (CalcTimeComboBox.Items.Contains("Days")) { CalcTimeComboBox.Items.Remove("Days"); } if (CalcTimeComboBox.Items.Contains("Hours")) { CalcTimeComboBox.Items.Remove("Hours"); } if (CalcTimeComboBox.Items.Contains("Minutes")) { CalcTimeComboBox.Items.Remove("Minutes"); } if (CalcTimeComboBox.Items.Contains("Day/Hour/Min/Sec/Ms")) { CalcTimeComboBox.Items.Remove("Day/Hour/Min/Sec/Ms"); } } refresh(); ApplyButton.Enabled = false; changesApplied = true; } private void refresh() { CalcTimeComboBox.Text = PiHexSession.getTimeDisplayMode(); enableLoggingCheckbox.Checked = PiHexSession.isLogging(); enableResumeCheckbox.Checked = PiHexSession.isResumeCalc(); enableErrorCheckingCheckbox.Checked = PiHexSession.isErrorChecking(); enableScrollingCheckbox.Checked = PiHexSession.isScrolling(); } private void CalcTimeComboBox_SelectedIndexChanged(object sender, EventArgs e) { ApplyButton.Enabled = true; changesApplied = false; PiHexSession.setTimeDisplayMode(CalcTimeComboBox.Text); } private void defautButton_Click(object sender, EventArgs e) { if (MessageBox.Show("Are you sure that you would like to reset all properties to default values? All information including current Pi calculations will be lost in the process.", "Notification", MessageBoxButtons.OKCancel, MessageBoxIcon.Warning) == DialogResult.OK) { PiHexSession.createEmptyXMLSession(); PiHexSession.restoreSession(); CalcTimeComboBox.Text = PiHexSession.getTimeDisplayMode(); enableLoggingCheckbox.Checked = PiHexSession.isLogging(); enableResumeCheckbox.Checked = PiHexSession.isResumeCalc(); enableErrorCheckingCheckbox.Checked = PiHexSession.isErrorChecking(); enableScrollingCheckbox.Checked = PiHexSession.isScrolling(); } } private void AdvancedOptionsButton_Click(object sender, EventArgs e) { if (changesApplied == false) { MessageBox.Show("All changes must be applied before continuing.", "Unsaved Changes", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } else { AdvancedOptions options = new AdvancedOptions(); options.ShowDialog(); } } private void ExportSessionButton_Click(object sender, EventArgs e) { if (SettingsSaveFileDialog.ShowDialog() == DialogResult.Cancel) { return; } else { PiHexSession.enableLogging(enableLoggingCheckbox.Checked); PiHexSession.enableResumeCalculation(enableResumeCheckbox.Checked); PiHexSession.enableErrorChecking(enableErrorCheckingCheckbox.Checked); PiHexSession.enableScrolling(enableScrollingCheckbox.Checked); PiHexSession.setTimeDisplayMode(CalcTimeComboBox.Text); PiHexSession.saveSession(); PiHexSession.saveSessionCopy(SettingsSaveFileDialog.FileName); } } private void ImportSessionButton_Click(object sender, EventArgs e) { if (MessageBox.Show("Are you sure that you would like to import new session properties? All information including current Pi calculations will be overwritten in the process.", "Notification", MessageBoxButtons.OKCancel, MessageBoxIcon.Warning) == DialogResult.OK) { if (SettingsOpenFileDialog.ShowDialog() == DialogResult.Cancel) { return; } else { PiHexSession.loadSession(SettingsOpenFileDialog.FileName); refresh(); } } } private void ApplyButton_Click(object sender, EventArgs e) { PiHexSession.enableLogging(enableLoggingCheckbox.Checked); PiHexSession.enableResumeCalculation(enableResumeCheckbox.Checked); PiHexSession.enableErrorChecking(enableErrorCheckingCheckbox.Checked); PiHexSession.enableScrolling(enableScrollingCheckbox.Checked); PiHexSession.setTimeDisplayMode(CalcTimeComboBox.Text); PiHexSession.saveSession(); ApplyButton.Enabled = false; changesApplied = true; } private void enableErrorCheckingCheckbox_CheckedChanged(object sender, EventArgs e) { ApplyButton.Enabled = true; changesApplied = false; } private void enableScrollingCheckbox_CheckedChanged(object sender, EventArgs e) { ApplyButton.Enabled = true; changesApplied = false; } } }
using System; using System.Collections.Generic; using Carrot.Configuration; using Moq; using RabbitMQ.Client; using Xunit; namespace Carrot.Tests { public class AmqpEntities { [Fact] public void ExchangeDeclarationWithDefaultDurability() { var model = new Mock<IModel>(); var exchange = FakeBroker(model.Object).DeclareDirectExchange("e"); exchange.Declare(model.Object); model.Verify(_ => _.ExchangeDeclare(exchange.Name, exchange.Type, false, false, It.IsAny<IDictionary<String, Object>>())); } [Fact] public void ExchangeDeclarationWithExplicitDurability() { var model = new Mock<IModel>(); var exchange = FakeBroker(model.Object).DeclareDurableTopicExchange("e"); exchange.Declare(model.Object); model.Verify(_ => _.ExchangeDeclare(exchange.Name, exchange.Type, true, false, It.IsAny<IDictionary<String, Object>>())); } [Fact] public void BuildingDirectExchange() { const String name = "one_exchange"; var exchange = FakeBroker(new Mock<IModel>().Object).DeclareDirectExchange(name); Assert.Equal(name, exchange.Name); Assert.Equal("direct", exchange.Type); Assert.False(exchange.IsDurable); } [Fact] public void BuildingFanoutExchange() { const String name = "one_exchange"; var exchange = FakeBroker(new Mock<IModel>().Object).DeclareFanoutExchange(name); Assert.Equal(name, exchange.Name); Assert.Equal("fanout", exchange.Type); Assert.False(exchange.IsDurable); } [Fact] public void BuildingTopicExchange() { const String name = "one_exchange"; var exchange = FakeBroker(new Mock<IModel>().Object).DeclareTopicExchange(name); Assert.Equal(name, exchange.Name); Assert.Equal("topic", exchange.Type); Assert.False(exchange.IsDurable); } [Fact] public void BuildingHeadersExchange() { const String name = "one_exchange"; var exchange = FakeBroker(new Mock<IModel>().Object).DeclareHeadersExchange(name); Assert.Equal(name, exchange.Name); Assert.Equal("headers", exchange.Type); Assert.False(exchange.IsDurable); } [Fact] public void ExchangeEquality() { const String name = "one_exchange"; var a = new Exchange(name, "direct"); var b = new Exchange(name, "topic"); Assert.Equal(a, b); var c = new Exchange("another_name", "direct"); Assert.NotEqual(a, c); } [Fact] public void MultipleBinding() { var broker = FakeBroker(new Mock<IModel>().Object); var exchange = broker.DeclareDirectExchange("exchange"); var queue = broker.DeclareQueue("queue"); broker.DeclareExchangeBinding(exchange, queue, "key"); Assert.Throws<ArgumentException>(() => broker.DeclareExchangeBinding(exchange, queue, "key")); } [Fact] public void QueueDeclarationWithDefaultDurability() { var model = new Mock<IModel>(); var queue = FakeBroker(model.Object).DeclareQueue("q"); queue.Declare(model.Object); model.Verify(_ => _.QueueDeclare(queue.Name, false, false, false, It.IsAny<IDictionary<String, Object>>())); } [Fact] public void QueueEquality() { const String name = "one_queue"; var a = new Queue(name); var b = new Queue(name); Assert.Equal(a, b); var c = new Queue("another_name"); Assert.NotEqual(a, c); } [Fact] public void ExchangeBindingArguments() { var model = new Mock<IModel>(); var broker = FakeBroker(model.Object); var exchange = broker.DeclareDirectExchange("exchange"); var queue = broker.DeclareQueue("queue"); var arguments = new Dictionary<String, Object> { { "key", "value" } }; broker.DeclareExchangeBinding(exchange, queue, "key", arguments); exchange.Declare(model.Object); using (broker.Connect()) model.Verify(_ => _.QueueBind(It.IsAny<String>(), It.IsAny<String>(), It.IsAny<String>(), It.Is<IDictionary<String, Object>>(__ => __ == arguments))); } [Fact] public void QueueArguments() { var model = new Mock<IModel>(); var broker = FakeBroker(model.Object); var arguments = new Dictionary<String, Object> { { "key", "value" } }; broker.DeclareQueue("queue", arguments); using (broker.Connect()) model.Verify(_ => _.QueueDeclare(It.Is<String>(__ => __ == "queue"), It.IsAny<Boolean>(), It.IsAny<Boolean>(), It.IsAny<Boolean>(), It.Is<IDictionary<String, Object>>(__ => __ == arguments))); } [Fact] public void ExchangeArguments() { var model = new Mock<IModel>(); var broker = FakeBroker(model.Object); var arguments = new Dictionary<String, Object> { { "key", "value" } }; broker.DeclareDirectExchange("exchange", arguments); using (broker.Connect()) model.Verify(_ => _.ExchangeDeclare(It.Is<String>(__ => __ == "exchange"), It.IsAny<String>(), It.IsAny<Boolean>(), It.IsAny<Boolean>(), It.Is<IDictionary<String, Object>>(__ => __ == arguments))); } private static IBroker FakeBroker(IModel model) { var builder = new Mock<IConnectionBuilder>(); var connection = new Mock<RabbitMQ.Client.IConnection>(); connection.Setup(_ => _.CreateModel()).Returns(model); builder.Setup(_ => _.CreateConnection(It.IsAny<Uri>())) .Returns(connection.Object); return Broker.New(_ => { _.ResolveMessageTypeBy(new Mock<IMessageTypeResolver>().Object); }, builder.Object); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Linq; using LibGit2Sharp.Core; using LibGit2Sharp.Core.Handles; namespace LibGit2Sharp { /// <summary> /// A Commit /// </summary> [DebuggerDisplay("{DebuggerDisplay,nq}")] public class Commit : GitObject { private readonly GitObjectLazyGroup group1; private readonly GitObjectLazyGroup group2; private readonly ILazy<Tree> lazyTree; private readonly ILazy<Signature> lazyAuthor; private readonly ILazy<Signature> lazyCommitter; private readonly ILazy<string> lazyMessage; private readonly ILazy<string> lazyMessageShort; private readonly ILazy<string> lazyEncoding; private readonly ParentsCollection parents; private readonly Lazy<IEnumerable<Note>> lazyNotes; /// <summary> /// Needed for mocking purposes. /// </summary> protected Commit() { } internal Commit(Repository repo, ObjectId id) : base(repo, id) { lazyTree = GitObjectLazyGroup.Singleton<Tree>(this.repo, id, obj => new Tree(this.repo, Proxy.git_commit_tree_id(obj), null)); group1 = new GitObjectLazyGroup(this.repo, id); lazyAuthor = group1.AddLazy<Signature>(Proxy.git_commit_author); lazyCommitter = group1.AddLazy<Signature>(Proxy.git_commit_committer); group2 = new GitObjectLazyGroup(this.repo, id); lazyMessage = group2.AddLazy<string>(Proxy.git_commit_message); lazyMessageShort = group2.AddLazy<string>(Proxy.git_commit_summary); lazyEncoding = group2.AddLazy<string>(RetrieveEncodingOf); lazyNotes = new Lazy<IEnumerable<Note>>(() => RetrieveNotesOfCommit(id).ToList()); parents = new ParentsCollection(repo, id); } /// <summary> /// Gets the <see cref="TreeEntry"/> pointed at by the <paramref name="relativePath"/> in the <see cref="Tree"/>. /// </summary> /// <param name="relativePath">The relative path to the <see cref="TreeEntry"/> from the <see cref="Commit"/> working directory.</param> /// <returns><c>null</c> if nothing has been found, the <see cref="TreeEntry"/> otherwise.</returns> public virtual TreeEntry this[string relativePath] { get { return Tree[relativePath]; } } /// <summary> /// Gets the commit message. /// </summary> public virtual string Message { get { return lazyMessage.Value; } } /// <summary> /// Gets the short commit message which is usually the first line of the commit. /// </summary> public virtual string MessageShort { get { return lazyMessageShort.Value; } } /// <summary> /// Gets the encoding of the message. /// </summary> public virtual string Encoding { get { return lazyEncoding.Value; } } /// <summary> /// Gets the author of this commit. /// </summary> public virtual Signature Author { get { return lazyAuthor.Value; } } /// <summary> /// Gets the committer. /// </summary> public virtual Signature Committer { get { return lazyCommitter.Value; } } /// <summary> /// Gets the Tree associated to this commit. /// </summary> public virtual Tree Tree { get { return lazyTree.Value; } } /// <summary> /// Gets the parents of this commit. This property is lazy loaded and can throw an exception if the commit no longer exists in the repo. /// </summary> public virtual IEnumerable<Commit> Parents { get { return parents; } } /// <summary> /// Gets the notes of this commit. /// </summary> public virtual IEnumerable<Note> Notes { get { return lazyNotes.Value; } } private IEnumerable<Note> RetrieveNotesOfCommit(ObjectId oid) { return repo.Notes[oid]; } private static string RetrieveEncodingOf(GitObjectSafeHandle obj) { string encoding = Proxy.git_commit_message_encoding(obj); return encoding ?? "UTF-8"; } private string DebuggerDisplay { get { return string.Format(CultureInfo.InvariantCulture, "{0} {1}", Id.ToString(7), MessageShort); } } private class ParentsCollection : ICollection<Commit> { private readonly Lazy<ICollection<Commit>> _parents; private readonly Lazy<int> _count; public ParentsCollection(Repository repo, ObjectId commitId) { _count = new Lazy<int>(() => Proxy.git_commit_parentcount(repo.Handle, commitId)); _parents = new Lazy<ICollection<Commit>>(() => RetrieveParentsOfCommit(repo, commitId)); } private ICollection<Commit> RetrieveParentsOfCommit(Repository repo, ObjectId commitId) { using (var obj = new ObjectSafeWrapper(commitId, repo.Handle)) { int parentsCount = _count.Value; var parents = new List<Commit>(parentsCount); for (uint i = 0; i < parentsCount; i++) { ObjectId parentCommitId = Proxy.git_commit_parent_id(obj.ObjectPtr, i); parents.Add(new Commit(repo, parentCommitId)); } return parents; } } public IEnumerator<Commit> GetEnumerator() { return _parents.Value.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } public void Add(Commit item) { throw new NotSupportedException(); } public void Clear() { throw new NotSupportedException(); } public bool Contains(Commit item) { return _parents.Value.Contains(item); } public void CopyTo(Commit[] array, int arrayIndex) { _parents.Value.CopyTo(array, arrayIndex); } public bool Remove(Commit item) { throw new NotSupportedException(); } public int Count { get { return _count.Value; } } public bool IsReadOnly { get { return true; } } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.ObjectModel; using System.Collections.Generic; using System.Linq; using System.Runtime.ExceptionServices; using Microsoft.VisualStudio.Debugger; using Microsoft.VisualStudio.Debugger.Clr; using Microsoft.VisualStudio.Debugger.ComponentInterfaces; using Microsoft.VisualStudio.Debugger.Evaluation; using Microsoft.VisualStudio.Debugger.Evaluation.ClrCompilation; using Xunit; namespace Microsoft.CodeAnalysis.ExpressionEvaluator { public abstract class ResultProviderTestBase { private readonly IDkmClrFormatter _formatter; private readonly IDkmClrResultProvider _resultProvider; internal readonly DkmInspectionContext DefaultInspectionContext; internal ResultProviderTestBase(ResultProvider resultProvider, DkmInspectionContext defaultInspectionContext) { _formatter = resultProvider.Formatter; _resultProvider = resultProvider; this.DefaultInspectionContext = defaultInspectionContext; } internal DkmClrValue CreateDkmClrValue( object value, Type type = null, string alias = null, DkmEvaluationResultFlags evalFlags = DkmEvaluationResultFlags.None, DkmClrValueFlags valueFlags = DkmClrValueFlags.None) { if (type == null) { type = value.GetType(); } return new DkmClrValue( value, DkmClrValue.GetHostObjectValue((TypeImpl)type, value), new DkmClrType((TypeImpl)type), alias, _formatter, evalFlags, valueFlags); } internal DkmClrValue CreateDkmClrValue( object value, DkmClrType type, string alias = null, DkmEvaluationResultFlags evalFlags = DkmEvaluationResultFlags.None, DkmClrValueFlags valueFlags = DkmClrValueFlags.None, bool isComObject = false) { return new DkmClrValue( value, DkmClrValue.GetHostObjectValue(type.GetLmrType(), value), type, alias, _formatter, evalFlags, valueFlags, isComObject); } internal DkmClrValue CreateErrorValue( DkmClrType type, string message) { return new DkmClrValue( value: null, hostObjectValue: message, type: type, alias: null, formatter: _formatter, evalFlags: DkmEvaluationResultFlags.None, valueFlags: DkmClrValueFlags.Error); } #region Formatter Tests internal string FormatNull<T>(bool useHexadecimal = false) { return FormatValue(null, typeof(T), useHexadecimal); } internal string FormatValue(object value, bool useHexadecimal = false) { return FormatValue(value, value.GetType(), useHexadecimal); } internal string FormatValue(object value, Type type, bool useHexadecimal = false) { var clrValue = CreateDkmClrValue(value, type); var inspectionContext = CreateDkmInspectionContext(_formatter, DkmEvaluationFlags.None, radix: useHexadecimal ? 16u : 10u); return clrValue.GetValueString(inspectionContext, Formatter.NoFormatSpecifiers); } internal bool HasUnderlyingString(object value) { return HasUnderlyingString(value, value.GetType()); } internal bool HasUnderlyingString(object value, Type type) { var clrValue = GetValueForUnderlyingString(value, type); return clrValue.HasUnderlyingString(DefaultInspectionContext); } internal string GetUnderlyingString(object value) { var clrValue = GetValueForUnderlyingString(value, value.GetType()); return clrValue.GetUnderlyingString(DefaultInspectionContext); } internal DkmClrValue GetValueForUnderlyingString(object value, Type type) { return CreateDkmClrValue( value, type, evalFlags: DkmEvaluationResultFlags.RawString); } #endregion #region ResultProvider Tests internal DkmInspectionContext CreateDkmInspectionContext( DkmEvaluationFlags flags = DkmEvaluationFlags.None, uint radix = 10, DkmRuntimeInstance runtimeInstance = null) { return CreateDkmInspectionContext(_formatter, flags, radix, runtimeInstance); } internal static DkmInspectionContext CreateDkmInspectionContext( IDkmClrFormatter formatter, DkmEvaluationFlags flags, uint radix, DkmRuntimeInstance runtimeInstance = null) { return new DkmInspectionContext(formatter, flags, radix, runtimeInstance); } internal DkmEvaluationResult FormatResult(string name, DkmClrValue value, DkmClrType declaredType = null, DkmInspectionContext inspectionContext = null) { return FormatResult(name, name, value, declaredType, inspectionContext); } internal DkmEvaluationResult FormatResult(string name, string fullName, DkmClrValue value, DkmClrType declaredType = null, DkmInspectionContext inspectionContext = null) { DkmEvaluationResult evaluationResult = null; var workList = new DkmWorkList(); _resultProvider.GetResult( value, workList, declaredType: declaredType ?? value.Type, inspectionContext: inspectionContext ?? DefaultInspectionContext, formatSpecifiers: Formatter.NoFormatSpecifiers, resultName: name, resultFullName: null, completionRoutine: asyncResult => evaluationResult = asyncResult.Result); workList.Execute(); return evaluationResult; } internal DkmEvaluationResult[] GetChildren(DkmEvaluationResult evalResult, DkmInspectionContext inspectionContext = null) { DkmEvaluationResultEnumContext enumContext; var builder = ArrayBuilder<DkmEvaluationResult>.GetInstance(); // Request 0-3 children. int size; DkmEvaluationResult[] items; for (size = 0; size < 3; size++) { items = GetChildren(evalResult, size, inspectionContext, out enumContext); var totalChildCount = enumContext.Count; Assert.InRange(totalChildCount, 0, int.MaxValue); var expectedSize = (size < totalChildCount) ? size : totalChildCount; Assert.Equal(expectedSize, items.Length); } // Request items (increasing the size of the request with each iteration). size = 1; items = GetChildren(evalResult, size, inspectionContext, out enumContext); while (items.Length > 0) { builder.AddRange(items); Assert.True(builder.Count <= enumContext.Count); int offset = builder.Count; // Request 0 items. items = GetItems(enumContext, offset, 0); Assert.Equal(items.Length, 0); // Request >0 items. size++; items = GetItems(enumContext, offset, size); } Assert.Equal(builder.Count, enumContext.Count); return builder.ToArrayAndFree(); } internal DkmEvaluationResult[] GetChildren(DkmEvaluationResult evalResult, int initialRequestSize, DkmInspectionContext inspectionContext, out DkmEvaluationResultEnumContext enumContext) { DkmGetChildrenAsyncResult getChildrenResult = default(DkmGetChildrenAsyncResult); var workList = new DkmWorkList(); _resultProvider.GetChildren(evalResult, workList, initialRequestSize, inspectionContext ?? DefaultInspectionContext, r => { getChildrenResult = r; }); workList.Execute(); var exception = getChildrenResult.Exception; if (exception != null) { ExceptionDispatchInfo.Capture(exception).Throw(); } enumContext = getChildrenResult.EnumContext; return getChildrenResult.InitialChildren; } internal DkmEvaluationResult[] GetItems(DkmEvaluationResultEnumContext enumContext, int startIndex, int count) { DkmEvaluationEnumAsyncResult getItemsResult = default(DkmEvaluationEnumAsyncResult); var workList = new DkmWorkList(); _resultProvider.GetItems(enumContext, workList, startIndex, count, r => { getItemsResult = r; }); workList.Execute(); var exception = getItemsResult.Exception; if (exception != null) { ExceptionDispatchInfo.Capture(exception).Throw(); } return getItemsResult.Items; } internal static DkmEvaluationResult EvalResult( string name, string value, string type, string fullName, DkmEvaluationResultFlags flags = DkmEvaluationResultFlags.None, DkmEvaluationResultCategory category = DkmEvaluationResultCategory.Other, string editableValue = null, DkmCustomUIVisualizerInfo[] customUIVisualizerInfo = null) { return DkmSuccessEvaluationResult.Create( null, null, name, fullName, flags, value, editableValue, type, category, default(DkmEvaluationResultAccessType), default(DkmEvaluationResultStorageType), default(DkmEvaluationResultTypeModifierFlags), null, (customUIVisualizerInfo != null) ? new ReadOnlyCollection<DkmCustomUIVisualizerInfo>(customUIVisualizerInfo) : null, null, null); } internal static DkmIntermediateEvaluationResult EvalIntermediateResult( string name, string fullName, string expression, DkmLanguage language) { return DkmIntermediateEvaluationResult.Create( InspectionContext: null, StackFrame: null, Name: name, FullName: fullName, Expression: expression, IntermediateLanguage: language, TargetRuntime: null, DataItem: null); } internal static DkmEvaluationResult EvalFailedResult( string name, string message, string type = null, string fullName = null, DkmEvaluationResultFlags flags = DkmEvaluationResultFlags.None) { return DkmFailedEvaluationResult.Create( null, null, name, fullName, message, flags, type, null); } internal static void Verify(IReadOnlyList<DkmEvaluationResult> actual, params DkmEvaluationResult[] expected) { try { int n = actual.Count; Assert.Equal(expected.Length, n); for (int i = 0; i < n; i++) { Verify(actual[i], expected[i]); } } catch { foreach (DkmSuccessEvaluationResult result in actual) { var optionalArgumentsTemplate = string.Empty; if (result.Flags != DkmEvaluationResultFlags.None) { optionalArgumentsTemplate += ", {4}"; } if (result.Category != DkmEvaluationResultCategory.Other) { optionalArgumentsTemplate += ", {5}"; } if (result.EditableValue != null) { optionalArgumentsTemplate += ", editableValue: {6}"; } var evalResultTemplate = "EvalResult({0}, {1}, {2}, {3}" + optionalArgumentsTemplate + "),"; var resultValue = (result.Value == null) ? "null" : Quote(Escape(result.Value)); Console.WriteLine(evalResultTemplate, Quote(result.Name), resultValue, Quote(result.Type), (result.FullName != null) ? Quote(Escape(result.FullName)) : "null", FormatEnumValue(result.Flags), FormatEnumValue(result.Category), Quote(result.EditableValue)); } throw; } } private static string Escape(string str) { return str.Replace("\"", "\\\""); } private static string Quote(string str) { return '"' + str + '"'; } private static string FormatEnumValue(Enum e) { var parts = e.ToString().Split(new[] { ", " }, StringSplitOptions.RemoveEmptyEntries); var enumTypeName = e.GetType().Name; return string.Join(" | ", parts.Select(p => enumTypeName + "." + p)); } internal static void Verify(DkmEvaluationResult actual, DkmEvaluationResult expected) { Assert.Equal(expected.Name, actual.Name); Assert.Equal(expected.FullName, actual.FullName); var expectedSuccess = expected as DkmSuccessEvaluationResult; var expectedIntermediate = expected as DkmIntermediateEvaluationResult; if (expectedSuccess != null) { var actualSuccess = (DkmSuccessEvaluationResult)actual; Assert.Equal(expectedSuccess.Value, actualSuccess.Value); Assert.Equal(expectedSuccess.Type, actualSuccess.Type); Assert.Equal(expectedSuccess.Flags, actualSuccess.Flags); Assert.Equal(expectedSuccess.Category, actualSuccess.Category); Assert.Equal(expectedSuccess.EditableValue, actualSuccess.EditableValue); // Verify the DebuggerVisualizerAttribute Assert.True( (expectedSuccess.CustomUIVisualizers == actualSuccess.CustomUIVisualizers) || (expectedSuccess.CustomUIVisualizers != null && actualSuccess.CustomUIVisualizers != null && expectedSuccess.CustomUIVisualizers.SequenceEqual(actualSuccess.CustomUIVisualizers, CustomUIVisualizerInfoComparer.Instance))); } else if (expectedIntermediate != null) { var actualIntermediate = (DkmIntermediateEvaluationResult)actual; Assert.Equal(expectedIntermediate.Expression, actualIntermediate.Expression); Assert.Equal(expectedIntermediate.IntermediateLanguage.Id.LanguageId, actualIntermediate.IntermediateLanguage.Id.LanguageId); Assert.Equal(expectedIntermediate.IntermediateLanguage.Id.VendorId, actualIntermediate.IntermediateLanguage.Id.VendorId); } else { var actualFailed = (DkmFailedEvaluationResult)actual; var expectedFailed = (DkmFailedEvaluationResult)expected; Assert.Equal(expectedFailed.ErrorMessage, actualFailed.ErrorMessage); Assert.Equal(expectedFailed.Type, actualFailed.Type); Assert.Equal(expectedFailed.Flags, actualFailed.Flags); } } #endregion private sealed class CustomUIVisualizerInfoComparer : IEqualityComparer<DkmCustomUIVisualizerInfo> { internal static readonly CustomUIVisualizerInfoComparer Instance = new CustomUIVisualizerInfoComparer(); bool IEqualityComparer<DkmCustomUIVisualizerInfo>.Equals(DkmCustomUIVisualizerInfo x, DkmCustomUIVisualizerInfo y) { return x == y || (x != null && y != null && x.Id == y.Id && x.MenuName == y.MenuName && x.Description == y.Description && x.Metric == y.Metric && x.UISideVisualizerTypeName == y.UISideVisualizerTypeName && x.UISideVisualizerAssemblyName == y.UISideVisualizerAssemblyName && x.UISideVisualizerAssemblyLocation == y.UISideVisualizerAssemblyLocation && x.DebuggeeSideVisualizerTypeName == y.DebuggeeSideVisualizerTypeName && x.DebuggeeSideVisualizerAssemblyName == y.DebuggeeSideVisualizerAssemblyName); } int IEqualityComparer<DkmCustomUIVisualizerInfo>.GetHashCode(DkmCustomUIVisualizerInfo obj) { throw new NotImplementedException(); } } } }
using System; using System.Collections.Generic; using Mapbox.VectorTile.Geometry; using UnityEngine; namespace Assets.Mapbox.Unity.MeshGeneration.Modifiers.MeshModifiers { public static class EarcutLibrary { public static List<int> Earcut(List<float> data, List<int> holeIndices, int dim) { dim = Math.Max(dim, 2); var hasHoles = holeIndices.Count; var outerLen = hasHoles > 0 ? holeIndices[0] * dim : data.Count; var outerNode = linkedList(data, 0, outerLen, dim, true); var triangles = new List<int>(); if (outerNode == null) return triangles; var minX = 0f; var minY = 0f; var maxX = 0f; var maxY = 0f; var x = 0f; var y = 0f; var size = 0f; if (hasHoles > 0) outerNode = EliminateHoles(data, holeIndices, outerNode, dim); // if the shape is not too simple, we'll use z-order curve hash later; calculate polygon bbox if (data.Count > 80 * dim) { minX = maxX = data[0]; minY = maxY = data[1]; for (var i = dim; i < outerLen; i += dim) { x = data[i]; y = data[i + 1]; if (x < minX) minX = x; if (y < minY) minY = y; if (x > maxX) maxX = x; if (y > maxY) maxY = y; } // minX, minY and size are later used to transform coords into integers for z-order calculation size = Math.Max(maxX - minX, maxY - minY); } earcutLinked(outerNode, triangles, dim, minX, minY, size); return triangles; } private static void earcutLinked(Node ear, List<int> triangles, int dim, float minX, float minY, float size, int pass = 0) { if (ear == null) return; // interlink polygon nodes in z-order if (pass == 0 && size > 0) indexCurve(ear, minX, minY, size); var stop = ear; Node prev; Node next; // iterate through ears, slicing them one by one while (ear.prev != ear.next) { prev = ear.prev; next = ear.next; if (size > 0 ? isEarHashed(ear, minX, minY, size) : isEar(ear)) { // cut off the triangle triangles.Add(prev.i / dim); triangles.Add(next.i / dim); triangles.Add(ear.i / dim); removeNode(ear); // skipping the next vertice leads to less sliver triangles ear = next.next; stop = next.next; continue; } ear = next; // if we looped through the whole remaining polygon and can't find any more ears if (ear == stop) { // try filtering points and slicing again if (pass == 0) { earcutLinked(FilterPoints(ear, null), triangles, dim, minX, minY, size, 1); // if this didn't work, try curing all small self-intersections locally } else if (pass == 1) { ear = cureLocalIntersections(ear, triangles, dim); earcutLinked(ear, triangles, dim, minX, minY, size, 2); // as a last resort, try splitting the remaining polygon into two } else if (pass == 2) { splitEarcut(ear, triangles, dim, minX, minY, size); } break; } } } private static bool isEarHashed(Node ear, float minX, float minY, float size) { var a = ear.prev; var b = ear; var c = ear.next; if (area(a, b, c) >= 0) return false; // reflex, can't be an ear // triangle bbox; min & max are calculated like this for speed var minTX = a.x < b.x ? (a.x < c.x ? a.x : c.x) : (b.x < c.x ? b.x : c.x); var minTY = a.y < b.y ? (a.y < c.y ? a.y : c.y) : (b.y < c.y ? b.y : c.y); var maxTX = a.x > b.x ? (a.x > c.x ? a.x : c.x) : (b.x > c.x ? b.x : c.x); var maxTY = a.y > b.y ? (a.y > c.y ? a.y : c.y) : (b.y > c.y ? b.y : c.y); // z-order range for the current triangle bbox; var minZ = zOrder(minTX, minTY, minX, minY, size); var maxZ = zOrder(maxTX, maxTY, minX, minY, size); // first look for points inside the triangle in increasing z-order var p = ear.nextZ; while (p != null && p.mZOrder <= maxZ) { if (p != ear.prev && p != ear.next && pointInTriangle(a.x, a.y, b.x, b.y, c.x, c.y, p.x, p.y) && area(p.prev, p, p.next) >= 0) return false; p = p.nextZ; } // then look for points in decreasing z-order p = ear.prevZ; while (p != null && p.mZOrder >= minZ) { if (p != ear.prev && p != ear.next && pointInTriangle(a.x, a.y, b.x, b.y, c.x, c.y, p.x, p.y) && area(p.prev, p, p.next) >= 0) return false; p = p.prevZ; } return true; } private static int zOrder(float x, float y, float minX, float minY, float size) { //TODO casting here might be wrong x = 32767 * (x - minX) / size; y = 32767 * (y - minY) / size; x = ((int)x | ((int)x << 8)) & 0x00FF00FF; x = ((int)x | ((int)x << 4)) & 0x0F0F0F0F; x = ((int)x | ((int)x << 2)) & 0x33333333; x = ((int)x | ((int)x << 1)) & 0x55555555; y = ((int)y | ((int)y << 8)) & 0x00FF00FF; y = ((int)y | ((int)y << 4)) & 0x0F0F0F0F; y = ((int)y | ((int)y << 2)) & 0x33333333; y = ((int)y | ((int)y << 1)) & 0x55555555; return (int)x | ((int)y << 1); } private static void splitEarcut(Node start, List<int> triangles, int dim, float minX, float minY, float size) { var a = start; do { var b = a.next.next; while (b != a.prev) { if (a.i != b.i && isValidDiagonal(a, b)) { // split the polygon in two by the diagonal var c = SplitPolygon(a, b); // filter colinear points around the cuts a = FilterPoints(a, a.next); c = FilterPoints(c, c.next); // run earcut on each half earcutLinked(a, triangles, dim, minX, minY, size); earcutLinked(c, triangles, dim, minX, minY, size); return; } b = b.next; } a = a.next; } while (a != start); } private static bool isValidDiagonal(Node a, Node b) { return a.next.i != b.i && a.prev.i != b.i && !intersectsPolygon(a, b) && locallyInside(a, b) && locallyInside(b, a) && middleInside(a, b); } private static bool middleInside(Node a, Node b) { var p = a; var inside = false; var px = (a.x + b.x) / 2; var py = (a.y + b.y) / 2; do { if (((p.y > py) != (p.next.y > py)) && p.next.y != p.y && (px < (p.next.x - p.x) * (py - p.y) / (p.next.y - p.y) + p.x)) inside = !inside; p = p.next; } while (p != a); return inside; } private static bool intersectsPolygon(Node a, Node b) { var p = a; do { if (p.i != a.i && p.next.i != a.i && p.i != b.i && p.next.i != b.i && intersects(p, p.next, a, b)) return true; p = p.next; } while (p != a); return false; } private static Node cureLocalIntersections(Node start, List<int> triangles, int dim) { var p = start; do { var a = p.prev; var b = p.next.next; if (!equals(a, b) && intersects(a, p, p.next, b) && locallyInside(a, b) && locallyInside(b, a)) { triangles.Add(a.i / dim); triangles.Add(p.i / dim); triangles.Add(b.i / dim); // remove two nodes involved removeNode(p); removeNode(p.next); p = start = b; } p = p.next; } while (p != start); return p; } private static bool intersects(Node p1, Node q1, Node p2, Node q2) { if ((equals(p1, q1) && equals(p2, q2)) || (equals(p1, q2) && equals(p2, q1))) return true; return area(p1, q1, p2) > 0 != area(p1, q1, q2) > 0 && area(p2, q2, p1) > 0 != area(p2, q2, q1) > 0; } private static bool isEar(Node ear) { var a = ear.prev; var b = ear; var c = ear.next; if (area(a, b, c) >= 0) return false; // reflex, can't be an ear // now make sure we don't have other points inside the potential ear var p = ear.next.next; while (p != ear.prev) { if (pointInTriangle(a.x, a.y, b.x, b.y, c.x, c.y, p.x, p.y) && area(p.prev, p, p.next) >= 0) return false; p = p.next; } return true; } private static void indexCurve(Node start, float minX, float minY, float size) { var p = start; do { if (p.mZOrder == 0) p.mZOrder = zOrder(p.x, p.y, minX, minY, size); p.prevZ = p.prev; p.nextZ = p.next; p = p.next; } while (p != start); p.prevZ.nextZ = null; p.prevZ = null; sortLinked(p); } private static Node sortLinked(Node list) { var i = 0; Node p; Node q; Node e; Node tail; var numMerges = 0; ; var pSize = 0; var qSize = 0; var inSize = 1; do { p = list; list = null; tail = null; numMerges = 0; while (p != null) { numMerges++; q = p; pSize = 0; for (i = 0; i < inSize; i++) { pSize++; q = q.nextZ; if (q == null) break; } qSize = inSize; while (pSize > 0 || (qSize > 0 && q != null)) { if (pSize != 0 && (qSize == 0 || q == null || p.mZOrder <= q.mZOrder)) { e = p; p = p.nextZ; pSize--; } else { e = q; q = q.nextZ; qSize--; } if (tail != null) tail.nextZ = e; else list = e; e.prevZ = tail; tail = e; } p = q; } tail.nextZ = null; inSize *= 2; } while (numMerges > 1); return list; } private static Node EliminateHoles(List<float> data, List<int> holeIndices, Node outerNode, int dim) { var i = 0; var len = holeIndices.Count; var start = 0; var end = 0; Node list = null; var queue = new List<Node>(); for (i = 0; i < len; i++) { start = holeIndices[i] * dim; end = i < len - 1 ? holeIndices[i + 1] * dim : data.Count; list = linkedList(data, start, end, dim, false); if (list == list.next) list.steiner = true; queue.Add(getLeftmost(list)); } queue.Sort(delegate (Node a, Node b) { return (int)Math.Ceiling(a.x - b.x); }); // process holes from left to right for (i = 0; i < queue.Count; i++) { EliminateHole(queue[i], outerNode); outerNode = FilterPoints(outerNode, outerNode.next); } return outerNode; } private static void EliminateHole(Node hole, Node outerNode) { outerNode = FindHoleBridge(hole, outerNode); if (outerNode != null) { var b = SplitPolygon(outerNode, hole); FilterPoints(b, b.next); } } private static Node FilterPoints(Node start, Node end) { if (start == null) return start; if (end == null) end = start; var p = start; bool again = true; do { again = false; if (!p.steiner && (equals(p, p.next) || area(p.prev, p, p.next) == 0)) { removeNode(p); p = end = p.prev; if (p == p.next) return null; again = true; } else { p = p.next; } } while (again || p != end); return end; } private static Node SplitPolygon(Node a, Node b) { var a2 = new Node(a.i, a.x, a.y); var b2 = new Node(b.i, b.x, b.y); var an = a.next; var bp = b.prev; a.next = b; b.prev = a; a2.next = an; an.prev = a2; b2.next = a2; a2.prev = b2; bp.next = b2; b2.prev = bp; return b2; } private static Node FindHoleBridge(Node hole, Node outerNode) { var p = outerNode; var hx = hole.x; var hy = hole.y; var qx = float.MinValue; Node m = null; // find a segment intersected by a ray from the hole's leftmost point to the left; // segment's endpoint with lesser x will be potential connection point do { if (p == null || p.next == null) Debug.Log("here"); if (hy <= p.y && hy >= p.next.y && p.next.y != p.y) { var x = p.x + (hy - p.y) * (p.next.x - p.x) / (p.next.y - p.y); if (x <= hx && x > qx) { qx = x; if (x == hx) { if (hy == p.y) return p; if (hy == p.next.y) return p.next; } m = p.x < p.next.x ? p : p.next; } } p = p.next; } while (p != outerNode); if (m == null) return null; if (hx == qx) return m.prev; // hole touches outer segment; pick lower endpoint // look for points inside the triangle of hole point, segment intersection and endpoint; // if there are no points found, we have a valid connection; // otherwise choose the point of the minimum angle with the ray as connection point var stop = m; var mx = m.x; var my = m.y; var tanMin = float.MaxValue; float tan = 0f; p = m.next; while (p != stop) { if (hx >= p.x && p.x >= mx && hx != p.x && pointInTriangle(hy < my ? hx : qx, hy, mx, my, hy < my ? qx : hx, hy, p.x, p.y)) { tan = Math.Abs(hy - p.y) / (hx - p.x); // tangential if ((tan < tanMin || (tan == tanMin && p.x > m.x)) && locallyInside(p, hole)) { m = p; tanMin = tan; } } p = p.next; } return m; } private static bool locallyInside(Node a, Node b) { return area(a.prev, a, a.next) < 0 ? area(a, b, a.next) >= 0 && area(a, a.prev, b) >= 0 : area(a, b, a.prev) < 0 || area(a, a.next, b) < 0; } private static float area(Node p, Node q, Node r) { return (q.y - p.y) * (r.x - q.x) - (q.x - p.x) * (r.y - q.y); } private static bool pointInTriangle(float ax, float ay, float bx, float by, float cx, float cy, float px, float py) { return (cx - px) * (ay - py) - (ax - px) * (cy - py) >= 0 && (ax - px) * (by - py) - (bx - px) * (ay - py) >= 0 && (bx - px) * (cy - py) - (cx - px) * (by - py) >= 0; } private static Node getLeftmost(Node start) { var p = start; var leftmost = start; do { if (p.x < leftmost.x) leftmost = p; p = p.next; } while (p != start); return leftmost; } // create a circular doubly linked list from polygon points in the specified winding order private static Node linkedList(List<float> data, int start, int end, int dim, bool clockwise) { var i = 0; Node last = null; if (clockwise == (signedArea(data, start, end, dim) > 0)) { for (i = start; i < end; i += dim) last = insertNode(i, data[i], data[i + 1], last); } else { for (i = end - dim; i >= start; i -= dim) last = insertNode(i, data[i], data[i + 1], last); } if (last != null && equals(last, last.next)) { removeNode(last); last = last.next; } return last; } private static void removeNode(Node p) { p.next.prev = p.prev; p.prev.next = p.next; if (p.prevZ != null) p.prevZ.nextZ = p.nextZ; if (p.nextZ != null) p.nextZ.prevZ = p.prevZ; } private static bool equals(Node p1, Node p2) { return p1.x == p2.x && p1.y == p2.y; } private static float signedArea(List<float> data, int start, int end, int dim) { var sum = 0f; var j = end - dim; for (var i = start; i < end; i += dim) { sum += (data[j] - data[i]) * (data[i + 1] + data[j + 1]); j = i; } return sum; } private static Node insertNode(int i, float x, float y, Node last) { var p = new Node(i, x, y); if (last == null) { p.prev = p; p.next = p; } else { p.next = last.next; p.prev = last; last.next.prev = p; last.next = p; } return p; } public static Data Flatten(List<List<float[]>> data) { var dim = data[0][0].Length; var result = new Data() { Dim = dim }; var holeIndex = 0; for (var i = 0; i < data.Count; i++) { for (var j = 0; j < data[i].Count; j++) { for (var d = 0; d < dim; d++) result.Vertices.Add(data[i][j][d]); } if (i > 0) { holeIndex += data[i - 1].Count; result.Holes.Add(holeIndex); } } return result; } public static Data Flatten(List<List<Vector3>> data) { var dim = 2; var result = new Data() { Dim = dim }; var holeIndex = 0; for (var i = 0; i < data.Count; i++) { for (var j = 0; j < data[i].Count; j++) { result.Vertices.Add(data[i][j][0]); result.Vertices.Add(data[i][j][2]); } if (i > 0) { holeIndex += data[i - 1].Count; result.Holes.Add(holeIndex); } } return result; } } public class Data { public List<float> Vertices; public List<int> Holes; public int Dim; public Data() { Vertices = new List<float>(); Holes = new List<int>(); Dim = 2; } } public class Node { /* Member Variables. */ public int i; public float x; public float y; public int mZOrder; public Node prev; public Node next; public Node prevZ; public Node nextZ; public bool steiner; public Node(int ind, float pX, float pY) { /* Initialize Member Variables. */ this.i = ind; this.x = pX; this.y = pY; this.mZOrder = 0; this.prev = null; this.next = null; this.prevZ = null; this.nextZ = null; } protected void setPreviousNode(Node pNode) { this.prev = pNode; } protected Node getPreviousNode() { return this.prev; } protected void setNextNode(Node pNode) { this.next = pNode; } protected Node getNextNode() { return this.next; } protected void setZOrder(int pZOrder) { this.mZOrder = pZOrder; } protected int getZOrder() { return this.mZOrder; } protected void setPreviousZNode(Node pNode) { this.prevZ = pNode; } protected Node getPreviousZNode() { return this.prevZ; } protected void setNextZNode(Node pNode) { this.nextZ = pNode; } protected Node getNextZNode() { return this.nextZ; } } }
//----------------------------------------------------------------------- // <copyright file="AreaDescriptionPicker.cs" company="Google"> // // Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // </copyright> //----------------------------------------------------------------------- using System.Collections; using System.Collections.Generic; using System.IO; using Tango; using UnityEngine; using UnityEngine.EventSystems; using UnityEngine.UI; /// <summary> /// List controller of the scrolling list. /// /// This list controller present a toggle group of Tango space Area Descriptions. The list class also has interface /// to start the game and connect to Tango Service. /// </summary> public class AreaDescriptionPicker : MonoBehaviour, ITangoLifecycle { /// <summary> /// The prefab of a standard button in the scrolling list. /// </summary> public GameObject m_listElement; /// <summary> /// The container panel of the Tango space Area Description scrolling list. /// </summary> public RectTransform m_listContentParent; /// <summary> /// Toggle group for the Area Description list. /// /// You can only toggle one Area Description at a time. After we get the list of Area Description from Tango, /// they are all added to this toggle group. /// </summary> public ToggleGroup m_toggleGroup; /// <summary> /// Enable learning mode toggle. /// /// Learning Mode allows the loaded Area Description to be extended with more knowledge about the area.. /// </summary> public Toggle m_enableLearningToggle; /// <summary> /// The reference of the TangoDeltaPoseController object. /// /// TangoDeltaPoseController listens to pose updates and applies the correct pose to itself and its built-in camera. /// </summary> public TangoARPoseController m_poseController; /// <summary> /// Control panel game object. /// /// The panel will be enabled when the game starts. /// </summary> public GameObject m_gameControlPanel; /// <summary> /// The GUI controller. /// /// GUI controller will be enabled when the game starts. /// </summary> public AreaLearningInGameController m_guiController; /// <summary> /// A reference to TangoApplication instance. /// </summary> private TangoApplication m_tangoApplication; /// <summary> /// The UUID of the selected Area Description. /// </summary> private string m_curAreaDescriptionUUID; /// <summary> /// Start the game. /// /// This will start the service connection, and start pose estimation from Tango Service. /// </summary> /// <param name="isNewAreaDescription">If set to <c>true</c> game with start to learn a new Area /// Description.</param> public void StartGame(bool isNewAreaDescription) { // The game has to be started with an Area Description. if (!isNewAreaDescription) { if (string.IsNullOrEmpty(m_curAreaDescriptionUUID)) { AndroidHelper.ShowAndroidToastMessage("Please choose an Area Description."); return; } } else { m_curAreaDescriptionUUID = null; } // Dismiss Area Description list, footer and header UI panel. gameObject.SetActive(false); if (isNewAreaDescription) { // Completely new area description. m_guiController.m_curAreaDescription = null; m_tangoApplication.m_enableAreaLearning = true; } else { // Load up an existing Area Description. AreaDescription areaDescription = AreaDescription.ForUUID(m_curAreaDescriptionUUID); m_guiController.m_curAreaDescription = areaDescription; m_tangoApplication.m_enableAreaLearning = m_enableLearningToggle.isOn; } m_tangoApplication.Startup(m_guiController.m_curAreaDescription); // Enable GUI controller to allow user tap and interactive with the environment. m_poseController.gameObject.SetActive(true); m_guiController.enabled = true; m_gameControlPanel.SetActive(true); } /// <summary> /// Internal callback when a permissions event happens. /// </summary> /// <param name="permissionsGranted">If set to <c>true</c> permissions granted.</param> public void OnTangoPermissions(bool permissionsGranted) { if (permissionsGranted) { _PopulateList(); } else { AndroidHelper.ShowAndroidToastMessage("Motion Tracking and Area Learning Permissions Needed"); Application.Quit(); } } /// <summary> /// This is called when succesfully connected to the Tango service. /// </summary> public void OnTangoServiceConnected() { } /// <summary> /// This is called when disconnected from the Tango service. /// </summary> public void OnTangoServiceDisconnected() { } /// <summary> /// Unity Start function. /// /// This function is responsible for connecting callbacks, set up TangoApplication and initialize the data list. /// </summary> public void Start() { m_tangoApplication = FindObjectOfType<TangoApplication>(); if (m_tangoApplication != null) { m_tangoApplication.Register(this); if (AndroidHelper.IsTangoCorePresent()) { m_tangoApplication.RequestPermissions(); } } else { Debug.Log("No Tango Manager found in scene."); } } /// <summary> /// Unity Update function. /// /// Application will be closed when click the back button. /// </summary> public void Update() { if (Input.GetKey(KeyCode.Escape)) { Application.Quit(); } } /// <summary> /// Refresh the scrolling list's content for both list. /// /// This function will query from the Tango API for the Tango space Area Description. Also, when it populates /// the scrolling list content, it will connect the delegate for each button in the list. The delegate is /// responsible for the actual import/export through the Tango API. /// </summary> private void _PopulateList() { foreach (Transform t in m_listContentParent.transform) { Destroy(t.gameObject); } // Update Tango space Area Description list. AreaDescription[] areaDescriptionList = AreaDescription.GetList(); if (areaDescriptionList == null) { return; } foreach (AreaDescription areaDescription in areaDescriptionList) { GameObject newElement = Instantiate(m_listElement) as GameObject; AreaDescriptionListElement listElement = newElement.GetComponent<AreaDescriptionListElement>(); listElement.m_toggle.group = m_toggleGroup; listElement.m_areaDescriptionName.text = areaDescription.GetMetadata().m_name; listElement.m_areaDescriptionUUID.text = areaDescription.m_uuid; // Ensure the lambda makes a copy of areaDescription. AreaDescription lambdaParam = areaDescription; listElement.m_toggle.onValueChanged.AddListener((value) => _OnToggleChanged(lambdaParam, value)); newElement.transform.SetParent(m_listContentParent.transform, false); } } /// <summary> /// Callback function when toggle button is selected. /// </summary> /// <param name="item">Caller item object.</param> /// <param name="value">Selected value of the toggle button.</param> private void _OnToggleChanged(AreaDescription item, bool value) { if (value) { m_curAreaDescriptionUUID = item.m_uuid; } } }
// Copyright 2017, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Generated by the protocol buffer compiler. DO NOT EDIT! // source: Greeter/protos/greeter.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Com.Example.Grpc { /// <summary>Holder for reflection information generated from Greeter/protos/greeter.proto</summary> public static partial class GreeterReflection { #region Descriptor /// <summary>File descriptor for Greeter/protos/greeter.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static GreeterReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "ChxHcmVldGVyL3Byb3Rvcy9ncmVldGVyLnByb3RvEhBjb20uZXhhbXBsZS5n", "cnBjIuQBCgxIZWxsb1JlcXVlc3QSDAoEbmFtZRgBIAEoCRILCgNhZ2UYAiAB", "KAUSDwoHaG9iYmllcxgDIAMoCRJECgtiYWdPZlRyaWNrcxgEIAMoCzIvLmNv", "bS5leGFtcGxlLmdycGMuSGVsbG9SZXF1ZXN0LkJhZ09mVHJpY2tzRW50cnkS", "LgoJc2VudGltZW50GAUgASgOMhsuY29tLmV4YW1wbGUuZ3JwYy5TZW50aW1l", "bnQaMgoQQmFnT2ZUcmlja3NFbnRyeRILCgNrZXkYASABKAkSDQoFdmFsdWUY", "AiABKAk6AjgBIiEKDUhlbGxvUmVzcG9uc2USEAoIZ3JlZXRpbmcYASABKAkq", "LQoJU2VudGltZW50EgkKBUhBUFBZEAASCgoGU0xFRVBZEAESCQoFQU5HUlkQ", "AjJeCg9HcmVldGluZ1NlcnZpY2USSwoIZ3JlZXRpbmcSHi5jb20uZXhhbXBs", "ZS5ncnBjLkhlbGxvUmVxdWVzdBofLmNvbS5leGFtcGxlLmdycGMuSGVsbG9S", "ZXNwb25zZUI2Chtpby5ncnBjLmV4YW1wbGVzLmhlbGxvd29ybGRCD0hlbGxv", "V29ybGRQcm90b1ABogIDSExXYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Com.Example.Grpc.Sentiment), }, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Com.Example.Grpc.HelloRequest), global::Com.Example.Grpc.HelloRequest.Parser, new[]{ "Name", "Age", "Hobbies", "BagOfTricks", "Sentiment" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, }), new pbr::GeneratedClrTypeInfo(typeof(global::Com.Example.Grpc.HelloResponse), global::Com.Example.Grpc.HelloResponse.Parser, new[]{ "Greeting" }, null, null, null) })); } #endregion } #region Enums public enum Sentiment { [pbr::OriginalName("HAPPY")] Happy = 0, [pbr::OriginalName("SLEEPY")] Sleepy = 1, [pbr::OriginalName("ANGRY")] Angry = 2, } #endregion #region Messages public sealed partial class HelloRequest : pb::IMessage<HelloRequest> { private static readonly pb::MessageParser<HelloRequest> _parser = new pb::MessageParser<HelloRequest>(() => new HelloRequest()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<HelloRequest> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Com.Example.Grpc.GreeterReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public HelloRequest() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public HelloRequest(HelloRequest other) : this() { name_ = other.name_; age_ = other.age_; hobbies_ = other.hobbies_.Clone(); bagOfTricks_ = other.bagOfTricks_.Clone(); sentiment_ = other.sentiment_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public HelloRequest Clone() { return new HelloRequest(this); } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 1; private string name_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "age" field.</summary> public const int AgeFieldNumber = 2; private int age_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int Age { get { return age_; } set { age_ = value; } } /// <summary>Field number for the "hobbies" field.</summary> public const int HobbiesFieldNumber = 3; private static readonly pb::FieldCodec<string> _repeated_hobbies_codec = pb::FieldCodec.ForString(26); private readonly pbc::RepeatedField<string> hobbies_ = new pbc::RepeatedField<string>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<string> Hobbies { get { return hobbies_; } } /// <summary>Field number for the "bagOfTricks" field.</summary> public const int BagOfTricksFieldNumber = 4; private static readonly pbc::MapField<string, string>.Codec _map_bagOfTricks_codec = new pbc::MapField<string, string>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForString(18), 34); private readonly pbc::MapField<string, string> bagOfTricks_ = new pbc::MapField<string, string>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::MapField<string, string> BagOfTricks { get { return bagOfTricks_; } } /// <summary>Field number for the "sentiment" field.</summary> public const int SentimentFieldNumber = 5; private global::Com.Example.Grpc.Sentiment sentiment_ = 0; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::Com.Example.Grpc.Sentiment Sentiment { get { return sentiment_; } set { sentiment_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as HelloRequest); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(HelloRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Name != other.Name) return false; if (Age != other.Age) return false; if(!hobbies_.Equals(other.hobbies_)) return false; if (!BagOfTricks.Equals(other.BagOfTricks)) return false; if (Sentiment != other.Sentiment) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Name.Length != 0) hash ^= Name.GetHashCode(); if (Age != 0) hash ^= Age.GetHashCode(); hash ^= hobbies_.GetHashCode(); hash ^= BagOfTricks.GetHashCode(); if (Sentiment != 0) hash ^= Sentiment.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Name.Length != 0) { output.WriteRawTag(10); output.WriteString(Name); } if (Age != 0) { output.WriteRawTag(16); output.WriteInt32(Age); } hobbies_.WriteTo(output, _repeated_hobbies_codec); bagOfTricks_.WriteTo(output, _map_bagOfTricks_codec); if (Sentiment != 0) { output.WriteRawTag(40); output.WriteEnum((int) Sentiment); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } if (Age != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Age); } size += hobbies_.CalculateSize(_repeated_hobbies_codec); size += bagOfTricks_.CalculateSize(_map_bagOfTricks_codec); if (Sentiment != 0) { size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Sentiment); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(HelloRequest other) { if (other == null) { return; } if (other.Name.Length != 0) { Name = other.Name; } if (other.Age != 0) { Age = other.Age; } hobbies_.Add(other.hobbies_); bagOfTricks_.Add(other.bagOfTricks_); if (other.Sentiment != 0) { Sentiment = other.Sentiment; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { Name = input.ReadString(); break; } case 16: { Age = input.ReadInt32(); break; } case 26: { hobbies_.AddEntriesFrom(input, _repeated_hobbies_codec); break; } case 34: { bagOfTricks_.AddEntriesFrom(input, _map_bagOfTricks_codec); break; } case 40: { sentiment_ = (global::Com.Example.Grpc.Sentiment) input.ReadEnum(); break; } } } } } public sealed partial class HelloResponse : pb::IMessage<HelloResponse> { private static readonly pb::MessageParser<HelloResponse> _parser = new pb::MessageParser<HelloResponse>(() => new HelloResponse()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<HelloResponse> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::Com.Example.Grpc.GreeterReflection.Descriptor.MessageTypes[1]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public HelloResponse() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public HelloResponse(HelloResponse other) : this() { greeting_ = other.greeting_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public HelloResponse Clone() { return new HelloResponse(this); } /// <summary>Field number for the "greeting" field.</summary> public const int GreetingFieldNumber = 1; private string greeting_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Greeting { get { return greeting_; } set { greeting_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as HelloResponse); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(HelloResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Greeting != other.Greeting) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (Greeting.Length != 0) hash ^= Greeting.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (Greeting.Length != 0) { output.WriteRawTag(10); output.WriteString(Greeting); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (Greeting.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Greeting); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(HelloResponse other) { if (other == null) { return; } if (other.Greeting.Length != 0) { Greeting = other.Greeting; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { Greeting = input.ReadString(); break; } } } } } #endregion } #endregion Designer generated code
using GenFx.Validation; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Linq; using System.Runtime.Serialization; using System.Threading.Tasks; namespace GenFx { /// <summary> /// Represents a collection of <see cref="GeneticEntity"/> objects which interact locally with each other. A population is /// the unit from which the <see cref="SelectionOperator"/> selects its genetic entities. /// </summary> /// <remarks> /// Populations can be isolated or interactive with one another through migration depending on /// which <see cref="GeneticAlgorithm"/> is used. /// </remarks> [DataContract] public abstract class Population : GeneticComponentWithAlgorithm { private const int DefaultPopulationSize = 1; [DataMember] private readonly ObservableCollection<GeneticEntity> geneticEntities = new ObservableCollection<GeneticEntity>(); [DataMember] private int index; [DataMember] private double? rawMean; [DataMember] private double? rawStandardDeviation; [DataMember] private double? rawMax; [DataMember] private double? rawMin; [DataMember] private int minimumPopulationSize = DefaultPopulationSize; /// <summary> /// Gets or sets the minimum number of <see cref="GeneticEntity"/> objects that are contained by a population. /// </summary> /// <remarks> /// This value is defaulted to 1 and must be greater or equal to 1 to be valid for executing /// a genetic algorithm. /// </remarks> /// <exception cref="ValidationException">Value is invalid.</exception> [ConfigurationProperty] [IntegerValidator(MinValue = 1)] public int MinimumPopulationSize { get { return this.minimumPopulationSize; } set { this.SetProperty(ref this.minimumPopulationSize, value); } } /// <summary> /// Gets the minimum <see cref="GeneticEntity.RawFitnessValue"/> in the entire population of genetic entities. /// </summary> /// <value> /// The minimum <see cref="GeneticEntity.RawFitnessValue"/> in the entire population of genetic entities. /// </value> /// <remarks> /// This value is not set if the algorithm is not configured to use metrics or a fitness scaling strategy. /// </remarks> public double? RawMin { get { return this.rawMin; } } /// <summary> /// Gets the maximum <see cref="GeneticEntity.RawFitnessValue"/> in the entire population of genetic entities. /// </summary> /// <value> /// The maximum <see cref="GeneticEntity.RawFitnessValue"/> in the entire population of genetic entities. /// </value> /// <remarks> /// This value is not set if the algorithm is not configured to use metrics or a fitness scaling strategy. /// </remarks> public double? RawMax { get { return this.rawMax; } } /// <summary> /// Gets the standard deviation of all the <see cref="GeneticEntity.RawFitnessValue"/> values in the entire population of genetic entities. /// </summary> /// <value> /// The standard deviation of all the <see cref="GeneticEntity.RawFitnessValue"/> values in the entire population of genetic entities. /// </value> /// <remarks> /// This value is not set if the algorithm is not configured to use metrics or a fitness scaling strategy. /// </remarks> public double? RawStandardDeviation { get { return this.rawStandardDeviation; } } /// <summary> /// Gets the mean of all the <see cref="GeneticEntity.RawFitnessValue"/> values in the entire population of genetic entities. /// </summary> /// <value> /// The mean of all the <see cref="GeneticEntity.RawFitnessValue"/> values in the entire population of genetic entities. /// </value> /// <remarks> /// This value is not set if the algorithm is not configured to use metrics or a fitness scaling strategy. /// </remarks> public double? RawMean { get { return this.rawMean; } } /// <summary> /// Gets the collection of <see cref="GeneticEntity"/> objects contained by the population. /// </summary> [Browsable(false)] public ObservableCollection<GeneticEntity> Entities { get { return this.geneticEntities; } } /// <summary> /// Gets or sets the index of this population in the <see cref="GeneticEnvironment"/>. /// </summary> public int Index { get { return this.index; } set { this.index = value; } } /// <summary> /// Gets the size of the population. /// </summary> public int Size { get { return this.Entities.Count; } } /// <summary> /// Evaluates the <see cref="GeneticEntity.RawFitnessValue"/> of all the <see cref="GeneticEntity"/> objects /// within the population followed by evaluation of the <see cref="GeneticEntity.ScaledFitnessValue"/> /// using the <see cref="FitnessScalingStrategy"/>. /// </summary> public virtual async Task EvaluateFitnessAsync() { this.AssertIsInitialized(); double rawSum = 0; List<Task> fitnessEvalTasks = new List<Task>(); foreach (GeneticEntity entity in this.geneticEntities) { fitnessEvalTasks.Add(entity.EvaluateFitnessAsync()); } // Wait for all entities to evaluate their fitness values await Task.WhenAll(fitnessEvalTasks); // There's no need to perform these calculations if there aren't any metrics or a fitness scaling strategy. if (this.Algorithm!.Metrics.Any() || this.Algorithm.FitnessScalingStrategy != null) { for (int i = 0; i < this.geneticEntities.Count; i++) { // Calculate the metrics based on raw fitness value rawSum += this.geneticEntities[i].RawFitnessValue; if (i == 0 || this.geneticEntities[i].RawFitnessValue > this.rawMax) { this.rawMax = this.geneticEntities[i].RawFitnessValue; } if (i == 0 || this.geneticEntities[i].RawFitnessValue < this.rawMin) { this.rawMin = this.geneticEntities[i].RawFitnessValue; } } // Calculate the metrics based on raw fitness value this.rawMean = rawSum / this.geneticEntities.Count; this.rawStandardDeviation = MathHelper.GetStandardDeviation(this.geneticEntities, this.rawMean.Value, FitnessType.Raw); } if (this.Algorithm.FitnessScalingStrategy != null) { // Scale the fitness values of the population. this.Algorithm.FitnessScalingStrategy.Scale(this); } } /// <summary> /// Creates the collection of <see cref="GeneticEntity"/> objects contained by this population. /// </summary> public Task InitializeAsync() { return this.InitializeCoreAsync(); } /// <summary> /// Creates the collection of <see cref="GeneticEntity"/> objects contained by this population. /// </summary> /// <remarks> /// <para>The default implementation of this method creates X <see cref="GeneticEntity"/> objects /// where X is equal to <see cref="MinimumPopulationSize"/>.</para> /// <para><b>Notes to implementers:</b> This method can be overriden in a derived class /// to customize how a population is filled with <see cref="GeneticEntity"/> objects /// or how those <see cref="GeneticEntity"/> objects are created.</para> /// </remarks> protected virtual Task InitializeCoreAsync() { this.AssertIsInitialized(); for (int i = 0; i < this.Algorithm!.PopulationSeed!.MinimumPopulationSize; i++) { GeneticEntity entity = (GeneticEntity)this.Algorithm.GeneticEntitySeed!.CreateNewAndInitialize(); this.geneticEntities.Add(entity); } return Task.FromResult(true); } } }
using System; using System.Collections.Generic; using System.Web.Security; using NUnit.Framework; using Appleseed.Framework.Providers.AppleseedMembershipProvider; using Appleseed.Framework.Providers.AppleseedRoleProvider; namespace Appleseed.Tests { [TestFixture] public class RoleProviderTest { [TestFixtureSetUp] public void FixtureSetUp() { // Set up initial database environment for testing purposes TestHelper.TearDownDB(); TestHelper.RecreateDBSchema(); } [Test] public void Foo() { Console.WriteLine("This should pass. It only writes to the Console."); } #region Config properties [Test] public void ApplicationNameTest() { try { string appName = Membership.ApplicationName; Assert.AreEqual(appName, "Appleseed"); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error retrieving ApplicationName property" + ex.Message, ex); } } #endregion [Test] public void GetAllRolesTest1() { try { string[] roles = Roles.GetAllRoles(); Assert.AreEqual(4, roles.Length); Assert.AreEqual("All Users", roles[0]); Assert.AreEqual("Authenticated Users", roles[1]); Assert.AreEqual("Unauthenticated Users", roles[2]); Assert.AreEqual("Admins", roles[3]); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in GetAllRolesTest1" + ex.Message, ex); } } [Test] public void GetAllRolesTest2() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; IList<AppleseedRole> roles = provider.GetAllRoles(Roles.ApplicationName); Assert.AreEqual(4, roles.Count); Assert.AreEqual("All Users", roles[0].Name); Assert.AreEqual("Authenticated Users", roles[1].Name); Assert.AreEqual("Unauthenticated Users", roles[2].Name); Assert.AreEqual("Admins", roles[3].Name); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in GetAllRolesTest2" + ex.Message, ex); } } [Test] public void GetRolesForUserTest1() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; Guid userId = new Guid("BE7DC028-7238-45D3-AF35-DD3FE4AEFB7E"); //"admin@Appleseedportal.net" IList<AppleseedRole> roles = provider.GetRolesForUser(Roles.ApplicationName, userId); Assert.AreEqual(1, roles.Count); Assert.AreEqual("Admins", roles[0].Name); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in GetRolesForUserTest1" + ex.Message, ex); } } [Test] public void GetRolesForUserTest2() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; IList<AppleseedRole> roles = provider.GetRolesForUser(Roles.ApplicationName, new Guid()); Assert.Fail(); } //catch (AppleseedRoleProviderException ex) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in GetRolesForUserTest2" + ex.Message, ex); } } [Test] public void GetRolesForUserTest3() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; Guid userId = new Guid("34ADB714-92B0-47ff-B5AF-5DB2E0D124A9"); //"user@user.com" IList<AppleseedRole> roles = provider.GetRolesForUser(Roles.ApplicationName, userId); Assert.AreEqual(roles.Count, 0); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in GetRolesForUserTest3" + ex.Message, ex); } } [Test] public void CreateRoleTest1() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; provider.CreateRole(Roles.ApplicationName, "editors"); provider.CreateRole(Roles.ApplicationName, "clerks"); provider.CreateRole(Roles.ApplicationName, "salesman"); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in CreateRoleTest1" + ex.Message, ex); } } [Test] public void CreateRoleTest2() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; provider.CreateRole(Roles.ApplicationName, "Admins"); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in CreateRoleTest2" + ex.Message, ex); } } [Test] public void CreateRoleTest3() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; provider.CreateRole(Roles.ApplicationName, "Admins,editors"); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in CreateRoleTest3" + ex.Message, ex); } } [Test] public void IsUserInRoleTest1() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; bool isInRole = provider.IsUserInRole("admin@Appleseedportal.net", "Admins"); Assert.IsTrue(isInRole); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in IsUserInRoleTest1" + ex.Message, ex); } } [Test] public void IsUserInRoleTest2() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; bool isInRole = provider.IsUserInRole("invalid@user.com", "Admins"); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in IsUserInRoleTest2" + ex.Message, ex); } } [Test] public void IsUserInRoleTest3() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; bool isInRole = provider.IsUserInRole("admin@Appleseedportal.net", "invalidRole"); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in IsUserInRoleTest3" + ex.Message, ex); } } [Test] public void IsUserInRoleTest4() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; bool isInRole = provider.IsUserInRole("admin@Appleseedportal.net", "editors"); Assert.IsFalse(isInRole); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in IsUserInRoleTest4" + ex.Message, ex); } } [Test] public void RoleExistsTest1() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; bool isInRole = provider.RoleExists("editors"); Assert.IsTrue(isInRole); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RoleExistsTest1" + ex.Message, ex); } } [Test] public void RoleExistsTestInvalidRoleName() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; bool isInRole = provider.RoleExists("invalidRole"); Assert.IsFalse(isInRole); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RoleExistsTest2" + ex.Message, ex); } } [Test] public void GetUsersInRoleTest1() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] names = provider.GetUsersInRole("Admins"); Assert.AreEqual(1, names.Length); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in GetUsersInRoleTest1" + ex.Message, ex); } } [Test] public void GetUsersInRoleTest2() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] names = provider.GetUsersInRole("editors"); Assert.AreEqual(0, names.Length); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in GetUsersInRoleTest2" + ex.Message, ex); } } [Test] public void GetUsersInRoleTest3() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] names = provider.GetUsersInRole("invalidRole"); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in GetUsersInRoleTest3" + ex.Message, ex); } } [Test] public void AddUsersToRolesTest1() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] users = new string[1]; users[0] = "invalid@user.com"; string[] roles = new string[1]; roles[0] = "Admins"; provider.AddUsersToRoles(users, roles); Assert.Fail(); } catch (AppleseedMembershipProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in AddUsersToRolesTest1" + ex.Message, ex); } } [Test] public void AddUsersToRolesTest2() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] users = new string[1]; users[0] = "admin@Appleseedportal.net"; string[] roles = new string[1]; roles[0] = "invalidRole"; provider.AddUsersToRoles(users, roles); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in AddUsersToRolesTest2" + ex.Message, ex); } } [Test] public void AddUsersToRolesTest3() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] users = new string[1]; users[0] = "admin@Appleseedportal.net"; string[] roles = new string[1]; roles[0] = "editors"; provider.AddUsersToRoles(users, roles); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in AddUsersToRolesTest3" + ex.Message, ex); } } [Test] public void AddUsersToRolesTest4() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; Guid[] users = new Guid[1]; users[0] = Guid.NewGuid(); Guid[] roles = new Guid[1]; roles[0] = new Guid("F6A4ADDA-8450-4F9A-BE86-D0719B239A8D"); // Admins provider.AddUsersToRoles("Appleseed", users, roles); Assert.Fail(); } catch (AppleseedMembershipProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in AddUsersToRolesTest4" + ex.Message, ex); } } [Test] public void AddUsersToRolesTest5() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; Guid[] users = new Guid[1]; users[0] = new Guid("BE7DC028-7238-45D3-AF35-DD3FE4AEFB7E"); //"admin@Appleseedportal.net"; Guid[] roles = new Guid[1]; roles[0] = Guid.NewGuid(); provider.AddUsersToRoles("Appleseed", users, roles); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in AddUsersToRolesTest5" + ex.Message, ex); } } [Test] public void AddUsersToRolesTest6() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; AppleseedUser user = (AppleseedUser)Membership.GetUser("admin@Appleseedportal.net"); Guid[] users = new Guid[1]; users[0] = user.ProviderUserKey; AppleseedRole role = provider.GetRoleByName("Appleseed", "clerks"); Guid[] roles = new Guid[1]; roles[0] = role.Id; provider.AddUsersToRoles("Appleseed", users, roles); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in AddUsersToRolesTest6" + ex.Message, ex); } } [Test] public void RemoveUsersFromRolesTest1() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] users = new string[1]; users[0] = "invalid@user.com"; string[] roles = new string[1]; roles[0] = "Admins"; provider.RemoveUsersFromRoles(users, roles); Assert.Fail(); } catch (AppleseedMembershipProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RemoveUsersFromRolesTest1" + ex.Message, ex); } } [Test] public void RemoveUsersFromRolesTest2() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] users = new string[1]; users[0] = "admin@Appleseedportal.net"; string[] roles = new string[1]; roles[0] = "invalidRole"; provider.RemoveUsersFromRoles(users, roles); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RemoveUsersFromRolesTest2" + ex.Message, ex); } } [Test] public void RemoveUsersFromRolesTest3() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] users = new string[1]; users[0] = "admin@Appleseedportal.net"; string[] roles = new string[1]; roles[0] = "editors"; provider.RemoveUsersFromRoles(users, roles); // admin is in editors role } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RemoveUsersFromRolesTest3" + ex.Message, ex); } } [Test] [Ignore("Temporarily until it will be fixed")] public void RemoveUsersFromRolesTest4() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; Guid[] users = new Guid[1]; users[0] = Guid.NewGuid(); Guid[] roles = new Guid[1]; roles[0] = new Guid("F6A4ADDA-8450-4F9A-BE86-D0719B239A8D"); // Admins provider.RemoveUsersFromRoles("Appleseed", users, roles); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RemoveUsersFromRolesTest4" + ex.Message, ex); } } [Test] public void RemoveUsersFromRolesTest5() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; Guid[] users = new Guid[1]; users[0] = new Guid("BE7DC028-7238-45D3-AF35-DD3FE4AEFB7E"); //"admin@Appleseedportal.net"; Guid[] roles = new Guid[1]; roles[0] = Guid.NewGuid(); provider.RemoveUsersFromRoles("Appleseed", users, roles); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RemoveUsersFromRolesTest5" + ex.Message, ex); } } [Test] [Ignore("Temporarily until it will be fixed")] public void RemoveUsersFromRolesTest6() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; Guid[] users = new Guid[1]; users[0] = new Guid("BE7DC028-7238-45D3-AF35-DD3FE4AEFB7E"); //"admin@Appleseedportal.net"; AppleseedRole editors = provider.GetRoleByName("Appleseed", "salesman"); Guid[] roles = new Guid[1]; roles[0] = editors.Id; provider.RemoveUsersFromRoles("Appleseed", users, roles); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RemoveUsersFromRolesTest6" + ex.Message, ex); } } [Test] [Ignore("Temporarily until it will be fixed")] public void RemoveUsersFromRolesTest7() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; Guid userId = new Guid("34ADB714-92B0-47ff-B5AF-5DB2E0D124A9"); // user@user.com; Guid[] users = new Guid[] { userId }; AppleseedRole editors = provider.GetRoleByName("Appleseed", "editors"); Guid[] roles = new Guid[1]; roles[0] = editors.Id; provider.AddUsersToRoles("Appleseed", users, roles); Assert.IsTrue(provider.IsUserInRole("Appleseed", userId, editors.Id)); provider.RemoveUsersFromRoles("Appleseed", users, roles); Assert.IsFalse(provider.IsUserInRole("Appleseed", userId, editors.Id)); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RemoveUsersFromRolesTest7" + ex.Message, ex); } } [Test] [Ignore("Temporarily until it will be fixed")] public void RemoveUsersFromRolesTest8() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; string[] users = new string[] { "user@user.com" }; string[] roles = new string[] { "editors" }; provider.AddUsersToRoles(users, roles); Assert.IsTrue(provider.IsUserInRole("user@user.com", "editors")); provider.RemoveUsersFromRoles(users, roles); Assert.IsFalse(provider.IsUserInRole("user@user.com", "editors")); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in RemoveUsersFromRolesTest8" + ex.Message, ex); } } [Test] public void DeleteRoleTest1() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; provider.DeleteRole("Admins", true); Assert.Fail(); // Admins has users } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in DeleteRoleTest1" + ex.Message, ex); } } [Test] public void DeleteRoleTest2() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; provider.CreateRole("tempRole1"); provider.DeleteRole("tempRole1", true); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in DeleteRoleTest2" + ex.Message, ex); } } [Test] public void DeleteRoleTest3() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; provider.CreateRole("tempRole2"); provider.AddUsersToRoles(new string[] { "user@user.com" }, new string[] { "tempRole2" }); provider.DeleteRole("tempRole2", false); } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in DeleteRoleTest3" + ex.Message, ex); } } [Test] public void DeleteRoleTest4() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; AppleseedRole editors = provider.GetRoleByName("Appleseed", "editors"); provider.DeleteRole("invalidApp", editors.Id, true); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in DeleteRoleTest4" + ex.Message, ex); } } [Test] public void DeleteRoleTest5() { try { AppleseedRoleProvider provider = Roles.Provider as AppleseedRoleProvider; provider.DeleteRole("invalidRole", true); Assert.Fail(); } catch (AppleseedRoleProviderException) { } catch (Exception ex) { Console.WriteLine(ex.Message); Assert.Fail("Error in DeleteRoleTest5" + ex.Message, ex); } } /* public abstract string[] FindUsersInRole( string portalAlias, string roleName, string usernameToMatch ); rename role */ } }
/* Copyright (c) 2012 Ant Micro <www.antmicro.com> Authors: * Konrad Kruczynski (kkruczynski@antmicro.com) * Piotr Zierhoffer (pzierhoffer@antmicro.com) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using Antmicro.Migrant.Hooks; using NUnit.Framework; using System.IO; using System; using System.Threading; namespace Antmicro.Migrant.Tests { [TestFixture(false, false)] [TestFixture(true, false)] [TestFixture(false, true)] [TestFixture(true, true)] public class HooksTests : BaseTestWithSettings { public HooksTests(bool useGeneratedSerializer, bool useGeneratedDeserializer) : base(useGeneratedSerializer, useGeneratedDeserializer, false, false, false) { } [Test] public void ShouldInvokePreSerialization() { var mock = new PreSerializationMock(); var copy = SerializerClone(mock); Assert.IsTrue(mock.Invoked); Assert.IsTrue(copy.Invoked); } [Test] public void ShouldInvokeDerivedPreSerialization() { var mock = new PreSerializationMockDerived(); var copy = SerializerClone(mock); Assert.IsTrue(mock.Invoked); Assert.IsTrue(mock.DerivedInvoked); Assert.IsTrue(copy.Invoked); Assert.IsTrue(copy.DerivedInvoked); } [Test] public void ShouldInvokePostSerialization() { var mock = new ImmediatePostSerializationMock(); var copy = SerializerClone(mock); Assert.IsTrue(mock.Invoked); Assert.IsFalse(copy.Invoked); } [Test] public void ShouldInvokeDerivedPostSerialization() { var mock = new ImmediatePostSerializationMockDerived(); var copy = SerializerClone(mock); Assert.IsTrue(mock.Invoked); Assert.IsTrue(mock.DerivedInvoked); Assert.IsFalse(copy.Invoked); Assert.IsFalse(copy.DerivedInvoked); } [Test] public void ShouldInvokeStaticPostSerialization() { var mock = new StaticImmediatePostSerializationMock(); SerializerClone(mock); Assert.IsTrue(StaticImmediatePostSerializationMock.Invoked); } [Test] public void ShouldInvokePostDeserialization() { var mock = new PostDeserializationMock(); var copy = SerializerClone(mock); Assert.IsFalse(mock.Invoked); Assert.IsTrue(copy.Invoked); } [Test] public void ShouldInvokeDerivedPostDeserialization() { var mock = new PostDeserializationMockDerived(); var copy = SerializerClone(mock); Assert.IsFalse(mock.Invoked); Assert.IsFalse(mock.DerivedInvoked); Assert.IsTrue(copy.Invoked); Assert.IsTrue(copy.DerivedInvoked); } [Test] public void ShouldInvokeGlobalHooks() { var memoryStream = new MemoryStream(); var serializer = new Serializer(GetSettings()); var preSerializationCounter = 0; var postSerializationCounter = 0; var postDeserializationCounter = 0; serializer.OnPreSerialization += x => preSerializationCounter++; serializer.OnPostSerialization += x => postSerializationCounter++; serializer.OnPostDeserialization += x => postDeserializationCounter++; serializer.Serialize(new [] { "One", "Two" }, memoryStream); Assert.AreEqual(3, preSerializationCounter); Assert.AreEqual(3, postSerializationCounter); Assert.AreEqual(0, postDeserializationCounter); memoryStream.Seek(0, SeekOrigin.Begin); serializer.Deserialize<string[]>(memoryStream); Assert.AreEqual(3, postDeserializationCounter); } [Test] public void ShouldHaveDeserializedReferencedObjectsWhenHookIsInvoked() { var referencing = new ReferencingPostDeserializationMock(); SerializerClone(referencing); } [Test] public void ShouldInvokePostHookAfterBothObjectsInCyclicReferenceAreDeserialized() { var a = new CyclicReferenceMockA(); a.B = new CyclicReferenceMockB(); a.B.A = a; SerializerClone(a); } [Test] public void ShouldInvokeLatePostSerializationHookAfterImmediate() { var late = new LatePostSerializationMockA(); SerializerClone(late); } [Test] public void ShouldInvokeLatePostDeserializationHookAfterImmediate() { var late = new LatePostSerializationMockA(); SerializerClone(late); } [Test] public void ShouldInvokeImmediateHooksInCorrectOrder() { var forOrderTest = new ForOrderTestA { B = new ForOrderTestB() }; var copy = SerializerClone(forOrderTest); Assert.Less(forOrderTest.B.HookInvokedOn, forOrderTest.HookInvokedOn); Assert.Less(copy.B.HookInvokedOn, copy.HookInvokedOn); } [Test] public void ShouldInvokeLateHooksInCorrectOrder() { var forLateOrderTest = new ForLateOrderTestA { B = new ForLateOrderTestB() }; var copy = SerializerClone(forLateOrderTest); Assert.Less(forLateOrderTest.B.HookInvokedOn, forLateOrderTest.HookInvokedOn); Assert.Less(copy.B.HookInvokedOn, copy.HookInvokedOn); } [Test] public void ShouldInvokePostDeserializationEvenIfExceptionWasThrownDuringSerializationEarly() { ShouldInvokePostDeserializationEvenIfExceptionWasThrownDuringSerialization(new PrePostSerializationMock()); } [Test] public void ShouldInvokePostDeserializationEvenIfExceptionWasThrownDuringSerializationLate() { ShouldInvokePostDeserializationEvenIfExceptionWasThrownDuringSerialization(new LatePrePostSerializationMock()); } [Test] public void ShouldInvokeHooksOnDerivedClassesInCorrectOrder() { var obj = new DerivedFromClassB(); var copy = SerializerClone(obj); Assert.IsTrue(copy.FlagC); } [Test] public void ShouldProperlyExecuteHooksOnVirtualMethod() { var mockWithVirtual = new MockWithVirtualDerived(); var copy = SerializerClone(mockWithVirtual); Assert.AreEqual(1, mockWithVirtual.BasePreSerializationCounter); Assert.AreEqual(2, mockWithVirtual.DerivedPreSerializationCounter); Assert.AreEqual(3, mockWithVirtual.BasePostSerializationCounter); Assert.AreEqual(4, mockWithVirtual.DerivedPostSerializationCounter); Assert.AreEqual(3, copy.BasePostDeserializationCounter); Assert.AreEqual(4, copy.DerivedPostDeserializationCounter); } [Test] public void ShouldFailWithSurrogateAndLatePostDeserializationHook() { try { var lateHook = new LateDeserializationMockA(); PseudoClone(lateHook, serializer => serializer.ForSurrogate<LateDeserializationMockA>().SetObject(x => new object())); Assert.Fail("Serialization finished while it should fail."); } catch(InvalidOperationException) { } } private void ShouldInvokePostDeserializationEvenIfExceptionWasThrownDuringSerialization<T>(T prePostMock) where T : IPrePostMock { try { SerializerClone(prePostMock); Assert.Fail("The exception has not propagated."); } catch(InvalidOperationException) { } Assert.AreEqual(true, prePostMock.PreExecuted); Assert.AreEqual(true, prePostMock.PostExecuted); } } public class PreSerializationMock { [PreSerialization] private void PreSerialization() { Invoked = true; } public bool Invoked { get; private set; } } public class PreSerializationMockDerived : PreSerializationMock { [PreSerialization] private void PreSerializationDerived() { DerivedInvoked = true; } public bool DerivedInvoked { get; private set; } } public class ImmediatePostSerializationMock { [PostSerializationAttribute] private void PostSerialization() { Invoked = true; } public bool Invoked { get; private set; } } public class ImmediatePostSerializationMockDerived : ImmediatePostSerializationMock { [PostSerializationAttribute] private void PostSerializationDerived() { DerivedInvoked = true; } public bool DerivedInvoked { get; private set; } } public class StaticImmediatePostSerializationMock { [PostSerializationAttribute] private static void PostSerialization() { Invoked = true; } public static bool Invoked { get; private set; } } public class PostDeserializationMock { [LatePostDeserializationAttribute] private void PostDeserialization() { Invoked = true; } public bool Invoked { get; private set; } } public class PostDeserializationMockDerived : PostDeserializationMock { [LatePostDeserializationAttribute] private void PostDeserializationDerived() { DerivedInvoked = true; } public bool DerivedInvoked { get; private set; } } public class ReferencingPostDeserializationMock { public ReferencingPostDeserializationMock() { mock = new ReferencedPostDeserializationMock(); } [LatePostDeserializationAttribute] private void PostDeserialization() { if(mock.TestObject == null) { throw new InvalidOperationException("Referenced mock was still not deserialized when invoking hook."); } } private readonly ReferencedPostDeserializationMock mock; } public class ReferencedPostDeserializationMock { public ReferencedPostDeserializationMock() { TestObject = new object(); } public object TestObject { get; private set; } } public class CyclicReferenceMockA { public CyclicReferenceMockB B { get; set; } public string Str { get; set; } public CyclicReferenceMockA() { Str = "Something"; } [LatePostDeserializationAttribute] public void TestIfBIsReady() { if(B == null || B.Str == null) { throw new InvalidOperationException("B is not ready after deserialization."); } } } public class CyclicReferenceMockB { public CyclicReferenceMockA A { get; set; } public string Str { get; set; } public CyclicReferenceMockB() { Str = "Something different"; } [LatePostDeserializationAttribute] public void TestIfAIsReady() { if(A == null || A.Str == null) { throw new InvalidOperationException("A is not ready after deserialization."); } } } public class LatePostSerializationMockA { public LatePostSerializationMockA() { B = new PostSerializationMockB(); } public PostSerializationMockB B { get; set; } [LatePostSerialization] private void PostSerialization() { if(!B.PostSerialized) { throw new InvalidOperationException("Late post serialization hook happened earlier than immediate one on referenced class."); } } } public class PostSerializationMockB { public bool PostSerialized { get; private set; } [PostSerializationAttribute] private void PostSerialization() { PostSerialized = true; } } public class LateDeserializationMockA { public LateDeserializationMockA() { B = new DeserializationMockB(); } public DeserializationMockB B { get; set; } [LatePostDeserializationAttribute] private void PostDeserialization() { if(!B.PostDeserialized) { throw new InvalidOperationException("Late post serialization hook happened earlier than immediate one on referenced class."); } } } public class DeserializationMockB { public bool PostDeserialized { get; private set; } [PostDeserializationAttribute] private void PostDeserialization() { PostDeserialized = true; } } public class ForOrderTestA : ForOrderTestB { public ForOrderTestB B { get; set; } } public class ForOrderTestB { public DateTime HookInvokedOn { get; private set; } [PostDeserialization] [PostSerialization] public void AfterDeOrSerialization() { HookInvokedOn = DateTime.Now; Thread.Sleep(100); } } public class ForLateOrderTestA : ForLateOrderTestB { public ForLateOrderTestB B { get; set; } } public class ForLateOrderTestB { public DateTime HookInvokedOn { get; private set; } [LatePostDeserialization] [LatePostSerialization] public void AfterDeOrSerialization() { HookInvokedOn = DateTime.Now; Thread.Sleep(100); } } public class ClassSendingExcetpionDuringSerialization { [PreSerialization] private void SendException() { throw new InvalidOperationException(); } } public interface IPrePostMock { bool PreExecuted { get; } bool PostExecuted { get; } } public class PrePostSerializationMock : IPrePostMock { public PrePostSerializationMock() { sendingException = new ClassSendingExcetpionDuringSerialization(); } public bool PreExecuted { get; private set; } public bool PostExecuted { get; private set; } [PreSerialization] private void BeforeSerialization() { PreExecuted = true; } [PostSerialization] private void AfterSerialization() { PostExecuted = true; } #pragma warning disable 0414 private ClassSendingExcetpionDuringSerialization sendingException; #pragma warning restore 0414 } public class LatePrePostSerializationMock : IPrePostMock { public LatePrePostSerializationMock() { sendingException = new ClassSendingExcetpionDuringSerialization(); } public bool PreExecuted { get; private set; } public bool PostExecuted { get; private set; } [PreSerialization] private void BeforeSerialization() { PreExecuted = true; } [LatePostSerialization] private void AfterSerialization() { PostExecuted = true; } #pragma warning disable 0414 private ClassSendingExcetpionDuringSerialization sendingException; #pragma warning restore 0414 } public class BaseClassA { [Transient] protected bool FlagA; [PostDeserialization] private void AfterDeserialization() { FlagA = true; } } public class DerivedFromBaseClassA : BaseClassA { [Transient] protected bool FlagB; [PostDeserialization] private void AfterDeserialization() { Assert.IsTrue(FlagA); FlagB = true; } } public class DerivedFromClassB : DerivedFromBaseClassA { [Transient] public bool FlagC; [PostDeserialization] private void AfterDeserialization() { Assert.IsTrue(FlagA); Assert.IsTrue(FlagB); FlagC = true; } } public class MockWithVirtualBase { public int BasePreSerializationCounter { get; private set; } public int BasePostSerializationCounter { get; private set; } public int BasePostDeserializationCounter { get; private set; } [PreSerialization] protected virtual void BeforeSerialization() { BasePreSerializationCounter = Counter; } [PostSerialization] protected virtual void AfterSerialization() { BasePostSerializationCounter = Counter; } [PostDeserialization] protected virtual void AfterDeserialization() { BasePostDeserializationCounter = Counter; } protected int Counter { get { return ++counterValue; } } private int counterValue; } public class MockWithVirtualDerived : MockWithVirtualBase { public int DerivedPreSerializationCounter { get; private set; } public int DerivedPostSerializationCounter { get; private set; } public int DerivedPostDeserializationCounter { get; private set; } protected override void BeforeSerialization() { base.BeforeSerialization(); DerivedPreSerializationCounter = Counter; } protected override void AfterSerialization() { base.AfterSerialization(); DerivedPostSerializationCounter = Counter; } protected override void AfterDeserialization() { base.AfterDeserialization(); DerivedPostDeserializationCounter = Counter; } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System.Collections; using System.Collections.ObjectModel; using System.Management.Automation.Internal; using System.Management.Automation.Language; using Dbg = System.Management.Automation.Diagnostics; namespace System.Management.Automation { /// <summary> /// The base class for all command processor classes. It provides /// abstract methods to execute a command. /// </summary> internal abstract class CommandProcessorBase : IDisposable { #region ctor /// <summary> /// Default constructor. /// </summary> internal CommandProcessorBase() { } /// <summary> /// Initializes the base command processor class with the command metadata. /// </summary> /// <param name="commandInfo"> /// The metadata about the command to run. /// </param> internal CommandProcessorBase(CommandInfo commandInfo) { if (commandInfo == null) { throw PSTraceSource.NewArgumentNullException(nameof(commandInfo)); } if (commandInfo is IScriptCommandInfo scriptCommand) { ExperimentalAttribute expAttribute = scriptCommand.ScriptBlock.ExperimentalAttribute; if (expAttribute != null && expAttribute.ToHide) { string errorTemplate = expAttribute.ExperimentAction == ExperimentAction.Hide ? DiscoveryExceptions.ScriptDisabledWhenFeatureOn : DiscoveryExceptions.ScriptDisabledWhenFeatureOff; string errorMsg = StringUtil.Format(errorTemplate, expAttribute.ExperimentName); ErrorRecord errorRecord = new ErrorRecord( new InvalidOperationException(errorMsg), "ScriptCommandDisabled", ErrorCategory.InvalidOperation, commandInfo); throw new CmdletInvocationException(errorRecord); } } CommandInfo = commandInfo; } #endregion ctor #region properties private InternalCommand _command; // Marker of whether BeginProcessing() has already run, // also used by CommandProcessor. internal bool RanBeginAlready; // Marker of whether this command has already been added to // a PipelineProcessor. It is an error to add the same command // more than once. internal bool AddedToPipelineAlready { get { return _addedToPipelineAlready; } set { _addedToPipelineAlready = value; } } internal bool _addedToPipelineAlready; /// <summary> /// Gets the CommandInfo for the command this command processor represents. /// </summary> /// <value></value> internal CommandInfo CommandInfo { get; set; } /// <summary> /// This indicates whether this command processor is created from /// a script file. /// </summary> /// <remarks> /// Script command processor created from a script file is special /// in following two perspectives, /// /// 1. New scope created needs to be a 'script' scope in the /// sense that it needs to handle $script: variables. /// For normal functions or scriptblocks, script scope /// variables are not supported. /// /// 2. ExitException will be handled by setting lastExitCode. /// For normal functions or scriptblocks, exit command will /// kill current powershell session. /// </remarks> public bool FromScriptFile { get { return _fromScriptFile; } } protected bool _fromScriptFile = false; /// <summary> /// If this flag is true, the commands in this Pipeline will redirect /// the global error output pipe to the command's error output pipe. /// (See the comment in Pipeline.RedirectShellErrorOutputPipe for an /// explanation of why this flag is needed). /// </summary> internal bool RedirectShellErrorOutputPipe { get; set; } = false; /// <summary> /// Gets or sets the command object. /// </summary> internal InternalCommand Command { get { return _command; } set { // The command runtime needs to be set up... if (value != null) { value.commandRuntime = this.commandRuntime; if (_command != null) value.CommandInfo = _command.CommandInfo; // Set the execution context for the command it's currently // null and our context has already been set up. if (value.Context == null && _context != null) value.Context = _context; } _command = value; } } /// <summary> /// Get the ObsoleteAttribute of the current command. /// </summary> internal virtual ObsoleteAttribute ObsoleteAttribute { get { return null; } } // Full Qualified ID for the obsolete command warning private const string FQIDCommandObsolete = "CommandObsolete"; /// <summary> /// The command runtime used for this instance of a command processor. /// </summary> protected MshCommandRuntime commandRuntime; internal MshCommandRuntime CommandRuntime { get { return commandRuntime; } set { commandRuntime = value; } } /// <summary> /// For commands that use the scope stack, if this flag is /// true, don't create a new scope when running this command. /// </summary> /// <value></value> internal bool UseLocalScope { get { return _useLocalScope; } set { _useLocalScope = value; } } protected bool _useLocalScope; /// <summary> /// Ensures that the provided script block is compatible with the current language mode - to /// be used when a script block is being dotted. /// </summary> /// <param name="scriptBlock">The script block being dotted.</param> /// <param name="languageMode">The current language mode.</param> /// <param name="invocationInfo">The invocation info about the command.</param> protected static void ValidateCompatibleLanguageMode( ScriptBlock scriptBlock, PSLanguageMode languageMode, InvocationInfo invocationInfo) { // If we are in a constrained language mode (Core or Restricted), block it. // We are currently restricting in one direction: // - Can't dot something from a more permissive mode, since that would probably expose // functions that were never designed to handle untrusted data. // This function won't be called for NoLanguage mode so the only direction checked is trusted // (FullLanguage mode) script running in a constrained/restricted session. if ((scriptBlock.LanguageMode.HasValue) && (scriptBlock.LanguageMode != languageMode) && ((languageMode == PSLanguageMode.RestrictedLanguage) || (languageMode == PSLanguageMode.ConstrainedLanguage))) { // Finally check if script block is really just PowerShell commands plus parameters. // If so then it is safe to dot source across language mode boundaries. bool isSafeToDotSource = false; try { scriptBlock.GetPowerShell(); isSafeToDotSource = true; } catch (Exception) { } if (!isSafeToDotSource) { ErrorRecord errorRecord = new ErrorRecord( new NotSupportedException( DiscoveryExceptions.DotSourceNotSupported), "DotSourceNotSupported", ErrorCategory.InvalidOperation, null); errorRecord.SetInvocationInfo(invocationInfo); throw new CmdletInvocationException(errorRecord); } } } /// <summary> /// The execution context used by the system. /// </summary> protected ExecutionContext _context; internal ExecutionContext Context { get { return _context; } set { _context = value; } } /// <summary> /// Etw activity for this pipeline. /// </summary> internal Guid PipelineActivityId { get; set; } = Guid.Empty; #endregion properties #region methods #region handling of -? parameter /// <summary> /// Checks if user has requested help (for example passing "-?" parameter for a cmdlet) /// and if yes, then returns the help target to display. /// </summary> /// <param name="helpTarget">Help target to request.</param> /// <param name="helpCategory">Help category to request.</param> /// <returns><see langword="true"/> if user requested help; <see langword="false"/> otherwise.</returns> internal virtual bool IsHelpRequested(out string helpTarget, out HelpCategory helpCategory) { // by default we don't handle "-?" parameter at all // (we want to do the checks only for cmdlets - this method is overridden in CommandProcessor) helpTarget = null; helpCategory = HelpCategory.None; return false; } /// <summary> /// Creates a command processor for "get-help [helpTarget]". /// </summary> /// <param name="context">Context for the command processor.</param> /// <param name="helpTarget">Help target.</param> /// <param name="helpCategory">Help category.</param> /// <returns>Command processor for "get-help [helpTarget]".</returns> internal static CommandProcessorBase CreateGetHelpCommandProcessor( ExecutionContext context, string helpTarget, HelpCategory helpCategory) { if (context == null) { throw PSTraceSource.NewArgumentNullException(nameof(context)); } if (string.IsNullOrEmpty(helpTarget)) { throw PSTraceSource.NewArgumentNullException(nameof(helpTarget)); } CommandProcessorBase helpCommandProcessor = context.CreateCommand("get-help", false); var cpi = CommandParameterInternal.CreateParameterWithArgument( /*parameterAst*/null, "Name", "-Name:", /*argumentAst*/null, helpTarget, false); helpCommandProcessor.AddParameter(cpi); cpi = CommandParameterInternal.CreateParameterWithArgument( /*parameterAst*/null, "Category", "-Category:", /*argumentAst*/null, helpCategory.ToString(), false); helpCommandProcessor.AddParameter(cpi); return helpCommandProcessor; } #endregion /// <summary> /// Tells whether pipeline input is expected or not. /// </summary> /// <returns>A bool indicating whether pipeline input is expected.</returns> internal bool IsPipelineInputExpected() { return commandRuntime.IsPipelineInputExpected; } /// <summary> /// If you want this command to execute in other than the default session /// state, use this API to get and set that session state instance... /// </summary> internal SessionStateInternal CommandSessionState { get; set; } /// <summary> /// Gets or sets the session state scope for this command processor object. /// </summary> protected internal SessionStateScope CommandScope { get; protected set; } protected virtual void OnSetCurrentScope() { } protected virtual void OnRestorePreviousScope() { } /// <summary> /// This method sets the current session state scope to the execution scope for the pipeline /// that was stored in the pipeline manager when it was first invoked. /// </summary> internal void SetCurrentScopeToExecutionScope() { // Make sure we have a session state instance for this command. // If one hasn't been explicitly set, then use the session state // available on the engine execution context... if (CommandSessionState == null) { CommandSessionState = Context.EngineSessionState; } // Store off the current scope _previousScope = CommandSessionState.CurrentScope; _previousCommandSessionState = Context.EngineSessionState; Context.EngineSessionState = CommandSessionState; // Set the current scope to the pipeline execution scope CommandSessionState.CurrentScope = CommandScope; OnSetCurrentScope(); } /// <summary> /// Restores the current session state scope to the scope which was active when SetCurrentScopeToExecutionScope /// was called. /// </summary> internal void RestorePreviousScope() { OnRestorePreviousScope(); Context.EngineSessionState = _previousCommandSessionState; if (_previousScope != null) { // Restore the scope but use the same session state instance we // got it from because the command may have changed the execution context // session state... CommandSessionState.CurrentScope = _previousScope; } } private SessionStateScope _previousScope; private SessionStateInternal _previousCommandSessionState; /// <summary> /// A collection of arguments that have been added by the parser or /// host interfaces. These will be sent to the parameter binder controller /// for processing. /// </summary> internal Collection<CommandParameterInternal> arguments = new Collection<CommandParameterInternal>(); /// <summary> /// Adds an unbound parameter. /// </summary> /// <param name="parameter"> /// The parameter to add to the unbound arguments list /// </param> internal void AddParameter(CommandParameterInternal parameter) { Diagnostics.Assert(parameter != null, "Caller to verify parameter argument"); arguments.Add(parameter); } /// <summary> /// Prepares the command for execution. /// This should be called once before ProcessRecord(). /// </summary> internal abstract void Prepare(IDictionary psDefaultParameterValues); /// <summary> /// Write warning message for an obsolete command. /// </summary> /// <param name="obsoleteAttr"></param> private void HandleObsoleteCommand(ObsoleteAttribute obsoleteAttr) { string commandName = string.IsNullOrEmpty(CommandInfo.Name) ? "script block" : string.Format(System.Globalization.CultureInfo.InvariantCulture, CommandBaseStrings.ObsoleteCommand, CommandInfo.Name); string warningMsg = string.Format( System.Globalization.CultureInfo.InvariantCulture, CommandBaseStrings.UseOfDeprecatedCommandWarning, commandName, obsoleteAttr.Message); // We ignore the IsError setting because we don't want to break people when obsoleting a command using (this.CommandRuntime.AllowThisCommandToWrite(false)) { this.CommandRuntime.WriteWarning(new WarningRecord(FQIDCommandObsolete, warningMsg)); } } /// <summary> /// Sets the execution scope for the pipeline and then calls the Prepare /// abstract method which gets overridden by derived classes. /// </summary> internal void DoPrepare(IDictionary psDefaultParameterValues) { CommandProcessorBase oldCurrentCommandProcessor = _context.CurrentCommandProcessor; try { Context.CurrentCommandProcessor = this; SetCurrentScopeToExecutionScope(); Prepare(psDefaultParameterValues); // Check obsolete attribute after Prepare so that -WarningAction will be respected for cmdlets if (ObsoleteAttribute != null) { // Obsolete command is rare. Put the IF here to avoid method call overhead HandleObsoleteCommand(ObsoleteAttribute); } } catch (Exception) { if (_useLocalScope) { // If we had an exception during Prepare, we're done trying to execute the command // so the scope we created needs to release any resources it hold.s CommandSessionState.RemoveScope(CommandScope); } throw; } finally { Context.CurrentCommandProcessor = oldCurrentCommandProcessor; RestorePreviousScope(); } } /// <summary> /// Called once before ProcessRecord(). Internally it calls /// BeginProcessing() of the InternalCommand. /// </summary> /// <exception cref="PipelineStoppedException"> /// a terminating error occurred, or the pipeline was otherwise stopped /// </exception> internal virtual void DoBegin() { // Note that DoPrepare() and DoBegin() should NOT be combined. // Reason: Encoding of commandline parameters happen as part // of DoPrepare(). If they are combined, the first command's // DoBegin() will be called before the next command's // DoPrepare(). Since BeginProcessing() can write objects // to the downstream commandlet, it will end up calling // DoExecute() (from Pipe.Add()) before DoPrepare. if (!RanBeginAlready) { RanBeginAlready = true; Pipe oldErrorOutputPipe = _context.ShellFunctionErrorOutputPipe; CommandProcessorBase oldCurrentCommandProcessor = _context.CurrentCommandProcessor; try { // // On V1 the output pipe was redirected to the command's output pipe only when it // was already redirected. This is the original comment explaining this behaviour: // // NTRAID#Windows Out of Band Releases-926183-2005-12-15 // MonadTestHarness has a bad dependency on an artifact of the current implementation // The following code only redirects the output pipe if it's already redirected // to preserve the artifact. The test suites need to be fixed and then this // the check can be removed and the assignment always done. // // However, this makes the hosting APIs behave differently than commands executed // from the command-line host (for example, see bugs Win7:415915 and Win7:108670). // The RedirectShellErrorOutputPipe flag is used by the V2 hosting API to force the // redirection. // if (this.RedirectShellErrorOutputPipe || _context.ShellFunctionErrorOutputPipe != null) { _context.ShellFunctionErrorOutputPipe = this.commandRuntime.ErrorOutputPipe; } _context.CurrentCommandProcessor = this; using (commandRuntime.AllowThisCommandToWrite(true)) { using (ParameterBinderBase.bindingTracer.TraceScope( "CALLING BeginProcessing")) { SetCurrentScopeToExecutionScope(); if (Context._debuggingMode > 0 && Command is not PSScriptCmdlet) { Context.Debugger.CheckCommand(this.Command.MyInvocation); } Command.DoBeginProcessing(); } } } catch (Exception e) { // This cmdlet threw an exception, so // wrap it and bubble it up. throw ManageInvocationException(e); } finally { _context.ShellFunctionErrorOutputPipe = oldErrorOutputPipe; _context.CurrentCommandProcessor = oldCurrentCommandProcessor; RestorePreviousScope(); } } } /// <summary> /// This calls the command. It assumes that DoPrepare() has already been called. /// </summary> internal abstract void ProcessRecord(); /// <summary> /// This method sets the execution scope to the /// appropriate scope for the pipeline and then calls /// the ProcessRecord abstract method that derived command processors /// override. /// </summary> internal void DoExecute() { ExecutionContext.CheckStackDepth(); CommandProcessorBase oldCurrentCommandProcessor = _context.CurrentCommandProcessor; try { Context.CurrentCommandProcessor = this; SetCurrentScopeToExecutionScope(); ProcessRecord(); } finally { Context.CurrentCommandProcessor = oldCurrentCommandProcessor; RestorePreviousScope(); } } /// <summary> /// Called once after ProcessRecord(). /// Internally it calls EndProcessing() of the InternalCommand. /// </summary> /// <exception cref="PipelineStoppedException"> /// A terminating error occurred, or the pipeline was otherwise stopped. /// </exception> internal virtual void Complete() { // Call ProcessRecord once from complete. Don't call DoExecute... ProcessRecord(); try { using (commandRuntime.AllowThisCommandToWrite(true)) { using (ParameterBinderBase.bindingTracer.TraceScope( "CALLING EndProcessing")) { this.Command.DoEndProcessing(); } } } // 2004/03/18-JonN This is understood to be // an FXCOP violation, cleared by KCwalina. catch (Exception e) { // This cmdlet threw an exception, so // wrap it and bubble it up. throw ManageInvocationException(e); } } /// <summary> /// Calls the virtual Complete method after setting the appropriate session state scope. /// </summary> internal void DoComplete() { Pipe oldErrorOutputPipe = _context.ShellFunctionErrorOutputPipe; CommandProcessorBase oldCurrentCommandProcessor = _context.CurrentCommandProcessor; try { // // On V1 the output pipe was redirected to the command's output pipe only when it // was already redirected. This is the original comment explaining this behaviour: // // NTRAID#Windows Out of Band Releases-926183-2005-12-15 // MonadTestHarness has a bad dependency on an artifact of the current implementation // The following code only redirects the output pipe if it's already redirected // to preserve the artifact. The test suites need to be fixed and then this // the check can be removed and the assignment always done. // // However, this makes the hosting APIs behave differently than commands executed // from the command-line host (for example, see bugs Win7:415915 and Win7:108670). // The RedirectShellErrorOutputPipe flag is used by the V2 hosting API to force the // redirection. // if (this.RedirectShellErrorOutputPipe || _context.ShellFunctionErrorOutputPipe != null) { _context.ShellFunctionErrorOutputPipe = this.commandRuntime.ErrorOutputPipe; } _context.CurrentCommandProcessor = this; SetCurrentScopeToExecutionScope(); Complete(); } finally { OnRestorePreviousScope(); _context.ShellFunctionErrorOutputPipe = oldErrorOutputPipe; _context.CurrentCommandProcessor = oldCurrentCommandProcessor; // Destroy the local scope at this point if there is one... if (_useLocalScope && CommandScope != null) { CommandSessionState.RemoveScope(CommandScope); } // and the previous scope... if (_previousScope != null) { // Restore the scope but use the same session state instance we // got it from because the command may have changed the execution context // session state... CommandSessionState.CurrentScope = _previousScope; } // Restore the previous session state if (_previousCommandSessionState != null) { Context.EngineSessionState = _previousCommandSessionState; } } } /// <summary> /// For diagnostic purposes. /// </summary> public override string ToString() { if (CommandInfo != null) return CommandInfo.ToString(); return "<NullCommandInfo>"; // does not require localization } /// <summary> /// True if Read() has not be called, false otherwise. /// </summary> private bool _firstCallToRead = true; /// <summary> /// Entry point used by the engine to reads the input pipeline object /// and binds the parameters. /// /// This default implementation reads the next pipeline object and sets /// it as the CurrentPipelineObject in the InternalCommand. /// Does not throw. /// </summary> /// <returns> /// True if read succeeds. /// </returns> internal virtual bool Read() { // Prepare the default value parameter list if this is the first call to Read if (_firstCallToRead) { _firstCallToRead = false; } // Retrieve the object from the input pipeline object inputObject = this.commandRuntime.InputPipe.Retrieve(); if (inputObject == AutomationNull.Value) { return false; } // If we are reading input for the first command in the pipeline increment PipelineIterationInfo[0], which is the number of items read from the input if (this.Command.MyInvocation.PipelinePosition == 1) { this.Command.MyInvocation.PipelineIterationInfo[0]++; } Command.CurrentPipelineObject = LanguagePrimitives.AsPSObjectOrNull(inputObject); return true; } /// <summary> /// Wraps the exception which occurred during cmdlet invocation, /// stores that as the exception to be returned from /// PipelineProcessor.SynchronousExecute, and writes it to /// the error variable. /// </summary> /// <param name="e"> /// The exception to wrap in a CmdletInvocationException or /// CmdletProviderInvocationException. /// </param> /// <returns> /// Always returns PipelineStoppedException. The caller should /// throw this exception. /// </returns> /// <remarks> /// Almost all exceptions which occur during pipeline invocation /// are wrapped in CmdletInvocationException before they are stored /// in the pipeline. However, there are several exceptions: /// /// AccessViolationException, StackOverflowException: /// These are considered to be such severe errors that we /// FailFast the process immediately. /// /// ProviderInvocationException: In this case, we assume that the /// cmdlet is get-item or the like, a thin wrapper around the /// provider API. We discard the original ProviderInvocationException /// and re-wrap its InnerException (the real error) in /// CmdletProviderInvocationException. This makes it easier to reach /// the real error. /// /// CmdletInvocationException, ActionPreferenceStopException: /// This indicates that the cmdlet itself ran a command which failed. /// We could go ahead and wrap the original exception in multiple /// layers of CmdletInvocationException, but this makes it difficult /// for the caller to access the root problem, plus the serialization /// layer might not communicate properties beyond some fixed depth. /// Instead, we choose to not re-wrap the exception. /// /// PipelineStoppedException: This could mean one of two things. /// It usually means that this pipeline has already stopped, /// in which case the pipeline already stores the original error. /// It could also mean that the cmdlet ran a command which was /// stopped by CTRL-C etc, in which case we choose not to /// re-wrap the exception as with CmdletInvocationException. /// </remarks> internal PipelineStoppedException ManageInvocationException(Exception e) { try { if (Command != null) { do // false loop { ProviderInvocationException pie = e as ProviderInvocationException; if (pie != null) { // If a ProviderInvocationException occurred, // discard the ProviderInvocationException and // re-wrap in CmdletProviderInvocationException e = new CmdletProviderInvocationException( pie, Command.MyInvocation); break; } // 1021203-2005/05/09-JonN // HaltCommandException will cause the command // to stop, but not be reported as an error. // 906445-2005/05/16-JonN // FlowControlException should not be wrapped if (e is PipelineStoppedException || e is CmdletInvocationException || e is ActionPreferenceStopException || e is HaltCommandException || e is FlowControlException || e is ScriptCallDepthException) { // do nothing; do not rewrap these exceptions break; } RuntimeException rte = e as RuntimeException; if (rte != null && rte.WasThrownFromThrowStatement) { // do not rewrap a script based throw break; } // wrap all other exceptions e = new CmdletInvocationException( e, Command.MyInvocation); } while (false); // commandRuntime.ManageException will always throw PipelineStoppedException // Otherwise, just return this exception... // If this exception happened in a transacted cmdlet, // rollback the transaction if (commandRuntime.UseTransaction) { // The "transaction timed out" exception is // exceedingly obtuse. We clarify things here. bool isTimeoutException = false; Exception tempException = e; while (tempException != null) { if (tempException is System.TimeoutException) { isTimeoutException = true; break; } tempException = tempException.InnerException; } if (isTimeoutException) { ErrorRecord errorRecord = new ErrorRecord( new InvalidOperationException( TransactionStrings.TransactionTimedOut), "TRANSACTION_TIMEOUT", ErrorCategory.InvalidOperation, e); errorRecord.SetInvocationInfo(Command.MyInvocation); e = new CmdletInvocationException(errorRecord); } // Rollback the transaction in the case of errors. if ( _context.TransactionManager.HasTransaction && _context.TransactionManager.RollbackPreference != RollbackSeverity.Never ) { Context.TransactionManager.Rollback(true); } } return (PipelineStoppedException)this.commandRuntime.ManageException(e); } // Upstream cmdlets see only that execution stopped // This should only happen if Command is null return new PipelineStoppedException(); } catch (Exception) { // this method should not throw exceptions; warn about any violations on checked builds and re-throw Diagnostics.Assert(false, "This method should not throw exceptions!"); throw; } } /// <summary> /// Stores the exception to be returned from /// PipelineProcessor.SynchronousExecute, and writes it to /// the error variable. /// </summary> /// <param name="e"> /// The exception which occurred during script execution /// </param> /// <exception cref="PipelineStoppedException"> /// ManageScriptException throws PipelineStoppedException if-and-only-if /// the exception is a RuntimeException, otherwise it returns. /// This allows the caller to rethrow unexpected exceptions. /// </exception> internal void ManageScriptException(RuntimeException e) { if (Command != null && commandRuntime.PipelineProcessor != null) { commandRuntime.PipelineProcessor.RecordFailure(e, Command); // An explicit throw is written to $error as an ErrorRecord, so we // skip adding what is more or less a duplicate. if (e is not PipelineStoppedException && !e.WasThrownFromThrowStatement) commandRuntime.AppendErrorToVariables(e); } // Upstream cmdlets see only that execution stopped throw new PipelineStoppedException(); } /// <summary> /// Sometimes we shouldn't be rethrow the exception we previously caught, /// such as when the exception is handled by a trap. /// </summary> internal void ForgetScriptException() { if (Command != null && commandRuntime.PipelineProcessor != null) { commandRuntime.PipelineProcessor.ForgetFailure(); } } #endregion methods #region IDispose // 2004/03/05-JonN BrucePay has suggested that the IDispose // implementations in PipelineProcessor and CommandProcessor can be // removed. private bool _disposed; /// <summary> /// IDisposable implementation /// When the command is complete, the CommandProcessorBase should be disposed. /// This enables cmdlets to reliably release file handles etc. /// without waiting for garbage collection. /// </summary> /// <remarks>We use the standard IDispose pattern</remarks> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } private void Dispose(bool disposing) { if (_disposed) return; if (disposing) { // 2004/03/05-JonN Look into using metadata to check // whether IDisposable is implemented, in order to avoid // this expensive reflection cast. IDisposable id = Command as IDisposable; if (id != null) { id.Dispose(); } } _disposed = true; } #endregion IDispose } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.ComponentModel.Composition; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Editor.Host; using Microsoft.CodeAnalysis.ErrorReporting; using Microsoft.CodeAnalysis.Shared.TestHooks; using Microsoft.CodeAnalysis.Text; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Microsoft.VisualStudio.Text.Operations; using Microsoft.VisualStudio.Text.Tagging; using Microsoft.VisualStudio.Utilities; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.Implementation.RenameTracking { /// <summary> /// Also known as "rename smart tag," this watches text changes in open buffers, determines /// whether they can be interpreted as an identifier rename, and if so displays a smart tag /// that can perform a rename on that symbol. Each text buffer is tracked independently. /// </summary> [Export(typeof(ITaggerProvider))] [TagType(typeof(RenameTrackingTag))] [TagType(typeof(IErrorTag))] [ContentType(ContentTypeNames.RoslynContentType)] [TextViewRole(PredefinedTextViewRoles.Editable)] internal sealed partial class RenameTrackingTaggerProvider : ITaggerProvider { private readonly ITextUndoHistoryRegistry _undoHistoryRegistry; private readonly IAsynchronousOperationListener _asyncListener; private readonly IWaitIndicator _waitIndicator; private readonly IInlineRenameService _inlineRenameService; private readonly IEnumerable<IRefactorNotifyService> _refactorNotifyServices; private readonly IDiagnosticAnalyzerService _diagnosticAnalyzerService; [ImportingConstructor] public RenameTrackingTaggerProvider( ITextUndoHistoryRegistry undoHistoryRegistry, IWaitIndicator waitIndicator, IInlineRenameService inlineRenameService, IDiagnosticAnalyzerService diagnosticAnalyzerService, [ImportMany] IEnumerable<IRefactorNotifyService> refactorNotifyServices, [ImportMany] IEnumerable<Lazy<IAsynchronousOperationListener, FeatureMetadata>> asyncListeners) { _undoHistoryRegistry = undoHistoryRegistry; _waitIndicator = waitIndicator; _inlineRenameService = inlineRenameService; _refactorNotifyServices = refactorNotifyServices; _diagnosticAnalyzerService = diagnosticAnalyzerService; _asyncListener = new AggregateAsynchronousOperationListener(asyncListeners, FeatureAttribute.RenameTracking); } public ITagger<T> CreateTagger<T>(ITextBuffer buffer) where T : ITag { var stateMachine = buffer.Properties.GetOrCreateSingletonProperty(() => new StateMachine(buffer, _inlineRenameService, _asyncListener, _diagnosticAnalyzerService)); return new Tagger(stateMachine, _undoHistoryRegistry, _waitIndicator, _refactorNotifyServices) as ITagger<T>; } internal static void ResetRenameTrackingState(Workspace workspace, DocumentId documentId) { ResetRenameTrackingStateWorker(workspace, documentId, visible: false); } internal static bool ResetVisibleRenameTrackingState(Workspace workspace, DocumentId documentId) { return ResetRenameTrackingStateWorker(workspace, documentId, visible: true); } internal static bool ResetRenameTrackingStateWorker(Workspace workspace, DocumentId documentId, bool visible) { if (workspace.IsDocumentOpen(documentId)) { var document = workspace.CurrentSolution.GetDocument(documentId); SourceText text; StateMachine stateMachine; ITextBuffer textBuffer; if (document != null && document.TryGetText(out text)) { textBuffer = text.Container.TryGetTextBuffer(); if (textBuffer == null) { Environment.FailFast(string.Format("document with name {0} is open but textBuffer is null. Textcontainer is of type {1}. SourceText is: {2}", document.Name, text.Container.GetType().FullName, text.ToString())); } if (textBuffer.Properties.TryGetProperty(typeof(StateMachine), out stateMachine)) { if (visible) { return stateMachine.ClearVisibleTrackingSession(); } else { return stateMachine.ClearTrackingSession(); } } } } return false; } internal static async Task<IEnumerable<Diagnostic>> GetDiagnosticsAsync(SyntaxTree tree, DiagnosticDescriptor diagnosticDescriptor, CancellationToken cancellationToken) { try { // This can run on a background thread. SourceText text; StateMachine stateMachine; if (tree != null && tree.TryGetText(out text)) { var textBuffer = text.Container.TryGetTextBuffer(); if (textBuffer != null && textBuffer.Properties.TryGetProperty(typeof(StateMachine), out stateMachine)) { return await stateMachine.GetDiagnostic(tree, diagnosticDescriptor, cancellationToken).ConfigureAwait(false); } } return SpecializedCollections.EmptyEnumerable<Diagnostic>(); } catch (Exception e) when (FatalError.ReportUnlessCanceled(e)) { throw ExceptionUtilities.Unreachable; } } internal static CodeAction CreateCodeAction( Document document, Diagnostic diagnostic, IWaitIndicator waitIndicator, IEnumerable<IRefactorNotifyService> refactorNotifyServices, ITextUndoHistoryRegistry undoHistoryRegistry) { // This can run on a background thread. var message = string.Format( EditorFeaturesResources.Rename_0_to_1, diagnostic.Properties[RenameTrackingDiagnosticAnalyzer.RenameFromPropertyKey], diagnostic.Properties[RenameTrackingDiagnosticAnalyzer.RenameToPropertyKey]); return new RenameTrackingCodeAction(document, message, refactorNotifyServices, undoHistoryRegistry); } internal static bool IsRenamableIdentifier(Task<TriggerIdentifierKind> isRenamableIdentifierTask, bool waitForResult, CancellationToken cancellationToken) { if (isRenamableIdentifierTask.Status == TaskStatus.RanToCompletion && isRenamableIdentifierTask.Result != TriggerIdentifierKind.NotRenamable) { return true; } else if (isRenamableIdentifierTask.Status == TaskStatus.Canceled) { return false; } else if (waitForResult) { return WaitForIsRenamableIdentifier(isRenamableIdentifierTask, cancellationToken); } else { return false; } } internal static bool WaitForIsRenamableIdentifier(Task<TriggerIdentifierKind> isRenamableIdentifierTask, CancellationToken cancellationToken) { try { return isRenamableIdentifierTask.WaitAndGetResult(cancellationToken) != TriggerIdentifierKind.NotRenamable; } catch (AggregateException e) when (e.InnerException is OperationCanceledException) { // We passed in a different cancellationToken, so if there's a race and // isRenamableIdentifierTask was cancelled, we'll get an AggregateException return false; } } internal static bool CanInvokeRename(Document document) { SourceText text; StateMachine stateMachine; ITextBuffer textBuffer; TrackingSession unused; if (document == null || !document.TryGetText(out text)) { return false; } textBuffer = text.Container.TryGetTextBuffer(); return textBuffer != null && textBuffer.Properties.TryGetProperty(typeof(StateMachine), out stateMachine) && stateMachine.CanInvokeRename(out unused); } } }
using System; using System.Collections; using System.Data; using System.Data.OleDb; using System.Diagnostics; using System.Drawing; using System.Drawing.Printing; using System.IO; using System.Reflection; using System.Text; using System.Windows.Forms; using C1.Win.C1Preview; using C1.C1Report; using Microsoft.Office.Interop.Excel; using PCSUtils.Utils; using C1PrintPreviewDialog = PCSUtils.Framework.ReportFrame.C1PrintPreviewDialog; using DataTable = System.Data.DataTable; namespace CASReport { [Serializable] public class CASReport : MarshalByRefObject, IDynamicReport { private ReportBuilder mReportBuilder; private string mConnectionString; private bool mUseReportViewerRenderEngine = false; private string mReportFolder = string.Empty; private object mResult; private C1PrintPreviewControl mPreview; #region IDynamicReport Members public ReportBuilder PCSReportBuilder { get { return this.mReportBuilder; } set { mReportBuilder = value; } } public string PCSConnectionString { get { return mConnectionString; } set { mConnectionString = value; } } public object Result { get { return mResult; } set { mResult = value; } } /// <summary> /// Notify PCS whether the rendering report process is run by /// this IDynamicReport /// or the ReportViewer Engine (in the ReportViewer form) /// </summary> public bool UseReportViewerRenderEngine { get { return mUseReportViewerRenderEngine; } set { mUseReportViewerRenderEngine = value; } } /// <summary> /// Inform External Process where to find out the ReportLayout ( the PCS' ReportDefinition Folder Path ) /// </summary> public string ReportDefinitionFolder { get { return mReportFolder; } set { mReportFolder = value; } } private string mLayoutFile = string.Empty; /// <summary> /// Inform External Process about the Layout file /// in which PCS instruct to use /// (PCS will assign this property while ReportViewer Form execute, /// ReportVIewer form will use the layout file in the report config entry to put in this property) /// </summary> public string ReportLayoutFile { get { return mLayoutFile; } set { mLayoutFile = value; } } public object Invoke(string pstrMethod, object[] pobjParameters) { return this.GetType().InvokeMember(pstrMethod, BindingFlags.InvokeMethod, null, this, pobjParameters); } public C1PrintPreviewControl PCSReportViewer { get { return mPreview; } set { mPreview = value; } } #endregion /// <summary> /// Execute Report /// </summary> /// <param name="pstrCCNID">CCN</param> /// <param name="pstrYear">Year</param> /// <param name="pstrMonth">Month</param> /// <param name="pstrProLineID">Production Line</param> /// <param name="pstrWorkCenterID">Work Center</param> /// <returns></returns> public DataTable ExecuteReport(string pstrCCNID, string pstrYear, string pstrMonth, string pstrProLineID, string pstrWorkCenterID) { const string WORKING_DATE = "WorkingDate"; const string BEGIN_DATE = "BeginDate"; const string END_DATE = "EndDate"; const double FIELD_WIDTH = 585; const string FLD = "fld"; int intCCNID = 0; int intYear = 0; int intMonth = 0; int intProductionLineID = 0; int intWorkCenterID = 0; try { intCCNID = int.Parse(pstrCCNID); } catch{} try { intYear = int.Parse(pstrYear); } catch{} try { intMonth = int.Parse(pstrMonth); } catch{} try { intProductionLineID = int.Parse(pstrProLineID); } catch{} try { intWorkCenterID = int.Parse(pstrWorkCenterID); } catch{} DateTime dtmStartDate = new DateTime(intYear, intMonth, 1); DateTime dtmEndDate = dtmStartDate.AddMonths(1).AddDays(-1); string strExpression = string.Empty; Hashtable arrStandardCapacity = new Hashtable(); Hashtable arrActual = new Hashtable(); Hashtable arrRemain = new Hashtable(); Hashtable arrEffective = new Hashtable(); C1Report rptReport = new C1Report(); string strMonth = dtmStartDate.ToString("MMM"); // planning offset DataTable dtbPlanningOffset = GetPlanningOffset(pstrCCNID); // get all cycles in selected year DataTable dtbCycles = GetCycles(pstrCCNID); // refine cycles as of date based on production line dtbCycles = RefineCycle(pstrProLineID, dtbPlanningOffset, dtbCycles); // all planning period ArrayList arrPlanningPeriod = GetPlanningPeriod(pstrCCNID); StringBuilder sbCycleIDs; DataTable dtbCyclesCurrentMonth = ArrangeCycles(dtmStartDate, dtmEndDate, dtbCycles, arrPlanningPeriod, out sbCycleIDs); DataTable dtbStandard = GetStandardCapacity(intWorkCenterID, intCCNID, intProductionLineID); DataTable dtbTRC = GetTotalRequiredCapacity(intProductionLineID, sbCycleIDs.ToString(), dtmStartDate, dtmEndDate); DataTable dtbValidWorkDay = GetWorkingDateFromWCCapacity(intProductionLineID); decimal decTotalStandard = 0; decimal decTotalActual = 0; decimal decTotalRemain = 0; decimal decTotalEffective = 0; DataRow[] drowStandard = null; for (int i = dtmStartDate.Day; i <= dtmEndDate.Day; i++) { DateTime dtmDate = new DateTime(dtmStartDate.Year, dtmStartDate.Month, i); string strColName = "D" + i.ToString("00"); decimal decSC = 0; decimal decActual = 0; decimal decRemain = 0; decimal decEffective = 0; strExpression = BEGIN_DATE + "<='" + dtmDate.ToString("G") + "' AND " + END_DATE + ">='" + dtmDate.ToString("G") + "'"; DataRow[] drowValidWorkDay = dtbValidWorkDay.Select(strExpression); if (drowValidWorkDay.Length == 0) { arrStandardCapacity.Add(strColName, decSC); arrActual.Add(strColName, decActual); arrRemain.Add(strColName, decRemain); arrEffective.Add(strColName, decimal.Round(decEffective, 2)); continue; } string strCycleID = GetCycleOfDate(dtmDate, dtbCyclesCurrentMonth); string strFilter = "WorkingDate = '" + dtmDate.ToString() + "'" + " AND DCOptionMasterID = '" + strCycleID + "'"; #region Standard Capacity drowStandard = dtbStandard.Select(strExpression); foreach (DataRow drowData in drowStandard) { try { decSC += (decimal) drowData["Capacity"]; } catch { } } arrStandardCapacity.Add(strColName, decimal.Round(decSC, 0)); decTotalStandard += decSC; #endregion #region Total Required Capacity DataRow[] drowTotalRequired = dtbTRC.Select(strFilter); foreach (DataRow drowData in drowTotalRequired) { try { decActual += (decimal)drowData["TotalSecond"]; } catch{} } arrActual.Add(strColName, decimal.Round(decActual, 0)); decTotalActual += decActual; #endregion #region Effective = Required Cap / Standard Cap try { decEffective = decActual / decSC; } catch{} arrEffective.Add(strColName, decimal.Round(decEffective, 2)); #endregion #region Remain Capacity // remain capacity decRemain = decSC - decActual; arrRemain.Add(strColName, decimal.Round(decRemain, 0)); #endregion } arrStandardCapacity.Add("Total", decimal.Round(decTotalStandard, 0)); arrActual.Add("Total", decimal.Round(decTotalActual, 0)); decTotalRemain = decTotalStandard - decTotalActual; arrRemain.Add("Total", decimal.Round(decTotalRemain, 0)); try { decTotalEffective = decTotalActual / decTotalStandard; } catch{} arrEffective.Add("Total", decimal.Round(decTotalEffective, 2)); /// column Name in the dtbResult const string STANDARD_CAPACITY = "StandardCapacity"; const string TOTAL_REQUIRED_CAPACITY = "TotalRequiredCapacity"; const string REMAIN_CAPACITY = "RemainCapacity"; const string EFFECTIVE = "Effective"; DataTable dtbResult = new DataTable(); dtbResult.Columns.Add(new DataColumn("RowType", typeof (string))); #region Report layout mLayoutFile = "CASReport.xml"; string[] arrstrReportInDefinitionFile = rptReport.GetReportInfo(mReportFolder + "\\" + mLayoutFile); rptReport.Load(mReportFolder + "\\" + mLayoutFile, arrstrReportInDefinitionFile[0]); arrstrReportInDefinitionFile = null; rptReport.Layout.PaperSize = PaperKind.A3; #endregion for (int i = dtmStartDate.Day; i <= dtmEndDate.Day; i++) { string strColumnName = "D" + i.ToString("00"); dtbResult.Columns.Add(new DataColumn(strColumnName, typeof (decimal))); #region Report layout DateTime dtmDay = new DateTime(intYear, intMonth, i); string strDate = "fldD" + i.ToString("00"); string strDay = "fldDay" + i.ToString("00"); try { rptReport.Fields[strDate].Text = i + "-" + strMonth; } catch { } try { rptReport.Fields[strDay].Text = dtmDay.DayOfWeek.ToString().Substring(0, 3); } catch { } DataRow[] drowValidWorkDay = dtbValidWorkDay.Select("BeginDate <= '" + dtmDay.ToString("G") + "'" + " AND EndDate >='" + dtmDay.ToString("G") + "'"); if (drowValidWorkDay.Length == 0) { try { if (dtmDay.DayOfWeek == DayOfWeek.Saturday) { rptReport.Fields[strDate].ForeColor = Color.Blue; rptReport.Fields[strDate].BackColor = Color.Yellow; } else { rptReport.Fields[strDate].ForeColor = Color.Red; rptReport.Fields[strDate].BackColor = Color.Yellow; } } catch { } try { if (dtmDay.DayOfWeek == DayOfWeek.Saturday) { rptReport.Fields[strDay].ForeColor = Color.Blue; rptReport.Fields[strDay].BackColor = Color.Yellow; } else { rptReport.Fields[strDay].ForeColor = Color.Red; rptReport.Fields[strDay].BackColor = Color.Yellow; } } catch { } } #endregion } #region Layout the format based on days in month int intDaysInMonth = DateTime.DaysInMonth(dtmStartDate.Year, dtmStartDate.Month); if (intDaysInMonth < 31) { for (int i = intDaysInMonth + 1; i <= 31; i++) { #region field name string strDate = "fldD" + i.ToString("00"); string strDayOfWeek = "fldDay" + i.ToString("00"); string strDiv = "div" + i.ToString("00"); #endregion #region Report Header try { rptReport.Fields[strDate].Visible = false; } catch { } try { rptReport.Fields[strDayOfWeek].Visible = false; } catch { } try { rptReport.Fields[strDiv].Visible = false; } catch { } #endregion } try { #region Resize all line //double dWidth = rptReport.Fields["line1"].Width; for (int i = 1; i <= 7; i++) rptReport.Fields["line" + i].Width = rptReport.Fields["line" + i].Width - (31 - intDaysInMonth)*FIELD_WIDTH; #endregion double dWidthToChange = (31 - intDaysInMonth)*FIELD_WIDTH; #region Total columns rptReport.Fields["fldDTotal"].Left = rptReport.Fields["fldStandardCapacityD"].Left = rptReport.Fields["fldTotalRequiredCapacityD"].Left = rptReport.Fields["fldEffectiveD"].Left = rptReport.Fields["fldRemainCapacityD"].Left = rptReport.Fields["fldDTotal"].Left - dWidthToChange; rptReport.Fields["divTotal"].Left = rptReport.Fields["fldDTotal"].Left + FIELD_WIDTH; #endregion } catch (Exception ex) { throw ex; } } #endregion DataRow drowSC = dtbResult.NewRow(); drowSC["RowType"] = STANDARD_CAPACITY; DataRow drowTR = dtbResult.NewRow(); drowTR["RowType"] = TOTAL_REQUIRED_CAPACITY; DataRow drowRC = dtbResult.NewRow(); drowRC["RowType"] = REMAIN_CAPACITY; DataRow drowEff = dtbResult.NewRow(); drowEff["RowType"] = EFFECTIVE; for (int i = dtmStartDate.Day; i <= dtmEndDate.Day; i++) { string strColumnName = "D" + i.ToString("00"); drowSC[strColumnName] = arrStandardCapacity[strColumnName]; drowTR[strColumnName] = arrActual[strColumnName]; drowRC[strColumnName] = arrRemain[strColumnName]; drowEff[strColumnName] = arrEffective[strColumnName]; } dtbResult.Rows.Add(drowSC); dtbResult.Rows.Add(drowTR); dtbResult.Rows.Add(drowRC); dtbResult.Rows.Add(drowEff); #region RENDER REPORT const string REPORTFLD_CHART = "fldChart"; const string REPORTFLD_TOTALCHART = "fldTotalChart"; if (dtbResult.Rows.Count > 0) { #region BUILD CHART, save to image in clipboard, and then put in the report field fldChart Field fldChart = rptReport.Fields[REPORTFLD_CHART]; Field fldTotalChart = rptReport.Fields[REPORTFLD_TOTALCHART]; #region INIT string EXCEL_FILE = "CASReport.xls"; string strTemplateFilePath = mReportFolder + Path.DirectorySeparatorChar + EXCEL_FILE; string strDestinationFilePath = mReportFolder + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(EXCEL_FILE) + FormControlComponents.NowToUTCString() + ".XLS"; /// Copy layout excel report file to ExcelReport folder with a UTC datetime name File.Copy(strTemplateFilePath, strDestinationFilePath, true); ExcelReportBuilder objXLS = new ExcelReportBuilder(strDestinationFilePath); #endregion try { #region BUILD THE REPORT ON EXCEL FILE string[] arrExcelColumnHeading = new string[DateTime.DaysInMonth(intYear, intMonth)]; for (int i = 1; i <= intDaysInMonth; i++) { DateTime dtmDate = new DateTime(intYear, intMonth, i); string strColHeading = i + "-" + strMonth + "\n" + dtmDate.DayOfWeek.ToString().Substring(0, 3); arrExcelColumnHeading[i - 1] = strColHeading; } double[,] arrExcelStandard = new double[1,intDaysInMonth]; double[,] arrExcelActual = new double[1,intDaysInMonth]; for (int i = dtmStartDate.Day; i <= dtmEndDate.Day; i++) { string strSC = "fldStandardCapacityD" + i.ToString("00"); string strTR = "fldTotalRequiredCapacityD" + i.ToString("00"); string strRC = "fldRemainCapacityD" + i.ToString("00"); string strEff = "fldEffectiveD" + i.ToString("00"); string strColName = "D" + i.ToString("00"); rptReport.Fields[strSC].Text = arrStandardCapacity[strColName].ToString(); rptReport.Fields[strTR].Text = arrActual[strColName].ToString(); rptReport.Fields[strRC].Text = arrRemain[strColName].ToString(); rptReport.Fields[strEff].Text = arrEffective[strColName].ToString(); try { arrExcelStandard[0, i - 1] = Decimal.ToDouble((decimal) arrStandardCapacity[strColName]); } catch{} try { arrExcelActual[0, i - 1] = Decimal.ToDouble((decimal) arrActual[strColName]); } catch{} } // total field rptReport.Fields["fldStandardCapacityD"].Text = arrStandardCapacity["Total"].ToString(); rptReport.Fields["fldTotalRequiredCapacityD"].Text = arrActual["Total"].ToString(); rptReport.Fields["fldRemainCapacityD"].Text = arrRemain["Total"].ToString(); rptReport.Fields["fldEffectiveD"].Text = arrEffective["Total"].ToString(); switch (intDaysInMonth) { case 28: objXLS.GetRange("A1", "AB1").Value2 = arrExcelColumnHeading; objXLS.GetRange("A2", "AB2").Value2 = arrExcelStandard; objXLS.GetRange("A3", "AB3").Value2 = arrExcelActual; break; case 29: objXLS.GetRange("A1", "AC1").Value2 = arrExcelColumnHeading; objXLS.GetRange("A2", "AC2").Value2 = arrExcelStandard; objXLS.GetRange("A3", "AC3").Value2 = arrExcelActual; break; case 30: objXLS.GetRange("A1", "AD1").Value2 = arrExcelColumnHeading; objXLS.GetRange("A2", "AD2").Value2 = arrExcelStandard; objXLS.GetRange("A3", "AD3").Value2 = arrExcelActual; break; default: objXLS.GetRange("A1", "AE1").Value2 = arrExcelColumnHeading; objXLS.GetRange("A2", "AE2").Value2 = arrExcelStandard; objXLS.GetRange("A3", "AE3").Value2 = arrExcelActual; break; } ChartObject chart = objXLS.GetChart("DetailChart"); // SeriesCollection serieSC = (SeriesCollection) chart.Chart.SeriesCollection(0); chart.Chart.CopyPicture(XlPictureAppearance.xlScreen, XlCopyPictureFormat.xlBitmap, XlPictureAppearance.xlScreen); Image image = (Image) Clipboard.GetDataObject().GetData(typeof (Bitmap)); fldChart.Visible = true; fldChart.Text = ""; fldChart.Picture = image; chart = objXLS.GetChart("Chart 12"); chart.Chart.CopyPicture(XlPictureAppearance.xlScreen, XlCopyPictureFormat.xlBitmap, XlPictureAppearance.xlScreen); image = (Image) Clipboard.GetDataObject().GetData(typeof (Bitmap)); fldTotalChart.Visible = true; fldTotalChart.Text = ""; fldTotalChart.Picture = image; #endregion } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } finally { #region SAVE, CLOSE EXCEL FILE CONTAIN REPORT objXLS.CloseWorkbook(); objXLS.Dispose(); objXLS = null; #endregion } #endregion BUILD CHART } #region MODIFY THE REPORT LAYOUT #region PUSH PARAMETER VALUE const string REPORTFLD_PARAMETER_CCN = "fldParameterCCN"; const string REPORTFLD_PARAMETER_MONTH = "fldParameterMonth"; const string REPORTFLD_PARAMETER_YEAR = "fldParameterYear"; const string REPORTFLD_PARAMETER_PRODUCTIONLINE = "fldParameterProductionLine"; const string REPORTFLD_PARAMETER_WORKCENTER = "fldParameterWorkCenter"; string strCCN = GetCCNCode(intCCNID); rptReport.Fields[REPORTFLD_PARAMETER_CCN].Text = strCCN; rptReport.Fields[REPORTFLD_PARAMETER_MONTH].Text = pstrMonth; rptReport.Fields[REPORTFLD_PARAMETER_YEAR].Text = pstrYear; string strProductionLine = GetProCodeAndName(intProductionLineID); rptReport.Fields[REPORTFLD_PARAMETER_PRODUCTIONLINE].Text = strProductionLine; string strWorkCenter = GetWCCodeAndName(intWorkCenterID); rptReport.Fields[REPORTFLD_PARAMETER_WORKCENTER].Text = strWorkCenter; #endregion #endregion rptReport.DataSource.Recordset = dtbResult; rptReport.Render(); C1PrintPreviewDialog ppvViewer = new C1PrintPreviewDialog(); ppvViewer.FormTitle = "Compare Actual And Standard Capacity"; ppvViewer.ReportViewer.Document = rptReport.Document; ppvViewer.Show(); #endregion return dtbResult; } /// <summary> /// Gets standard capacity of work center in a day /// </summary> /// <param name="pintWorkCenterID">Selected Work Center</param> /// <param name="pintCCNID">CCN</param> /// <param name="pintProductionLineID">Production Line</param> /// <returns>Standard Capacity</returns> private DataTable GetStandardCapacity(int pintWorkCenterID, int pintCCNID, int pintProductionLineID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { oconPCS = new OleDbConnection(mConnectionString); string strSql = "SELECT ISNULL(SUM(ISNULL(PRO_WCCapacity.Capacity, 0)), 0) AS 'Capacity'," + " PRO_WCCapacity.BeginDate, PRO_WCCapacity.EndDate" + " FROM PRO_WCCapacity JOIN MST_WorkCenter" + " ON PRO_WCCapacity.WorkCenterID = MST_WorkCenter.WorkCenterID" + " LEFT JOIN PRO_ProductionLine" + " ON MST_WorkCenter.ProductionLineID = PRO_ProductionLine.ProductionLineID" + " WHERE PRO_WCCapacity.WorkCenterID = " + pintWorkCenterID + " AND ISNULL(MST_WorkCenter.IsMain, 0) = 1" + " AND PRO_ProductionLine.ProductionLineID = " + pintProductionLineID + " AND PRO_WCCapacity.CCNID = " + pintCCNID + " GROUP BY PRO_WCCapacity.BeginDate, PRO_WCCapacity.EndDate"; ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); DataTable dtbData = new DataTable(); odadPCS.Fill(dtbData); return dtbData; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } /// <summary> /// Gets total required capacity /// </summary> /// <param name="pintProductionLineID">Production Line</param> /// <param name="pstrOptionIDs">Cycle Option go thru selected month</param> /// <param name="pdtmFromDate">From Date</param> /// <param name="pdtmToDate">To Date</param> /// <returns>Total Required Capacity</returns> private DataTable GetTotalRequiredCapacity(int pintProductionLineID, string pstrOptionIDs, DateTime pdtmFromDate, DateTime pdtmToDate) { OleDbConnection oconPCS = null; try { oconPCS = new OleDbConnection(mConnectionString); string strSql = "SELECT SUM(ISNULL(TotalSecond, 0)) AS TotalSecond, WorkingDate, DCOptionMasterID" + " FROM PRO_DCPResultDetail JOIN PRO_DCPResultMaster" + " ON PRO_DCPResultDetail.DCPResultMasterID = PRO_DCPResultMaster.DCPResultMasterID" + " JOIN MST_WorkCenter" + " ON PRO_DCPResultMaster.WorkCenterID = MST_WorkCenter.WorkCenterID" + " WHERE MST_WorkCenter.ProductionLineID = " + pintProductionLineID + " AND DCOptionMasterID IN (" + pstrOptionIDs + ")" + " AND IsMain = 1" + " AND WorkingDate >= ? AND WorkingDate <= ?" + " GROUP BY DCOptionMasterID, WorkingDate"; OleDbCommand cmdPCS = new OleDbCommand(strSql, oconPCS); cmdPCS.Parameters.Add(new OleDbParameter("FromDate", OleDbType.Date)).Value = pdtmFromDate; cmdPCS.Parameters.Add(new OleDbParameter("ToDate", OleDbType.Date)).Value = pdtmToDate; cmdPCS.Connection.Open(); DataTable dtbTRC = new DataTable(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(cmdPCS); odadPCS.Fill(dtbTRC); return dtbTRC; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } /// <summary> /// Get CCN Code from ID /// </summary> /// <param name="pintCCNID">CCN ID</param> /// <returns>CCN Code</returns> private string GetCCNCode(int pintCCNID) { OleDbConnection oconPCS = null; try { oconPCS = new OleDbConnection(mConnectionString); string strSql = "SELECT Code FROM MST_CCN WHERE CCNID = " + pintCCNID; OleDbCommand cmdData = new OleDbCommand(strSql, oconPCS); cmdData.Connection.Open(); object objResult = cmdData.ExecuteScalar(); try { return objResult.ToString(); } catch { return string.Empty; } } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } /// <summary> /// Get Production Line Code and Name from ID /// </summary> /// <param name="pintProID">Production Line ID</param> /// <returns>Pro Code (Pro Name)</returns> private string GetProCodeAndName(int pintProID) { OleDbConnection oconPCS = null; try { oconPCS = new OleDbConnection(mConnectionString); string strSql = "SELECT Code + ' (' + Name + ')' FROM PRO_ProductionLine WHERE ProductionLineID = " + pintProID; OleDbCommand cmdData = new OleDbCommand(strSql, oconPCS); cmdData.Connection.Open(); object objResult = cmdData.ExecuteScalar(); try { return objResult.ToString(); } catch { return string.Empty; } } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } /// <summary> /// Get Workcenter Code and Name from ID /// </summary> /// <param name="pintWorkCenterID">Work Center ID</param> /// <returns>Code (Name)</returns> private string GetWCCodeAndName(int pintWorkCenterID) { OleDbConnection oconPCS = null; try { oconPCS = new OleDbConnection(mConnectionString); string strSql = "SELECT Code + ' (' + Name + ')' FROM MST_WorkCenter WHERE WorkCenterID = " + pintWorkCenterID; OleDbCommand cmdData = new OleDbCommand(strSql, oconPCS); cmdData.Connection.Open(); object objResult = cmdData.ExecuteScalar(); try { return objResult.ToString(); } catch { return string.Empty; } } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } /// <summary> /// Gets working date of main work center from work center capactity /// </summary> /// <param name="pintProductionLineID">Production Line ID</param> /// <returns>DataTable</returns> private DataTable GetWorkingDateFromWCCapacity(int pintProductionLineID) { OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; DataTable dtbData = new DataTable(); try { string strSql = "SELECT BeginDate, EndDate" + " FROM PRO_WCCapacity JOIN MST_WorkCenter" + " ON PRO_WCCapacity.WorkCenterID = MST_WorkCenter.WorkCenterID" + " JOIN PRO_ProductionLine ON MST_WorkCenter.ProductionLineID = PRO_ProductionLine.ProductionLineID" + " WHERE MST_WorkCenter.IsMain = 1" + " AND MST_WorkCenter.ProductionLineID = " + pintProductionLineID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dtbData); return dtbData; } finally { if (oconPCS != null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } /// <summary> /// Gets all cycles of CCN /// </summary> /// <param name="pstrCCNID">CCN</param> /// <returns>All cycles</returns> public DataTable GetCycles(string pstrCCNID) { OleDbConnection oconPCS = null; OleDbDataAdapter odadPCS = null; try { DataTable dtbData = new DataTable(); oconPCS = new OleDbConnection(mConnectionString); string strSql = "SELECT DCOptionMasterID, PlanningPeriod, Version," + " AsOfDate AS FromDate, DATEADD(dd, PlanHorizon, AsOfDate) AS ToDate" + " FROM PRO_DCOptionMaster" + " WHERE CCNID = " + pstrCCNID; OleDbCommand cmdData = new OleDbCommand(strSql, oconPCS); odadPCS = new OleDbDataAdapter(cmdData); cmdData.Connection.Open(); odadPCS.Fill(dtbData); return dtbData; } finally { if (oconPCS != null) if (oconPCS.State != ConnectionState.Closed) oconPCS.Close(); } } /// <summary> /// Arrange cycles by as of date and planning period /// </summary> /// <param name="pdtmFromMonth">From Month</param> /// <param name="pdtmToMonth">To Month</param> /// <param name="pdtbCycles">All cycles</param> /// <param name="parrPlanningPeriod">All Planning Period</param> /// <param name="sbCycleIDs">out: cycle ids in range</param> /// <returns>Arranged cycles</returns> private DataTable ArrangeCycles(DateTime pdtmFromMonth, DateTime pdtmToMonth, DataTable pdtbCycles, ArrayList parrPlanningPeriod, out StringBuilder sbCycleIDs) { DataTable dtbResult = pdtbCycles.Clone(); DateTime dtmFromDate = new DateTime(pdtmFromMonth.Year, pdtmFromMonth.Month, 1); DateTime dtmToDate = dtmFromDate.AddMonths(1).AddDays(-1); if (pdtmToMonth > DateTime.MinValue) dtmToDate = pdtmToMonth; sbCycleIDs = new StringBuilder(); DataTable dtbTemp = pdtbCycles.Clone(); ArrayList arrMonths = GetAllMonthInRange(dtmFromDate, dtmToDate); #region find all cycle go thru the date range foreach (DateTime dtmPeriod in parrPlanningPeriod) { DataRow[] drowPeriod = pdtbCycles.Select("PlanningPeriod = '" + dtmPeriod.ToString("G") + "'" , "Version DESC"); foreach (DataRow period in drowPeriod) { DateTime dtmFromDateCycle = (DateTime)period["FromDate"]; DateTime dtmToDateCycle = (DateTime)period["ToDate"]; ArrayList arrCycleMonths = GetAllMonthInRange(dtmFromDateCycle, dtmToDateCycle); foreach (DateTime dtmDate in arrMonths) { if (arrCycleMonths.Contains(dtmDate)) dtbTemp.ImportRow(period); } } } #endregion #region sorting all cycle // order by planning period, from date and version DataRow[] drowCycles = dtbTemp.Select("", "PlanningPeriod ASC, FromDate ASC, Version DESC"); DateTime dtmPreFromDate = DateTime.MinValue; int intPreVersion = -1; DateTime dtmPlanningPeriod = DateTime.MinValue; if (drowCycles.Length > 0) dtmPlanningPeriod = (DateTime)drowCycles[0]["PlanningPeriod"]; for (int i = 0; i < drowCycles.Length; i++) { DataRow drowCycle = drowCycles[i]; // from date of current cycle DateTime dtmCurFromDate = (DateTime)drowCycle["FromDate"]; // version of current cycle int intVersion = Convert.ToInt32(drowCycle["Version"]); // this cycle is old version of period, from date is greater than new version then ignore it if (intVersion < intPreVersion && dtmCurFromDate > dtmPreFromDate && dtmPlanningPeriod.Equals(drowCycle["PlanningPeriod"])) continue; // re-assign value intPreVersion = intVersion; dtmPreFromDate = dtmCurFromDate; dtmPlanningPeriod = (DateTime)drowCycle["PlanningPeriod"]; // update ToDate of previous cycle if (i > 0) { // previous cycle DataRow drowPreCycle = drowCycles[i-1]; // as of date of current cycle DateTime dtmAsOfDate = (DateTime)drowCycle["FromDate"]; // update to date of previous cycle drowPreCycle["ToDate"] = dtmAsOfDate.AddDays(-1); } } if (drowCycles.Length > 0) drowCycles[drowCycles.Length - 1]["ToDate"] = dtmToDate; // import to result table foreach (DataRow drowCycle in drowCycles) { sbCycleIDs.Append(drowCycle["DCOptionMasterID"].ToString() + ","); dtbResult.ImportRow(drowCycle); } #endregion sbCycleIDs.Append("0"); return dtbResult; } /// <summary> /// Gets all months appears in range of date /// </summary> /// <param name="pdtmFromDate">From Date</param> /// <param name="pdtmToDate">To Date</param> /// <returns>List of Month</returns> private ArrayList GetAllMonthInRange(DateTime pdtmFromDate, DateTime pdtmToDate) { pdtmFromDate = new DateTime(pdtmFromDate.Year, pdtmFromDate.Month, 1); pdtmToDate = new DateTime(pdtmToDate.Year, pdtmToDate.Month, 1); ArrayList arrMonths = new ArrayList(); for (DateTime dtmDate = pdtmFromDate; dtmDate <= pdtmToDate; dtmDate = dtmDate.AddMonths(1)) { arrMonths.Add(dtmDate); } return arrMonths; } /// <summary> /// Gets cycle of given date /// </summary> /// <param name="pdtmDate">Date</param> /// <param name="pdtbCycles">All cycles</param> /// <returns>Cycle ID</returns> private string GetCycleOfDate(DateTime pdtmDate, DataTable pdtbCycles) { string strCycleID = "0"; foreach (DataRow drowCycle in pdtbCycles.Rows) { DateTime dtmFromDate = (DateTime)drowCycle["FromDate"]; DateTime dtmToDate = (DateTime)drowCycle["ToDate"]; if (pdtmDate >= dtmFromDate && pdtmDate <= dtmToDate) { strCycleID = drowCycle["DCOptionMasterID"].ToString(); break; } } return strCycleID; } /// <summary> /// Gets all planning period of CCN /// </summary> /// <param name="pstrCCNID">CCNID</param> /// <returns>List of Planning Period</returns> private ArrayList GetPlanningPeriod(string pstrCCNID) { OleDbConnection oconPCS =null; OleDbCommand ocmdPCS =null; try { string strSql = String.Empty; strSql= "SELECT DISTINCT PlanningPeriod FROM PRO_DCOptionMaster WHERE CCNID = " + pstrCCNID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataReader reader = ocmdPCS.ExecuteReader(); ArrayList arrDate = new ArrayList(); while(reader.Read()) arrDate.Add((DateTime)reader["PlanningPeriod"]); return arrDate; } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } private DataTable GetPlanningOffset(string pstrCCNID) { OleDbConnection oconPCS =null; OleDbCommand ocmdPCS =null; try { string strSql = "SELECT PRO_PlanningOffset.PlanningStartDate, PRO_PlanningOffset.DCOptionMasterID," + " PRO_PlanningOffset.ProductionLineID" + " FROM PRO_PlanningOffset JOIN PRO_DCOptionMaster" + " ON PRO_PlanningOffset.DCOptionMasterID = PRO_DCOptionMaster.DCOptionMasterID" + " WHERE PRO_DCOptionMaster.CCNID = " + pstrCCNID; oconPCS = new OleDbConnection(mConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); DataTable dtbData = new DataTable(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dtbData); return dtbData; } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } private DataTable RefineCycle(string pstrProductionLineID, DataTable pdtbPlanningOffset, DataTable pdtbCycles) { foreach (DataRow drowData in pdtbCycles.Rows) { string strCycleID = drowData["DCOptionMasterID"].ToString(); string strFilter = "DCOptionMasterID = '" + strCycleID + "' AND ProductionLineID = '" + pstrProductionLineID + "'"; DataRow[] drowOffset = pdtbPlanningOffset.Select(strFilter); // refine as of date of the cycle based on planning offset of current production line if (drowOffset.Length > 0) { DateTime dtmStartDate = (DateTime) drowOffset[0]["PlanningStartDate"]; dtmStartDate = new DateTime(dtmStartDate.Year, dtmStartDate.Month, dtmStartDate.Day); drowData["FromDate"] = dtmStartDate; } } return pdtbCycles; } } }
using UnityEngine; using System; using UnityEngine.Events; namespace UnityStandardAssets.ImageEffects { // TODO: Retina support for the wheels (not sure how Unity handles Retina) // TODO: Cleanup all the temp stuff [ExecuteInEditMode] [RequireComponent(typeof(Camera))] [AddComponentMenu("Image Effects/Color Adjustments/Tonemapping and Color Grading")] public class TonemappingColorGrading : MonoBehaviour { #region Temp stuff, should be removed before release [NonSerialized] public bool fastMode = true; public bool debugClamp = false; #endregion #if UNITY_EDITOR // EDITOR ONLY call for allowing the editor to update // the histogram public UnityAction<RenderTexture, Material> onFrameEndEditorOnly; [SerializeField] public ComputeShader histogramComputeShader; [SerializeField] public Shader histogramShader; #endif [AttributeUsage(AttributeTargets.Field)] public class SettingsGroup : Attribute { } public class DrawFilmicCurveAttribute : Attribute { } public enum Passes { ThreeD = 0, OneD = 1, ThreeDDebug = 2, OneDDebug = 3 } [Serializable] public struct FilmicCurve { public bool enabled; // LUT [Range(-4f, 4f)][Tooltip("Exposure Bias|Adjusts the overall exposure of the scene")] public float exposureBias; [Range(0f, 2f)][Tooltip("Contrast|Contrast adjustment (log-space)")] public float contrast; [Range(0f, 1f)][Tooltip("Toe|Toe of the filmic curve; affects the darker areas of the scene")] public float toe; [Range(0f, 1f)][Tooltip("Shoulder|Shoulder of the filmic curve; brings overexposed highlights back into range")] public float lutShoulder; public static FilmicCurve defaultFilmicCurve = new FilmicCurve { enabled = false, exposureBias = 0.0f, contrast = 1.0f, toe = 0.0f, lutShoulder = 0.0f }; } public class ColorWheelGroup : PropertyAttribute { public int minSizePerWheel = 60; public int maxSizePerWheel = 150; public ColorWheelGroup() { } public ColorWheelGroup(int minSizePerWheel, int maxSizePerWheel) { this.minSizePerWheel = minSizePerWheel; this.maxSizePerWheel = maxSizePerWheel; } } [Serializable] public struct ColorGradingColors { [Tooltip("Shadows|Shadows color")] public Color shadows; [Tooltip("Midtones|Midtones color")] public Color midtones; [Tooltip("Highlights|Highlights color")] public Color highlights; public static ColorGradingColors defaultGradingColors = new ColorGradingColors { shadows = new Color(1, 1, 1), midtones = new Color(1, 1, 1), highlights = new Color(1, 1, 1) }; } [Serializable] public struct ColorGrading { public bool enabled; [ColorUsage(false)][Tooltip("White Balance|Adjusts the white color before tonemapping")] public Color whiteBalance; [Range(0f, 2f)][Tooltip("Vibrance|Pushes the intensity of all colors")] public float saturation; [Range(0f, 5f)][Tooltip("Gamma|Adjusts the gamma")] public float gamma; [ColorWheelGroup] public ColorGradingColors lutColors; public static ColorGrading defaultColorGrading = new ColorGrading { whiteBalance = Color.white, enabled = false, saturation = 1.0f, gamma = 1.0f, lutColors = ColorGradingColors.defaultGradingColors }; } [NonSerialized] private bool m_Dirty = true; public void SetDirty() { m_Dirty = true; } [SerializeField][SettingsGroup][DrawFilmicCurve] private FilmicCurve m_FilmicCurve = FilmicCurve.defaultFilmicCurve; public FilmicCurve filmicCurve { get { return m_FilmicCurve; } set { m_FilmicCurve = value; SetDirty(); } } [SerializeField][SettingsGroup] private ColorGrading m_ColorGrading = ColorGrading.defaultColorGrading; public ColorGrading colorGrading { get { return m_ColorGrading; } set { m_ColorGrading = value; SetDirty(); } } // called in editor when UI is changed private void OnValidate() { SetDirty(); } // The actual texture that we build private Texture3D m_LutTex; // 1D curves private Texture2D m_LutCurveTex1D; private bool isLinearColorSpace { get { return QualitySettings.activeColorSpace == ColorSpace.Linear; } } [SerializeField][Tooltip("Lookup Texture|Custom lookup texture")] private Texture2D m_UserLutTexture; public Texture2D userLutTexture { get { return m_UserLutTexture; } set { m_UserLutTexture = value; SetDirty(); } } public struct SimplePolyFunc { // f(x) = signY * A * (signX * x - x0) ^ b + y0 public float A; public float B; public float x0; public float y0; public float signX; public float signY; public float logA; public float Eval(float x) { // Standard function //return signY * A * Mathf.Pow(signX * x - x0, B) + y0; // Slightly more complicated but numerically stable function return signY * Mathf.Exp(logA + B * Mathf.Log(signX * x - x0)) + y0; } // Create a function going from (0,0) to (x_end,y_end) where the // derivative at x_end is m public void Initialize(float x_end, float y_end, float m) { A = 0.0f; B = 1.0f; x0 = 0.0f; y0 = 0.0f; signX = 1.0f; signY = 1.0f; // Invalid case, slope must be positive and the // y that we are trying to hit must be positve. if (m <= 0.0f || y_end <= 0.0f) { return; } // Also invalid if (x_end <= 0.0f) { return; } B = (m * x_end) / y_end; float p = Mathf.Pow(x_end, B); A = y_end / p; logA = Mathf.Log(y_end) - B * Mathf.Log(x_end); } }; // Usual & internal stuff public Shader tonemapShader = null; public bool validRenderTextureFormat = true; private Material m_TonemapMaterial; public Material tonemapMaterial { get { //if (m_TonemapMaterial == null) // m_TonemapMaterial = ImageEffectHelper.CheckShaderAndCreateMaterial(tonemapShader); return m_TonemapMaterial; } } private RenderTexture m_RtDebug = null; private RenderTextureFormat m_RtFormat = RenderTextureFormat.ARGBHalf; private int m_UserLutDim = 16; private Color[] m_UserLutData; protected void OnEnable() { if (tonemapShader == null) tonemapShader = Shader.Find("Hidden/TonemappingColorGrading"); //if (ImageEffectHelper.IsSupported(tonemapShader, false, true, this)) // return; enabled = false; Debug.LogWarning("The image effect " + ToString() + " has been disabled as it's not supported on the current platform."); } float GetHighlightRecovery() { return Mathf.Max(0.0f, m_FilmicCurve.lutShoulder * 3.0f); } public float GetWhitePoint() { return Mathf.Pow(2.0f, Mathf.Max(0.0f, GetHighlightRecovery())); } static float LutToLin(float x, float lutA) { x = (x >= 1.0f) ? 1.0f : x; float temp = x / lutA; return temp / (1.0f - temp); } static float LinToLut(float x, float lutA) { return Mathf.Sqrt(x / (x + lutA)); } static float LiftGammaGain(float x, float lift, float invGamma, float gain) { float xx = Mathf.Sqrt(x); float ret = gain * (lift * (1.0f - xx) + Mathf.Pow(xx, invGamma)); return ret * ret; } static float LogContrast(float x, float linRef, float contrast) { x = Mathf.Max(x, 1e-5f); float logRef = Mathf.Log(linRef); float logVal = Mathf.Log(x); float logAdj = logRef + (logVal - logRef) * contrast; float dstVal = Mathf.Exp(logAdj); return dstVal; } static Color NormalizeColor(Color c) { float sum = (c.r + c.g + c.b) / 3.0f; if (sum == 0.0f) return new Color(1.0f, 1.0f, 1.0f, 1.0f); Color ret = new Color(); ret.r = c.r / sum; ret.g = c.g / sum; ret.b = c.b / sum; ret.a = 1.0f; return ret; } static public float GetLutA() { // Our basic function is f(x) = A * x / (x + 1) // We want the function to actually be able to hit 1.0f (to use // the full range of the 3D lut) and that's what A is for. // Tried a bunch numbers and 1.05 seems to work pretty well. return 1.05f; } void SetIdentityLut() { int dim = 16; Color[] newC = new Color[dim * dim * dim]; float oneOverDim = 1.0f / (1.0f * dim - 1.0f); for (int i = 0; i < dim; i++) { for (int j = 0; j < dim; j++) { for (int k = 0; k < dim; k++) { newC[i + (j * dim) + (k * dim * dim)] = new Color((i * 1.0f) * oneOverDim, (j * 1.0f) * oneOverDim, (k * 1.0f) * oneOverDim, 1.0f); } } } m_UserLutData = newC; m_UserLutDim = dim; } int ClampLutDim(int src) { return Mathf.Clamp(src, 0, m_UserLutDim - 1); } Color SampleLutNearest(int r, int g, int b) { r = ClampLutDim(r); g = ClampLutDim(g); g = ClampLutDim(b); return m_UserLutData[r + (g * m_UserLutDim) + (b * m_UserLutDim * m_UserLutDim)]; } // Does the lookup without bounds checking Color SampleLutNearestUnsafe(int r, int g, int b) { return m_UserLutData[r + (g * m_UserLutDim) + (b * m_UserLutDim * m_UserLutDim)]; } Color SampleLutLinear(float srcR, float srcG, float srcB) { float sampleOffset = 0.0f; float sampleScale = (float)(m_UserLutDim - 1); float r = srcR * sampleScale + sampleOffset; float g = srcG * sampleScale + sampleOffset; float b = srcB * sampleScale + sampleOffset; int r0 = Mathf.FloorToInt(r); int g0 = Mathf.FloorToInt(g); int b0 = Mathf.FloorToInt(b); r0 = ClampLutDim(r0); g0 = ClampLutDim(g0); b0 = ClampLutDim(b0); int r1 = ClampLutDim(r0 + 1); int g1 = ClampLutDim(g0 + 1); int b1 = ClampLutDim(b0 + 1); float tr = (r) - (float)r0; float tg = (g) - (float)g0; float tb = (b) - (float)b0; Color c000 = SampleLutNearestUnsafe(r0, g0, b0); Color c001 = SampleLutNearestUnsafe(r0, g0, b1); Color c010 = SampleLutNearestUnsafe(r0, g1, b0); Color c011 = SampleLutNearestUnsafe(r0, g1, b1); Color c100 = SampleLutNearestUnsafe(r1, g0, b0); Color c101 = SampleLutNearestUnsafe(r1, g0, b1); Color c110 = SampleLutNearestUnsafe(r1, g1, b0); Color c111 = SampleLutNearestUnsafe(r1, g1, b1); Color c00 = Color.Lerp(c000, c001, tb); Color c01 = Color.Lerp(c010, c011, tb); Color c10 = Color.Lerp(c100, c101, tb); Color c11 = Color.Lerp(c110, c111, tb); Color c0 = Color.Lerp(c00, c01, tg); Color c1 = Color.Lerp(c10, c11, tg); Color c = Color.Lerp(c0, c1, tr); return c; } void UpdateUserLut() { // Conversion fun: the given 2D texture needs to be of the format // w * h, wheras h is the 'depth' (or 3d dimension 'dim') and w = dim * dim if (userLutTexture == null) { SetIdentityLut(); return; } if (!ValidDimensions(userLutTexture)) { Debug.LogWarning("The given 2D texture " + userLutTexture.name + " cannot be used as a 3D LUT. Reverting to identity."); SetIdentityLut(); return; } int dim = userLutTexture.height; Color[] c = userLutTexture.GetPixels(); Color[] newC = new Color[c.Length]; for (int i = 0; i < dim; i++) { for (int j = 0; j < dim; j++) { for (int k = 0; k < dim; k++) { int j_ = dim - j - 1; Color dst = c[k * dim + i + j_ * dim * dim]; newC[i + (j * dim) + (k * dim * dim)] = dst; } } } m_UserLutDim = dim; m_UserLutData = newC; } public float EvalFilmicHelper(float src, float lutA, SimplePolyFunc polyToe, SimplePolyFunc polyLinear, SimplePolyFunc polyShoulder, float x0, float x1, float linearW) { // Figure out the linear value of this 3d texel float dst = LutToLin(src, lutA); if (m_FilmicCurve.enabled) { // We could allow this to be customized, but most people probably // would not understand it and it would just create complexity. // 18% grey is the standard film reference grey so let's just go with that. float linRef = .18f; dst = LogContrast(dst, linRef, m_FilmicCurve.contrast); SimplePolyFunc polyR = polyToe; if (dst >= x0) polyR = polyLinear; if (dst >= x1) polyR = polyShoulder; dst = Mathf.Min(dst, linearW); dst = polyR.Eval(dst); } return dst; } float EvalCurveGradingHelper(float src, float lift, float invGamma, float gain) { float dst = src; if (m_ColorGrading.enabled) { // lift/gamma/gain dst = LiftGammaGain(dst, lift, invGamma, gain); } // Max with zero dst = Mathf.Max(dst, 0.0f); if (m_ColorGrading.enabled) { // Overall gamma dst = Mathf.Pow(dst, m_ColorGrading.gamma); } return dst; } void Create3DLut(float lutA, SimplePolyFunc polyToe, SimplePolyFunc polyLinear, SimplePolyFunc polyShoulder, float x0, float x1, float linearW, float liftR, float invGammaR, float gainR, float liftG, float invGammaG, float gainG, float liftB, float invGammaB, float gainB) { int dim = 32; Color[] newC = new Color[dim * dim * dim]; float oneOverDim = 1.0f / (1.0f * dim - 1.0f); for (int i = 0; i < dim; i++) { for (int j = 0; j < dim; j++) { for (int k = 0; k < dim; k++) { float srcR = (i * 1.0f) * oneOverDim; float srcG = (j * 1.0f) * oneOverDim; float srcB = (k * 1.0f) * oneOverDim; float dstR = EvalFilmicHelper(srcR, lutA, polyToe, polyLinear, polyShoulder, x0, x1, linearW); float dstG = EvalFilmicHelper(srcG, lutA, polyToe, polyLinear, polyShoulder, x0, x1, linearW); float dstB = EvalFilmicHelper(srcB, lutA, polyToe, polyLinear, polyShoulder, x0, x1, linearW); Color c = SampleLutLinear(dstR, dstG, dstB); dstR = c.r; dstG = c.g; dstB = c.b; dstR = EvalCurveGradingHelper(dstR, liftR, invGammaR, gainR); dstG = EvalCurveGradingHelper(dstG, liftG, invGammaG, gainG); dstB = EvalCurveGradingHelper(dstB, liftB, invGammaB, gainB); if (m_ColorGrading.enabled) { // Saturation float lum = dstR * 0.2125f + dstG * 0.7154f + dstB * 0.0721f; dstR = lum + (dstR - lum) * m_ColorGrading.saturation; dstG = lum + (dstG - lum) * m_ColorGrading.saturation; dstB = lum + (dstB - lum) * m_ColorGrading.saturation; } newC[i + (j * dim) + (k * dim * dim)] = new Color(dstR, dstG, dstB, 1.0f); } } } if (m_LutTex == null) { m_LutTex = new Texture3D(dim, dim, dim, TextureFormat.RGB24, false); m_LutTex.filterMode = FilterMode.Bilinear; m_LutTex.wrapMode = TextureWrapMode.Clamp; m_LutTex.hideFlags = HideFlags.DontSave; } m_LutTex.SetPixels(newC); m_LutTex.Apply(); } void Create1DLut(float lutA, SimplePolyFunc polyToe, SimplePolyFunc polyLinear, SimplePolyFunc polyShoulder, float x0, float x1, float linearW, float liftR, float invGammaR, float gainR, float liftG, float invGammaG, float gainG, float liftB, float invGammaB, float gainB) { int curveLen = 128; Color[] newC = new Color[curveLen * 2]; float oneOverDim = 1.0f / (1.0f * curveLen - 1.0f); for (int i = 0; i < curveLen; i++) { float srcR = (i * 1.0f) * oneOverDim; float srcG = (i * 1.0f) * oneOverDim; float srcB = (i * 1.0f) * oneOverDim; float dstR = EvalFilmicHelper(srcR, lutA, polyToe, polyLinear, polyShoulder, x0, x1, linearW); float dstG = EvalFilmicHelper(srcG, lutA, polyToe, polyLinear, polyShoulder, x0, x1, linearW); float dstB = EvalFilmicHelper(srcB, lutA, polyToe, polyLinear, polyShoulder, x0, x1, linearW); Color c = SampleLutLinear(dstR, dstG, dstB); dstR = c.r; dstG = c.g; dstB = c.b; dstR = EvalCurveGradingHelper(dstR, liftR, invGammaR, gainR); dstG = EvalCurveGradingHelper(dstG, liftG, invGammaG, gainG); dstB = EvalCurveGradingHelper(dstB, liftB, invGammaB, gainB); // Saturation is done in the shader as it can't be baked into color curves if (isLinearColorSpace) { dstR = Mathf.LinearToGammaSpace(dstR); dstG = Mathf.LinearToGammaSpace(dstG); dstB = Mathf.LinearToGammaSpace(dstB); } newC[i + 0 * curveLen] = new Color(dstR, dstG, dstB, 1.0f); newC[i + 1 * curveLen] = new Color(dstR, dstG, dstB, 1.0f); } if (m_LutCurveTex1D == null) { m_LutCurveTex1D = new Texture2D(curveLen, 2, TextureFormat.RGB24, false); m_LutCurveTex1D.filterMode = FilterMode.Bilinear; m_LutCurveTex1D.wrapMode = TextureWrapMode.Clamp; m_LutCurveTex1D.hideFlags = HideFlags.DontSave; } m_LutCurveTex1D.SetPixels(newC); m_LutCurveTex1D.Apply(); } void UpdateLut() { UpdateUserLut(); float lutA = GetLutA(); SimplePolyFunc polyToe; SimplePolyFunc polyLinear; SimplePolyFunc polyShoulder; float gammaSpace = 2.2f; float x0 = Mathf.Pow(1.0f / 3.0f, gammaSpace); float shoulderBase = .7f; float x1 = Mathf.Pow(shoulderBase, gammaSpace); float gammaHighY = Mathf.Pow(shoulderBase, 1.0f + (m_FilmicCurve.lutShoulder) * 1.0f); float y1 = Mathf.Pow(gammaHighY, gammaSpace); float y0; { float t = x0 / x1; float lin = t * y1; float low = lin * (1.0f - m_FilmicCurve.toe * .5f); y0 = low; } float dx = x1 - x0; float dy = y1 - y0; float m = 0.0f; if (dx > 0 && dy > 0) m = dy / dx; // Linear section, power is 1, slope is m polyLinear.x0 = x0; polyLinear.y0 = y0; polyLinear.A = m; polyLinear.B = 1.0f; polyLinear.signX = 1.0f; polyLinear.signY = 1.0f; polyLinear.logA = Mathf.Log(m); // Toe polyToe = polyLinear; polyToe.Initialize(x0, y0, m); float linearW = GetWhitePoint(); { // Shoulder, first think about it "backwards" float offsetX = linearW - x1; float offsetY = 1.0f - y1; polyShoulder = polyLinear; polyShoulder.Initialize(offsetX, offsetY, m); // Flip horizontal polyShoulder.signX = -1.0f; polyShoulder.x0 = -linearW; // Flip vertical polyShoulder.signY = -1.0f; polyShoulder.y0 = 1.0f; } Color normS = NormalizeColor(m_ColorGrading.lutColors.shadows); Color normM = NormalizeColor(m_ColorGrading.lutColors.midtones); Color normH = NormalizeColor(m_ColorGrading.lutColors.highlights); float avgS = (normS.r + normS.g + normS.b) / 3.0f; float avgM = (normM.r + normM.g + normM.b) / 3.0f; float avgH = (normH.r + normH.g + normH.b) / 3.0f; // These are magic numbers float liftScale = .1f; float gammaScale = .5f; float gainScale = .5f; float liftR = (normS.r - avgS) * liftScale; float liftG = (normS.g - avgS) * liftScale; float liftB = (normS.b - avgS) * liftScale; float gammaR = Mathf.Pow(2.0f, (normM.r - avgM) * gammaScale); float gammaG = Mathf.Pow(2.0f, (normM.g - avgM) * gammaScale); float gammaB = Mathf.Pow(2.0f, (normM.b - avgM) * gammaScale); float gainR = Mathf.Pow(2.0f, (normH.r - avgH) * gainScale); float gainG = Mathf.Pow(2.0f, (normH.g - avgH) * gainScale); float gainB = Mathf.Pow(2.0f, (normH.b - avgH) * gainScale); float minGamma = .01f; float invGammaR = 1.0f / Mathf.Max(minGamma, gammaR); float invGammaG = 1.0f / Mathf.Max(minGamma, gammaG); float invGammaB = 1.0f / Mathf.Max(minGamma, gammaB); if (!fastMode) { Create3DLut(lutA, polyToe, polyLinear, polyShoulder, x0, x1, linearW, liftR, invGammaR, gainR, liftG, invGammaG, gainG, liftB, invGammaB, gainB); } else { // Instad of doing a single 3D lut, I tried doing this as 3x 1D luts. Or rather, // a single lut with separate curves baked into RGB channels. It wasn't actually faster // do it's disabled. But there are two reasons why in the future it might be useful: // 1. If it turns out that 3x 1D luts are faster on some hardware, it might be worth it. // 2. Updating the 3D LUT is quite slow so you can't change it every frame. If the // parameters need to lerp than the 1D version might be worthwhile. Create1DLut(lutA, polyToe, polyLinear, polyShoulder, x0, x1, linearW, liftR, invGammaR, gainR, liftG, invGammaG, gainG, liftB, invGammaB, gainB); } } public bool ValidDimensions(Texture2D tex2d) { if (!tex2d) return false; int h = tex2d.height; if (h != Mathf.FloorToInt(Mathf.Sqrt(tex2d.width))) return false; return true; } public void Convert(Texture2D temp2DTex) { #if false // Conversion fun: the given 2D texture needs to be of the format // w * h, wheras h is the 'depth' (or 3d dimension 'dim') and w = dim * dim if (temp2DTex) { int dim = temp2DTex.width * temp2DTex.height; dim = temp2DTex.height; if (!ValidDimensions(temp2DTex)) { Debug.LogWarning("The given 2D texture " + temp2DTex.name + " cannot be used as a 3D LUT."); //basedOnTempTex = ""; return; } Color[] c = temp2DTex.GetPixels(); Color[] newC = new Color[c.Length]; for (int i = 0; i < dim; i++) { for (int j = 0; j < dim; j++) { for (int k = 0; k < dim; k++) { int j_ = dim - j - 1; newC[i + (j * dim) + (k * dim * dim)] = c[k * dim + i + j_ * dim * dim]; } } } if (converted3DLut) DestroyImmediate(converted3DLut); converted3DLut = new Texture3D(dim, dim, dim, TextureFormat.ARGB32, false); converted3DLut.SetPixels(newC); converted3DLut.Apply(); userLutTexName = temp2DTex.name; } else { // error, something went terribly wrong //Debug.LogError("Couldn't color correct with 3D LUT texture. Image Effect will be disabled."); SetIdentityLut(); userLutTexName = ""; } #endif } void OnDisable() { if (m_TonemapMaterial) { DestroyImmediate(m_TonemapMaterial); m_TonemapMaterial = null; } if (m_LutTex) { DestroyImmediate(m_LutTex); m_LutTex = null; } if (m_LutCurveTex1D) { DestroyImmediate(m_LutCurveTex1D); m_LutCurveTex1D = null; } } // The image filter chain will continue in LDR [ImageEffectTransformsToLDR] void OnRenderImage(RenderTexture source, RenderTexture destination) { if (tonemapMaterial == null) { Graphics.Blit(source, destination); return; } if (m_LutTex == null || m_Dirty) { UpdateLut(); m_Dirty = false; } #if UNITY_EDITOR validRenderTextureFormat = true; if (source.format != RenderTextureFormat.ARGBHalf) validRenderTextureFormat = false; #endif if (fastMode) tonemapMaterial.SetTexture("_LutTex1D", m_LutCurveTex1D); else tonemapMaterial.SetTexture("_LutTex", m_LutTex); float lutA = GetLutA(); float exposureBias = Mathf.Pow(2.0f, m_FilmicCurve.enabled ? m_FilmicCurve.exposureBias : 0.0f); Vector4 exposureMult = new Vector4(exposureBias, exposureBias, exposureBias, 1.0f); Color linWB = new Color(1.0f, 1.0f, 1.0f, 1.0f); if (m_ColorGrading.enabled) { linWB.r = Mathf.Pow(m_ColorGrading.whiteBalance.r, 2.2f); linWB.g = Mathf.Pow(m_ColorGrading.whiteBalance.g, 2.2f); linWB.b = Mathf.Pow(m_ColorGrading.whiteBalance.b, 2.2f); Color normWB = NormalizeColor(linWB); exposureMult.x *= normWB.r; exposureMult.y *= normWB.g; exposureMult.z *= normWB.b; } tonemapMaterial.SetFloat("_LutA", lutA); tonemapMaterial.SetVector("_LutExposureMult", exposureMult); tonemapMaterial.SetFloat("_Vibrance", m_ColorGrading.enabled ? m_ColorGrading.saturation : 1f); int pass; if (debugClamp) pass = (int)(fastMode ? Passes.OneDDebug : Passes.ThreeDDebug); else pass = (int)(fastMode ? Passes.OneD : Passes.ThreeD); Graphics.Blit(source, destination, tonemapMaterial, pass); #if UNITY_EDITOR // if we have an on frame end callabck // we need to pass a valid result texture // if destination is null we wrote to the // backbuffer so we need to copy that out. // It's slow and not amazing, but editor only if (onFrameEndEditorOnly != null) { if (destination == null) { var temp = RenderTexture.GetTemporary(source.width, source.height); Graphics.Blit(null, temp); onFrameEndEditorOnly(temp, tonemapMaterial); RenderTexture.ReleaseTemporary(temp); } else onFrameEndEditorOnly(destination, tonemapMaterial); } #endif } } }
#region License, Terms and Author(s) // // ELMAH - Error Logging Modules and Handlers for ASP.NET // Copyright (c) 2004-9 Atif Aziz. All rights reserved. // // Author(s): // // James Driscoll, mailto:jamesdriscoll@btinternet.com // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion [assembly: Elmah.Scc("$Id$")] namespace Elmah { #region Imports using System; using System.Data; using System.Data.OleDb; using System.Diagnostics; using System.Globalization; using System.IO; using System.Text; using IDictionary = System.Collections.IDictionary; using System.Collections.Generic; #endregion /// <summary> /// An <see cref="ErrorLog"/> implementation that uses Microsoft Access /// as its backing store. /// </summary> /// <remarks> /// The MDB file is automatically created at the path specified in the /// connection string if it does not already exist. /// </remarks> public class AccessErrorLog : ErrorLog { private readonly string _connectionString; private const int _maxAppNameLength = 60; private const string _scriptResourceName = "mkmdb.vbs"; private static readonly object _mdbInitializationLock = new object(); /// <summary> /// Initializes a new instance of the <see cref="AccessErrorLog"/> class /// using a dictionary of configured settings. /// </summary> public AccessErrorLog(IDictionary config) { if (config == null) throw new ArgumentNullException("config"); string connectionString = ConnectionStringHelper.GetConnectionString(config); // // If there is no connection string to use then throw an // exception to abort construction. // if (connectionString.Length == 0) throw new ApplicationException("Connection string is missing for the Access error log."); _connectionString = connectionString; InitializeDatabase(); // // Set the application name as this implementation provides // per-application isolation over a single store. // string appName = config.Find("applicationName", string.Empty); if (appName.Length > _maxAppNameLength) { throw new ApplicationException(string.Format( "Application name is too long. Maximum length allowed is {0} characters.", _maxAppNameLength.ToString("N0"))); } ApplicationName = appName; } /// <summary> /// Initializes a new instance of the <see cref="AccessErrorLog"/> class /// to use a specific connection string for connecting to the database. /// </summary> public AccessErrorLog(string connectionString) { if (connectionString == null) throw new ArgumentNullException("connectionString"); if (connectionString.Length == 0) throw new ArgumentException(null, "connectionString"); _connectionString = connectionString; } /// <summary> /// Gets the name of this error log implementation. /// </summary> public override string Name { get { return "Microsoft Access Error Log"; } } /// <summary> /// Gets the connection string used by the log to connect to the database. /// </summary> public virtual string ConnectionString { get { return _connectionString; } } /// <summary> /// Logs an error to the database. /// </summary> /// <remarks> /// Use the stored procedure called by this implementation to set a /// policy on how long errors are kept in the log. The default /// implementation stores all errors for an indefinite time. /// </remarks> public override string Log(Error error) { if (error == null) throw new ArgumentNullException("error"); string errorXml = ErrorXml.EncodeString(error); using (OleDbConnection connection = new OleDbConnection(this.ConnectionString)) using (OleDbCommand command = connection.CreateCommand()) { connection.Open(); command.CommandType = CommandType.Text; command.CommandText = @"INSERT INTO ELMAH_Error (Application, Host, Type, Source, Message, UserName, StatusCode, TimeUtc, AllXml) VALUES (@Application, @Host, @Type, @Source, @Message, @UserName, @StatusCode, @TimeUtc, @AllXml)"; command.CommandType = CommandType.Text; OleDbParameterCollection parameters = command.Parameters; parameters.Add("@Application", OleDbType.VarChar, _maxAppNameLength).Value = ApplicationName; parameters.Add("@Host", OleDbType.VarChar, 30).Value = error.HostName; parameters.Add("@Type", OleDbType.VarChar, 100).Value = error.Type; parameters.Add("@Source", OleDbType.VarChar, 60).Value = error.Source; parameters.Add("@Message", OleDbType.LongVarChar, error.Message.Length).Value = error.Message; parameters.Add("@User", OleDbType.VarChar, 50).Value = error.User; parameters.Add("@StatusCode", OleDbType.Integer).Value = error.StatusCode; parameters.Add("@TimeUtc", OleDbType.Date).Value = error.Time.ToUniversalTime(); parameters.Add("@AllXml", OleDbType.LongVarChar, errorXml.Length).Value = errorXml; command.ExecuteNonQuery(); using (OleDbCommand identityCommand = connection.CreateCommand()) { identityCommand.CommandType = CommandType.Text; identityCommand.CommandText = "SELECT @@IDENTITY"; return Convert.ToString(identityCommand.ExecuteScalar(), CultureInfo.InvariantCulture); } } } /// <summary> /// Returns a page of errors from the databse in descending order /// of logged time. /// </summary> public override int GetErrors(int pageIndex, int pageSize, IList<ErrorLogEntry> errorEntryList) { if (pageIndex < 0) throw new ArgumentOutOfRangeException("pageIndex", pageIndex, null); if (pageSize < 0) throw new ArgumentOutOfRangeException("pageSize", pageSize, null); using (OleDbConnection connection = new OleDbConnection(this.ConnectionString)) using (OleDbCommand command = connection.CreateCommand()) { command.CommandType = CommandType.Text; command.CommandText = "SELECT COUNT(*) FROM ELMAH_Error"; connection.Open(); int totalCount = (int)command.ExecuteScalar(); if (errorEntryList != null && pageIndex * pageSize < totalCount) { int maxRecords = pageSize * (pageIndex + 1); if (maxRecords > totalCount) { maxRecords = totalCount; pageSize = totalCount - pageSize * (totalCount / pageSize); } StringBuilder sql = new StringBuilder(1000); sql.Append("SELECT e.* FROM ("); sql.Append("SELECT TOP "); sql.Append(pageSize.ToString()); sql.Append(" TimeUtc, ErrorId FROM ("); sql.Append("SELECT TOP "); sql.Append(maxRecords.ToString()); sql.Append(" TimeUtc, ErrorId FROM ELMAH_Error "); sql.Append("ORDER BY TimeUtc DESC, ErrorId DESC) "); sql.Append("ORDER BY TimeUtc ASC, ErrorId ASC) AS i "); sql.Append("INNER JOIN Elmah_Error AS e ON i.ErrorId = e.ErrorId "); sql.Append("ORDER BY e.TimeUtc DESC, e.ErrorId DESC"); command.CommandText = sql.ToString(); using (OleDbDataReader reader = command.ExecuteReader()) { Debug.Assert(reader != null); while (reader.Read()) { var id = Convert.ToString(reader["ErrorId"], CultureInfo.InvariantCulture); var error = new Error { ApplicationName = reader["Application"].ToString(), HostName = reader["Host"].ToString(), Type = reader["Type"].ToString(), Source = reader["Source"].ToString(), Message = reader["Message"].ToString(), User = reader["UserName"].ToString(), StatusCode = Convert.ToInt32(reader["StatusCode"]), Time = Convert.ToDateTime(reader["TimeUtc"]).ToLocalTime() }; errorEntryList.Add(new ErrorLogEntry(this, id, error)); } reader.Close(); } } return totalCount; } } /// <summary> /// Returns the specified error from the database, or null /// if it does not exist. /// </summary> public override ErrorLogEntry GetError(string id) { if (id == null) throw new ArgumentNullException("id"); if (id.Length == 0) throw new ArgumentException(null, "id"); int errorId; try { errorId = int.Parse(id, CultureInfo.InvariantCulture); } catch (FormatException e) { throw new ArgumentException(e.Message, "id", e); } catch (OverflowException e) { throw new ArgumentException(e.Message, "id", e); } string errorXml; using (OleDbConnection connection = new OleDbConnection(this.ConnectionString)) using (OleDbCommand command = connection.CreateCommand()) { command.CommandText = @"SELECT AllXml FROM ELMAH_Error WHERE ErrorId = @ErrorId"; command.CommandType = CommandType.Text; OleDbParameterCollection parameters = command.Parameters; parameters.Add("@ErrorId", OleDbType.Integer).Value = errorId; connection.Open(); errorXml = (string)command.ExecuteScalar(); } if (errorXml == null) return null; Error error = ErrorXml.DecodeString(errorXml); return new ErrorLogEntry(this, id, error); } private void InitializeDatabase() { string connectionString = ConnectionString; Debug.AssertStringNotEmpty(connectionString); string dbFilePath = ConnectionStringHelper.GetDataSourceFilePath(connectionString); if (File.Exists(dbFilePath)) return; // // Make sure that we don't have multiple instances trying to create the database. // lock (_mdbInitializationLock) { // // Just double-check that no other thread has created the database while // we were waiting for the lock. // if (File.Exists(dbFilePath)) return; // // Create a temporary copy of the mkmdb.vbs script. // We do this in the same directory as the resulting database for security permission purposes. // string scriptPath = Path.Combine(Path.GetDirectoryName(dbFilePath), _scriptResourceName); using (FileStream scriptStream = new FileStream(scriptPath, FileMode.Create, FileAccess.Write, FileShare.None)) { ManifestResourceHelper.WriteResourceToStream(scriptStream, _scriptResourceName); } // // Run the script file to create the database using batch // mode (//B), which suppresses script errors and prompts // from displaying. // ProcessStartInfo psi = new ProcessStartInfo( "cscript", scriptPath + " \"" + dbFilePath + "\" //B //NoLogo"); psi.UseShellExecute = false; // i.e. CreateProcess psi.CreateNoWindow = true; // Stay lean, stay mean try { using (Process process = Process.Start(psi)) { // // A few seconds should be plenty of time to create the database. // TimeSpan tolerance = TimeSpan.FromSeconds(2); if (!process.WaitForExit((int) tolerance.TotalMilliseconds)) { // // but it wasn't, so clean up and throw an exception! // Realistically, I don't expect to ever get here! // process.Kill(); throw new Exception(string.Format( "The Microsoft Access database creation script took longer than the allocated time of {0} seconds to execute. " + "The script was terminated prematurely.", tolerance.TotalSeconds)); } if (process.ExitCode != 0) { throw new Exception(string.Format( "The Microsoft Access database creation script failed with exit code {0}.", process.ExitCode)); } } } finally { // // Clean up after ourselves!! // File.Delete(scriptPath); } } } } }
#region Copyright notice and license // Copyright 2015 gRPC authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; using System.IO; using System.Reflection; using Grpc.Core.Logging; namespace Grpc.Core.Internal { /// <summary> /// Takes care of loading C# native extension and provides access to PInvoke calls the library exports. /// </summary> internal sealed class NativeExtension { static readonly ILogger Logger = GrpcEnvironment.Logger.ForType<NativeExtension>(); static readonly object staticLock = new object(); static volatile NativeExtension instance; readonly NativeMethods nativeMethods; private NativeExtension() { this.nativeMethods = LoadNativeMethods(); // Redirect the native logs as the very first thing after loading the native extension // to make sure we don't lose any logs. NativeLogRedirector.Redirect(this.nativeMethods); // Initialize NativeCallbackDispatcher.Init(this.nativeMethods); DefaultSslRootsOverride.Override(this.nativeMethods); Logger.Debug("gRPC native library loaded successfully."); } /// <summary> /// Gets singleton instance of this class. /// The native extension is loaded when called for the first time. /// </summary> public static NativeExtension Get() { if (instance == null) { lock (staticLock) { if (instance == null) { instance = new NativeExtension(); } } } return instance; } /// <summary> /// Provides access to the exported native methods. /// </summary> public NativeMethods NativeMethods { get { return this.nativeMethods; } } /// <summary> /// Detects which configuration of native extension to load and load it. /// </summary> private static UnmanagedLibrary LoadUnmanagedLibrary() { // TODO: allow customizing path to native extension (possibly through exposing a GrpcEnvironment property). // See https://github.com/grpc/grpc/pull/7303 for one option. var assemblyDirectory = Path.GetDirectoryName(GetAssemblyPath()); // With "classic" VS projects, the native libraries get copied using a .targets rule to the build output folder // alongside the compiled assembly. // With dotnet cli projects targeting net45 framework, the native libraries (just the required ones) // are similarly copied to the built output folder, through the magic of Microsoft.NETCore.Platforms. var classicPath = Path.Combine(assemblyDirectory, GetNativeLibraryFilename()); // With dotnet cli project targeting netcoreappX.Y, projects will use Grpc.Core assembly directly in the location where it got restored // by nuget. We locate the native libraries based on known structure of Grpc.Core nuget package. // When "dotnet publish" is used, the runtimes directory is copied next to the published assemblies. string runtimesDirectory = string.Format("runtimes/{0}/native", GetPlatformString()); var netCorePublishedAppStylePath = Path.Combine(assemblyDirectory, runtimesDirectory, GetNativeLibraryFilename()); var netCoreAppStylePath = Path.Combine(assemblyDirectory, "../..", runtimesDirectory, GetNativeLibraryFilename()); // Look for the native library in all possible locations in given order. string[] paths = new[] { classicPath, netCorePublishedAppStylePath, netCoreAppStylePath}; return new UnmanagedLibrary(paths); } /// <summary> /// Loads native extension and return native methods delegates. /// </summary> private static NativeMethods LoadNativeMethods() { if (PlatformApis.IsUnity) { return LoadNativeMethodsUnity(); } if (PlatformApis.IsXamarin) { return LoadNativeMethodsXamarin(); } return new NativeMethods(LoadUnmanagedLibrary()); } /// <summary> /// Return native method delegates when running on Unity platform. /// Unity does not use standard NuGet packages and the native library is treated /// there as a "native plugin" which is (provided it has the right metadata) /// automatically made available to <c>[DllImport]</c> loading logic. /// WARNING: Unity support is experimental and work-in-progress. Don't expect it to work. /// </summary> private static NativeMethods LoadNativeMethodsUnity() { switch (PlatformApis.GetUnityRuntimePlatform()) { case "IPhonePlayer": return new NativeMethods(new NativeMethods.DllImportsFromStaticLib()); default: // most other platforms load unity plugins as a shared library return new NativeMethods(new NativeMethods.DllImportsFromSharedLib()); } } /// <summary> /// Return native method delegates when running on the Xamarin platform. /// WARNING: Xamarin support is experimental and work-in-progress. Don't expect it to work. /// </summary> private static NativeMethods LoadNativeMethodsXamarin() { if (PlatformApis.IsXamarinAndroid) { return new NativeMethods(new NativeMethods.DllImportsFromSharedLib()); } // not tested yet return new NativeMethods(new NativeMethods.DllImportsFromStaticLib()); } private static string GetAssemblyPath() { var assembly = typeof(NativeExtension).GetTypeInfo().Assembly; #if NETSTANDARD1_5 || NETSTANDARD2_0 // Assembly.EscapedCodeBase does not exist under CoreCLR, but assemblies imported from a nuget package // don't seem to be shadowed by DNX-based projects at all. return assembly.Location; #else // If assembly is shadowed (e.g. in a webapp), EscapedCodeBase is pointing // to the original location of the assembly, and Location is pointing // to the shadow copy. We care about the original location because // the native dlls don't get shadowed. var escapedCodeBase = assembly.EscapedCodeBase; if (IsFileUri(escapedCodeBase)) { return new Uri(escapedCodeBase).LocalPath; } return assembly.Location; #endif } #if !NETSTANDARD1_5 && !NETSTANDARD2_0 private static bool IsFileUri(string uri) { return uri.ToLowerInvariant().StartsWith(Uri.UriSchemeFile); } #endif private static string GetPlatformString() { if (PlatformApis.IsWindows) { return "win"; } if (PlatformApis.IsLinux) { return "linux"; } if (PlatformApis.IsMacOSX) { return "osx"; } throw new InvalidOperationException("Unsupported platform."); } // Currently, only Intel platform is supported. private static string GetArchitectureString() { if (PlatformApis.Is64Bit) { return "x64"; } else { return "x86"; } } // platform specific file name of the extension library private static string GetNativeLibraryFilename() { string architecture = GetArchitectureString(); if (PlatformApis.IsWindows) { return string.Format("grpc_csharp_ext.{0}.dll", architecture); } if (PlatformApis.IsLinux) { return string.Format("libgrpc_csharp_ext.{0}.so", architecture); } if (PlatformApis.IsMacOSX) { return string.Format("libgrpc_csharp_ext.{0}.dylib", architecture); } throw new InvalidOperationException("Unsupported platform."); } } }
/** * (C) Copyright IBM Corp. 2017, 2021. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * IBM OpenAPI SDK Code Generator Version: 3.38.0-07189efd-20210827-205025 */ using System.Collections.Generic; using System.IO; using System.Net.Http; using System.Text; using IBM.Cloud.SDK.Core.Authentication; using IBM.Cloud.SDK.Core.Http; using IBM.Cloud.SDK.Core.Http.Extensions; using IBM.Cloud.SDK.Core.Service; using IBM.Watson.NaturalLanguageUnderstanding.v1.Model; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using System; namespace IBM.Watson.NaturalLanguageUnderstanding.v1 { public partial class NaturalLanguageUnderstandingService : IBMService, INaturalLanguageUnderstandingService { const string defaultServiceName = "natural_language_understanding"; private const string defaultServiceUrl = "https://api.us-south.natural-language-understanding.watson.cloud.ibm.com"; public string Version { get; set; } public NaturalLanguageUnderstandingService(string version) : this(version, defaultServiceName, ConfigBasedAuthenticatorFactory.GetAuthenticator(defaultServiceName)) { } public NaturalLanguageUnderstandingService(string version, IAuthenticator authenticator) : this(version, defaultServiceName, authenticator) {} public NaturalLanguageUnderstandingService(string version, string serviceName) : this(version, serviceName, ConfigBasedAuthenticatorFactory.GetAuthenticator(serviceName)) { } public NaturalLanguageUnderstandingService(IClient httpClient) : base(defaultServiceName, httpClient) { } public NaturalLanguageUnderstandingService(string version, string serviceName, IAuthenticator authenticator) : base(serviceName, authenticator) { if (string.IsNullOrEmpty(version)) { throw new ArgumentNullException("`version` is required"); } Version = version; if (string.IsNullOrEmpty(ServiceUrl)) { SetServiceUrl(defaultServiceUrl); } } /// <summary> /// Analyze text. /// /// Analyzes text, HTML, or a public webpage for the following features: /// - Categories /// - Classifications /// - Concepts /// - Emotion /// - Entities /// - Keywords /// - Metadata /// - Relations /// - Semantic roles /// - Sentiment /// - Syntax /// - Summarization (Experimental) /// /// If a language for the input text is not specified with the `language` parameter, the service [automatically /// detects the /// language](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-detectable-languages). /// </summary> /// <param name="features">Specific features to analyze the document for.</param> /// <param name="text">The plain text to analyze. One of the `text`, `html`, or `url` parameters is required. /// (optional)</param> /// <param name="html">The HTML file to analyze. One of the `text`, `html`, or `url` parameters is required. /// (optional)</param> /// <param name="url">The webpage to analyze. One of the `text`, `html`, or `url` parameters is required. /// (optional)</param> /// <param name="clean">Set this to `false` to disable webpage cleaning. For more information about webpage /// cleaning, see [Analyzing /// webpages](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-analyzing-webpages). /// (optional, default to true)</param> /// <param name="xpath">An [XPath /// query](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-analyzing-webpages#xpath) /// to perform on `html` or `url` input. Results of the query will be appended to the cleaned webpage text /// before it is analyzed. To analyze only the results of the XPath query, set the `clean` parameter to `false`. /// (optional)</param> /// <param name="fallbackToRaw">Whether to use raw HTML content if text cleaning fails. (optional, default to /// true)</param> /// <param name="returnAnalyzedText">Whether or not to return the analyzed text. (optional, default to /// false)</param> /// <param name="language">ISO 639-1 code that specifies the language of your text. This overrides automatic /// language detection. Language support differs depending on the features you include in your analysis. For /// more information, see [Language /// support](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-language-support). /// (optional)</param> /// <param name="limitTextCharacters">Sets the maximum number of characters that are processed by the service. /// (optional)</param> /// <returns><see cref="AnalysisResults" />AnalysisResults</returns> public DetailedResponse<AnalysisResults> Analyze(Features features, string text = null, string html = null, string url = null, bool? clean = null, string xpath = null, bool? fallbackToRaw = null, bool? returnAnalyzedText = null, string language = null, long? limitTextCharacters = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (features == null) { throw new ArgumentNullException("`features` is required for `Analyze`"); } DetailedResponse<AnalysisResults> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.PostAsync($"{this.Endpoint}/v1/analyze"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeader("Content-Type", "application/json"); JObject bodyObject = new JObject(); if (features != null) { bodyObject["features"] = JToken.FromObject(features); } if (!string.IsNullOrEmpty(text)) { bodyObject["text"] = text; } if (!string.IsNullOrEmpty(html)) { bodyObject["html"] = html; } if (!string.IsNullOrEmpty(url)) { bodyObject["url"] = url; } if (clean != null) { bodyObject["clean"] = JToken.FromObject(clean); } if (!string.IsNullOrEmpty(xpath)) { bodyObject["xpath"] = xpath; } if (fallbackToRaw != null) { bodyObject["fallback_to_raw"] = JToken.FromObject(fallbackToRaw); } if (returnAnalyzedText != null) { bodyObject["return_analyzed_text"] = JToken.FromObject(returnAnalyzedText); } if (!string.IsNullOrEmpty(language)) { bodyObject["language"] = language; } if (limitTextCharacters != null) { bodyObject["limit_text_characters"] = JToken.FromObject(limitTextCharacters); } var httpContent = new StringContent(JsonConvert.SerializeObject(bodyObject), Encoding.UTF8, HttpMediaType.APPLICATION_JSON); restRequest.WithBodyContent(httpContent); restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "Analyze")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<AnalysisResults>().Result; if (result == null) { result = new DetailedResponse<AnalysisResults>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// List models. /// /// Lists Watson Knowledge Studio [custom entities and relations /// models](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-customizing) /// that are deployed to your Natural Language Understanding service. /// </summary> /// <returns><see cref="ListModelsResults" />ListModelsResults</returns> public DetailedResponse<ListModelsResults> ListModels() { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } DetailedResponse<ListModelsResults> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v1/models"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "ListModels")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<ListModelsResults>().Result; if (result == null) { result = new DetailedResponse<ListModelsResults>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Delete model. /// /// Deletes a custom model. /// </summary> /// <param name="modelId">Model ID of the model to delete.</param> /// <returns><see cref="DeleteModelResults" />DeleteModelResults</returns> public DetailedResponse<DeleteModelResults> DeleteModel(string modelId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `DeleteModel`"); } else { modelId = Uri.EscapeDataString(modelId); } DetailedResponse<DeleteModelResults> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.DeleteAsync($"{this.Endpoint}/v1/models/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "DeleteModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<DeleteModelResults>().Result; if (result == null) { result = new DetailedResponse<DeleteModelResults>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Create sentiment model. /// /// (Beta) Creates a custom sentiment model by uploading training data and associated metadata. The model begins /// the training and deploying process and is ready to use when the `status` is `available`. /// </summary> /// <param name="language">The 2-letter language code of this model.</param> /// <param name="trainingData">Training data in CSV format. For more information, see [Sentiment training data /// requirements](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-custom-sentiment#sentiment-training-data-requirements).</param> /// <param name="name">An optional name for the model. (optional)</param> /// <param name="description">An optional description of the model. (optional)</param> /// <param name="modelVersion">An optional version string. (optional)</param> /// <param name="workspaceId">ID of the Watson Knowledge Studio workspace that deployed this model to Natural /// Language Understanding. (optional)</param> /// <param name="versionDescription">The description of the version. (optional)</param> /// <returns><see cref="SentimentModel" />SentimentModel</returns> public DetailedResponse<SentimentModel> CreateSentimentModel(string language, System.IO.MemoryStream trainingData, string name = null, string description = null, string modelVersion = null, string workspaceId = null, string versionDescription = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(language)) { throw new ArgumentNullException("`language` is required for `CreateSentimentModel`"); } if (trainingData == null) { throw new ArgumentNullException("`trainingData` is required for `CreateSentimentModel`"); } DetailedResponse<SentimentModel> result = null; try { var formData = new MultipartFormDataContent(); if (language != null) { var languageContent = new StringContent(language, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); languageContent.Headers.ContentType = null; formData.Add(languageContent, "language"); } if (trainingData != null) { var trainingDataContent = new ByteArrayContent(trainingData.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse("text/csv", out contentType); trainingDataContent.Headers.ContentType = contentType; formData.Add(trainingDataContent, "training_data", "filename"); } if (name != null) { var nameContent = new StringContent(name, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); nameContent.Headers.ContentType = null; formData.Add(nameContent, "name"); } if (description != null) { var descriptionContent = new StringContent(description, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); descriptionContent.Headers.ContentType = null; formData.Add(descriptionContent, "description"); } if (modelVersion != null) { var modelVersionContent = new StringContent(modelVersion, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); modelVersionContent.Headers.ContentType = null; formData.Add(modelVersionContent, "model_version"); } if (workspaceId != null) { var workspaceIdContent = new StringContent(workspaceId, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); workspaceIdContent.Headers.ContentType = null; formData.Add(workspaceIdContent, "workspace_id"); } if (versionDescription != null) { var versionDescriptionContent = new StringContent(versionDescription, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); versionDescriptionContent.Headers.ContentType = null; formData.Add(versionDescriptionContent, "version_description"); } IClient client = this.Client; SetAuthentication(); var restRequest = client.PostAsync($"{this.Endpoint}/v1/models/sentiment"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithBodyContent(formData); restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "CreateSentimentModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<SentimentModel>().Result; if (result == null) { result = new DetailedResponse<SentimentModel>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// List sentiment models. /// /// (Beta) Returns all custom sentiment models associated with this service instance. /// </summary> /// <returns><see cref="ListSentimentModelsResponse" />ListSentimentModelsResponse</returns> public DetailedResponse<ListSentimentModelsResponse> ListSentimentModels() { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } DetailedResponse<ListSentimentModelsResponse> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v1/models/sentiment"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "ListSentimentModels")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<ListSentimentModelsResponse>().Result; if (result == null) { result = new DetailedResponse<ListSentimentModelsResponse>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Get sentiment model details. /// /// (Beta) Returns the status of the sentiment model with the given model ID. /// </summary> /// <param name="modelId">ID of the model.</param> /// <returns><see cref="SentimentModel" />SentimentModel</returns> public DetailedResponse<SentimentModel> GetSentimentModel(string modelId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `GetSentimentModel`"); } else { modelId = Uri.EscapeDataString(modelId); } DetailedResponse<SentimentModel> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v1/models/sentiment/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "GetSentimentModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<SentimentModel>().Result; if (result == null) { result = new DetailedResponse<SentimentModel>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Update sentiment model. /// /// (Beta) Overwrites the training data associated with this custom sentiment model and retrains the model. The /// new model replaces the current deployment. /// </summary> /// <param name="modelId">ID of the model.</param> /// <param name="language">The 2-letter language code of this model.</param> /// <param name="trainingData">Training data in CSV format. For more information, see [Sentiment training data /// requirements](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-custom-sentiment#sentiment-training-data-requirements).</param> /// <param name="name">An optional name for the model. (optional)</param> /// <param name="description">An optional description of the model. (optional)</param> /// <param name="modelVersion">An optional version string. (optional)</param> /// <param name="workspaceId">ID of the Watson Knowledge Studio workspace that deployed this model to Natural /// Language Understanding. (optional)</param> /// <param name="versionDescription">The description of the version. (optional)</param> /// <returns><see cref="SentimentModel" />SentimentModel</returns> public DetailedResponse<SentimentModel> UpdateSentimentModel(string modelId, string language, System.IO.MemoryStream trainingData, string name = null, string description = null, string modelVersion = null, string workspaceId = null, string versionDescription = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `UpdateSentimentModel`"); } else { modelId = Uri.EscapeDataString(modelId); } if (string.IsNullOrEmpty(language)) { throw new ArgumentNullException("`language` is required for `UpdateSentimentModel`"); } if (trainingData == null) { throw new ArgumentNullException("`trainingData` is required for `UpdateSentimentModel`"); } DetailedResponse<SentimentModel> result = null; try { var formData = new MultipartFormDataContent(); if (language != null) { var languageContent = new StringContent(language, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); languageContent.Headers.ContentType = null; formData.Add(languageContent, "language"); } if (trainingData != null) { var trainingDataContent = new ByteArrayContent(trainingData.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse("text/csv", out contentType); trainingDataContent.Headers.ContentType = contentType; formData.Add(trainingDataContent, "training_data", "filename"); } if (name != null) { var nameContent = new StringContent(name, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); nameContent.Headers.ContentType = null; formData.Add(nameContent, "name"); } if (description != null) { var descriptionContent = new StringContent(description, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); descriptionContent.Headers.ContentType = null; formData.Add(descriptionContent, "description"); } if (modelVersion != null) { var modelVersionContent = new StringContent(modelVersion, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); modelVersionContent.Headers.ContentType = null; formData.Add(modelVersionContent, "model_version"); } if (workspaceId != null) { var workspaceIdContent = new StringContent(workspaceId, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); workspaceIdContent.Headers.ContentType = null; formData.Add(workspaceIdContent, "workspace_id"); } if (versionDescription != null) { var versionDescriptionContent = new StringContent(versionDescription, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); versionDescriptionContent.Headers.ContentType = null; formData.Add(versionDescriptionContent, "version_description"); } IClient client = this.Client; SetAuthentication(); var restRequest = client.PutAsync($"{this.Endpoint}/v1/models/sentiment/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithBodyContent(formData); restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "UpdateSentimentModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<SentimentModel>().Result; if (result == null) { result = new DetailedResponse<SentimentModel>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Delete sentiment model. /// /// (Beta) Un-deploys the custom sentiment model with the given model ID and deletes all associated customer /// data, including any training data or binary artifacts. /// </summary> /// <param name="modelId">ID of the model.</param> /// <returns><see cref="DeleteModelResults" />DeleteModelResults</returns> public DetailedResponse<DeleteModelResults> DeleteSentimentModel(string modelId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `DeleteSentimentModel`"); } else { modelId = Uri.EscapeDataString(modelId); } DetailedResponse<DeleteModelResults> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.DeleteAsync($"{this.Endpoint}/v1/models/sentiment/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "DeleteSentimentModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<DeleteModelResults>().Result; if (result == null) { result = new DetailedResponse<DeleteModelResults>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Create categories model. /// /// (Beta) Creates a custom categories model by uploading training data and associated metadata. The model /// begins the training and deploying process and is ready to use when the `status` is `available`. /// </summary> /// <param name="language">The 2-letter language code of this model.</param> /// <param name="trainingData">Training data in JSON format. For more information, see [Categories training data /// requirements](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-categories##categories-training-data-requirements).</param> /// <param name="trainingDataContentType">The content type of trainingData. (optional)</param> /// <param name="name">An optional name for the model. (optional)</param> /// <param name="description">An optional description of the model. (optional)</param> /// <param name="modelVersion">An optional version string. (optional)</param> /// <param name="workspaceId">ID of the Watson Knowledge Studio workspace that deployed this model to Natural /// Language Understanding. (optional)</param> /// <param name="versionDescription">The description of the version. (optional)</param> /// <returns><see cref="CategoriesModel" />CategoriesModel</returns> public DetailedResponse<CategoriesModel> CreateCategoriesModel(string language, System.IO.MemoryStream trainingData, string trainingDataContentType = null, string name = null, string description = null, string modelVersion = null, string workspaceId = null, string versionDescription = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(language)) { throw new ArgumentNullException("`language` is required for `CreateCategoriesModel`"); } if (trainingData == null) { throw new ArgumentNullException("`trainingData` is required for `CreateCategoriesModel`"); } DetailedResponse<CategoriesModel> result = null; try { var formData = new MultipartFormDataContent(); if (language != null) { var languageContent = new StringContent(language, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); languageContent.Headers.ContentType = null; formData.Add(languageContent, "language"); } if (trainingData != null) { var trainingDataContent = new ByteArrayContent(trainingData.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse(trainingDataContentType, out contentType); trainingDataContent.Headers.ContentType = contentType; formData.Add(trainingDataContent, "training_data", "filename"); } if (name != null) { var nameContent = new StringContent(name, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); nameContent.Headers.ContentType = null; formData.Add(nameContent, "name"); } if (description != null) { var descriptionContent = new StringContent(description, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); descriptionContent.Headers.ContentType = null; formData.Add(descriptionContent, "description"); } if (modelVersion != null) { var modelVersionContent = new StringContent(modelVersion, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); modelVersionContent.Headers.ContentType = null; formData.Add(modelVersionContent, "model_version"); } if (workspaceId != null) { var workspaceIdContent = new StringContent(workspaceId, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); workspaceIdContent.Headers.ContentType = null; formData.Add(workspaceIdContent, "workspace_id"); } if (versionDescription != null) { var versionDescriptionContent = new StringContent(versionDescription, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); versionDescriptionContent.Headers.ContentType = null; formData.Add(versionDescriptionContent, "version_description"); } IClient client = this.Client; SetAuthentication(); var restRequest = client.PostAsync($"{this.Endpoint}/v1/models/categories"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithBodyContent(formData); restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "CreateCategoriesModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<CategoriesModel>().Result; if (result == null) { result = new DetailedResponse<CategoriesModel>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Enum values for CreateCategoriesModel. /// </summary> public class CreateCategoriesModelEnums { /// <summary> /// The content type of trainingData. /// </summary> public class TrainingDataContentTypeValue { /// <summary> /// Constant JSON for json /// </summary> public const string JSON = "json"; /// <summary> /// Constant APPLICATION_JSON for application/json /// </summary> public const string APPLICATION_JSON = "application/json"; } } /// <summary> /// List categories models. /// /// (Beta) Returns all custom categories models associated with this service instance. /// </summary> /// <returns><see cref="CategoriesModelList" />CategoriesModelList</returns> public DetailedResponse<CategoriesModelList> ListCategoriesModels() { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } DetailedResponse<CategoriesModelList> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v1/models/categories"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "ListCategoriesModels")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<CategoriesModelList>().Result; if (result == null) { result = new DetailedResponse<CategoriesModelList>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Get categories model details. /// /// (Beta) Returns the status of the categories model with the given model ID. /// </summary> /// <param name="modelId">ID of the model.</param> /// <returns><see cref="CategoriesModel" />CategoriesModel</returns> public DetailedResponse<CategoriesModel> GetCategoriesModel(string modelId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `GetCategoriesModel`"); } else { modelId = Uri.EscapeDataString(modelId); } DetailedResponse<CategoriesModel> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v1/models/categories/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "GetCategoriesModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<CategoriesModel>().Result; if (result == null) { result = new DetailedResponse<CategoriesModel>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Update categories model. /// /// (Beta) Overwrites the training data associated with this custom categories model and retrains the model. The /// new model replaces the current deployment. /// </summary> /// <param name="modelId">ID of the model.</param> /// <param name="language">The 2-letter language code of this model.</param> /// <param name="trainingData">Training data in JSON format. For more information, see [Categories training data /// requirements](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-categories##categories-training-data-requirements).</param> /// <param name="trainingDataContentType">The content type of trainingData. (optional)</param> /// <param name="name">An optional name for the model. (optional)</param> /// <param name="description">An optional description of the model. (optional)</param> /// <param name="modelVersion">An optional version string. (optional)</param> /// <param name="workspaceId">ID of the Watson Knowledge Studio workspace that deployed this model to Natural /// Language Understanding. (optional)</param> /// <param name="versionDescription">The description of the version. (optional)</param> /// <returns><see cref="CategoriesModel" />CategoriesModel</returns> public DetailedResponse<CategoriesModel> UpdateCategoriesModel(string modelId, string language, System.IO.MemoryStream trainingData, string trainingDataContentType = null, string name = null, string description = null, string modelVersion = null, string workspaceId = null, string versionDescription = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `UpdateCategoriesModel`"); } else { modelId = Uri.EscapeDataString(modelId); } if (string.IsNullOrEmpty(language)) { throw new ArgumentNullException("`language` is required for `UpdateCategoriesModel`"); } if (trainingData == null) { throw new ArgumentNullException("`trainingData` is required for `UpdateCategoriesModel`"); } DetailedResponse<CategoriesModel> result = null; try { var formData = new MultipartFormDataContent(); if (language != null) { var languageContent = new StringContent(language, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); languageContent.Headers.ContentType = null; formData.Add(languageContent, "language"); } if (trainingData != null) { var trainingDataContent = new ByteArrayContent(trainingData.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse(trainingDataContentType, out contentType); trainingDataContent.Headers.ContentType = contentType; formData.Add(trainingDataContent, "training_data", "filename"); } if (name != null) { var nameContent = new StringContent(name, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); nameContent.Headers.ContentType = null; formData.Add(nameContent, "name"); } if (description != null) { var descriptionContent = new StringContent(description, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); descriptionContent.Headers.ContentType = null; formData.Add(descriptionContent, "description"); } if (modelVersion != null) { var modelVersionContent = new StringContent(modelVersion, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); modelVersionContent.Headers.ContentType = null; formData.Add(modelVersionContent, "model_version"); } if (workspaceId != null) { var workspaceIdContent = new StringContent(workspaceId, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); workspaceIdContent.Headers.ContentType = null; formData.Add(workspaceIdContent, "workspace_id"); } if (versionDescription != null) { var versionDescriptionContent = new StringContent(versionDescription, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); versionDescriptionContent.Headers.ContentType = null; formData.Add(versionDescriptionContent, "version_description"); } IClient client = this.Client; SetAuthentication(); var restRequest = client.PutAsync($"{this.Endpoint}/v1/models/categories/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithBodyContent(formData); restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "UpdateCategoriesModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<CategoriesModel>().Result; if (result == null) { result = new DetailedResponse<CategoriesModel>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Enum values for UpdateCategoriesModel. /// </summary> public class UpdateCategoriesModelEnums { /// <summary> /// The content type of trainingData. /// </summary> public class TrainingDataContentTypeValue { /// <summary> /// Constant JSON for json /// </summary> public const string JSON = "json"; /// <summary> /// Constant APPLICATION_JSON for application/json /// </summary> public const string APPLICATION_JSON = "application/json"; } } /// <summary> /// Delete categories model. /// /// (Beta) Un-deploys the custom categories model with the given model ID and deletes all associated customer /// data, including any training data or binary artifacts. /// </summary> /// <param name="modelId">ID of the model.</param> /// <returns><see cref="DeleteModelResults" />DeleteModelResults</returns> public DetailedResponse<DeleteModelResults> DeleteCategoriesModel(string modelId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `DeleteCategoriesModel`"); } else { modelId = Uri.EscapeDataString(modelId); } DetailedResponse<DeleteModelResults> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.DeleteAsync($"{this.Endpoint}/v1/models/categories/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "DeleteCategoriesModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<DeleteModelResults>().Result; if (result == null) { result = new DetailedResponse<DeleteModelResults>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Create classifications model. /// /// Creates a custom classifications model by uploading training data and associated metadata. The model begins /// the training and deploying process and is ready to use when the `status` is `available`. /// </summary> /// <param name="language">The 2-letter language code of this model.</param> /// <param name="trainingData">Training data in JSON format. For more information, see [Classifications training /// data /// requirements](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-classifications#classification-training-data-requirements).</param> /// <param name="trainingDataContentType">The content type of trainingData. (optional)</param> /// <param name="name">An optional name for the model. (optional)</param> /// <param name="description">An optional description of the model. (optional)</param> /// <param name="modelVersion">An optional version string. (optional)</param> /// <param name="workspaceId">ID of the Watson Knowledge Studio workspace that deployed this model to Natural /// Language Understanding. (optional)</param> /// <param name="versionDescription">The description of the version. (optional)</param> /// <returns><see cref="ClassificationsModel" />ClassificationsModel</returns> public DetailedResponse<ClassificationsModel> CreateClassificationsModel(string language, System.IO.MemoryStream trainingData, string trainingDataContentType = null, string name = null, string description = null, string modelVersion = null, string workspaceId = null, string versionDescription = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(language)) { throw new ArgumentNullException("`language` is required for `CreateClassificationsModel`"); } if (trainingData == null) { throw new ArgumentNullException("`trainingData` is required for `CreateClassificationsModel`"); } DetailedResponse<ClassificationsModel> result = null; try { var formData = new MultipartFormDataContent(); if (language != null) { var languageContent = new StringContent(language, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); languageContent.Headers.ContentType = null; formData.Add(languageContent, "language"); } if (trainingData != null) { var trainingDataContent = new ByteArrayContent(trainingData.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse(trainingDataContentType, out contentType); trainingDataContent.Headers.ContentType = contentType; formData.Add(trainingDataContent, "training_data", "filename"); } if (name != null) { var nameContent = new StringContent(name, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); nameContent.Headers.ContentType = null; formData.Add(nameContent, "name"); } if (description != null) { var descriptionContent = new StringContent(description, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); descriptionContent.Headers.ContentType = null; formData.Add(descriptionContent, "description"); } if (modelVersion != null) { var modelVersionContent = new StringContent(modelVersion, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); modelVersionContent.Headers.ContentType = null; formData.Add(modelVersionContent, "model_version"); } if (workspaceId != null) { var workspaceIdContent = new StringContent(workspaceId, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); workspaceIdContent.Headers.ContentType = null; formData.Add(workspaceIdContent, "workspace_id"); } if (versionDescription != null) { var versionDescriptionContent = new StringContent(versionDescription, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); versionDescriptionContent.Headers.ContentType = null; formData.Add(versionDescriptionContent, "version_description"); } IClient client = this.Client; SetAuthentication(); var restRequest = client.PostAsync($"{this.Endpoint}/v1/models/classifications"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithBodyContent(formData); restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "CreateClassificationsModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<ClassificationsModel>().Result; if (result == null) { result = new DetailedResponse<ClassificationsModel>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Enum values for CreateClassificationsModel. /// </summary> public class CreateClassificationsModelEnums { /// <summary> /// The content type of trainingData. /// </summary> public class TrainingDataContentTypeValue { /// <summary> /// Constant JSON for json /// </summary> public const string JSON = "json"; /// <summary> /// Constant APPLICATION_JSON for application/json /// </summary> public const string APPLICATION_JSON = "application/json"; } } /// <summary> /// List classifications models. /// /// Returns all custom classifications models associated with this service instance. /// </summary> /// <returns><see cref="ClassificationsModelList" />ClassificationsModelList</returns> public DetailedResponse<ClassificationsModelList> ListClassificationsModels() { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } DetailedResponse<ClassificationsModelList> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v1/models/classifications"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "ListClassificationsModels")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<ClassificationsModelList>().Result; if (result == null) { result = new DetailedResponse<ClassificationsModelList>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Get classifications model details. /// /// Returns the status of the classifications model with the given model ID. /// </summary> /// <param name="modelId">ID of the model.</param> /// <returns><see cref="ClassificationsModel" />ClassificationsModel</returns> public DetailedResponse<ClassificationsModel> GetClassificationsModel(string modelId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `GetClassificationsModel`"); } else { modelId = Uri.EscapeDataString(modelId); } DetailedResponse<ClassificationsModel> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v1/models/classifications/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "GetClassificationsModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<ClassificationsModel>().Result; if (result == null) { result = new DetailedResponse<ClassificationsModel>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Update classifications model. /// /// Overwrites the training data associated with this custom classifications model and retrains the model. The /// new model replaces the current deployment. /// </summary> /// <param name="modelId">ID of the model.</param> /// <param name="language">The 2-letter language code of this model.</param> /// <param name="trainingData">Training data in JSON format. For more information, see [Classifications training /// data /// requirements](https://cloud.ibm.com/docs/natural-language-understanding?topic=natural-language-understanding-classifications#classification-training-data-requirements).</param> /// <param name="trainingDataContentType">The content type of trainingData. (optional)</param> /// <param name="name">An optional name for the model. (optional)</param> /// <param name="description">An optional description of the model. (optional)</param> /// <param name="modelVersion">An optional version string. (optional)</param> /// <param name="workspaceId">ID of the Watson Knowledge Studio workspace that deployed this model to Natural /// Language Understanding. (optional)</param> /// <param name="versionDescription">The description of the version. (optional)</param> /// <returns><see cref="ClassificationsModel" />ClassificationsModel</returns> public DetailedResponse<ClassificationsModel> UpdateClassificationsModel(string modelId, string language, System.IO.MemoryStream trainingData, string trainingDataContentType = null, string name = null, string description = null, string modelVersion = null, string workspaceId = null, string versionDescription = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `UpdateClassificationsModel`"); } else { modelId = Uri.EscapeDataString(modelId); } if (string.IsNullOrEmpty(language)) { throw new ArgumentNullException("`language` is required for `UpdateClassificationsModel`"); } if (trainingData == null) { throw new ArgumentNullException("`trainingData` is required for `UpdateClassificationsModel`"); } DetailedResponse<ClassificationsModel> result = null; try { var formData = new MultipartFormDataContent(); if (language != null) { var languageContent = new StringContent(language, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); languageContent.Headers.ContentType = null; formData.Add(languageContent, "language"); } if (trainingData != null) { var trainingDataContent = new ByteArrayContent(trainingData.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse(trainingDataContentType, out contentType); trainingDataContent.Headers.ContentType = contentType; formData.Add(trainingDataContent, "training_data", "filename"); } if (name != null) { var nameContent = new StringContent(name, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); nameContent.Headers.ContentType = null; formData.Add(nameContent, "name"); } if (description != null) { var descriptionContent = new StringContent(description, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); descriptionContent.Headers.ContentType = null; formData.Add(descriptionContent, "description"); } if (modelVersion != null) { var modelVersionContent = new StringContent(modelVersion, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); modelVersionContent.Headers.ContentType = null; formData.Add(modelVersionContent, "model_version"); } if (workspaceId != null) { var workspaceIdContent = new StringContent(workspaceId, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); workspaceIdContent.Headers.ContentType = null; formData.Add(workspaceIdContent, "workspace_id"); } if (versionDescription != null) { var versionDescriptionContent = new StringContent(versionDescription, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); versionDescriptionContent.Headers.ContentType = null; formData.Add(versionDescriptionContent, "version_description"); } IClient client = this.Client; SetAuthentication(); var restRequest = client.PutAsync($"{this.Endpoint}/v1/models/classifications/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithBodyContent(formData); restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "UpdateClassificationsModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<ClassificationsModel>().Result; if (result == null) { result = new DetailedResponse<ClassificationsModel>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Enum values for UpdateClassificationsModel. /// </summary> public class UpdateClassificationsModelEnums { /// <summary> /// The content type of trainingData. /// </summary> public class TrainingDataContentTypeValue { /// <summary> /// Constant JSON for json /// </summary> public const string JSON = "json"; /// <summary> /// Constant APPLICATION_JSON for application/json /// </summary> public const string APPLICATION_JSON = "application/json"; } } /// <summary> /// Delete classifications model. /// /// Un-deploys the custom classifications model with the given model ID and deletes all associated customer /// data, including any training data or binary artifacts. /// </summary> /// <param name="modelId">ID of the model.</param> /// <returns><see cref="DeleteModelResults" />DeleteModelResults</returns> public DetailedResponse<DeleteModelResults> DeleteClassificationsModel(string modelId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(modelId)) { throw new ArgumentNullException("`modelId` is required for `DeleteClassificationsModel`"); } else { modelId = Uri.EscapeDataString(modelId); } DetailedResponse<DeleteModelResults> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.DeleteAsync($"{this.Endpoint}/v1/models/classifications/{modelId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("natural-language-understanding", "v1", "DeleteClassificationsModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<DeleteModelResults>().Result; if (result == null) { result = new DetailedResponse<DeleteModelResults>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } } }
//------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. //------------------------------------------------------------ namespace System.ServiceModel.Channels { using System.Runtime; using System.ServiceModel; abstract class LayeredChannelFactory<TChannel> : ChannelFactoryBase<TChannel> { IChannelFactory innerChannelFactory; public LayeredChannelFactory(IDefaultCommunicationTimeouts timeouts, IChannelFactory innerChannelFactory) : base(timeouts) { this.innerChannelFactory = innerChannelFactory; } protected IChannelFactory InnerChannelFactory { get { return this.innerChannelFactory; } } public override T GetProperty<T>() { if (typeof(T) == typeof(IChannelFactory<TChannel>)) { return (T)(object)this; } T baseProperty = base.GetProperty<T>(); if (baseProperty != null) { return baseProperty; } return this.innerChannelFactory.GetProperty<T>(); } protected override IAsyncResult OnBeginOpen(TimeSpan timeout, AsyncCallback callback, object state) { return this.innerChannelFactory.BeginOpen(timeout, callback, state); } protected override void OnEndOpen(IAsyncResult result) { this.innerChannelFactory.EndOpen(result); } protected override IAsyncResult OnBeginClose(TimeSpan timeout, AsyncCallback callback, object state) { return new ChainedCloseAsyncResult(timeout, callback, state, base.OnBeginClose, base.OnEndClose, this.innerChannelFactory); } protected override void OnEndClose(IAsyncResult result) { ChainedCloseAsyncResult.End(result); } protected override void OnClose(TimeSpan timeout) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); base.OnClose(timeoutHelper.RemainingTime()); this.innerChannelFactory.Close(timeoutHelper.RemainingTime()); } protected override void OnOpen(TimeSpan timeout) { this.innerChannelFactory.Open(timeout); } protected override void OnAbort() { base.OnAbort(); this.innerChannelFactory.Abort(); } } class LayeredInputChannel : LayeredChannel<IInputChannel>, IInputChannel { public LayeredInputChannel(ChannelManagerBase channelManager, IInputChannel innerChannel) : base(channelManager, innerChannel) { } public virtual EndpointAddress LocalAddress { get { return InnerChannel.LocalAddress; } } void InternalOnReceive(Message message) { if (message != null) { this.OnReceive(message); } } protected virtual void OnReceive(Message message) { } public Message Receive() { Message message = InnerChannel.Receive(); this.InternalOnReceive(message); return message; } public Message Receive(TimeSpan timeout) { Message message = InnerChannel.Receive(timeout); this.InternalOnReceive(message); return message; } public IAsyncResult BeginReceive(AsyncCallback callback, object state) { return InnerChannel.BeginReceive(callback, state); } public IAsyncResult BeginReceive(TimeSpan timeout, AsyncCallback callback, object state) { return InnerChannel.BeginReceive(timeout, callback, state); } public Message EndReceive(IAsyncResult result) { Message message = InnerChannel.EndReceive(result); this.InternalOnReceive(message); return message; } public IAsyncResult BeginTryReceive(TimeSpan timeout, AsyncCallback callback, object state) { return InnerChannel.BeginTryReceive(timeout, callback, state); } public bool EndTryReceive(IAsyncResult result, out Message message) { bool retVal = InnerChannel.EndTryReceive(result, out message); this.InternalOnReceive(message); return retVal; } public bool TryReceive(TimeSpan timeout, out Message message) { bool retVal = InnerChannel.TryReceive(timeout, out message); this.InternalOnReceive(message); return retVal; } public bool WaitForMessage(TimeSpan timeout) { return InnerChannel.WaitForMessage(timeout); } public IAsyncResult BeginWaitForMessage(TimeSpan timeout, AsyncCallback callback, object state) { return InnerChannel.BeginWaitForMessage(timeout, callback, state); } public bool EndWaitForMessage(IAsyncResult result) { return InnerChannel.EndWaitForMessage(result); } } class LayeredDuplexChannel : LayeredInputChannel, IDuplexChannel { IOutputChannel innerOutputChannel; EndpointAddress localAddress; EventHandler onInnerOutputChannelFaulted; public LayeredDuplexChannel(ChannelManagerBase channelManager, IInputChannel innerInputChannel, EndpointAddress localAddress, IOutputChannel innerOutputChannel) : base(channelManager, innerInputChannel) { this.localAddress = localAddress; this.innerOutputChannel = innerOutputChannel; this.onInnerOutputChannelFaulted = new EventHandler(OnInnerOutputChannelFaulted); this.innerOutputChannel.Faulted += this.onInnerOutputChannelFaulted; } public override EndpointAddress LocalAddress { get { return this.localAddress; } } public EndpointAddress RemoteAddress { get { return this.innerOutputChannel.RemoteAddress; } } public Uri Via { get { return innerOutputChannel.Via; } } protected override void OnClosing() { this.innerOutputChannel.Faulted -= this.onInnerOutputChannelFaulted; base.OnClosing(); } protected override void OnAbort() { this.innerOutputChannel.Abort(); base.OnAbort(); } protected override IAsyncResult OnBeginClose(TimeSpan timeout, AsyncCallback callback, object state) { return new ChainedCloseAsyncResult(timeout, callback, state, base.OnBeginClose, base.OnEndClose, this.innerOutputChannel); } protected override void OnEndClose(IAsyncResult result) { ChainedCloseAsyncResult.End(result); } protected override void OnClose(TimeSpan timeout) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); this.innerOutputChannel.Close(timeoutHelper.RemainingTime()); base.OnClose(timeoutHelper.RemainingTime()); } protected override IAsyncResult OnBeginOpen(TimeSpan timeout, AsyncCallback callback, object state) { return new ChainedOpenAsyncResult(timeout, callback, state, base.OnBeginOpen, base.OnEndOpen, this.innerOutputChannel); } protected override void OnEndOpen(IAsyncResult result) { ChainedOpenAsyncResult.End(result); } protected override void OnOpen(TimeSpan timeout) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); base.OnOpen(timeoutHelper.RemainingTime()); innerOutputChannel.Open(timeoutHelper.RemainingTime()); } public void Send(Message message) { this.Send(message, this.DefaultSendTimeout); } public void Send(Message message, TimeSpan timeout) { this.innerOutputChannel.Send(message, timeout); } public IAsyncResult BeginSend(Message message, AsyncCallback callback, object state) { return this.BeginSend(message, this.DefaultSendTimeout, callback, state); } public IAsyncResult BeginSend(Message message, TimeSpan timeout, AsyncCallback callback, object state) { return this.innerOutputChannel.BeginSend(message, timeout, callback, state); } public void EndSend(IAsyncResult result) { this.innerOutputChannel.EndSend(result); } void OnInnerOutputChannelFaulted(object sender, EventArgs e) { this.Fault(); } } }
namespace Boo.Lang.Parser.Tests { using NUnit.Framework; [TestFixture] public class WSAParserRoundtripTestFixture : AbstractWSAParserTestFixture { void RunCompilerTestCase(string fname) { RunParserTestCase(fname); } [Test] public void and_or_1() { RunCompilerTestCase(@"and-or-1.boo"); } [Test] public void arrays_1() { RunCompilerTestCase(@"arrays-1.boo"); } [Test] public void arrays_2() { RunCompilerTestCase(@"arrays-2.boo"); } [Test] public void arrays_3() { RunCompilerTestCase(@"arrays-3.boo"); } [Test] public void arrays_4() { RunCompilerTestCase(@"arrays-4.boo"); } [Test] public void arrays_5() { RunCompilerTestCase(@"arrays-5.boo"); } [Test] public void arrays_6() { RunCompilerTestCase(@"arrays-6.boo"); } [Test] public void as_1() { RunCompilerTestCase(@"as-1.boo"); } [Test] public void assignment_1() { RunCompilerTestCase(@"assignment-1.boo"); } [Test] public void ast_literal_enum() { RunCompilerTestCase(@"ast-literal-enum.boo"); } [Test] public void ast_literal_varargs_method() { RunCompilerTestCase(@"ast-literal-varargs-method.boo"); } [Test] public void ast_literals_1() { RunCompilerTestCase(@"ast-literals-1.boo"); } [Test] public void ast_literals_10() { RunCompilerTestCase(@"ast-literals-10.boo"); } [Test] public void ast_literals_11() { RunCompilerTestCase(@"ast-literals-11.boo"); } [Test] public void ast_literals_2() { RunCompilerTestCase(@"ast-literals-2.boo"); } [Test] public void ast_literals_3() { RunCompilerTestCase(@"ast-literals-3.boo"); } [Test] public void ast_literals_4() { RunCompilerTestCase(@"ast-literals-4.boo"); } [Test] public void ast_literals_5() { RunCompilerTestCase(@"ast-literals-5.boo"); } [Test] public void ast_literals_6() { RunCompilerTestCase(@"ast-literals-6.boo"); } [Test] public void ast_literals_7() { RunCompilerTestCase(@"ast-literals-7.boo"); } [Test] public void ast_literals_8() { RunCompilerTestCase(@"ast-literals-8.boo"); } [Test] public void ast_literals_9() { RunCompilerTestCase(@"ast-literals-9.boo"); } [Test] public void ast_literals_if_it_looks_like_a_block_1() { RunCompilerTestCase(@"ast-literals-if-it-looks-like-a-block-1.boo"); } [Test] public void at_operator() { RunCompilerTestCase(@"at-operator.boo"); } [Test] public void attributes_1() { RunCompilerTestCase(@"attributes-1.boo"); } [Test] public void attributes_2() { RunCompilerTestCase(@"attributes-2.boo"); } [Test] public void bool_literals_1() { RunCompilerTestCase(@"bool-literals-1.boo"); } [Test] public void callables_1() { RunCompilerTestCase(@"callables-1.boo"); } [Test] public void callables_2() { RunCompilerTestCase(@"callables-2.boo"); } [Test] public void callables_with_varags() { RunCompilerTestCase(@"callables-with-varags.boo"); } [Test] public void cast_1() { RunCompilerTestCase(@"cast-1.boo"); } [Test] public void char_1() { RunCompilerTestCase(@"char-1.boo"); } [Test] public void char_2() { RunCompilerTestCase(@"char-2.boo"); } [Test] public void class_1() { RunCompilerTestCase(@"class-1.boo"); } [Test] public void class_2() { RunCompilerTestCase(@"class-2.boo"); } [Test] public void class_3() { RunCompilerTestCase(@"class-3.boo"); } [Test] public void closures_1() { RunCompilerTestCase(@"closures-1.boo"); } [Test] public void closures_10() { RunCompilerTestCase(@"closures-10.boo"); } [Test] public void closures_11() { RunCompilerTestCase(@"closures-11.boo"); } [Test] public void closures_12() { RunCompilerTestCase(@"closures-12.boo"); } [Test] public void closures_13() { RunCompilerTestCase(@"closures-13.boo"); } [Test] public void closures_14() { RunCompilerTestCase(@"closures-14.boo"); } [Test] public void closures_15() { RunCompilerTestCase(@"closures-15.boo"); } [Test] public void closures_16() { RunCompilerTestCase(@"closures-16.boo"); } [Test] public void closures_17() { RunCompilerTestCase(@"closures-17.boo"); } [Test] public void closures_18() { RunCompilerTestCase(@"closures-18.boo"); } [Test] public void closures_19() { RunCompilerTestCase(@"closures-19.boo"); } [Test] public void closures_2() { RunCompilerTestCase(@"closures-2.boo"); } [Test] public void closures_20() { RunCompilerTestCase(@"closures-20.boo"); } [Test] public void closures_21() { RunCompilerTestCase(@"closures-21.boo"); } [Test] public void closures_22() { RunCompilerTestCase(@"closures-22.boo"); } [Test] public void closures_3() { RunCompilerTestCase(@"closures-3.boo"); } [Test] public void closures_4() { RunCompilerTestCase(@"closures-4.boo"); } [Test] public void closures_5() { RunCompilerTestCase(@"closures-5.boo"); } [Test] public void closures_6() { RunCompilerTestCase(@"closures-6.boo"); } [Test] public void closures_7() { RunCompilerTestCase(@"closures-7.boo"); } [Test] public void closures_8() { RunCompilerTestCase(@"closures-8.boo"); } [Test] public void closures_9() { RunCompilerTestCase(@"closures-9.boo"); } [Test] public void collection_initializer() { RunCompilerTestCase(@"collection-initializer.boo"); } [Test] public void comments_1() { RunCompilerTestCase(@"comments-1.boo"); } [Test] public void comments_2() { RunCompilerTestCase(@"comments-2.boo"); } [Test] public void comments_3() { RunCompilerTestCase(@"comments-3.boo"); } [Test] public void comments_4() { RunCompilerTestCase(@"comments-4.boo"); } [Test] public void conditional_1() { RunCompilerTestCase(@"conditional-1.boo"); } [Test] public void declarations_1() { RunCompilerTestCase(@"declarations-1.boo"); } [Test] public void declarations_2() { RunCompilerTestCase(@"declarations-2.boo"); } [Test] public void declarations_3() { RunCompilerTestCase(@"declarations-3.boo"); } [Test] public void double_literals_1() { RunCompilerTestCase(@"double-literals-1.boo"); } [Test] public void dsl_1() { RunCompilerTestCase(@"dsl-1.boo"); } [Test] public void elif_1() { RunCompilerTestCase(@"elif-1.boo"); } [Test] public void elif_2() { RunCompilerTestCase(@"elif-2.boo"); } [Test] public void enumerable_type_shortcut() { RunCompilerTestCase(@"enumerable-type-shortcut.boo"); } [Test] public void enums_1() { RunCompilerTestCase(@"enums-1.boo"); } [Test] public void events_1() { RunCompilerTestCase(@"events-1.boo"); } [Test] public void explode_1() { RunCompilerTestCase(@"explode-1.boo"); } [Test] public void explode_2() { RunCompilerTestCase(@"explode-2.boo"); } [Test] public void expressions_1() { RunCompilerTestCase(@"expressions-1.boo"); } [Test] public void expressions_2() { RunCompilerTestCase(@"expressions-2.boo"); } [Test] public void expressions_3() { RunCompilerTestCase(@"expressions-3.boo"); } [Test] public void expressions_4() { RunCompilerTestCase(@"expressions-4.boo"); } [Test] public void expressions_5() { RunCompilerTestCase(@"expressions-5.boo"); } [Test] public void extensions_1() { RunCompilerTestCase(@"extensions-1.boo"); } [Test] public void fields_1() { RunCompilerTestCase(@"fields-1.boo"); } [Test] public void fields_2() { RunCompilerTestCase(@"fields-2.boo"); } [Test] public void fields_3() { RunCompilerTestCase(@"fields-3.boo"); } [Test] public void fields_4() { RunCompilerTestCase(@"fields-4.boo"); } [Test] public void fields_5() { RunCompilerTestCase(@"fields-5.boo"); } [Test] public void fields_6() { RunCompilerTestCase(@"fields-6.boo"); } [Test] public void for_or_1() { RunCompilerTestCase(@"for_or-1.boo"); } [Test] public void for_or_then_1() { RunCompilerTestCase(@"for_or_then-1.boo"); } [Test] public void for_then_1() { RunCompilerTestCase(@"for_then-1.boo"); } [Test] public void generators_1() { RunCompilerTestCase(@"generators-1.boo"); } [Test] public void generators_2() { RunCompilerTestCase(@"generators-2.boo"); } [Test] public void generators_3() { RunCompilerTestCase(@"generators-3.boo"); } [Test] public void generic_method_1() { RunCompilerTestCase(@"generic-method-1.boo"); } [Test] public void generic_method_2() { RunCompilerTestCase(@"generic-method-2.boo"); } [Test] public void generic_method_3() { RunCompilerTestCase(@"generic-method-3.boo"); } [Test] public void generic_parameter_constraints() { RunCompilerTestCase(@"generic-parameter-constraints.boo"); } [Test] public void generics_1() { RunCompilerTestCase(@"generics-1.boo"); } [Test] public void generics_2() { RunCompilerTestCase(@"generics-2.boo"); } [Test] public void generics_3() { RunCompilerTestCase(@"generics-3.boo"); } [Test] public void generics_4() { RunCompilerTestCase(@"generics-4.boo"); } [Test] public void generics_5() { RunCompilerTestCase(@"generics-5.boo"); } [Test] public void getset_1() { RunCompilerTestCase(@"getset-1.boo"); } [Test] public void goto_1() { RunCompilerTestCase(@"goto-1.boo"); } [Test] public void goto_2() { RunCompilerTestCase(@"goto-2.boo"); } [Test] public void hash_1() { RunCompilerTestCase(@"hash-1.boo"); } [Test] public void hash_initializer() { RunCompilerTestCase(@"hash-initializer.boo"); } [Test] public void iif_1() { RunCompilerTestCase(@"iif-1.boo"); } [Test] public void import_1() { RunCompilerTestCase(@"import-1.boo"); } [Test] public void import_2() { RunCompilerTestCase(@"import-2.boo"); } [Test] public void in_not_in_1() { RunCompilerTestCase(@"in-not-in-1.boo"); } [Test] public void in_not_in_2() { RunCompilerTestCase(@"in-not-in-2.boo"); } [Test] public void in_not_in_3() { RunCompilerTestCase(@"in-not-in-3.boo"); } [Test] public void inplace_1() { RunCompilerTestCase(@"inplace-1.boo"); } [Test] public void internal_generic_callable_type_1() { RunCompilerTestCase(@"internal-generic-callable-type-1.boo"); } [Test] public void internal_generic_type_1() { RunCompilerTestCase(@"internal-generic-type-1.boo"); } [Test] public void internal_generic_type_2() { RunCompilerTestCase(@"internal-generic-type-2.boo"); } [Test] public void internal_generic_type_3() { RunCompilerTestCase(@"internal-generic-type-3.boo"); } [Test] public void internal_generic_type_4() { RunCompilerTestCase(@"internal-generic-type-4.boo"); } [Test] public void internal_generic_type_5() { RunCompilerTestCase(@"internal-generic-type-5.boo"); } [Test] public void internal_generic_type_6() { RunCompilerTestCase(@"internal-generic-type-6.boo"); } [Test] public void interpolation_1() { RunCompilerTestCase(@"interpolation-1.boo"); } [Test] public void interpolation_2() { RunCompilerTestCase(@"interpolation-2.boo"); } [Test] public void interpolation_3() { RunCompilerTestCase(@"interpolation-3.boo"); } [Test] public void interpolation_4() { RunCompilerTestCase(@"interpolation-4.boo"); } [Test] public void invocation_1() { RunCompilerTestCase(@"invocation-1.boo"); } [Test] public void isa_1() { RunCompilerTestCase(@"isa-1.boo"); } [Test] public void keywords_as_members_1() { RunCompilerTestCase(@"keywords-as-members-1.boo"); } [Test] public void line_continuation_1() { RunCompilerTestCase(@"line-continuation-1.boo"); } [Test] public void list_1() { RunCompilerTestCase(@"list-1.boo"); } [Test] public void long_literals_1() { RunCompilerTestCase(@"long-literals-1.boo"); } [Test] public void macro_doc() { RunCompilerTestCase(@"macro-doc.boo"); } [Test] public void macros_1() { RunCompilerTestCase(@"macros-1.boo"); } [Test] public void macros_2() { RunCompilerTestCase(@"macros-2.boo"); } [Test] public void macros_3() { RunCompilerTestCase(@"macros-3.boo"); } [Test] public void macros_anywhere_1() { RunCompilerTestCase(@"macros-anywhere-1.boo"); } [Test] public void member_references_1() { RunCompilerTestCase(@"member-references-1.boo"); } [Test] public void method_declaration_in_macro_application() { RunCompilerTestCase(@"method-declaration-in-macro-application.boo"); } [Test] public void method_declarations_in_nested_macro_application() { RunCompilerTestCase(@"method-declarations-in-nested-macro-application.boo"); } [Test] public void module_1() { RunCompilerTestCase(@"module-1.boo"); } [Test] public void module_2() { RunCompilerTestCase(@"module-2.boo"); } [Test] public void module_3() { RunCompilerTestCase(@"module-3.boo"); } [Test] public void named_arguments_1() { RunCompilerTestCase(@"named-arguments-1.boo"); } [Test] public void named_arguments_2() { RunCompilerTestCase(@"named-arguments-2.boo"); } [Test] public void new_1() { RunCompilerTestCase(@"new-1.boo"); } [Test] public void not_1() { RunCompilerTestCase(@"not-1.boo"); } [Test] public void not_2() { RunCompilerTestCase(@"not-2.boo"); } [Test] public void null_1() { RunCompilerTestCase(@"null-1.boo"); } [Test] public void omitted_member_target_1() { RunCompilerTestCase(@"omitted-member-target-1.boo"); } [Test] public void ones_complement_1() { RunCompilerTestCase(@"ones-complement-1.boo"); } [Test] public void regex_literals_1() { RunCompilerTestCase(@"regex-literals-1.boo"); } [Test] public void regex_literals_2() { RunCompilerTestCase(@"regex-literals-2.boo"); } [Test] public void return_1() { RunCompilerTestCase(@"return-1.boo"); } [Test] public void return_2() { RunCompilerTestCase(@"return-2.boo"); } [Test] public void self_1() { RunCompilerTestCase(@"self-1.boo"); } [Test] public void semicolons_1() { RunCompilerTestCase(@"semicolons-1.boo"); } [Test] public void slicing_1() { RunCompilerTestCase(@"slicing-1.boo"); } [Test] public void slicing_2() { RunCompilerTestCase(@"slicing-2.boo"); } [Test] public void splicing_1() { RunCompilerTestCase(@"splicing-1.boo"); } [Test] public void splicing_class_body() { RunCompilerTestCase(@"splicing-class-body.boo"); } [Test] public void splicing_enum_body() { RunCompilerTestCase(@"splicing-enum-body.boo"); } [Test] public void string_literals_1() { RunCompilerTestCase(@"string-literals-1.boo"); } [Test] public void struct_1() { RunCompilerTestCase(@"struct-1.boo"); } [Test] public void timespan_literals_1() { RunCompilerTestCase(@"timespan-literals-1.boo"); } [Test] public void try_1() { RunCompilerTestCase(@"try-1.boo"); } [Test] public void try_2() { RunCompilerTestCase(@"try-2.boo"); } [Test] public void try_3() { RunCompilerTestCase(@"try-3.boo"); } [Test] public void type_references_1() { RunCompilerTestCase(@"type-references-1.boo"); } [Test] public void unless_1() { RunCompilerTestCase(@"unless-1.boo"); } [Test] public void varargs_1() { RunCompilerTestCase(@"varargs-1.boo"); } [Test] public void while_or_1() { RunCompilerTestCase(@"while_or-1.boo"); } [Test] public void while_or_then_1() { RunCompilerTestCase(@"while_or_then-1.boo"); } [Test] public void while_then_1() { RunCompilerTestCase(@"while_then-1.boo"); } [Test] public void xor_1() { RunCompilerTestCase(@"xor-1.boo"); } [Test] public void yield_1() { RunCompilerTestCase(@"yield-1.boo"); } override protected string GetRelativeTestCasesPath() { return "parser/wsa"; } } }
/* * Copyright (c) 2006-2016, openmetaverse.co * All rights reserved. * * - Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Neither the name of the openmetaverse.co nor the names * of its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Threading; namespace OpenMetaverse { public sealed class WrappedObject<T> : IDisposable where T : class { private T _instance; internal readonly ObjectPoolSegment<T> _owningSegment; internal readonly ObjectPoolBase<T> _owningObjectPool; private bool _disposed = false; internal WrappedObject(ObjectPoolBase<T> owningPool, ObjectPoolSegment<T> ownerSegment, T activeInstance) { _owningObjectPool = owningPool; _owningSegment = ownerSegment; _instance = activeInstance; } ~WrappedObject() { #if !PocketPC // If the AppDomain is being unloaded, or the CLR is // shutting down, just exit gracefully if (Environment.HasShutdownStarted) return; #endif // Object Resurrection in Action! GC.ReRegisterForFinalize(this); // Return this instance back to the owning queue _owningObjectPool.CheckIn(_owningSegment, _instance); } /// <summary> /// Returns an instance of the class that has been checked out of the Object Pool. /// </summary> public T Instance { get { if (_disposed) throw new ObjectDisposedException("WrappedObject"); return _instance; } } /// <summary> /// Checks the instance back into the object pool /// </summary> public void Dispose() { if (_disposed) return; _disposed = true; _owningObjectPool.CheckIn(_owningSegment, _instance); GC.SuppressFinalize(this); } } public abstract class ObjectPoolBase<T> : IDisposable where T : class { private int _itemsPerSegment = 32; private int _minimumSegmentCount = 1; // A segment won't be eligible for cleanup unless it's at least this old... private TimeSpan _minimumAgeToCleanup = new TimeSpan(0, 5, 0); // ever increasing segment counter private int _activeSegment = 0; private bool _gc = true; private volatile bool _disposed = false; private Dictionary<int, ObjectPoolSegment<T>> _segments = new Dictionary<int, ObjectPoolSegment<T>>(); private object _syncRoot = new object(); private object _timerLock = new object(); // create a timer that starts in 5 minutes, and gets called every 5 minutes. System.Threading.Timer _timer; int _cleanupFrequency; /// <summary> /// Creates a new instance of the ObjectPoolBase class. Initialize MUST be called /// after using this constructor. /// </summary> protected ObjectPoolBase() { } /// <summary> /// Creates a new instance of the ObjectPool Base class. /// </summary> /// <param name="itemsPerSegment">The object pool is composed of segments, which /// are allocated whenever the size of the pool is exceeded. The number of items /// in a segment should be large enough that allocating a new segmeng is a rare /// thing. For example, on a server that will have 10k people logged in at once, /// the receive buffer object pool should have segment sizes of at least 1000 /// byte arrays per segment. /// </param> /// <param name="minimumSegmentCount">The minimun number of segments that may exist.</param> /// <param name="gcOnPoolGrowth">Perform a full GC.Collect whenever a segment is allocated, and then again after allocation to compact the heap.</param> /// <param name="cleanupFrequenceMS">The frequency which segments are checked to see if they're eligible for cleanup.</param> protected ObjectPoolBase(int itemsPerSegment, int minimumSegmentCount, bool gcOnPoolGrowth, int cleanupFrequenceMS) { Initialize(itemsPerSegment, minimumSegmentCount, gcOnPoolGrowth, cleanupFrequenceMS); } protected void Initialize(int itemsPerSegment, int minimumSegmentCount, bool gcOnPoolGrowth, int cleanupFrequenceMS) { _itemsPerSegment = itemsPerSegment; _minimumSegmentCount = minimumSegmentCount; _gc = gcOnPoolGrowth; // force garbage collection to make sure these new long lived objects // cause as little fragmentation as possible if (_gc) System.GC.Collect(); lock (_syncRoot) { while (_segments.Count < this.MinimumSegmentCount) { ObjectPoolSegment<T> segment = CreateSegment(false); _segments.Add(segment.SegmentNumber, segment); } } // This forces a compact, to make sure our objects fill in any holes in the heap. if (_gc) { System.GC.Collect(); } _timer = new Timer(CleanupThreadCallback, null, cleanupFrequenceMS, cleanupFrequenceMS); } /// <summary> /// Forces the segment cleanup algorithm to be run. This method is intended /// primarly for use from the Unit Test libraries. /// </summary> internal void ForceCleanup() { CleanupThreadCallback(null); } private void CleanupThreadCallback(object state) { if (_disposed) return; if (Monitor.TryEnter(_timerLock) == false) return; try { lock (_syncRoot) { // If we're below, or at, or minimum segment count threshold, // there's no point in going any further. if (_segments.Count <= _minimumSegmentCount) return; for (int i = _activeSegment; i > 0; i--) { ObjectPoolSegment<T> segment; if (_segments.TryGetValue(i, out segment) == true) { // For the "old" segments that were allocated at startup, this will // always be false, as their expiration dates are set at infinity. if (segment.CanBeCleanedUp()) { _segments.Remove(i); segment.Dispose(); } } } } } finally { Monitor.Exit(_timerLock); } } /// <summary> /// Responsible for allocate 1 instance of an object that will be stored in a segment. /// </summary> /// <returns>An instance of whatever objec the pool is pooling.</returns> protected abstract T GetObjectInstance(); private ObjectPoolSegment<T> CreateSegment(bool allowSegmentToBeCleanedUp) { if (_disposed) throw new ObjectDisposedException("ObjectPoolBase"); if (allowSegmentToBeCleanedUp) Logger.Log("Creating new object pool segment", Helpers.LogLevel.Info); // This method is called inside a lock, so no interlocked stuff required. int segmentToAdd = _activeSegment; _activeSegment++; Queue<T> buffers = new Queue<T>(); for (int i = 1; i <= this._itemsPerSegment; i++) { T obj = GetObjectInstance(); buffers.Enqueue(obj); } // certain segments we don't want to ever be cleaned up (the initial segments) DateTime cleanupTime = (allowSegmentToBeCleanedUp) ? DateTime.Now.Add(this._minimumAgeToCleanup) : DateTime.MaxValue; ObjectPoolSegment<T> segment = new ObjectPoolSegment<T>(segmentToAdd, buffers, cleanupTime); return segment; } /// <summary> /// Checks in an instance of T owned by the object pool. This method is only intended to be called /// by the <c>WrappedObject</c> class. /// </summary> /// <param name="owningSegment">The segment from which the instance is checked out.</param> /// <param name="instance">The instance of <c>T</c> to check back into the segment.</param> internal void CheckIn(ObjectPoolSegment<T> owningSegment, T instance) { lock (_syncRoot) { owningSegment.CheckInObject(instance); } } /// <summary> /// Checks an instance of <c>T</c> from the pool. If the pool is not sufficient to /// allow the checkout, a new segment is created. /// </summary> /// <returns>A <c>WrappedObject</c> around the instance of <c>T</c>. To check /// the instance back into the segment, be sureto dispose the WrappedObject /// when finished. </returns> public WrappedObject<T> CheckOut() { if (_disposed) throw new ObjectDisposedException("ObjectPoolBase"); // It's key that this CheckOut always, always, uses a pooled object // from the oldest available segment. This will help keep the "newer" // segments from being used - which in turn, makes them eligible // for deletion. lock (_syncRoot) { ObjectPoolSegment<T> targetSegment = null; // find the oldest segment that has items available for checkout for (int i = 0; i < _activeSegment; i++) { ObjectPoolSegment<T> segment; if (_segments.TryGetValue(i, out segment) == true) { if (segment.AvailableItems > 0) { targetSegment = segment; break; } } } if (targetSegment == null) { // We couldn't find a sigment that had any available space in it, // so it's time to create a new segment. // Before creating the segment, do a GC to make sure the heap // is compacted. if (_gc) GC.Collect(); targetSegment = CreateSegment(true); if (_gc) GC.Collect(); _segments.Add(targetSegment.SegmentNumber, targetSegment); } WrappedObject<T> obj = new WrappedObject<T>(this, targetSegment, targetSegment.CheckOutObject()); return obj; } } /// <summary> /// The total number of segments created. Intended to be used by the Unit Tests. /// </summary> public int TotalSegments { get { if (_disposed) throw new ObjectDisposedException("ObjectPoolBase"); lock (_syncRoot) { return _segments.Count; } } } /// <summary> /// The number of items that are in a segment. Items in a segment /// are all allocated at the same time, and are hopefully close to /// each other in the managed heap. /// </summary> public int ItemsPerSegment { get { if (_disposed) throw new ObjectDisposedException("ObjectPoolBase"); return _itemsPerSegment; } } /// <summary> /// The minimum number of segments. When segments are reclaimed, /// this number of segments will always be left alone. These /// segments are allocated at startup. /// </summary> public int MinimumSegmentCount { get { if (_disposed) throw new ObjectDisposedException("ObjectPoolBase"); return _minimumSegmentCount; } } /// <summary> /// The age a segment must be before it's eligible for cleanup. /// This is used to prevent thrash, and typical values are in /// the 5 minute range. /// </summary> public TimeSpan MinimumSegmentAgePriorToCleanup { get { if (_disposed) throw new ObjectDisposedException("ObjectPoolBase"); return _minimumAgeToCleanup; } set { if (_disposed) throw new ObjectDisposedException("ObjectPoolBase"); _minimumAgeToCleanup = value; } } /// <summary> /// The frequence which the cleanup thread runs. This is typically /// expected to be in the 5 minute range. /// </summary> public int CleanupFrequencyMilliseconds { get { if (_disposed) throw new ObjectDisposedException("ObjectPoolBase"); return _cleanupFrequency; } set { if (_disposed) throw new ObjectDisposedException("ObjectPoolBase"); Interlocked.Exchange(ref _cleanupFrequency, value); _timer.Change(_cleanupFrequency, _cleanupFrequency); } } #region IDisposable Members public void Dispose() { if (_disposed) return; Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { if (disposing) { lock (_syncRoot) { if (_disposed) return; _timer.Dispose(); _disposed = true; foreach (KeyValuePair<int, ObjectPoolSegment<T>> kvp in _segments) { try { kvp.Value.Dispose(); } catch (Exception) { } } _segments.Clear(); } } } #endregion } internal class ObjectPoolSegment<T> : IDisposable where T : class { private Queue<T> _liveInstances = new Queue<T>(); private int _segmentNumber; private int _originalCount; private bool _isDisposed = false; private DateTime _eligibleForDeletionAt; public int SegmentNumber { get { return _segmentNumber; } } public int AvailableItems { get { return _liveInstances.Count; } } public DateTime DateEligibleForDeletion { get { return _eligibleForDeletionAt; } } public ObjectPoolSegment(int segmentNumber, Queue<T> liveInstances, DateTime eligibleForDeletionAt) { _segmentNumber = segmentNumber; _liveInstances = liveInstances; _originalCount = liveInstances.Count; _eligibleForDeletionAt = eligibleForDeletionAt; } public bool CanBeCleanedUp() { if (_isDisposed == true) throw new ObjectDisposedException("ObjectPoolSegment"); return ((_originalCount == _liveInstances.Count) && (DateTime.Now > _eligibleForDeletionAt)); } public void Dispose() { if (_isDisposed) return; _isDisposed = true; bool shouldDispose = (typeof(T) is IDisposable); while (_liveInstances.Count != 0) { T instance = _liveInstances.Dequeue(); if (shouldDispose) { try { (instance as IDisposable).Dispose(); } catch (Exception) { } } } } internal void CheckInObject(T o) { if (_isDisposed == true) throw new ObjectDisposedException("ObjectPoolSegment"); _liveInstances.Enqueue(o); } internal T CheckOutObject() { if (_isDisposed == true) throw new ObjectDisposedException("ObjectPoolSegment"); if (0 == _liveInstances.Count) throw new InvalidOperationException("No Objects Available for Checkout"); T o = _liveInstances.Dequeue(); return o; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Net; using Umbraco.Cms.Core.Models; using Umbraco.Cms.Core.Models.PublishedContent; using Umbraco.Cms.Core.Services; using Umbraco.Extensions; namespace Umbraco.Cms.Core.Routing { public class PublishedRequestBuilder : IPublishedRequestBuilder { private readonly IFileService _fileService; private IReadOnlyDictionary<string, string> _headers; private bool _cacheability; private IReadOnlyList<string> _cacheExtensions; private string _redirectUrl; private HttpStatusCode? _responseStatus; private IPublishedContent _publishedContent; private bool _ignorePublishedContentCollisions; /// <summary> /// Initializes a new instance of the <see cref="PublishedRequestBuilder"/> class. /// </summary> public PublishedRequestBuilder(Uri uri, IFileService fileService) { Uri = uri; AbsolutePathDecoded = uri.GetAbsolutePathDecoded(); _fileService = fileService; } /// <inheritdoc/> public Uri Uri { get; } /// <inheritdoc/> public string AbsolutePathDecoded { get; } /// <inheritdoc/> public DomainAndUri Domain { get; private set; } /// <inheritdoc/> public string Culture { get; private set; } /// <inheritdoc/> public ITemplate Template { get; private set; } /// <inheritdoc/> public bool IsInternalRedirect { get; private set; } /// <inheritdoc/> public int? ResponseStatusCode => _responseStatus.HasValue ? (int?)_responseStatus : null; /// <inheritdoc/> public IPublishedContent PublishedContent { get => _publishedContent; private set { _publishedContent = value; IsInternalRedirect = false; Template = null; } } /// <inheritdoc/> public IPublishedRequest Build() => new PublishedRequest( Uri, AbsolutePathDecoded, PublishedContent, IsInternalRedirect, Template, Domain, Culture, _redirectUrl, _responseStatus.HasValue ? (int?)_responseStatus : null, _cacheExtensions, _headers, _cacheability, _ignorePublishedContentCollisions); /// <inheritdoc/> public IPublishedRequestBuilder SetNoCacheHeader(bool cacheability) { _cacheability = cacheability; return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetCacheExtensions(IEnumerable<string> cacheExtensions) { _cacheExtensions = cacheExtensions.ToList(); return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetCulture(string culture) { Culture = culture; return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetDomain(DomainAndUri domain) { Domain = domain; SetCulture(domain.Culture); return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetHeaders(IReadOnlyDictionary<string, string> headers) { _headers = headers; return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetInternalRedirect(IPublishedContent content) { // unless a template has been set already by the finder, // template should be null at that point. // redirecting to self if (PublishedContent != null && content.Id == PublishedContent.Id) { // no need to set PublishedContent, we're done IsInternalRedirect = true; return this; } // else // set published content - this resets the template, and sets IsInternalRedirect to false PublishedContent = content; IsInternalRedirect = true; return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetPublishedContent(IPublishedContent content) { PublishedContent = content; IsInternalRedirect = false; return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetRedirect(string url, int status = (int)HttpStatusCode.Redirect) { _redirectUrl = url; _responseStatus = (HttpStatusCode)status; return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetRedirectPermanent(string url) { _redirectUrl = url; _responseStatus = HttpStatusCode.Moved; return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetResponseStatus(int code) { _responseStatus = (HttpStatusCode)code; return this; } /// <inheritdoc/> public IPublishedRequestBuilder SetTemplate(ITemplate template) { Template = template; return this; } /// <inheritdoc/> public bool TrySetTemplate(string alias) { if (string.IsNullOrWhiteSpace(alias)) { Template = null; return true; } // NOTE - can we still get it with whitespaces in it due to old legacy bugs? alias = alias.Replace(" ", string.Empty); ITemplate model = _fileService.GetTemplate(alias); if (model == null) { return false; } Template = model; return true; } /// <inheritdoc/> public void IgnorePublishedContentCollisions() => _ignorePublishedContentCollisions = true; } }