context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// // KeyInfoTest.cs - Test Cases for KeyInfo // // Author: // Sebastien Pouliot <sebastien@ximian.com> // // (C) 2002, 2003 Motus Technologies Inc. (http://www.motus.com) // Copyright (C) 2004-2005 Novell, Inc (http://www.novell.com) // // Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information. using System.Linq; using System.Security.Cryptography.X509Certificates; using System.Xml; using Xunit; namespace System.Security.Cryptography.Xml.Tests { public class KeyInfoTest { private KeyInfo info; public KeyInfoTest() { info = new KeyInfo(); } [Fact] public void EmptyKeyInfo() { Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\" />", (info.GetXml().OuterXml)); Assert.Equal(0, info.Count); } [Fact] public void KeyInfoName() { KeyInfoName name = new KeyInfoName(); name.Value = "Mono::"; info.AddClause(name); Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><KeyName>Mono::</KeyName></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } [Fact] public void KeyInfoNode() { string test = "<Test>KeyInfoNode</Test>"; XmlDocument doc = new XmlDocument(); doc.LoadXml(test); KeyInfoNode node = new KeyInfoNode(doc.DocumentElement); info.AddClause(node); Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><Test xmlns=\"\">KeyInfoNode</Test></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } private static string dsaP = "rjxsMU368YOCTQejWkiuO9e/vUVwkLtq1jKiU3TtJ53hBJqjFRuTa228vZe+BH2su9RPn/vYFWfQDv6zgBYe3eNdu4Afw+Ny0FatX6dl3E77Ra6Tsd3MmLXBiGSQ1mMNd5G2XQGpbt9zsGlUaexXekeMLxIufgfZLwYp67M+2WM="; private static string dsaQ = "tf0K9rMyvUrU4cIkwbCrDRhQAJk="; private static string dsaG = "S8Z+1pGCed00w6DtVcqZLKjfqlCJ7JsugEFIgSy/Vxtu9YGCMclV4ijGEbPo/jU8YOSMuD7E9M7UaopMRcmKQjoKZzoJjkgVFP48Ohxl1f08lERnButsxanx3+OstFwUGQ8XNaGg3KrIoZt1FUnfxN3RHHTvVhjzNSHxMGULGaU="; private static string dsaY = "LnrxxRGLYeV2XLtK3SYz8RQHlHFZYrtznDZyMotuRfO5uC5YODhSFyLXvb1qB3WeGtF4h3Eo4KzHgMgfN2ZMlffxFRhJgTtH3ctbL8lfQoDkjeiPPnYGhspdJxr0tyZmiy0gkjJG3vwHYrLnvZWx9Wm/unqiOlGBPNuxJ+hOeP8="; //private static string dsaJ = "9RhE5TycDtdEIXxS3HfxFyXYgpy81zY5lVjwD6E9JP37MWEi80BlX6ab1YPm6xYSEoqReMPP9RgGiW6DuACpgI7+8vgCr4i/7VhzModJAA56PwvTu6UMt9xxKU/fT672v8ucREkMWoc7lEey"; //private static string dsaSeed = "HxW3N4RHWVgqDQKuGg7iJTUTiCs="; //private static string dsaPgenCounter = "Asw="; // private static string xmlDSA = "<DSAKeyValue><P>" + dsaP + "</P><Q>" + dsaQ + "</Q><G>" + dsaG + "</G><Y>" + dsaY + "</Y><J>" + dsaJ + "</J><Seed>" + dsaSeed + "</Seed><PgenCounter>" + dsaPgenCounter + "</PgenCounter></DSAKeyValue>"; private static string xmlDSA = "<DSAKeyValue><P>" + dsaP + "</P><Q>" + dsaQ + "</Q><G>" + dsaG + "</G><Y>" + dsaY + "</Y></DSAKeyValue>"; [Fact] public void DSAKeyValue() { using (DSA key = DSA.Create()) { key.ImportParameters(new DSAParameters { P = Convert.FromBase64String(dsaP), Q = Convert.FromBase64String(dsaQ), G = Convert.FromBase64String(dsaG), Y = Convert.FromBase64String(dsaY), //J = Convert.FromBase64String(dsaJ), //Seed = Convert.FromBase64String(dsaSeed), //Counter = BitConverter.ToUInt16(Convert.FromBase64String(dsaPgenCounter), 0) }); DSAKeyValue dsa = new DSAKeyValue(key); info.AddClause(dsa); AssertCrypto.AssertXmlEquals("dsa", "<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\">" + xmlDSA + "</KeyValue></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } } private static string rsaModulus = "9DC4XNdQJwMRnz5pP2a6U51MHCODRilaIoVXqUPhCUb0lJdGroeqVYT84ZyIVrcarzD7Tqs3aEOIa3rKox0N1bxQpZPqayVQeLAkjLLtzJW/ScRJx3uEDJdgT1JnM1FH0GZTinmEdCUXdLc7+Y/c/qqIkTfbwHbRZjW0bBJyExM="; private static string rsaExponent = "AQAB"; private static string xmlRSA = "<RSAKeyValue><Modulus>" + rsaModulus + "</Modulus><Exponent>" + rsaExponent + "</Exponent></RSAKeyValue>"; [Fact] public void RSAKeyValue() { using (RSA key = RSA.Create()) { key.ImportParameters(new RSAParameters() { Modulus = Convert.FromBase64String(rsaModulus), Exponent = Convert.FromBase64String(rsaExponent) }); RSAKeyValue rsa = new RSAKeyValue(key); info.AddClause(rsa); AssertCrypto.AssertXmlEquals("rsa", "<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\">" + xmlRSA + "</KeyValue></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } } [Fact] public void RetrievalMethod() { KeyInfoRetrievalMethod retrieval = new KeyInfoRetrievalMethod(); retrieval.Uri = "http://www.go-mono.org/"; info.AddClause(retrieval); Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><RetrievalMethod URI=\"http://www.go-mono.org/\" /></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } static byte[] cert = { 0x30,0x82,0x02,0x1D,0x30,0x82,0x01,0x86,0x02,0x01,0x14,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,0x58,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x43,0x41,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x4B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x20,0x43,0x61,0x6E,0x61,0x64,0x61,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x0A,0x2B,0x06,0x01,0x04,0x01,0x2A,0x02,0x0B,0x02,0x01,0x13,0x18,0x6B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73, 0x73,0x40,0x6B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x2E,0x63,0x61,0x30,0x1E,0x17,0x0D,0x39,0x36,0x30,0x35,0x30,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x39,0x39,0x30,0x35,0x30,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x58,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x43,0x41,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x4B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x20,0x43,0x61,0x6E,0x61,0x64,0x61,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06, 0x0A,0x2B,0x06,0x01,0x04,0x01,0x2A,0x02,0x0B,0x02,0x01,0x13,0x18,0x6B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x40,0x6B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x2E,0x63,0x61,0x30,0x81,0x9D,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8B,0x00,0x30,0x81,0x87,0x02,0x81,0x81,0x00,0xCD,0x23,0xFA,0x2A,0xE1,0xED,0x98,0xF4,0xE9,0xD0,0x93,0x3E,0xD7,0x7A,0x80,0x02,0x4C,0xCC,0xC1,0x02,0xAF,0x5C,0xB6,0x1F,0x7F,0xFA,0x57,0x42,0x6F,0x30,0xD1,0x20,0xC5,0xB5, 0x21,0x07,0x40,0x2C,0xA9,0x86,0xC2,0xF3,0x64,0x84,0xAE,0x3D,0x85,0x2E,0xED,0x85,0xBD,0x54,0xB0,0x18,0x28,0xEF,0x6A,0xF8,0x1B,0xE7,0x0B,0x16,0x1F,0x93,0x25,0x4F,0xC7,0xF8,0x8E,0xC3,0xB9,0xCA,0x98,0x84,0x0E,0x55,0xD0,0x2F,0xEF,0x78,0x77,0xC5,0x72,0x28,0x5F,0x60,0xBF,0x19,0x2B,0xD1,0x72,0xA2,0xB7,0xD8,0x3F,0xE0,0x97,0x34,0x5A,0x01,0xBD,0x04,0x9C,0xC8,0x78,0x45,0xCD,0x93,0x8D,0x15,0xF2,0x76,0x10,0x11,0xAB,0xB8,0x5B,0x2E,0x9E,0x52,0xDD,0x81,0x3E,0x9C,0x64,0xC8,0x29,0x93,0x02,0x01,0x03,0x30,0x0D,0x06, 0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x03,0x81,0x81,0x00,0x32,0x1A,0x35,0xBA,0xBF,0x43,0x27,0xD6,0xB4,0xD4,0xB8,0x76,0xE5,0xE3,0x9B,0x4D,0x6C,0xC0,0x86,0xC9,0x77,0x35,0xBA,0x6B,0x16,0x2D,0x13,0x46,0x4A,0xB0,0x32,0x53,0xA1,0x5B,0x5A,0xE9,0x99,0xE2,0x0C,0x86,0x88,0x17,0x4E,0x0D,0xFE,0x82,0xAC,0x4E,0x47,0xEF,0xFB,0xFF,0x39,0xAC,0xEE,0x35,0xC8,0xFA,0x52,0x37,0x0A,0x49,0xAD,0x59,0xAD,0xE2,0x8A,0xA9,0x1C,0xC6,0x5F,0x1F,0xF8,0x6F,0x73,0x7E,0xCD,0xA0,0x31,0xE8,0x0C,0xBE,0xF5,0x4D, 0xD9,0xB2,0xAB,0x8A,0x12,0xB6,0x30,0x78,0x68,0x11,0x7C,0x0D,0xF1,0x49,0x4D,0xA3,0xFD,0xB2,0xE9,0xFF,0x1D,0xF0,0x91,0xFA,0x54,0x85,0xFF,0x33,0x90,0xE8,0xC1,0xBF,0xA4,0x9B,0xA4,0x62,0x46,0xBD,0x61,0x12,0x59,0x98,0x41,0x89 }; [Fact] public void X509Data() { using (X509Certificate x509 = new X509Certificate(cert)) { KeyInfoX509Data x509data = new KeyInfoX509Data(x509); info.AddClause(x509data); AssertCrypto.AssertXmlEquals("X509Data", "<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><X509Data xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><X509Certificate>MIICHTCCAYYCARQwDQYJKoZIhvcNAQEEBQAwWDELMAkGA1UEBhMCQ0ExHzAdBgNVBAMTFktleXdpdG5lc3MgQ2FuYWRhIEluYy4xKDAmBgorBgEEASoCCwIBExhrZXl3aXRuZXNzQGtleXdpdG5lc3MuY2EwHhcNOTYwNTA3MDAwMDAwWhcNOTkwNTA3MDAwMDAwWjBYMQswCQYDVQQGEwJDQTEfMB0GA1UEAxMWS2V5d2l0bmVzcyBDYW5hZGEgSW5jLjEoMCYGCisGAQQBKgILAgETGGtleXdpdG5lc3NAa2V5d2l0bmVzcy5jYTCBnTANBgkqhkiG9w0BAQEFAAOBiwAwgYcCgYEAzSP6KuHtmPTp0JM+13qAAkzMwQKvXLYff/pXQm8w0SDFtSEHQCyphsLzZISuPYUu7YW9VLAYKO9q+BvnCxYfkyVPx/iOw7nKmIQOVdAv73h3xXIoX2C/GSvRcqK32D/glzRaAb0EnMh4Rc2TjRXydhARq7hbLp5S3YE+nGTIKZMCAQMwDQYJKoZIhvcNAQEEBQADgYEAMho1ur9DJ9a01Lh25eObTWzAhsl3NbprFi0TRkqwMlOhW1rpmeIMhogXTg3+gqxOR+/7/zms7jXI+lI3CkmtWa3iiqkcxl8f+G9zfs2gMegMvvVN2bKrihK2MHhoEXwN8UlNo/2y6f8d8JH6VIX/M5Dowb+km6RiRr1hElmYQYk=</X509Certificate></X509Data></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } } [Fact] public void Complex() { KeyInfoName name = new KeyInfoName(); name.Value = "CoreFx::"; info.AddClause(name); using (DSA keyDSA = DSA.Create()) { keyDSA.ImportParameters(new DSAParameters { P = Convert.FromBase64String(dsaP), Q = Convert.FromBase64String(dsaQ), G = Convert.FromBase64String(dsaG), Y = Convert.FromBase64String(dsaY), }); DSAKeyValue dsa = new DSAKeyValue(keyDSA); info.AddClause(dsa); using (RSA keyRSA = RSA.Create()) { keyRSA.ImportParameters(new RSAParameters() { Modulus = Convert.FromBase64String(rsaModulus), Exponent = Convert.FromBase64String(rsaExponent) }); RSAKeyValue rsa = new RSAKeyValue(keyRSA); info.AddClause(rsa); KeyInfoRetrievalMethod retrieval = new KeyInfoRetrievalMethod(); retrieval.Uri = "https://github.com/dotnet/corefx"; info.AddClause(retrieval); using (X509Certificate x509 = new X509Certificate(cert)) { KeyInfoX509Data x509data = new KeyInfoX509Data(x509); info.AddClause(x509data); string s = "<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><KeyName>CoreFx::</KeyName><KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><DSAKeyValue><P>rjxsMU368YOCTQejWkiuO9e/vUVwkLtq1jKiU3TtJ53hBJqjFRuTa228vZe+BH2su9RPn/vYFWfQDv6zgBYe3eNdu4Afw+Ny0FatX6dl3E77Ra6Tsd3MmLXBiGSQ1mMNd5G2XQGpbt9zsGlUaexXekeMLxIufgfZLwYp67M+2WM=</P><Q>tf0K9rMyvUrU4cIkwbCrDRhQAJk=</Q><G>S8Z+1pGCed00w6DtVcqZLKjfqlCJ7JsugEFIgSy/Vxtu9YGCMclV4ijGEbPo/jU8YOSMuD7E9M7UaopMRcmKQjoKZzoJjkgVFP48Ohxl1f08lERnButsxanx3+OstFwUGQ8XNaGg3KrIoZt1FUnfxN3RHHTvVhjzNSHxMGULGaU=</G><Y>LnrxxRGLYeV2XLtK3SYz8RQHlHFZYrtznDZyMotuRfO5uC5YODhSFyLXvb1qB3WeGtF4h3Eo4KzHgMgfN2ZMlffxFRhJgTtH3ctbL8lfQoDkjeiPPnYGhspdJxr0tyZmiy0gkjJG3vwHYrLnvZWx9Wm/unqiOlGBPNuxJ+hOeP8=</Y></DSAKeyValue></KeyValue>"; s += "<KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><RSAKeyValue><Modulus>9DC4XNdQJwMRnz5pP2a6U51MHCODRilaIoVXqUPhCUb0lJdGroeqVYT84ZyIVrcarzD7Tqs3aEOIa3rKox0N1bxQpZPqayVQeLAkjLLtzJW/ScRJx3uEDJdgT1JnM1FH0GZTinmEdCUXdLc7+Y/c/qqIkTfbwHbRZjW0bBJyExM=</Modulus><Exponent>AQAB</Exponent></RSAKeyValue></KeyValue>"; s += "<RetrievalMethod URI=\"https://github.com/dotnet/corefx\" />"; s += "<X509Data xmlns=\"http://www.w3.org/2000/09/xmldsig#\">"; s += "<X509Certificate>MIICHTCCAYYCARQwDQYJKoZIhvcNAQEEBQAwWDELMAkGA1UEBhMCQ0ExHzAdBgNVBAMTFktleXdpdG5lc3MgQ2FuYWRhIEluYy4xKDAmBgorBgEEASoCCwIBExhrZXl3aXRuZXNzQGtleXdpdG5lc3MuY2EwHhcNOTYwNTA3MDAwMDAwWhcNOTkwNTA3MDAwMDAwWjBYMQswCQYDVQQGEwJDQTEfMB0GA1UEAxMWS2V5d2l0bmVzcyBDYW5hZGEgSW5jLjEoMCYGCisGAQQBKgILAgETGGtleXdpdG5lc3NAa2V5d2l0bmVzcy5jYTCBnTANBgkqhkiG9w0BAQEFAAOBiwAwgYcCgYEAzSP6KuHtmPTp0JM+13qAAkzMwQKvXLYff/pXQm8w0SDFtSEHQCyphsLzZISuPYUu7YW9VLAYKO9q+BvnCxYfkyVPx/iOw7nKmIQOVdAv73h3xXIoX2C/GSvRcqK32D/glzRaAb0EnMh4Rc2TjRXydhARq7hbLp5S3YE+nGTIKZMCAQMwDQYJKoZIhvcNAQEEBQADgYEAMho1ur9DJ9a01Lh25eObTWzAhsl3NbprFi0TRkqwMlOhW1rpmeIMhogXTg3+gqxOR+/7/zms7jXI+lI3CkmtWa3iiqkcxl8f+G9zfs2gMegMvvVN2bKrihK2MHhoEXwN8UlNo/2y6f8d8JH6VIX/M5Dowb+km6RiRr1hElmYQYk=</X509Certificate></X509Data></KeyInfo>"; AssertCrypto.AssertXmlEquals("Complex", s, (info.GetXml().OuterXml)); Assert.Equal(5, info.Count); } } } } [Fact(Skip = "https://github.com/dotnet/corefx/issues/16779")] public void ImportKeyNode() { string value = "<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><KeyName>Mono::</KeyName><KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><DSAKeyValue><P>rjxsMU368YOCTQejWkiuO9e/vUVwkLtq1jKiU3TtJ53hBJqjFRuTa228vZe+BH2su9RPn/vYFWfQDv6zgBYe3eNdu4Afw+Ny0FatX6dl3E77Ra6Tsd3MmLXBiGSQ1mMNd5G2XQGpbt9zsGlUaexXekeMLxIufgfZLwYp67M+2WM=</P><Q>tf0K9rMyvUrU4cIkwbCrDRhQAJk=</Q><G>S8Z+1pGCed00w6DtVcqZLKjfqlCJ7JsugEFIgSy/Vxtu9YGCMclV4ijGEbPo/jU8YOSMuD7E9M7UaopMRcmKQjoKZzoJjkgVFP48Ohxl1f08lERnButsxanx3+OstFwUGQ8XNaGg3KrIoZt1FUnfxN3RHHTvVhjzNSHxMGULGaU=</G><Y>LnrxxRGLYeV2XLtK3SYz8RQHlHFZYrtznDZyMotuRfO5uC5YODhSFyLXvb1qB3WeGtF4h3Eo4KzHgMgfN2ZMlffxFRhJgTtH3ctbL8lfQoDkjeiPPnYGhspdJxr0tyZmiy0gkjJG3vwHYrLnvZWx9Wm/unqiOlGBPNuxJ+hOeP8=</Y><J>9RhE5TycDtdEIXxS3HfxFyXYgpy81zY5lVjwD6E9JP37MWEi80BlX6ab1YPm6xYSEoqReMPP9RgGiW6DuACpgI7+8vgCr4i/7VhzModJAA56PwvTu6UMt9xxKU/fT672v8ucREkMWoc7lEey</J><Seed>HxW3N4RHWVgqDQKuGg7iJTUTiCs=</Seed><PgenCounter>Asw=</PgenCounter></DSAKeyValue></KeyValue>"; value += "<KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><RSAKeyValue><Modulus>9DC4XNdQJwMRnz5pP2a6U51MHCODRilaIoVXqUPhCUb0lJdGroeqVYT84ZyIVrcarzD7Tqs3aEOIa3rKox0N1bxQpZPqayVQeLAkjLLtzJW/ScRJx3uEDJdgT1JnM1FH0GZTinmEdCUXdLc7+Y/c/qqIkTfbwHbRZjW0bBJyExM=</Modulus><Exponent>AQAB</Exponent></RSAKeyValue></KeyValue><RetrievalElement URI=\"http://www.go-mono.org/\" /><X509Data xmlns=\"http://www.w3.org/2000/09/xmldsig#\">"; value += "<X509Certificate>MIICHTCCAYYCARQwDQYJKoZIhvcNAQEEBQAwWDELMAkGA1UEBhMCQ0ExHzAdBgNVBAMTFktleXdpdG5lc3MgQ2FuYWRhIEluYy4xKDAmBgorBgEEASoCCwIBExhrZXl3aXRuZXNzQGtleXdpdG5lc3MuY2EwHhcNOTYwNTA3MDAwMDAwWhcNOTkwNTA3MDAwMDAwWjBYMQswCQYDVQQGEwJDQTEfMB0GA1UEAxMWS2V5d2l0bmVzcyBDYW5hZGEgSW5jLjEoMCYGCisGAQQBKgILAgETGGtleXdpdG5lc3NAa2V5d2l0bmVzcy5jYTCBnTANBgkqhkiG9w0BAQEFAAOBiwAwgYcCgYEAzSP6KuHtmPTp0JM+13qAAkzMwQKvXLYff/pXQm8w0SDFtSEHQCyphsLzZISuPYUu7YW9VLAYKO9q+BvnCxYfkyVPx/iOw7nKmIQOVdAv73h3xXIoX2C/GSvRcqK32D/glzRaAb0EnMh4Rc2TjRXydhARq7hbLp5S3YE+nGTIKZMCAQMwDQYJKoZIhvcNAQEEBQADgYEAMho1ur9DJ9a01Lh25eObTWzAhsl3NbprFi0TRkqwMlOhW1rpmeIMhogXTg3+gqxOR+/7/zms7jXI+lI3CkmtWa3iiqkcxl8f+G9zfs2gMegMvvVN2bKrihK2MHhoEXwN8UlNo/2y6f8d8JH6VIX/M5Dowb+km6RiRr1hElmYQYk=</X509Certificate></X509Data></KeyInfo>"; XmlDocument doc = new XmlDocument(); doc.LoadXml(value); info.LoadXml(doc.DocumentElement); AssertCrypto.AssertXmlEquals("Import", value, (info.GetXml().OuterXml)); Assert.Equal(5, info.Count); } [Fact] public void NullClause() { Assert.Equal(0, info.Count); // null is accepted... info.AddClause(null); Assert.Equal(1, info.Count); // but can't get XML out if it! Assert.Throws<NullReferenceException>(() => info.GetXml()); } [Fact] public void NullXml() { Assert.Throws<ArgumentNullException>(() => info.LoadXml(null)); } [Fact] public void InvalidXml() { string bad = "<Test></Test>"; XmlDocument doc = new XmlDocument(); doc.LoadXml(bad); info.LoadXml(doc.DocumentElement); // LAMESPEC: no expection but Xml isn't loaded Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\" />", (info.GetXml().OuterXml)); Assert.Equal(0, info.Count); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.IO; using System.Linq; namespace System.Net.NetworkInformation { /// <summary> /// Implements a NetworkInterface on Linux. /// </summary> internal class LinuxNetworkInterface : UnixNetworkInterface { private readonly OperationalStatus _operationalStatus; private readonly bool? _supportsMulticast; private readonly long? _speed; private readonly LinuxIPInterfaceProperties _ipProperties; internal LinuxNetworkInterface(string name) : base(name) { _operationalStatus = GetOperationalStatus(name); _supportsMulticast = GetSupportsMulticast(name); _speed = GetSpeed(name); _ipProperties = new LinuxIPInterfaceProperties(this); } public static unsafe NetworkInterface[] GetLinuxNetworkInterfaces() { Dictionary<string, LinuxNetworkInterface> interfacesByName = new Dictionary<string, LinuxNetworkInterface>(); List<Exception> exceptions = null; const int MaxTries = 3; for (int attempt = 0; attempt < MaxTries; attempt++) { // Because these callbacks are executed in a reverse-PInvoke, we do not want any exceptions // to propogate out, because they will not be catchable. Instead, we track all the exceptions // that are thrown in these callbacks, and aggregate them at the end. int result = Interop.Sys.EnumerateInterfaceAddresses( (name, ipAddr, maskAddr) => { try { LinuxNetworkInterface lni = GetOrCreate(interfacesByName, name); lni.ProcessIpv4Address(ipAddr, maskAddr); } catch (Exception e) { if (exceptions == null) { exceptions = new List<Exception>(); } exceptions.Add(e); } }, (name, ipAddr, scopeId) => { try { LinuxNetworkInterface lni = GetOrCreate(interfacesByName, name); lni.ProcessIpv6Address(ipAddr, *scopeId); } catch (Exception e) { if (exceptions == null) { exceptions = new List<Exception>(); } exceptions.Add(e); } }, (name, llAddr) => { try { LinuxNetworkInterface lni = GetOrCreate(interfacesByName, name); lni.ProcessLinkLayerAddress(llAddr); } catch (Exception e) { if (exceptions == null) { exceptions = new List<Exception>(); } exceptions.Add(e); } }); if (exceptions != null) { throw new NetworkInformationException(SR.net_PInvokeError, new AggregateException(exceptions)); } else if (result == 0) { return interfacesByName.Values.ToArray(); } else { interfacesByName.Clear(); } } throw new NetworkInformationException(SR.net_PInvokeError); } /// <summary> /// Gets or creates a LinuxNetworkInterface, based on whether it already exists in the given Dictionary. /// If created, it is added to the Dictionary. /// </summary> /// <param name="interfaces">The Dictionary of existing interfaces.</param> /// <param name="name">The name of the interface.</param> /// <returns>The cached or new LinuxNetworkInterface with the given name.</returns> private static LinuxNetworkInterface GetOrCreate(Dictionary<string, LinuxNetworkInterface> interfaces, string name) { LinuxNetworkInterface lni; if (!interfaces.TryGetValue(name, out lni)) { lni = new LinuxNetworkInterface(name); interfaces.Add(name, lni); } return lni; } public override bool SupportsMulticast { get { if (_supportsMulticast.HasValue) { return _supportsMulticast.Value; } else { throw new PlatformNotSupportedException(SR.net_InformationUnavailableOnPlatform); } } } private static bool? GetSupportsMulticast(string name) { // /sys/class/net/<interface_name>/flags string path = Path.Combine(NetworkFiles.SysClassNetFolder, name, NetworkFiles.FlagsFileName); if (File.Exists(path)) { try { Interop.LinuxNetDeviceFlags flags = (Interop.LinuxNetDeviceFlags)StringParsingHelpers.ParseRawHexFileAsInt(path); return (flags & Interop.LinuxNetDeviceFlags.IFF_MULTICAST) == Interop.LinuxNetDeviceFlags.IFF_MULTICAST; } catch (Exception) // Ignore any problems accessing or parsing the file. { } } return null; } public override IPInterfaceProperties GetIPProperties() { return _ipProperties; } public override IPInterfaceStatistics GetIPStatistics() { return new LinuxIPInterfaceStatistics(_name); } public override IPv4InterfaceStatistics GetIPv4Statistics() { return new LinuxIPv4InterfaceStatistics(_name); } public override OperationalStatus OperationalStatus { get { return _operationalStatus; } } public override long Speed { get { if (_speed.HasValue) { return _speed.Value; } else { throw new PlatformNotSupportedException(SR.net_InformationUnavailableOnPlatform); } } } public override bool IsReceiveOnly { get { throw new PlatformNotSupportedException(SR.net_InformationUnavailableOnPlatform); } } private static long? GetSpeed(string name) { try { string path = Path.Combine(NetworkFiles.SysClassNetFolder, name, NetworkFiles.SpeedFileName); long megabitsPerSecond = StringParsingHelpers.ParseRawLongFile(path); return megabitsPerSecond == -1 ? megabitsPerSecond : megabitsPerSecond * 1_000_000; // Value must be returned in bits per second, not megabits. } catch (Exception) // Ignore any problems accessing or parsing the file. { return null; } } private static OperationalStatus GetOperationalStatus(string name) { // /sys/class/net/<name>/operstate string path = Path.Combine(NetworkFiles.SysClassNetFolder, name, NetworkFiles.OperstateFileName); if (File.Exists(path)) { try { string state = File.ReadAllText(path).Trim(); return MapState(state); } catch (Exception) // Ignore any problems accessing or parsing the file. { } } return OperationalStatus.Unknown; } // Maps values from /sys/class/net/<interface>/operstate to OperationalStatus values. private static OperationalStatus MapState(string state) { // // http://users.sosdg.org/~qiyong/lxr/source/Documentation/networking/operstates.txt?a=um#L41 // switch (state) { case "unknown": return OperationalStatus.Unknown; case "notpresent": return OperationalStatus.NotPresent; case "down": return OperationalStatus.Down; case "lowerlayerdown": return OperationalStatus.LowerLayerDown; case "testing": return OperationalStatus.Testing; case "dormant": return OperationalStatus.Dormant; case "up": return OperationalStatus.Up; default: return OperationalStatus.Unknown; } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace AzureBrowserApp.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
// // ContentValues.cs // // Author: // Zachary Gramana <zack@xamarin.com> // // Copyright (c) 2014 Xamarin Inc // Copyright (c) 2014 .NET Foundation // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // // // Copyright (c) 2014 Couchbase, Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the // License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions // and limitations under the License. // using System; using System.Collections.Generic; using System.Linq; using System.Text; using Couchbase.Lite.Util; #if SQLITE namespace Couchbase.Lite.Storage.SystemSQLite #elif CUSTOM_SQLITE namespace Couchbase.Lite.Storage.CustomSQLite #else namespace Couchbase.Lite.Storage.SQLCipher #endif { /// <summary> /// A class for holding arbitrary values for binding to SQL statements and such /// </summary> public sealed class ContentValues // TODO: Create Add override and refactor to use initializer syntax. { #region Constants private const string Tag = "ContentValues"; #endregion #region Variables //The actual container for storing the values private readonly Dictionary<string, object> mValues; #endregion #region Properties internal object this[string key] { get { return mValues[key]; } set { mValues[key] = value; } } #endregion #region Constructors /// <summary> /// Creates an empty set of values using the default initial size /// </summary> public ContentValues() { // COPY: Copied from android.content.ContentValues // Choosing a default size of 8 based on analysis of typical // consumption by applications. mValues = new Dictionary<string, object>(8); } /// <summary> /// Creates an empty set of values using the given initial size /// </summary> /// <param name="size">the initial size of the set of values</param> public ContentValues(int size) { mValues = new Dictionary<String, Object>(size); } /// <summary> /// Creates a set of values copied from the given set /// </summary> /// <param name="from">The values to copy</param> public ContentValues(ContentValues from) { mValues = new Dictionary<string, object>(from.mValues); } #endregion #region Public Methods /// <summary>Adds a value to the set.</summary> /// <param name="key">the name of the value to put</param> /// <param name="value">the data for the value to put</param> public void Put(string key, string value) { mValues[key] = value; } /// <summary>Adds all values from the passed in ContentValues.</summary> /// <param name="other">the ContentValues from which to copy</param> public void PutAll(ContentValues other) { mValues.PutAll(other.mValues); } /// <summary>Adds a value to the set.</summary> /// <param name="key">the name of the value to put</param> /// <param name="value">the data for the value to put</param> public void Put(string key, byte value) { mValues[key] = value; } /// <summary>Adds a value to the set.</summary> /// <param name="key">the name of the value to put</param> /// <param name="value">the data for the value to put</param> public void Put(string key, short value) { mValues[key] = value; } /// <summary>Adds a value to the set.</summary> /// <param name="key">the name of the value to put</param> /// <param name="value">the data for the value to put</param> public void Put(string key, int value) { mValues[key] = value; } /// <summary>Adds a value to the set.</summary> /// <param name="key">the name of the value to put</param> /// <param name="value">the data for the value to put</param> public void Put(string key, long value) { mValues[key] = value; } /// <summary>Adds a value to the set.</summary> /// <param name="key">the name of the value to put</param> /// <param name="value">the data for the value to put</param> public void Put(string key, float value) { mValues[key] = value; } /// <summary>Adds a value to the set.</summary> /// <param name="key">the name of the value to put</param> /// <param name="value">the data for the value to put</param> public void Put(string key, double value) { mValues[key] = value; } /// <summary>Adds a value to the set.</summary> /// <param name="key">the name of the value to put</param> /// <param name="value">the data for the value to put</param> public void Put(string key, bool value) { mValues[key] = value; } /// <summary>Adds a value to the set.</summary> /// <param name="key">the name of the value to put</param> /// <param name="value">the data for the value to put</param> public void Put(string key, IEnumerable<Byte> value) { mValues[key] = value; } /// <summary>Adds a null value to the set.</summary> /// <param name="key">the name of the value to make null</param> public void PutNull(string key) { mValues[key] = null; } /// <summary>Returns the number of values.</summary> /// <returns>the number of values</returns> public int Size() { return mValues.Count; } /// <summary>Remove a single value.</summary> /// <param name="key">the name of the value to remove</param> public void Remove(string key) { mValues.Remove(key); } /// <summary>Removes all values.</summary> public void Clear() { mValues.Clear(); } /// <summary>Returns true if this object has the named value.</summary> /// <param name="key">the value to check for</param> /// <returns> /// /// <code>true</code> /// if the value is present, /// <code>false</code> /// otherwise /// </returns> public bool ContainsKey(string key) { return mValues.ContainsKey(key); } /// <summary> /// Returns the value of the specified key, or null if not present /// </summary> /// <param name="key">The key to check</param> public object Get(string key) { return mValues.Get(key); } /// <summary> /// Gets a value and converts it to a String. /// </summary> /// <param name="key">the value to get</param> /// <returns>the String for the value</returns> public string GetAsString(string key) { object value = mValues.Get(key); return value != null ? value.ToString() : null; } /// <summary> /// Gets a value and converts it to a Long. /// </summary> /// <param name="key">the value to get</param> /// <returns>the Long value, or null if the value is missing or cannot be converted</returns> public long? GetAsLong(string key) { return mValues.GetNullable<long>(key); } /// <summary> /// Gets a value and converts it to an Integer. /// </summary> /// <param name="key">the value to get</param> /// <returns>the Integer value, or null if the value is missing or cannot be converted</returns> public int? GetAsInteger(string key) { return mValues.GetNullable<int>(key); } /// <summary> /// Gets a value and converts it to a Short. /// </summary> /// <param name="key">the value to get</param> /// <returns>the Short value, or null if the value is missing or cannot be converted</returns> public short? GetAsShort(string key) { return mValues.GetNullable<short>(key); } /// <summary> /// Gets a value and converts it to a Byte. /// </summary> /// <param name="key">the value to get</param> /// <returns>the Byte value, or null if the value is missing or cannot be converted</returns> public byte? GetAsByte(string key) { return mValues.GetNullable<byte>(key); } /// <summary> /// Gets a value and converts it to a Double. /// </summary> /// <param name="key">the value to get</param> /// <returns>the Double value, or null if the value is missing or cannot be converted</returns> public double? GetAsDouble(string key) { return mValues.GetNullable<double>(key); } /// <summary> /// Gets a value and converts it to a Float. /// </summary> /// <param name="key">the value to get</param> /// <returns>the Float value, or null if the value is missing or cannot be converted</returns> public float? GetAsFloat(string key) { return mValues.GetNullable<float>(key); } /// <summary> /// Gets a value and converts it to a Boolean. /// </summary> /// <param name="key">the value to get</param> /// <returns>the Boolean value, or null if the value is missing or cannot be converted</returns> public bool? GetAsBoolean(string key) { return mValues.GetNullable<bool>(key); } /// <summary> /// Gets a value that is a byte array. /// </summary> /// <remarks> /// Gets a value that is a byte array. Note that this method will not convert /// any other types to byte arrays. /// </remarks> /// <param name="key">the value to get</param> /// <returns>the byte[] value, or null is the value is missing or not a byte[]</returns> public byte[] GetAsByteArray(string key) { return mValues.GetCast<byte[]>(key); } /// <summary> /// Returns a set of all of the keys and values /// </summary> /// <returns>a set of all of the keys and values</returns> public ICollection<KeyValuePair<string, object>> ValueSet() { return mValues.AsSafeEnumerable().ToArray(); } /// <summary>Returns a set of all of the keys</summary> /// <returns>a set of all of the keys</returns> public ICollection<string> KeySet() { return mValues.Keys; } #endregion #region Overrides #pragma warning disable 1591 public override bool Equals(object obj) { if (!(obj is ContentValues)) { return false; } return mValues.Equals(((ContentValues)obj).mValues); } public override int GetHashCode() { return mValues.GetHashCode(); } public override string ToString() { StringBuilder sb = new StringBuilder(); foreach (string name in mValues.Keys) { string value = GetAsString(name); if (sb.Length > 0) { sb.Append(" "); } sb.AppendFormat("{0}={1}", name, new SecureLogString(value, LogMessageSensitivity.PotentiallyInsecure)); } return sb.ToString(); } #pragma warning restore 1591 #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; namespace System.Xml { internal partial class ReadContentAsBinaryHelper { // Private enums enum State { None, InReadContent, InReadElementContent, } // Fields XmlReader reader; State state; int valueOffset; bool isEnd; bool canReadValueChunk; char[] valueChunk; int valueChunkLength; IncrementalReadDecoder decoder; Base64Decoder base64Decoder; BinHexDecoder binHexDecoder; // Constants const int ChunkSize = 256; // Constructor internal ReadContentAsBinaryHelper(XmlReader reader) { this.reader = reader; this.canReadValueChunk = reader.CanReadValueChunk; if (canReadValueChunk) { valueChunk = new char[ChunkSize]; } } // Static methods internal static ReadContentAsBinaryHelper CreateOrReset(ReadContentAsBinaryHelper helper, XmlReader reader) { if (helper == null) { return new ReadContentAsBinaryHelper(reader); } else { helper.Reset(); return helper; } } // Internal methods internal int ReadContentAsBase64(byte[] buffer, int index, int count) { // check arguments if (buffer == null) { throw new ArgumentNullException("buffer"); } if (count < 0) { throw new ArgumentOutOfRangeException("count"); } if (index < 0) { throw new ArgumentOutOfRangeException("index"); } if (buffer.Length - index < count) { throw new ArgumentOutOfRangeException("count"); } switch (state) { case State.None: if (!reader.CanReadContentAs()) { throw reader.CreateReadContentAsException("ReadContentAsBase64"); } if (!Init()) { return 0; } break; case State.InReadContent: // if we have a correct decoder, go read if (decoder == base64Decoder) { // read more binary data return ReadContentAsBinary(buffer, index, count); } break; case State.InReadElementContent: throw new InvalidOperationException(SR.Xml_MixingBinaryContentMethods); default: Debug.Fail("Unmatched state in switch"); return 0; } Debug.Assert(state == State.InReadContent); // setup base64 decoder InitBase64Decoder(); // read more binary data return ReadContentAsBinary(buffer, index, count); } internal int ReadContentAsBinHex(byte[] buffer, int index, int count) { // check arguments if (buffer == null) { throw new ArgumentNullException("buffer"); } if (count < 0) { throw new ArgumentOutOfRangeException("count"); } if (index < 0) { throw new ArgumentOutOfRangeException("index"); } if (buffer.Length - index < count) { throw new ArgumentOutOfRangeException("count"); } switch (state) { case State.None: if (!reader.CanReadContentAs()) { throw reader.CreateReadContentAsException("ReadContentAsBinHex"); } if (!Init()) { return 0; } break; case State.InReadContent: // if we have a correct decoder, go read if (decoder == binHexDecoder) { // read more binary data return ReadContentAsBinary(buffer, index, count); } break; case State.InReadElementContent: throw new InvalidOperationException(SR.Xml_MixingBinaryContentMethods); default: Debug.Fail("Unmatched state in switch"); return 0; } Debug.Assert(state == State.InReadContent); // setup binhex decoder InitBinHexDecoder(); // read more binary data return ReadContentAsBinary(buffer, index, count); } internal int ReadElementContentAsBase64(byte[] buffer, int index, int count) { // check arguments if (buffer == null) { throw new ArgumentNullException("buffer"); } if (count < 0) { throw new ArgumentOutOfRangeException("count"); } if (index < 0) { throw new ArgumentOutOfRangeException("index"); } if (buffer.Length - index < count) { throw new ArgumentOutOfRangeException("count"); } switch (state) { case State.None: if (reader.NodeType != XmlNodeType.Element) { throw reader.CreateReadElementContentAsException("ReadElementContentAsBase64"); } if (!InitOnElement()) { return 0; } break; case State.InReadContent: throw new InvalidOperationException(SR.Xml_MixingBinaryContentMethods); case State.InReadElementContent: // if we have a correct decoder, go read if (decoder == base64Decoder) { // read more binary data return ReadElementContentAsBinary(buffer, index, count); } break; default: Debug.Fail("Unmatched state in switch"); return 0; } Debug.Assert(state == State.InReadElementContent); // setup base64 decoder InitBase64Decoder(); // read more binary data return ReadElementContentAsBinary(buffer, index, count); } internal int ReadElementContentAsBinHex(byte[] buffer, int index, int count) { // check arguments if (buffer == null) { throw new ArgumentNullException("buffer"); } if (count < 0) { throw new ArgumentOutOfRangeException("count"); } if (index < 0) { throw new ArgumentOutOfRangeException("index"); } if (buffer.Length - index < count) { throw new ArgumentOutOfRangeException("count"); } switch (state) { case State.None: if (reader.NodeType != XmlNodeType.Element) { throw reader.CreateReadElementContentAsException("ReadElementContentAsBinHex"); } if (!InitOnElement()) { return 0; } break; case State.InReadContent: throw new InvalidOperationException(SR.Xml_MixingBinaryContentMethods); case State.InReadElementContent: // if we have a correct decoder, go read if (decoder == binHexDecoder) { // read more binary data return ReadElementContentAsBinary(buffer, index, count); } break; default: Debug.Fail("Unmatched state in switch"); return 0; } Debug.Assert(state == State.InReadElementContent); // setup binhex decoder InitBinHexDecoder(); // read more binary data return ReadElementContentAsBinary(buffer, index, count); } internal void Finish() { if (state != State.None) { while (MoveToNextContentNode(true)) ; if (state == State.InReadElementContent) { if (reader.NodeType != XmlNodeType.EndElement) { throw new XPath.XPathException(SR.Xml_InvalidNodeType, reader.NodeType.ToString(), reader as IXmlLineInfo); } // move off the EndElement reader.Read(); } } Reset(); } internal void Reset() { state = State.None; isEnd = false; valueOffset = 0; } // Private methods private bool Init() { // make sure we are on a content node if (!MoveToNextContentNode(false)) { return false; } state = State.InReadContent; isEnd = false; return true; } private bool InitOnElement() { Debug.Assert(reader.NodeType == XmlNodeType.Element); bool isEmpty = reader.IsEmptyElement; // move to content or off the empty element reader.Read(); if (isEmpty) { return false; } // make sure we are on a content node if (!MoveToNextContentNode(false)) { if (reader.NodeType != XmlNodeType.EndElement) { throw new XPath.XPathException(SR.Xml_InvalidNodeType, reader.NodeType.ToString(), reader as IXmlLineInfo); } // move off end element reader.Read(); return false; } state = State.InReadElementContent; isEnd = false; return true; } private void InitBase64Decoder() { if (base64Decoder == null) { base64Decoder = new Base64Decoder(); } else { base64Decoder.Reset(); } decoder = base64Decoder; } private void InitBinHexDecoder() { if (binHexDecoder == null) { binHexDecoder = new BinHexDecoder(); } else { binHexDecoder.Reset(); } decoder = binHexDecoder; } private int ReadContentAsBinary(byte[] buffer, int index, int count) { Debug.Assert(decoder != null); if (isEnd) { Reset(); return 0; } decoder.SetNextOutputBuffer(buffer, index, count); for (; ;) { // use streaming ReadValueChunk if the reader supports it if (canReadValueChunk) { for (; ;) { if (valueOffset < valueChunkLength) { int decodedCharsCount = decoder.Decode(valueChunk, valueOffset, valueChunkLength - valueOffset); valueOffset += decodedCharsCount; } if (decoder.IsFull) { return decoder.DecodedCount; } Debug.Assert(valueOffset == valueChunkLength); if ((valueChunkLength = reader.ReadValueChunk(valueChunk, 0, ChunkSize)) == 0) { break; } valueOffset = 0; } } else { // read what is reader.Value string value = reader.Value; int decodedCharsCount = decoder.Decode(value, valueOffset, value.Length - valueOffset); valueOffset += decodedCharsCount; if (decoder.IsFull) { return decoder.DecodedCount; } } valueOffset = 0; // move to next textual node in the element content; throw on sub elements if (!MoveToNextContentNode(true)) { isEnd = true; return decoder.DecodedCount; } } } private int ReadElementContentAsBinary(byte[] buffer, int index, int count) { if (count == 0) { return 0; } // read binary int decoded = ReadContentAsBinary(buffer, index, count); if (decoded > 0) { return decoded; } // if 0 bytes returned check if we are on a closing EndElement, throw exception if not if (reader.NodeType != XmlNodeType.EndElement) { throw new XPath.XPathException(SR.Xml_InvalidNodeType, reader.NodeType.ToString(), reader as IXmlLineInfo); } // move off the EndElement reader.Read(); state = State.None; return 0; } bool MoveToNextContentNode(bool moveIfOnContentNode) { do { switch (reader.NodeType) { case XmlNodeType.Attribute: return !moveIfOnContentNode; case XmlNodeType.Text: case XmlNodeType.Whitespace: case XmlNodeType.SignificantWhitespace: case XmlNodeType.CDATA: if (!moveIfOnContentNode) { return true; } break; case XmlNodeType.ProcessingInstruction: case XmlNodeType.Comment: case XmlNodeType.EndEntity: // skip comments, pis and end entity nodes break; case XmlNodeType.EntityReference: if (reader.CanResolveEntity) { reader.ResolveEntity(); break; } goto default; default: return false; } moveIfOnContentNode = false; } while (reader.Read()); return false; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Sandbox.ModAPI; using VRage; using VRage.Game.ModAPI; using VRageMath; using VRage.ModAPI; using VRage.Game.Entity; using Sandbox.Common.ObjectBuilders; using Sandbox.Game.Entities; using Sandbox.Game; using Sandbox.Definitions; using Ingame = Sandbox.ModAPI.Ingame; using VRage.Game; using VRage.Voxels; using VRage.ObjectBuilders; using NaniteConstructionSystem.Particles; using NaniteConstructionSystem.Extensions; using NaniteConstructionSystem.Entities.Beacons; namespace NaniteConstructionSystem.Entities.Targets { public class NaniteMiningTarget { public int ParticleCount { get; set; } public double StartTime { get; set; } public double CarryTime { get; set; } public double LastUpdate { get; set; } } public class NaniteMiningTargets : NaniteTargetBlocksBase { public override string TargetName { get { return "Mining"; } } private float m_maxDistance = 500f; private Dictionary<NaniteMiningItem, NaniteMiningTarget> m_targetTracker; private static HashSet<Vector3D> m_globalPositionList; private Random rnd; public NaniteMiningTargets(NaniteConstructionBlock constructionBlock) : base(constructionBlock) { m_maxDistance = NaniteConstructionManager.Settings.MiningMaxDistance; m_targetTracker = new Dictionary<NaniteMiningItem, NaniteMiningTarget>(); m_globalPositionList = new HashSet<Vector3D>(); rnd = new Random(); } public override int GetMaximumTargets() { MyCubeBlock block = (MyCubeBlock)m_constructionBlock.ConstructionBlock; return (int)Math.Min(NaniteConstructionManager.Settings.MiningNanitesNoUpgrade + (block.UpgradeValues["MiningNanites"] * NaniteConstructionManager.Settings.MiningNanitesPerUpgrade), NaniteConstructionManager.Settings.MiningMaxStreams); } public override float GetPowerUsage() { MyCubeBlock block = (MyCubeBlock)m_constructionBlock.ConstructionBlock; return Math.Max(1, NaniteConstructionManager.Settings.MiningPowerPerStream - (int)(block.UpgradeValues["PowerNanites"] * NaniteConstructionManager.Settings.PowerDecreasePerUpgrade)); } public override float GetMinTravelTime() { MyCubeBlock block = (MyCubeBlock)m_constructionBlock.ConstructionBlock; return Math.Max(1f, NaniteConstructionManager.Settings.MiningMinTravelTime - (block.UpgradeValues["SpeedNanites"] * NaniteConstructionManager.Settings.MinTravelTimeReductionPerUpgrade)); } public override float GetSpeed() { MyCubeBlock block = (MyCubeBlock)m_constructionBlock.ConstructionBlock; return NaniteConstructionManager.Settings.MiningDistanceDivisor + (block.UpgradeValues["SpeedNanites"] * (float)NaniteConstructionManager.Settings.SpeedIncreasePerUpgrade); } public override bool IsEnabled() { bool result = true; if (!((IMyFunctionalBlock)m_constructionBlock.ConstructionBlock).Enabled || !((IMyFunctionalBlock)m_constructionBlock.ConstructionBlock).IsFunctional || m_constructionBlock.ConstructionBlock.CustomName.ToLower().Contains("NoMining".ToLower())) result = false; if (NaniteConstructionManager.TerminalSettings.ContainsKey(m_constructionBlock.ConstructionBlock.EntityId)) { if (!NaniteConstructionManager.TerminalSettings[m_constructionBlock.ConstructionBlock.EntityId].AllowMining) return false; } return result; } public override void ParallelUpdate(List<IMyCubeGrid> gridList, List<IMySlimBlock> gridBlocks) { using (Lock.AcquireExclusiveUsing()) PotentialTargetList.Clear(); DateTime start = DateTime.Now; List<object> finalAddList = new List<object>(); int listCount = 0; foreach (var miningBlock in NaniteConstructionManager.MiningList.Where(x => x.IsWorking && Vector3D.DistanceSquared(m_constructionBlock.ConstructionBlock.GetPosition(), x.MiningBlock.GetPosition()) < m_maxDistance * m_maxDistance).OrderBy(x => rnd.Next(100))) { IMyCubeBlock item = (IMyCubeBlock)miningBlock.MiningBlock; MyRelationsBetweenPlayerAndBlock relation = item.GetUserRelationToOwner(m_constructionBlock.ConstructionBlock.OwnerId); if (!(relation == MyRelationsBetweenPlayerAndBlock.Owner || relation == MyRelationsBetweenPlayerAndBlock.FactionShare || (MyAPIGateway.Session.CreativeMode && relation == MyRelationsBetweenPlayerAndBlock.NoOwnership))) continue; if (!((IMyFunctionalBlock)item).Enabled) continue; if (miningBlock.OreList == null || miningBlock.OreList.Count < 1) continue; int sum = miningBlock.OreList.Sum(x => x.Value.Count); Dictionary<MyVoxelMaterialDefinition, List<NaniteMiningItem>> lookup = null; using (miningBlock.Lock.AcquireExclusiveUsing()) { lookup = miningBlock.OreList.ToDictionary(x => x.Key, x => x.Value); } List<object> addList = new List<object>(); int count = 0; int pos = 0; while (true) { var group = lookup.ElementAt(count % miningBlock.OreList.Count); if (pos < group.Value.Count) { addList.Insert(0, group.Value[pos]); } count++; if (count % miningBlock.OreList.Count == 0) pos++; if (count >= 1000) break; if (count >= sum) break; } DistributeList(addList, finalAddList, listCount); listCount++; if (listCount > 5) break; } var listToAdd = finalAddList.Take(1000).ToList(); listToAdd.Reverse(); using (Lock.AcquireExclusiveUsing()) { PotentialTargetList.AddRange(listToAdd); } //Logging.Instance.WriteLine(string.Format("ParallelUpdate() took {0} ms", (DateTime.Now - start).TotalMilliseconds)); } private void DistributeList(List<object> listToAdd, List<object> finalList, int count) { if(count < 1) { finalList.AddRange(listToAdd); return; } for(int r = listToAdd.Count - 1; r >= 0; r--) { var item = listToAdd[r]; var realPos = r * (count + 1); if (realPos >= finalList.Count) realPos = finalList.Count - 1; finalList.Insert(realPos, item); } } public override void FindTargets(ref Dictionary<string, int> available) { if (!IsEnabled()) return; if (TargetList.Count >= GetMaximumTargets()) { if (PotentialTargetList.Count > 0) m_lastInvalidTargetReason = "Maximum targets reached. Add more upgrades!"; return; } DateTime start = DateTime.Now; using (Lock.AcquireExclusiveUsing()) { if (m_constructionBlock.IsUserDefinedLimitReached()) { m_lastInvalidTargetReason = "User defined maximum nanite limit reached"; return; } //foreach (NaniteMiningItem item in PotentialTargetList) for (int r = PotentialTargetList.Count - 1; r >= 0; r--) { var item = (NaniteMiningItem)PotentialTargetList[r]; if (TargetList.Contains(item)) continue; if (m_globalPositionList.Contains(item.Position)) { m_lastInvalidTargetReason = "Another factory has this voxel as a target"; continue; } var blockList = NaniteConstructionManager.GetConstructionBlocks((IMyCubeGrid)m_constructionBlock.ConstructionBlock.CubeGrid); bool found = false; foreach (var block in blockList) { // This can be sped up if necessary by indexing items by position if (block.GetTarget<NaniteMiningTargets>().TargetList.FirstOrDefault(x => ((NaniteMiningItem)x).Position == item.Position) != null) { found = true; break; } } if (found) { m_lastInvalidTargetReason = "Another factory has this voxel as a target"; continue; } if (!NaniteMining.CheckVoxelContent(item.VoxelId, item.Position)) { continue; } if (Vector3D.DistanceSquared(m_constructionBlock.ConstructionBlock.GetPosition(), item.Position) < m_maxDistance * m_maxDistance && NaniteConstructionPower.HasRequiredPowerForNewTarget((IMyFunctionalBlock)m_constructionBlock.ConstructionBlock, this)) { Logging.Instance.WriteLine(string.Format("ADDING Mining Target: conid={0} pos={1} type={2}", m_constructionBlock.ConstructionBlock.EntityId, item.Position, MyDefinitionManager.Static.GetVoxelMaterialDefinition(item.VoxelMaterial).MinedOre)); //PotentialTargetList.Remove(item); TargetList.Add(item); m_globalPositionList.Add(item.Position); if (TargetList.Count >= GetMaximumTargets()) break; } } } //Logging.Instance.WriteLine(string.Format("FindTargets took {0}ms", (DateTime.Now - start).TotalMilliseconds)); } public override void Update() { foreach(var item in TargetList.ToList()) { ProcessItem(item); } } private void ProcessItem(object miningTarget) { var target = miningTarget as NaniteMiningItem; if (target == null) return; if (Sync.IsServer) { if (!IsEnabled()) { Logging.Instance.WriteLine("CANCELLING Mining Target due to being disabled"); CancelTarget(target); return; } if (m_constructionBlock.FactoryState != NaniteConstructionBlock.FactoryStates.Active) return; if(!target.MiningHammer.IsWorking) { Logging.Instance.WriteLine("CANCELLING Mining Target due to hammer functionality change"); CancelTarget(target); return; } if (!NaniteConstructionPower.HasRequiredPowerForCurrentTarget((IMyFunctionalBlock)m_constructionBlock.ConstructionBlock)) { Logging.Instance.WriteLine("CANCELLING Mining Target due to power shortage"); CancelTarget(target); return; } if (!m_targetTracker.ContainsKey(target)) { m_constructionBlock.SendAddTarget(target); } if (m_targetTracker.ContainsKey(target)) { var trackedItem = m_targetTracker[target]; if (MyAPIGateway.Session.ElapsedPlayTime.TotalMilliseconds - trackedItem.StartTime >= trackedItem.CarryTime && MyAPIGateway.Session.ElapsedPlayTime.TotalMilliseconds - trackedItem.LastUpdate > 2000) { trackedItem.LastUpdate = MyAPIGateway.Session.ElapsedPlayTime.TotalMilliseconds; if (!TransferFromTarget(target)) CancelTarget(target); else CompleteTarget(target); } } } CreateMiningParticles(target); } private void CreateMiningParticles(NaniteMiningItem target) { if (!m_targetTracker.ContainsKey(target)) CreateTrackerItem(target); if (NaniteParticleManager.TotalParticleCount > NaniteParticleManager.MaxTotalParticles) return; m_targetTracker[target].ParticleCount++; int size = (int)Math.Max(60f, NaniteParticleManager.TotalParticleCount); if ((float)m_targetTracker[target].ParticleCount / size < 1f) return; m_targetTracker[target].ParticleCount = 0; // Create Particle Vector4 startColor = new Vector4(0.7f, 0.2f, 0.0f, 1f); Vector4 endColor = new Vector4(0.2f, 0.05f, 0.0f, 0.35f); m_constructionBlock.ParticleManager.AddParticle(startColor, endColor, GetMinTravelTime() * 1000f, GetSpeed(), target, (IMyTerminalBlock)target.MiningHammer.MiningBlock); } private void CreateTrackerItem(NaniteMiningItem target) { double distance = Vector3D.Distance(m_constructionBlock.ConstructionBlock.GetPosition(), target.Position); int time = (int)Math.Max(GetMinTravelTime() * 1000f, (distance / GetSpeed()) * 1000f); NaniteMiningTarget miningTarget = new NaniteMiningTarget(); miningTarget.ParticleCount = 0; miningTarget.StartTime = MyAPIGateway.Session.ElapsedPlayTime.TotalMilliseconds; miningTarget.LastUpdate = MyAPIGateway.Session.ElapsedPlayTime.TotalMilliseconds; miningTarget.CarryTime = time - 1000; m_targetTracker.Add(target, miningTarget); } private bool TransferFromTarget(NaniteMiningItem target) { byte material = 0; float amount = 0; //NaniteMining.CheckVoxelContent(target.VoxelId, target.Position, target.LocalPos); NaniteMining.RemoveVoxelContent(target.VoxelId, target.Position, out material, out amount); //NaniteMining.TestRemoveVoxel(target.VoxelId, target.Position, out material, out amount); //gging.Instance.WriteLine($"Removing: {target.Position} ({material} {amount})"); /* if (material == 0) { Logging.Instance.WriteLine(string.Format("Material is 0", target.VoxelId)); return false; } */ if (amount == 0f) { Logging.Instance.WriteLine(string.Format("Amount is 0", target.VoxelId)); return false; } var def = MyDefinitionManager.Static.GetVoxelMaterialDefinition(target.VoxelMaterial); var builder = MyObjectBuilderSerializer.CreateNewObject<MyObjectBuilder_Ore>(def.MinedOre); if (!GridHelper.FindFreeCargo((MyCubeBlock)m_constructionBlock.ConstructionBlock, builder, (int)amount, false)) { Logging.Instance.WriteLine(string.Format("Can not find free cargo space!")); return false; } return true; } public override void CancelTarget(object obj) { var target = obj as NaniteMiningItem; Logging.Instance.WriteLine(string.Format("CANCELLED Mining Target: {0} - {1} (VoxelID={2},Position={3})", m_constructionBlock.ConstructionBlock.EntityId, obj.GetType().Name, target.VoxelId, target.Position)); if (Sync.IsServer) { m_constructionBlock.SendCompleteTarget((NaniteMiningItem)obj); } m_constructionBlock.ParticleManager.CancelTarget(target); if (m_targetTracker.ContainsKey(target)) m_targetTracker.Remove(target); m_globalPositionList.Remove(target.Position); Remove(obj); } public override void CompleteTarget(object obj) { var target = obj as NaniteMiningItem; Logging.Instance.WriteLine(string.Format("COMPLETED Mining Target: {0} - {1} (VoxelID={2},Position={3})", m_constructionBlock.ConstructionBlock.EntityId, obj.GetType().Name, target.VoxelId, target.Position)); if (Sync.IsServer) { m_constructionBlock.SendCompleteTarget((NaniteMiningItem)obj); } m_constructionBlock.ParticleManager.CompleteTarget(target); if (m_targetTracker.ContainsKey(target)) m_targetTracker.Remove(target); m_globalPositionList.Remove(target.Position); Remove(obj); } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.ArtifactRegistry.V1.Snippets { using Google.Api.Gax; using Google.Api.Gax.ResourceNames; using System; using System.Linq; using System.Threading.Tasks; /// <summary>Generated snippets.</summary> public sealed class GeneratedArtifactRegistryClientSnippets { /// <summary>Snippet for ListDockerImages</summary> public void ListDockerImagesRequestObject() { // Snippet: ListDockerImages(ListDockerImagesRequest, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = ArtifactRegistryClient.Create(); // Initialize request argument(s) ListDockerImagesRequest request = new ListDockerImagesRequest { Parent = "", }; // Make the request PagedEnumerable<ListDockerImagesResponse, DockerImage> response = artifactRegistryClient.ListDockerImages(request); // Iterate over all response items, lazily performing RPCs as required foreach (DockerImage item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListDockerImagesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (DockerImage item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<DockerImage> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (DockerImage item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListDockerImagesAsync</summary> public async Task ListDockerImagesRequestObjectAsync() { // Snippet: ListDockerImagesAsync(ListDockerImagesRequest, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = await ArtifactRegistryClient.CreateAsync(); // Initialize request argument(s) ListDockerImagesRequest request = new ListDockerImagesRequest { Parent = "", }; // Make the request PagedAsyncEnumerable<ListDockerImagesResponse, DockerImage> response = artifactRegistryClient.ListDockerImagesAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((DockerImage item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListDockerImagesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (DockerImage item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<DockerImage> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (DockerImage item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListDockerImages</summary> public void ListDockerImages() { // Snippet: ListDockerImages(string, string, int?, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = ArtifactRegistryClient.Create(); // Initialize request argument(s) string parent = ""; // Make the request PagedEnumerable<ListDockerImagesResponse, DockerImage> response = artifactRegistryClient.ListDockerImages(parent); // Iterate over all response items, lazily performing RPCs as required foreach (DockerImage item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListDockerImagesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (DockerImage item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<DockerImage> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (DockerImage item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListDockerImagesAsync</summary> public async Task ListDockerImagesAsync() { // Snippet: ListDockerImagesAsync(string, string, int?, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = await ArtifactRegistryClient.CreateAsync(); // Initialize request argument(s) string parent = ""; // Make the request PagedAsyncEnumerable<ListDockerImagesResponse, DockerImage> response = artifactRegistryClient.ListDockerImagesAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((DockerImage item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListDockerImagesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (DockerImage item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<DockerImage> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (DockerImage item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListRepositories</summary> public void ListRepositoriesRequestObject() { // Snippet: ListRepositories(ListRepositoriesRequest, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = ArtifactRegistryClient.Create(); // Initialize request argument(s) ListRepositoriesRequest request = new ListRepositoriesRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), }; // Make the request PagedEnumerable<ListRepositoriesResponse, Repository> response = artifactRegistryClient.ListRepositories(request); // Iterate over all response items, lazily performing RPCs as required foreach (Repository item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListRepositoriesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Repository item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Repository> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Repository item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListRepositoriesAsync</summary> public async Task ListRepositoriesRequestObjectAsync() { // Snippet: ListRepositoriesAsync(ListRepositoriesRequest, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = await ArtifactRegistryClient.CreateAsync(); // Initialize request argument(s) ListRepositoriesRequest request = new ListRepositoriesRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), }; // Make the request PagedAsyncEnumerable<ListRepositoriesResponse, Repository> response = artifactRegistryClient.ListRepositoriesAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((Repository item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListRepositoriesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Repository item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Repository> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Repository item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListRepositories</summary> public void ListRepositories() { // Snippet: ListRepositories(string, string, int?, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = ArtifactRegistryClient.Create(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; // Make the request PagedEnumerable<ListRepositoriesResponse, Repository> response = artifactRegistryClient.ListRepositories(parent); // Iterate over all response items, lazily performing RPCs as required foreach (Repository item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListRepositoriesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Repository item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Repository> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Repository item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListRepositoriesAsync</summary> public async Task ListRepositoriesAsync() { // Snippet: ListRepositoriesAsync(string, string, int?, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = await ArtifactRegistryClient.CreateAsync(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; // Make the request PagedAsyncEnumerable<ListRepositoriesResponse, Repository> response = artifactRegistryClient.ListRepositoriesAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((Repository item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListRepositoriesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Repository item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Repository> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Repository item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListRepositories</summary> public void ListRepositoriesResourceNames() { // Snippet: ListRepositories(LocationName, string, int?, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = ArtifactRegistryClient.Create(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); // Make the request PagedEnumerable<ListRepositoriesResponse, Repository> response = artifactRegistryClient.ListRepositories(parent); // Iterate over all response items, lazily performing RPCs as required foreach (Repository item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListRepositoriesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Repository item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Repository> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Repository item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListRepositoriesAsync</summary> public async Task ListRepositoriesResourceNamesAsync() { // Snippet: ListRepositoriesAsync(LocationName, string, int?, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = await ArtifactRegistryClient.CreateAsync(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); // Make the request PagedAsyncEnumerable<ListRepositoriesResponse, Repository> response = artifactRegistryClient.ListRepositoriesAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((Repository item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListRepositoriesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Repository item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Repository> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Repository item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for GetRepository</summary> public void GetRepositoryRequestObject() { // Snippet: GetRepository(GetRepositoryRequest, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = ArtifactRegistryClient.Create(); // Initialize request argument(s) GetRepositoryRequest request = new GetRepositoryRequest { RepositoryName = RepositoryName.FromProjectLocationRepository("[PROJECT]", "[LOCATION]", "[REPOSITORY]"), }; // Make the request Repository response = artifactRegistryClient.GetRepository(request); // End snippet } /// <summary>Snippet for GetRepositoryAsync</summary> public async Task GetRepositoryRequestObjectAsync() { // Snippet: GetRepositoryAsync(GetRepositoryRequest, CallSettings) // Additional: GetRepositoryAsync(GetRepositoryRequest, CancellationToken) // Create client ArtifactRegistryClient artifactRegistryClient = await ArtifactRegistryClient.CreateAsync(); // Initialize request argument(s) GetRepositoryRequest request = new GetRepositoryRequest { RepositoryName = RepositoryName.FromProjectLocationRepository("[PROJECT]", "[LOCATION]", "[REPOSITORY]"), }; // Make the request Repository response = await artifactRegistryClient.GetRepositoryAsync(request); // End snippet } /// <summary>Snippet for GetRepository</summary> public void GetRepository() { // Snippet: GetRepository(string, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = ArtifactRegistryClient.Create(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/repositories/[REPOSITORY]"; // Make the request Repository response = artifactRegistryClient.GetRepository(name); // End snippet } /// <summary>Snippet for GetRepositoryAsync</summary> public async Task GetRepositoryAsync() { // Snippet: GetRepositoryAsync(string, CallSettings) // Additional: GetRepositoryAsync(string, CancellationToken) // Create client ArtifactRegistryClient artifactRegistryClient = await ArtifactRegistryClient.CreateAsync(); // Initialize request argument(s) string name = "projects/[PROJECT]/locations/[LOCATION]/repositories/[REPOSITORY]"; // Make the request Repository response = await artifactRegistryClient.GetRepositoryAsync(name); // End snippet } /// <summary>Snippet for GetRepository</summary> public void GetRepositoryResourceNames() { // Snippet: GetRepository(RepositoryName, CallSettings) // Create client ArtifactRegistryClient artifactRegistryClient = ArtifactRegistryClient.Create(); // Initialize request argument(s) RepositoryName name = RepositoryName.FromProjectLocationRepository("[PROJECT]", "[LOCATION]", "[REPOSITORY]"); // Make the request Repository response = artifactRegistryClient.GetRepository(name); // End snippet } /// <summary>Snippet for GetRepositoryAsync</summary> public async Task GetRepositoryResourceNamesAsync() { // Snippet: GetRepositoryAsync(RepositoryName, CallSettings) // Additional: GetRepositoryAsync(RepositoryName, CancellationToken) // Create client ArtifactRegistryClient artifactRegistryClient = await ArtifactRegistryClient.CreateAsync(); // Initialize request argument(s) RepositoryName name = RepositoryName.FromProjectLocationRepository("[PROJECT]", "[LOCATION]", "[REPOSITORY]"); // Make the request Repository response = await artifactRegistryClient.GetRepositoryAsync(name); // End snippet } } }
using System; using System.Collections.Generic; using System.Windows.Forms; using System.Xml; using System.IO; namespace Krystals4ObjectLibrary { public sealed class ModulationKrystal : Krystal { #region constructors /// <summary> /// constructor for loading a complete modulated krystal from a file /// </summary> /// <param name="filepath"></param> public ModulationKrystal(string filepath) : base(filepath) { string modulatorName = ""; using(XmlReader r = XmlReader.Create(filepath)) { K.ReadToXmlElementTag(r, "modulation"); // check that this is a modulation (the other checks have been done in base() for(int attr = 0; attr < 3; attr++) { r.MoveToAttribute(attr); switch(r.Name) { case "x": this._xInputFilename = r.Value; break; case "y": this._yInputFilename = r.Value; break; case "modulator": modulatorName = r.Value; break; } } } string xInputFilepath = K.KrystalsFolder + @"\" + _xInputFilename; string yInputFilepath = K.KrystalsFolder + @"\" + _yInputFilename; string modulatorFilepath = K.ModulationOperatorsFolder + @"\" + modulatorName; _xInputKrystal = new ModulationInputKrystal(xInputFilepath); _yInputKrystal = new ModulationInputKrystal(yInputFilepath); _modulator = new Modulator(modulatorFilepath); _modulationNodeList = GetModulationNodeList(); SetRedundantQualifierCoordinates(); } /// <summary> /// Constructor used when beginning to edit a new modulated krystal (which has no modulator or strands yet). /// </summary> /// <param name="xInputFilepath">The file path to the x input</param> /// <param name="yInputFilepath">The file path to the y input</param> /// <param name="modulatorFilepath">The file path to the krystal containing the modulator (may be null or empty)</param> public ModulationKrystal(string xInputFilepath, string yInputFilepath, string modulatorFilepath) : base() { _xInputFilename = Path.GetFileName(xInputFilepath); _yInputFilename = Path.GetFileName(yInputFilepath); _xInputKrystal = new ModulationInputKrystal(xInputFilepath); _yInputKrystal = new ModulationInputKrystal(yInputFilepath); _modulationNodeList = GetModulationNodeList(); this._level = _yInputKrystal.Level > _xInputKrystal.Level ? _yInputKrystal.Level : _xInputKrystal.Level; if(_yInputKrystal.Level == _xInputKrystal.Level && _yInputKrystal.NumValues != _xInputKrystal.NumValues) throw new ApplicationException("Error: the two input krystals are not of compatible size."); if(_yInputKrystal.Level == 0 && _xInputKrystal.Level == 0) throw new ApplicationException("Error: the two input krystals cannot both be constants."); if(string.IsNullOrEmpty(modulatorFilepath)) { _modulator = new Modulator((int)_xInputKrystal.MaxValue, (int)_yInputKrystal.MaxValue); } else { _modulator = new Modulator(modulatorFilepath); if(_modulator.XDim < _xInputKrystal.MaxValue || _modulator.YDim < _yInputKrystal.MaxValue) throw new ApplicationException("Error: One or more input values exceed the bounds of the modulator."); } SetRedundantQualifierCoordinates(); } #endregion #region public functions private List<ModulationNode> GetModulationNodeList() { //InputKrystal xInputKrystal, InputKrystal yInputKrystal List<ModulationNode> modulationNodeList = new List<ModulationNode>(); InputKrystal master, slave; if(_xInputKrystal.Level > _yInputKrystal.Level) { master = _xInputKrystal; slave = _yInputKrystal; } else { slave = _xInputKrystal; master = _yInputKrystal; } int[] alignedSlaveValues = slave.AlignedValues(master); // first construct a flat list of modulation nodes (the leaf nodes of the final tree) int momentIndex = 0; foreach(LeveledValue leveledValue in master.LeveledValues) { int level = leveledValue.level; int mVal = leveledValue.value; if(mVal == 0 || alignedSlaveValues[momentIndex] == 0) { string msg = "Error: An input krystal contained a value of zero."; throw new ApplicationException(msg); } ModulationNode mn; if(master == _xInputKrystal) mn = new ModulationNode(momentIndex + 1, level, mVal, alignedSlaveValues[momentIndex]); else mn = new ModulationNode(momentIndex + 1, level, alignedSlaveValues[momentIndex], mVal); modulationNodeList.Add(mn); momentIndex++; } return modulationNodeList; } /// <summary> /// Saves the krystal to a file. /// Throws an exception if an error occurs. /// The 'overwrite' argument is always ignored. /// </summary> public override void Save(bool overwrite) { string pathname; if(string.IsNullOrEmpty(_name) || overwrite == false) // this is a new or changed krystal, so generate a new name { if(_name != null && _name == "") // used by Krystals4 _name = base.GetNameOfEquivalentSavedKrystal("mk"); if(string.IsNullOrEmpty(_name)) // null is used by Moritz { int fileIndex = 1; do { _name = String.Format("mk{0}({1})-{2}{3}", _level, _maxValue, fileIndex, K.KrystalFilenameSuffix); pathname = K.KrystalsFolder + @"\" + _name; fileIndex++; } while(File.Exists(pathname)); } else pathname = K.KrystalsFolder + @"\" + _name; } else pathname = K.KrystalsFolder + @"\" + _name; if(MaxValueHasChanged()) { File.Delete(pathname); _name = ""; Save(false); // false means do not overwrite. This (recursive) call saves under a new name } else { if(_modulator == null || string.IsNullOrEmpty(_modulator.Name)) { throw new ApplicationException("Program error: The modulation krystal's modulator has not been set!"); } XmlWriter w = base.BeginSaveKrystal(); // disposed of in EndSaveKrystal #region save heredity info w.WriteStartElement("modulation"); w.WriteAttributeString("x", this._xInputFilename); w.WriteAttributeString("y", this._yInputFilename); w.WriteAttributeString("modulator", this._modulator.Name); w.WriteEndElement(); // expansion #endregion base.EndSaveKrystal(w); // saves the strands, closes the document, disposes of w } } private bool MaxValueHasChanged() { string[] segs = _name.Split('(', ')'); uint max = uint.Parse(segs[1]); if(max == MaxValue) return false; else return true; } public void Modulate() { foreach(ModulationNode m in _modulationNodeList) m.ModResult = _modulator.Array[m.X - 1, m.Y - 1]; #region convert to a list of strands List<Strand> strandList = new List<Strand>(); List<uint> valueList = new List<uint>(); uint strandValueLevel = this.Level + 1; Strand s = new Strand(1, valueList); ModulationNode mNode; for(int index = 0; index < _modulationNodeList.Count; index++) { mNode = _modulationNodeList[index]; if(mNode.ModLevel < strandValueLevel && index > 0) { strandList.Add(s); valueList = new List<uint>(); s = new Strand((uint)mNode.ModLevel, valueList); } s.Values.Add((uint)mNode.ModResult); } strandList.Add(s); #endregion convert to a list of strands this.Update(strandList); } /// <summary> /// Re-modulates this krystal (using the existing input krystals and modulator), then saves it, /// overwriting the existing file. /// All the krystals in the krystals folder are rebuilt, when one of them has been changed. /// </summary> public override void Rebuild() { this.Modulate(); Save(true); // true means overwrite } #endregion public functions #region Properties /// <summary> /// A string of uints separated by spaces, containing the x-values which occur in the /// modulator, but which do not have corresponding values in the X-input krystal. /// </summary> public List<uint> RedundantQualifierXInputValues { get { return _redundantModifierXInputValues; } } /// <summary> /// A string of uints separated by spaces, containing the y-values which occur in the /// modulator, but which do not have corresponding values in the Y-input krystal. /// </summary> public List<uint> RedundantQualifierYInputValues { get { return _redundantModifierYInputValues; } } public string XInputFilename { get { return _xInputFilename; } } public string YInputFilename { get { return _yInputFilename; } } public string ModulatorFilename { get { if(_modulator != null) return _modulator.Name; else return ""; } } public ModulationInputKrystal XInputKrystal { get { return _xInputKrystal; } set { _xInputKrystal = value; } } public ModulationInputKrystal YInputKrystal { get { return _yInputKrystal; } set { _yInputKrystal = value; } } public Modulator Modulator { get { return _modulator; } set { _modulator = value; } } public List<ModulationNode> ModulationNodeList { get { return _modulationNodeList; } } private List<ModulationNode> _modulationNodeList; public List<StrandNode> StrandNodeList { get { ModulationInputKrystal dKrystal; ModulationInputKrystal pKrystal; if(_xInputKrystal.Level >= _yInputKrystal.Level) { dKrystal = this._xInputKrystal; pKrystal = this._yInputKrystal; } else { dKrystal = this._yInputKrystal; pKrystal = this._xInputKrystal; } int[] alignedInputPointValues = pKrystal.AlignedValues(dKrystal); if(dKrystal.NumValues != alignedInputPointValues.Length) { string msg = "Error: The input krystals must belong to the same density family.\n"; throw new ApplicationException(msg); } List<LeveledValue> leveledValues = new List<LeveledValue>(); foreach(LeveledValue leveledValue in dKrystal.LeveledValues) leveledValues.Add(leveledValue); // construct the list of StrandNodes List<StrandNode> strandNodeList = new List<StrandNode>(); int momentIndex = 0; foreach(LeveledValue leveledValue in leveledValues) { int level = leveledValue.level; int mVal = leveledValue.value; if(mVal == 0 || alignedInputPointValues[momentIndex] == 0) { string msg = "Error: An input krystal contained a value of zero."; throw new ApplicationException(msg); } StrandNode sn = new StrandNode(momentIndex + 1, level, mVal, alignedInputPointValues[momentIndex]); strandNodeList.Add(sn); momentIndex++; } return (strandNodeList); } } #endregion Properties #region private functions /// <summary> /// Called from ModulationKrystal constructors, this function sets the private variables /// _redundantQualifierXInputValues and /// _redundantQualifierYInputValues /// These are lists of integers containing the legal modulator coordinates which are not actually used /// by the x-input or y-input krystals /// </summary> private void SetRedundantQualifierCoordinates() { List<int> kValuesList = _xInputKrystal.AbsoluteValues; // set by the InputKrystal constructor _redundantModifierXInputValues.Clear(); for(uint x = 1; x <= _modulator.XDim; x++) if(!kValuesList.Contains((int)x)) _redundantModifierXInputValues.Add(x); kValuesList = _yInputKrystal.AbsoluteValues; // set by the InputKrystal constructor _redundantModifierYInputValues.Clear(); for(uint y = 1; y <= _modulator.YDim; y++) if(!kValuesList.Contains((int)y)) _redundantModifierYInputValues.Add(y); } #endregion private functions #region private variables private List<uint> _redundantModifierXInputValues = new List<uint>(); private List<uint> _redundantModifierYInputValues = new List<uint>(); private readonly string _xInputFilename; private readonly string _yInputFilename; //private string _modulatorFilename; private ModulationInputKrystal _xInputKrystal; private ModulationInputKrystal _yInputKrystal; private Modulator _modulator; #endregion private variables } /// <summary> /// The X- and Y-Input krystals for a modulation both have this class /// </summary> public sealed class ModulationInputKrystal : InputKrystal { public ModulationInputKrystal(string filepath) : base(filepath) { } } /// <summary> /// This class contains modulation parameters, and is used to build the _modulationNodeList for a modulation. /// The _modulationNodeList is used as a parameter in the following constructor: /// ModulationEditor.ModulationTreeView /// and in the function: /// ModulationEditor.Modulate() /// </summary> public class ModulationNode : TreeNode { public ModulationNode(int moment, int level, int x, int y) { ModMoment = moment; ModLevel = level; X = x; Y = y; } public int ModMoment; public int ModLevel; public int X; public int Y; public int ModResult; } }
/// This code was generated by /// \ / _ _ _| _ _ /// | (_)\/(_)(_|\/| |(/_ v1.0.0 /// / / using System; using System.Collections.Generic; using Twilio.Base; using Twilio.Converters; namespace Twilio.Rest.Preview.Understand.Assistant.Task { /// <summary> /// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you /// currently do not have developer preview access, please contact help@twilio.com. /// /// FetchFieldOptions /// </summary> public class FetchFieldOptions : IOptions<FieldResource> { /// <summary> /// The unique ID of the Assistant. /// </summary> public string PathAssistantSid { get; } /// <summary> /// The unique ID of the Task associated with this Field. /// </summary> public string PathTaskSid { get; } /// <summary> /// A 34 character string that uniquely identifies this resource. /// </summary> public string PathSid { get; } /// <summary> /// Construct a new FetchFieldOptions /// </summary> /// <param name="pathAssistantSid"> The unique ID of the Assistant. </param> /// <param name="pathTaskSid"> The unique ID of the Task associated with this Field. </param> /// <param name="pathSid"> A 34 character string that uniquely identifies this resource. </param> public FetchFieldOptions(string pathAssistantSid, string pathTaskSid, string pathSid) { PathAssistantSid = pathAssistantSid; PathTaskSid = pathTaskSid; PathSid = pathSid; } /// <summary> /// Generate the necessary parameters /// </summary> public List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); return p; } } /// <summary> /// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you /// currently do not have developer preview access, please contact help@twilio.com. /// /// ReadFieldOptions /// </summary> public class ReadFieldOptions : ReadOptions<FieldResource> { /// <summary> /// The unique ID of the Assistant. /// </summary> public string PathAssistantSid { get; } /// <summary> /// The unique ID of the Task associated with this Field. /// </summary> public string PathTaskSid { get; } /// <summary> /// Construct a new ReadFieldOptions /// </summary> /// <param name="pathAssistantSid"> The unique ID of the Assistant. </param> /// <param name="pathTaskSid"> The unique ID of the Task associated with this Field. </param> public ReadFieldOptions(string pathAssistantSid, string pathTaskSid) { PathAssistantSid = pathAssistantSid; PathTaskSid = pathTaskSid; } /// <summary> /// Generate the necessary parameters /// </summary> public override List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); if (PageSize != null) { p.Add(new KeyValuePair<string, string>("PageSize", PageSize.ToString())); } return p; } } /// <summary> /// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you /// currently do not have developer preview access, please contact help@twilio.com. /// /// CreateFieldOptions /// </summary> public class CreateFieldOptions : IOptions<FieldResource> { /// <summary> /// The unique ID of the parent Assistant. /// </summary> public string PathAssistantSid { get; } /// <summary> /// The unique ID of the Task associated with this Field. /// </summary> public string PathTaskSid { get; } /// <summary> /// The unique name or sid of the FieldType. It can be any Built-in Field Type or the unique_name or sid of a custom Field Type. /// </summary> public string FieldType { get; } /// <summary> /// A user-provided string that uniquely identifies this resource as an alternative to the sid. Unique up to 64 characters long. /// </summary> public string UniqueName { get; } /// <summary> /// Construct a new CreateFieldOptions /// </summary> /// <param name="pathAssistantSid"> The unique ID of the parent Assistant. </param> /// <param name="pathTaskSid"> The unique ID of the Task associated with this Field. </param> /// <param name="fieldType"> The unique name or sid of the FieldType. It can be any Built-in Field Type or the /// unique_name or sid of a custom Field Type. </param> /// <param name="uniqueName"> A user-provided string that uniquely identifies this resource as an alternative to the /// sid. Unique up to 64 characters long. </param> public CreateFieldOptions(string pathAssistantSid, string pathTaskSid, string fieldType, string uniqueName) { PathAssistantSid = pathAssistantSid; PathTaskSid = pathTaskSid; FieldType = fieldType; UniqueName = uniqueName; } /// <summary> /// Generate the necessary parameters /// </summary> public List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); if (FieldType != null) { p.Add(new KeyValuePair<string, string>("FieldType", FieldType)); } if (UniqueName != null) { p.Add(new KeyValuePair<string, string>("UniqueName", UniqueName)); } return p; } } /// <summary> /// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you /// currently do not have developer preview access, please contact help@twilio.com. /// /// DeleteFieldOptions /// </summary> public class DeleteFieldOptions : IOptions<FieldResource> { /// <summary> /// The unique ID of the Assistant. /// </summary> public string PathAssistantSid { get; } /// <summary> /// The unique ID of the Task associated with this Field. /// </summary> public string PathTaskSid { get; } /// <summary> /// A 34 character string that uniquely identifies this resource. /// </summary> public string PathSid { get; } /// <summary> /// Construct a new DeleteFieldOptions /// </summary> /// <param name="pathAssistantSid"> The unique ID of the Assistant. </param> /// <param name="pathTaskSid"> The unique ID of the Task associated with this Field. </param> /// <param name="pathSid"> A 34 character string that uniquely identifies this resource. </param> public DeleteFieldOptions(string pathAssistantSid, string pathTaskSid, string pathSid) { PathAssistantSid = pathAssistantSid; PathTaskSid = pathTaskSid; PathSid = pathSid; } /// <summary> /// Generate the necessary parameters /// </summary> public List<KeyValuePair<string, string>> GetParams() { var p = new List<KeyValuePair<string, string>>(); return p; } } }
using System; using System.Collections.Generic; using System.IO; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; using Microsoft.TeamFoundation.DistributedTask.WebApi; using Microsoft.VisualStudio.Services.Agent.Util; using Microsoft.VisualStudio.Services.Agent.Worker; using Microsoft.VisualStudio.Services.Agent.Worker.Release.Artifacts; using Microsoft.VisualStudio.Services.Agent.Worker.Release.Artifacts.Definition; using Microsoft.VisualStudio.Services.ReleaseManagement.WebApi.Contracts; using Microsoft.VisualStudio.Services.WebApi; using Moq; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using Xunit; namespace Microsoft.VisualStudio.Services.Agent.Tests.Worker.Release { public sealed class JenkinsArtifactL0 { private Mock<IExecutionContext> _ec; private Mock<IGenericHttpClient> _httpClient; private Mock<IExtensionManager> _extensionManager; private ArtifactDefinition _artifactDefinition; [Fact] [Trait("Level", "L0")] [Trait("Category", "Worker")] public async void IfNoCommitVersionExistsInArtifactDetailsNoIssueShouldBeAdded() { using (TestHostContext tc = Setup()) { var trace = tc.GetTrace(); var artifact = new JenkinsArtifact(); artifact.Initialize(tc); await artifact.DownloadCommitsAsync(_ec.Object, _artifactDefinition, "test"); _ec.Verify(x => x.AddIssue(It.Is<Issue>(y => y.Type == IssueType.Warning)), Times.Never); } } [Fact] [Trait("Level", "L0")] [Trait("Category", "Worker")] public async void ShouldLogAnIssueIfEndVersionIsInvalidInArtifactDetail() { using (TestHostContext tc = Setup()) { var trace = tc.GetTrace(); JenkinsArtifactDetails details = _artifactDefinition.Details as JenkinsArtifactDetails; details.EndCommitArtifactVersion = "xx"; var artifact = new JenkinsArtifact(); artifact.Initialize(tc); await artifact.DownloadCommitsAsync(_ec.Object, _artifactDefinition, "test"); _ec.Verify(x => x.AddIssue(It.Is<Issue>(y => y.Type == IssueType.Warning)), Times.Once); } } [Fact] [TraitAttribute("Level", "L0")] [TraitAttribute("Category", "Worker")] public async void MissingStartVersionShouldDownloadCommitsFromSingleBuild() { using (TestHostContext tc = Setup()) { JenkinsArtifactDetails details = _artifactDefinition.Details as JenkinsArtifactDetails; details.EndCommitArtifactVersion = "10"; var artifact = new JenkinsArtifact(); artifact.Initialize(tc); string expectedUrl = $"{details.Url}/job/{details.JobName}/{details.EndCommitArtifactVersion}/api/json?tree=number,result,changeSet[items[commitId,date,msg,author[fullName]]]"; await artifact.DownloadCommitsAsync(_ec.Object, _artifactDefinition, tc.GetDirectory(WellKnownDirectory.Root)); _httpClient.Verify(x => x.GetStringAsync(It.Is<string>(y => y.StartsWith(expectedUrl)), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Once); } } [Fact] [TraitAttribute("Level", "L0")] [TraitAttribute("Category", "Worker")] public async void JenkinsCommitsShouldBeFetchedBetweenBuildRange() { using (TestHostContext tc = Setup()) { JenkinsArtifactDetails details = _artifactDefinition.Details as JenkinsArtifactDetails; details.StartCommitArtifactVersion = "10"; details.EndCommitArtifactVersion = "20"; var artifact = new JenkinsArtifact(); artifact.Initialize(tc); SetupBuildRangeQuery(details, "{ \"allBuilds\": [{ \"number\": 20 }, { \"number\": 10 }, { \"number\": 2 } ] }"); string expectedUrl = $"{details.Url}/job/{details.JobName}/api/json?tree=builds[number,result,changeSet[items[commitId,date,msg,author[fullName]]]]{{0,1}}"; await artifact.DownloadCommitsAsync(_ec.Object, _artifactDefinition, tc.GetDirectory(WellKnownDirectory.Root)); _httpClient.Verify(x => x.GetStringAsync(It.Is<string>(y => y.StartsWith(expectedUrl)), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Once); } } [Fact] [TraitAttribute("Level", "L0")] [TraitAttribute("Category", "Worker")] public async void JenkinsRollbackCommitsShouldBeFetched() { using (TestHostContext tc = Setup()) { JenkinsArtifactDetails details = _artifactDefinition.Details as JenkinsArtifactDetails; details.StartCommitArtifactVersion = "20"; details.EndCommitArtifactVersion = "10"; var artifact = new JenkinsArtifact(); artifact.Initialize(tc); SetupBuildRangeQuery(details, "{ \"allBuilds\": [{ \"number\": 20 }, { \"number\": 10 }, { \"number\": 2 } ] }"); string expectedUrl = $"{details.Url}/job/{details.JobName}/api/json?tree=builds[number,result,changeSet[items[commitId,date,msg,author[fullName]]]]{{0,1}}"; await artifact.DownloadCommitsAsync(_ec.Object, _artifactDefinition, tc.GetDirectory(WellKnownDirectory.Root)); _httpClient.Verify(x => x.GetStringAsync(It.Is<string>(y => y.StartsWith(expectedUrl)), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Once); } } [Fact] [TraitAttribute("Level", "L0")] [TraitAttribute("Category", "Worker")] public async void JenkinsCommitsShouldLogAnIssueIfBuildIsDeleted() { using (TestHostContext tc = Setup()) { JenkinsArtifactDetails details = _artifactDefinition.Details as JenkinsArtifactDetails; details.StartCommitArtifactVersion = "10"; details.EndCommitArtifactVersion = "20"; var artifact = new JenkinsArtifact(); artifact.Initialize(tc); SetupBuildRangeQuery(details, "{ \"allBuilds\": [{ \"number\": 30 }, { \"number\": 29 }, { \"number\": 28 } ] }"); await artifact.DownloadCommitsAsync(_ec.Object, _artifactDefinition, tc.GetDirectory(WellKnownDirectory.Root)); _ec.Verify(x => x.AddIssue(It.Is<Issue>(y => y.Type == IssueType.Warning)), Times.Once); } } [Fact] [TraitAttribute("Level", "L0")] [TraitAttribute("Category", "Worker")] public async void CommitsShouldBeUploadedAsAttachment() { using (TestHostContext tc = Setup()) { string commitRootDirectory = Path.Combine(tc.GetDirectory(WellKnownDirectory.Work), Guid.NewGuid().ToString("D")); Directory.CreateDirectory(commitRootDirectory); try { JenkinsArtifactDetails details = _artifactDefinition.Details as JenkinsArtifactDetails; details.StartCommitArtifactVersion = "10"; details.EndCommitArtifactVersion = "20"; var artifact = new JenkinsArtifact(); artifact.Initialize(tc); SetupBuildRangeQuery(details, "{ \"allBuilds\": [{ \"number\": 20 }, { \"number\": 10 }, { \"number\": 2 } ] }"); string commitResult = " {\"builds\": [{ \"number\":9, \"result\":\"SUCCESS\", \"changeSet\": { \"items\": [{ \"commitId\" : \"2869c7ccd0b1b649ba6765e89ee5ff36ef6d4805\", \"author\": { \"fullName\" : \"testuser\" }, \"msg\":\"test\" }]}}]}"; string commitsUrl = $"{details.Url}/job/{details.JobName}/api/json?tree=builds[number,result,changeSet[items[commitId,date,msg,author[fullName]]]]{{0,1}}"; _httpClient.Setup(x => x.GetStringAsync(It.Is<string>(y => y.StartsWith(commitsUrl)), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>())) .Returns(Task.FromResult(commitResult)); string commitFilePath = Path.Combine(commitRootDirectory, $"commits_{details.Alias}_1.json"); await artifact.DownloadCommitsAsync(_ec.Object, _artifactDefinition, commitRootDirectory); _ec.Verify(x => x.QueueAttachFile(It.Is<string>(y => y.Equals(CoreAttachmentType.FileAttachment)), It.IsAny<string>(), It.IsAny<string>()), Times.Once); } finally { IOUtil.DeleteDirectory(commitRootDirectory, CancellationToken.None); } } } [Fact] [TraitAttribute("Level", "L0")] [TraitAttribute("Category", "Worker")] public async void CommitsShoulHaveUrlIfItsGitRepo() { using (TestHostContext tc = Setup()) { string commitRootDirectory = Path.Combine(tc.GetDirectory(WellKnownDirectory.Work), Guid.NewGuid().ToString("D")); Directory.CreateDirectory(commitRootDirectory); try { JenkinsArtifactDetails details = _artifactDefinition.Details as JenkinsArtifactDetails; details.StartCommitArtifactVersion = "10"; details.EndCommitArtifactVersion = "20"; var artifact = new JenkinsArtifact(); artifact.Initialize(tc); SetupBuildRangeQuery(details, "{ \"allBuilds\": [{ \"number\": 20 }, { \"number\": 10 }, { \"number\": 2 } ] }"); string commitResult = " {\"builds\": [{ \"number\":9, \"result\":\"SUCCESS\", \"changeSet\": { \"items\": [{ \"commitId\" : \"2869c7ccd0b1b649ba6765e89ee5ff36ef6d4805\", \"author\": { \"fullName\" : \"testuser\" }, \"msg\":\"test\" }]}}]}"; string commitsUrl = $"{details.Url}/job/{details.JobName}/api/json?tree=builds[number,result,changeSet[items[commitId,date,msg,author[fullName]]]]{{0,1}}"; _httpClient.Setup(x => x.GetStringAsync(It.Is<string>(y => y.StartsWith(commitsUrl)), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>())) .Returns(Task.FromResult(commitResult)); string repoUrl = $"{details.Url}/job/{details.JobName}/{details.EndCommitArtifactVersion}/api/json?tree=actions[remoteUrls],changeSet[kind]"; string repoResult = "{ \"actions\": [ { \"remoteUrls\": [ \"https://github.com/TestUser/TestRepo\" ] }, ], \"changeSet\": { \"kind\": \"git\" } }"; _httpClient.Setup(x => x.GetStringAsync(It.Is<string>(y => y.StartsWith(repoUrl)), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>())) .Returns(Task.FromResult(repoResult)); string commitFilePath = Path.Combine(commitRootDirectory, $"commits_{details.Alias}_1.json"); string expectedCommitUrl = "https://github.com/TestUser/TestRepo/commit/2869c7ccd0b1b649ba6765e89ee5ff36ef6d4805"; await artifact.DownloadCommitsAsync(_ec.Object, _artifactDefinition, commitRootDirectory); _ec.Verify(x => x.QueueAttachFile(It.Is<string>(y => y.Equals(CoreAttachmentType.FileAttachment)), It.IsAny<string>(), It.Is<string>(z => string.Join("", File.ReadAllLines(z)).Contains(expectedCommitUrl))), Times.Once); } finally { IOUtil.DeleteDirectory(commitRootDirectory, CancellationToken.None); } } } private void SetupBuildRangeQuery(JenkinsArtifactDetails details, string result) { string buildIndexUrl = $"{details.Url}/job/{details.JobName}/api/json?tree=allBuilds[number]"; _httpClient.Setup(x => x.GetStringAsync(It.Is<string>(y => y.StartsWith(buildIndexUrl)), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>())) .Returns(Task.FromResult(result)); } private TestHostContext Setup([CallerMemberName] string name = "") { TestHostContext hc = new TestHostContext(this, name); _ec = new Mock<IExecutionContext>(); _httpClient = new Mock<IGenericHttpClient>(); _artifactDefinition = new ArtifactDefinition { Details = new JenkinsArtifactDetails { Url = new Uri("http://localhost"), JobName = "jenkins", Alias = "jenkins" } }; _extensionManager = new Mock<IExtensionManager>(); hc.SetSingleton<IExtensionManager>(_extensionManager.Object); hc.SetSingleton<IGenericHttpClient>(_httpClient.Object); return hc; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Globalization; using Xunit; namespace System.Tests { public static class UInt64Tests { [Fact] public static void Ctor_Empty() { var i = new ulong(); Assert.Equal((ulong)0, i); } [Fact] public static void Ctor_Value() { ulong i = 41; Assert.Equal((ulong)41, i); } [Fact] public static void MaxValue() { Assert.Equal(0xFFFFFFFFFFFFFFFF, ulong.MaxValue); } [Fact] public static void MinValue() { Assert.Equal((ulong)0, ulong.MinValue); } [Theory] [InlineData((ulong)234, (ulong)234, 0)] [InlineData((ulong)234, ulong.MinValue, 1)] [InlineData((ulong)234, (ulong)0, 1)] [InlineData((ulong)234, (ulong)123, 1)] [InlineData((ulong)234, (ulong)456, -1)] [InlineData((ulong)234, ulong.MaxValue, -1)] [InlineData((ulong)234, null, 1)] public static void CompareTo(ulong i, object value, int expected) { if (value is ulong) { Assert.Equal(expected, Math.Sign(i.CompareTo((ulong)value))); } IComparable comparable = i; Assert.Equal(expected, Math.Sign(comparable.CompareTo(value))); } [Fact] public static void CompareTo_ObjectNotULong_ThrowsArgumentException() { IComparable comparable = (ulong)234; AssertExtensions.Throws<ArgumentException>(null, () => comparable.CompareTo("a")); // Obj is not a ulong AssertExtensions.Throws<ArgumentException>(null, () => comparable.CompareTo(234)); // Obj is not a ulong } [Theory] [InlineData((ulong)789, (ulong)789, true)] [InlineData((ulong)788, (ulong)0, false)] [InlineData((ulong)0, (ulong)0, true)] [InlineData((ulong)789, null, false)] [InlineData((ulong)789, "789", false)] [InlineData((ulong)789, 789, false)] public static void Equals(ulong i1, object obj, bool expected) { if (obj is ulong) { ulong i2 = (ulong)obj; Assert.Equal(expected, i1.Equals(i2)); Assert.Equal(expected, i1.GetHashCode().Equals(i2.GetHashCode())); Assert.Equal((int)i1, i1.GetHashCode()); } Assert.Equal(expected, i1.Equals(obj)); } public static IEnumerable<object[]> ToStringTestData() { NumberFormatInfo emptyFormat = NumberFormatInfo.CurrentInfo; yield return new object[] { (ulong)0, "G", emptyFormat, "0" }; yield return new object[] { (ulong)4567, "G", emptyFormat, "4567" }; yield return new object[] { ulong.MaxValue, "G", emptyFormat, "18446744073709551615" }; yield return new object[] { (ulong)0x2468, "x", emptyFormat, "2468" }; yield return new object[] { (ulong)2468, "N", emptyFormat, string.Format("{0:N}", 2468.00) }; NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.NegativeSign = "#"; customFormat.NumberDecimalSeparator = "~"; customFormat.NumberGroupSeparator = "*"; yield return new object[] { (ulong)2468, "N", customFormat, "2*468~00" }; } [Theory] [MemberData(nameof(ToStringTestData))] public static void ToString(ulong i, string format, IFormatProvider provider, string expected) { // Format is case insensitive string upperFormat = format.ToUpperInvariant(); string lowerFormat = format.ToLowerInvariant(); string upperExpected = expected.ToUpperInvariant(); string lowerExpected = expected.ToLowerInvariant(); bool isDefaultProvider = (provider == null || provider == NumberFormatInfo.CurrentInfo); if (string.IsNullOrEmpty(format) || format.ToUpperInvariant() == "G") { if (isDefaultProvider) { Assert.Equal(upperExpected, i.ToString()); Assert.Equal(upperExpected, i.ToString((IFormatProvider)null)); } Assert.Equal(upperExpected, i.ToString(provider)); } if (isDefaultProvider) { Assert.Equal(upperExpected, i.ToString(upperFormat)); Assert.Equal(lowerExpected, i.ToString(lowerFormat)); Assert.Equal(upperExpected, i.ToString(upperFormat, null)); Assert.Equal(lowerExpected, i.ToString(lowerFormat, null)); } Assert.Equal(upperExpected, i.ToString(upperFormat, provider)); Assert.Equal(lowerExpected, i.ToString(lowerFormat, provider)); } [Fact] public static void ToString_InvalidFormat_ThrowsFormatException() { ulong i = 123; Assert.Throws<FormatException>(() => i.ToString("Y")); // Invalid format Assert.Throws<FormatException>(() => i.ToString("Y", null)); // Invalid format } public static IEnumerable<object[]> Parse_Valid_TestData() { NumberStyles defaultStyle = NumberStyles.Integer; NumberFormatInfo emptyFormat = new NumberFormatInfo(); NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.CurrencySymbol = "$"; yield return new object[] { "0", defaultStyle, null, (ulong)0 }; yield return new object[] { "123", defaultStyle, null, (ulong)123 }; yield return new object[] { "+123", defaultStyle, null, (ulong)123 }; yield return new object[] { " 123 ", defaultStyle, null, (ulong)123 }; yield return new object[] { "18446744073709551615", defaultStyle, null, 18446744073709551615 }; yield return new object[] { "12", NumberStyles.HexNumber, null, (ulong)0x12 }; yield return new object[] { "1000", NumberStyles.AllowThousands, null, (ulong)1000 }; yield return new object[] { "123", defaultStyle, emptyFormat, (ulong)123 }; yield return new object[] { "123", NumberStyles.Any, emptyFormat, (ulong)123 }; yield return new object[] { "12", NumberStyles.HexNumber, emptyFormat, (ulong)0x12 }; yield return new object[] { "abc", NumberStyles.HexNumber, emptyFormat, (ulong)0xabc }; yield return new object[] { "ABC", NumberStyles.HexNumber, null, (ulong)0xabc }; yield return new object[] { "$1,000", NumberStyles.Currency, customFormat, (ulong)1000 }; } [Theory] [MemberData(nameof(Parse_Valid_TestData))] public static void Parse(string value, NumberStyles style, IFormatProvider provider, ulong expected) { ulong result; // If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.True(ulong.TryParse(value, out result)); Assert.Equal(expected, result); Assert.Equal(expected, ulong.Parse(value)); // If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (provider != null) { Assert.Equal(expected, ulong.Parse(value, provider)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.True(ulong.TryParse(value, style, provider ?? new NumberFormatInfo(), out result)); Assert.Equal(expected, result); // If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (provider == null) { Assert.Equal(expected, ulong.Parse(value, style)); } Assert.Equal(expected, ulong.Parse(value, style, provider ?? new NumberFormatInfo())); } public static IEnumerable<object[]> Parse_Invalid_TestData() { NumberStyles defaultStyle = NumberStyles.Integer; NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.CurrencySymbol = "$"; customFormat.NumberDecimalSeparator = "."; yield return new object[] { null, defaultStyle, null, typeof(ArgumentNullException) }; yield return new object[] { "", defaultStyle, null, typeof(FormatException) }; yield return new object[] { " \t \n \r ", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "Garbage", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "abc", defaultStyle, null, typeof(FormatException) }; // Hex value yield return new object[] { "1E23", defaultStyle, null, typeof(FormatException) }; // Exponent yield return new object[] { "(123)", defaultStyle, null, typeof(FormatException) }; // Parentheses yield return new object[] { 100.ToString("C0"), defaultStyle, null, typeof(FormatException) }; // Currency yield return new object[] { 1000.ToString("N0"), defaultStyle, null, typeof(FormatException) }; // Thousands yield return new object[] { 678.90.ToString("F2"), defaultStyle, null, typeof(FormatException) }; // Decimal yield return new object[] { "+-123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "-+123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "+abc", NumberStyles.HexNumber, null, typeof(FormatException) }; yield return new object[] { "-abc", NumberStyles.HexNumber, null, typeof(FormatException) }; yield return new object[] { "- 123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "+ 123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "abc", NumberStyles.None, null, typeof(FormatException) }; // Hex value yield return new object[] { " 123 ", NumberStyles.None, null, typeof(FormatException) }; // Trailing and leading whitespace yield return new object[] { "678.90", defaultStyle, customFormat, typeof(FormatException) }; // Decimal yield return new object[] { "-1", defaultStyle, null, typeof(OverflowException) }; // < min value yield return new object[] { "18446744073709551616", defaultStyle, null, typeof(OverflowException) }; // > max value yield return new object[] { "(123)", NumberStyles.AllowParentheses, null, typeof(OverflowException) }; // Parentheses = negative } [Theory] [MemberData(nameof(Parse_Invalid_TestData))] public static void Parse_Invalid(string value, NumberStyles style, IFormatProvider provider, Type exceptionType) { ulong result; // If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.False(ulong.TryParse(value, out result)); Assert.Equal(default(ulong), result); Assert.Throws(exceptionType, () => ulong.Parse(value)); // If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (provider != null) { Assert.Throws(exceptionType, () => ulong.Parse(value, provider)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.False(ulong.TryParse(value, style, provider ?? new NumberFormatInfo(), out result)); Assert.Equal(default(ulong), result); // If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (provider == null) { Assert.Throws(exceptionType, () => ulong.Parse(value, style)); } Assert.Throws(exceptionType, () => ulong.Parse(value, style, provider ?? new NumberFormatInfo())); } [Theory] [InlineData(NumberStyles.HexNumber | NumberStyles.AllowParentheses, null)] [InlineData(unchecked((NumberStyles)0xFFFFFC00), "style")] public static void TryParse_InvalidNumberStyle_ThrowsArgumentException(NumberStyles style, string paramName) { ulong result = 0; AssertExtensions.Throws<ArgumentException>(paramName, () => ulong.TryParse("1", style, null, out result)); Assert.Equal(default(ulong), result); AssertExtensions.Throws<ArgumentException>(paramName, () => ulong.Parse("1", style)); AssertExtensions.Throws<ArgumentException>(paramName, () => ulong.Parse("1", style, null)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // AssociativeAggregationOperator.cs // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Threading; namespace System.Linq.Parallel { /// <summary> /// The aggregation operator is a little unique, in that the enumerators it returns /// yield intermediate results instead of the final results. That's because there is /// one last Aggregate operation that must occur in order to perform the final reduction /// over the intermediate streams. In other words, the intermediate enumerators produced /// by this operator are never seen by other query operators or consumers directly. /// /// An aggregation performs parallel prefixing internally. Given a binary operator O, /// it will generate intermediate results by folding O across partitions; then it /// performs a final reduction by folding O across the intermediate results. The /// analysis engine knows about associativity and commutativity, and will ensure the /// style of partitioning inserted into the tree is compatible with the operator. /// /// For instance, say O is + (meaning it is AC), our input is {1,2,...,8}, and we /// use 4 partitions to calculate the aggregation. Sequentially this would look /// like this O(O(O(1,2),...),8), in other words ((1+2)+...)+8. The parallel prefix /// of this (w/ 4 partitions) instead calculates the intermediate aggregations, i.e.: /// t1 = O(1,2), t2 = O(3,4), ... t4 = O(7,8), aka t1 = 1+2, t2 = 3+4, t4 = 7+8. /// The final step is to aggregate O over these intermediaries, i.e. /// O(O(O(t1,t2),t3),t4), or ((t1+t2)+t3)+t4. This generalizes to any binary operator. /// /// Because some aggregations use a different input, intermediate, and output types, /// we support an even more generalized aggregation type. In this model, we have /// three operators, an intermediate (used for the incremental aggregations), a /// final (used for the final summary of intermediate results), and a result selector /// (used to perform whatever transformation is needed on the final summary). /// </summary> /// <typeparam name="TInput"></typeparam> /// <typeparam name="TIntermediate"></typeparam> /// <typeparam name="TOutput"></typeparam> internal sealed class AssociativeAggregationOperator<TInput, TIntermediate, TOutput> : UnaryQueryOperator<TInput, TIntermediate> { private readonly TIntermediate _seed; // A seed used during aggregation. private readonly bool _seedIsSpecified; // Whether a seed was specified. If not, the first element will be used. private readonly bool _throwIfEmpty; // Whether to throw an exception if the data source is empty. // An intermediate reduction function. private readonly Func<TIntermediate, TInput, TIntermediate> _intermediateReduce; // A final reduction function. private readonly Func<TIntermediate, TIntermediate, TIntermediate> _finalReduce; // The result selector function. private readonly Func<TIntermediate, TOutput> _resultSelector; // A function that constructs seed instances private readonly Func<TIntermediate> _seedFactory; //--------------------------------------------------------------------------------------- // Constructs a new instance of an associative operator. // // Assumptions: // This operator must be associative. // internal AssociativeAggregationOperator(IEnumerable<TInput> child, TIntermediate seed, Func<TIntermediate> seedFactory, bool seedIsSpecified, Func<TIntermediate, TInput, TIntermediate> intermediateReduce, Func<TIntermediate, TIntermediate, TIntermediate> finalReduce, Func<TIntermediate, TOutput> resultSelector, bool throwIfEmpty, QueryAggregationOptions options) : base(child) { Debug.Assert(child != null, "child data source cannot be null"); Debug.Assert(intermediateReduce != null, "need an intermediate reduce function"); Debug.Assert(finalReduce != null, "need a final reduce function"); Debug.Assert(resultSelector != null, "need a result selector function"); Debug.Assert(options.IsValidQueryAggregationOption(), "enum out of valid range"); Debug.Assert((options & QueryAggregationOptions.Associative) == QueryAggregationOptions.Associative, "expected an associative operator"); Debug.Assert(typeof(TIntermediate) == typeof(TInput) || seedIsSpecified, "seed must be specified if TIntermediate differs from TInput"); _seed = seed; _seedFactory = seedFactory; _seedIsSpecified = seedIsSpecified; _intermediateReduce = intermediateReduce; _finalReduce = finalReduce; _resultSelector = resultSelector; _throwIfEmpty = throwIfEmpty; } //--------------------------------------------------------------------------------------- // Executes the entire query tree, and aggregates the intermediate results into the // final result based on the binary operators and final reduction. // // Return Value: // The single result of aggregation. // internal TOutput Aggregate() { Debug.Assert(_finalReduce != null); Debug.Assert(_resultSelector != null); TIntermediate accumulator = default(TIntermediate); bool hadElements = false; // Because the final reduction is typically much cheaper than the intermediate // reductions over the individual partitions, and because each parallel partition // will do a lot of work to produce a single output element, we prefer to turn off // pipelining, and process the final reductions serially. using (IEnumerator<TIntermediate> enumerator = GetEnumerator(ParallelMergeOptions.FullyBuffered, true)) { // We just reduce the elements in each output partition. If the operation is associative, // this will yield the correct answer. If not, we should never be calling this routine. while (enumerator.MoveNext()) { if (hadElements) { // Accumulate results by passing the current accumulation and current element to // the reduction operation. try { accumulator = _finalReduce(accumulator, enumerator.Current); } #if SUPPORT_THREAD_ABORT catch (ThreadAbortException) { // Do not wrap ThreadAbortExceptions throw; } #endif catch (Exception ex) { // We need to wrap all exceptions into an aggregate. throw new AggregateException(ex); } } else { // This is the first element. Just set the accumulator to the first element. accumulator = enumerator.Current; hadElements = true; } } // If there were no elements, we must throw an exception. if (!hadElements) { if (_throwIfEmpty) { throw new InvalidOperationException(SR.NoElements); } else { accumulator = _seedFactory == null ? _seed : _seedFactory(); } } } // Finally, run the selection routine to yield the final element. try { return _resultSelector(accumulator); } #if SUPPORT_THREAD_ABORT catch (ThreadAbortException) { // Do not wrap ThreadAbortExceptions throw; } #endif catch (Exception ex) { // We need to wrap all exceptions into an aggregate. throw new AggregateException(ex); } } //--------------------------------------------------------------------------------------- // Just opens the current operator, including opening the child and wrapping it with // partitions as needed. // internal override QueryResults<TIntermediate> Open(QuerySettings settings, bool preferStriping) { // We just open the child operator. QueryResults<TInput> childQueryResults = Child.Open(settings, preferStriping); return new UnaryQueryOperatorResults(childQueryResults, this, settings, preferStriping); } internal override void WrapPartitionedStream<TKey>( PartitionedStream<TInput, TKey> inputStream, IPartitionedStreamRecipient<TIntermediate> recipient, bool preferStriping, QuerySettings settings) { int partitionCount = inputStream.PartitionCount; PartitionedStream<TIntermediate, int> outputStream = new PartitionedStream<TIntermediate, int>( partitionCount, Util.GetDefaultComparer<int>(), OrdinalIndexState.Correct); for (int i = 0; i < partitionCount; i++) { outputStream[i] = new AssociativeAggregationOperatorEnumerator<TKey>(inputStream[i], this, i, settings.CancellationState.MergedCancellationToken); } recipient.Receive(outputStream); } //--------------------------------------------------------------------------------------- // Returns an enumerable that represents the query executing sequentially. // [ExcludeFromCodeCoverage] internal override IEnumerable<TIntermediate> AsSequentialQuery(CancellationToken token) { Debug.Fail("This method should never be called. Associative aggregation can always be parallelized."); throw new NotSupportedException(); } //--------------------------------------------------------------------------------------- // Whether this operator performs a premature merge that would not be performed in // a similar sequential operation (i.e., in LINQ to Objects). // internal override bool LimitsParallelism { get { return false; } } //--------------------------------------------------------------------------------------- // This enumerator type encapsulates the intermediary aggregation over the underlying // (possibly partitioned) data source. // private class AssociativeAggregationOperatorEnumerator<TKey> : QueryOperatorEnumerator<TIntermediate, int> { private readonly QueryOperatorEnumerator<TInput, TKey> _source; // The source data. private readonly AssociativeAggregationOperator<TInput, TIntermediate, TOutput> _reduceOperator; // The operator. private readonly int _partitionIndex; // The index of this partition. private readonly CancellationToken _cancellationToken; private bool _accumulated; // Whether we've accumulated already. (false-sharing risk, but only written once) //--------------------------------------------------------------------------------------- // Instantiates a new aggregation operator. // internal AssociativeAggregationOperatorEnumerator(QueryOperatorEnumerator<TInput, TKey> source, AssociativeAggregationOperator<TInput, TIntermediate, TOutput> reduceOperator, int partitionIndex, CancellationToken cancellationToken) { Debug.Assert(source != null); Debug.Assert(reduceOperator != null); _source = source; _reduceOperator = reduceOperator; _partitionIndex = partitionIndex; _cancellationToken = cancellationToken; } //--------------------------------------------------------------------------------------- // This API, upon the first time calling it, walks the entire source query tree. It begins // with an accumulator value set to the aggregation operator's seed, and always passes // the accumulator along with the current element from the data source to the binary // intermediary aggregation operator. The return value is kept in the accumulator. At // the end, we will have our intermediate result, ready for final aggregation. // internal override bool MoveNext(ref TIntermediate currentElement, ref int currentKey) { Debug.Assert(_reduceOperator != null); Debug.Assert(_reduceOperator._intermediateReduce != null, "expected a compiled operator"); // Only produce a single element. Return false if MoveNext() was already called before. if (_accumulated) { return false; } _accumulated = true; bool hadNext = false; TIntermediate accumulator = default(TIntermediate); // Initialize the accumulator. if (_reduceOperator._seedIsSpecified) { // If the seed is specified, initialize accumulator to the seed value. accumulator = _reduceOperator._seedFactory == null ? _reduceOperator._seed : _reduceOperator._seedFactory(); } else { // If the seed is not specified, then we take the first element as the seed. // Seed may be unspecified only if TInput is the same as TIntermediate. Debug.Assert(typeof(TInput) == typeof(TIntermediate)); TInput acc = default(TInput); TKey accKeyUnused = default(TKey); if (!_source.MoveNext(ref acc, ref accKeyUnused)) return false; hadNext = true; accumulator = (TIntermediate)((object)acc); } // Scan through the source and accumulate the result. TInput input = default(TInput); TKey keyUnused = default(TKey); int i = 0; while (_source.MoveNext(ref input, ref keyUnused)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); hadNext = true; accumulator = _reduceOperator._intermediateReduce(accumulator, input); } if (hadNext) { currentElement = accumulator; currentKey = _partitionIndex; // A reduction's "index" is just its partition number. return true; } return false; } protected override void Dispose(bool disposing) { Debug.Assert(_source != null); _source.Dispose(); } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using System.Timers; using OpenMetaverse; using log4net; using Nini.Config; using OpenSim.Framework; using OpenSim.Region.CoreModules.Framework.InterfaceCommander; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using System.Xml; using System.Xml.Serialization; using System.IO; namespace OpenSim.Region.OptionalModules.World.TreePopulator { /// <summary> /// Version 2.02 - Still hacky /// </summary> public class TreePopulatorModule : IRegionModule, ICommandableModule, IVegetationModule { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private readonly Commander m_commander = new Commander("tree"); private Scene m_scene; [XmlRootAttribute(ElementName = "Copse", IsNullable = false)] public class Copse { public string m_name; public Boolean m_frozen; public Tree m_tree_type; public int m_tree_quantity; public float m_treeline_low; public float m_treeline_high; public Vector3 m_seed_point; public double m_range; public Vector3 m_initial_scale; public Vector3 m_maximum_scale; public Vector3 m_rate; [XmlIgnore] public Boolean m_planted; [XmlIgnore] public List<UUID> m_trees; public Copse() { } public Copse(string fileName, Boolean planted) { Copse cp = (Copse)DeserializeObject(fileName); this.m_name = cp.m_name; this.m_frozen = cp.m_frozen; this.m_tree_quantity = cp.m_tree_quantity; this.m_treeline_high = cp.m_treeline_high; this.m_treeline_low = cp.m_treeline_low; this.m_range = cp.m_range; this.m_tree_type = cp.m_tree_type; this.m_seed_point = cp.m_seed_point; this.m_initial_scale = cp.m_initial_scale; this.m_maximum_scale = cp.m_maximum_scale; this.m_initial_scale = cp.m_initial_scale; this.m_rate = cp.m_rate; this.m_planted = planted; this.m_trees = new List<UUID>(); } public Copse(string copsedef) { char[] delimiterChars = {':', ';'}; string[] field = copsedef.Split(delimiterChars); this.m_name = field[1].Trim(); this.m_frozen = (copsedef[0] == 'F'); this.m_tree_quantity = int.Parse(field[2]); this.m_treeline_high = float.Parse(field[3], Culture.NumberFormatInfo); this.m_treeline_low = float.Parse(field[4], Culture.NumberFormatInfo); this.m_range = double.Parse(field[5], Culture.NumberFormatInfo); this.m_tree_type = (Tree) Enum.Parse(typeof(Tree),field[6]); this.m_seed_point = Vector3.Parse(field[7]); this.m_initial_scale = Vector3.Parse(field[8]); this.m_maximum_scale = Vector3.Parse(field[9]); this.m_rate = Vector3.Parse(field[10]); this.m_planted = true; this.m_trees = new List<UUID>(); } public Copse(string name, int quantity, float high, float low, double range, Vector3 point, Tree type, Vector3 scale, Vector3 max_scale, Vector3 rate, List<UUID> trees) { this.m_name = name; this.m_frozen = false; this.m_tree_quantity = quantity; this.m_treeline_high = high; this.m_treeline_low = low; this.m_range = range; this.m_tree_type = type; this.m_seed_point = point; this.m_initial_scale = scale; this.m_maximum_scale = max_scale; this.m_rate = rate; this.m_planted = false; this.m_trees = trees; } public override string ToString() { string frozen = (this.m_frozen ? "F" : "A"); return string.Format("{0}TPM: {1}; {2}; {3:0.0}; {4:0.0}; {5:0.0}; {6}; {7:0.0}; {8:0.0}; {9:0.0}; {10:0.00};", frozen, this.m_name, this.m_tree_quantity, this.m_treeline_high, this.m_treeline_low, this.m_range, this.m_tree_type, this.m_seed_point.ToString(), this.m_initial_scale.ToString(), this.m_maximum_scale.ToString(), this.m_rate.ToString()); } } private List<Copse> m_copse; private double m_update_ms = 1000.0; // msec between updates private bool m_active_trees = false; Timer CalculateTrees; #region ICommandableModule Members public ICommander CommandInterface { get { return m_commander; } } #endregion #region IRegionModule Members public void Initialise(Scene scene, IConfigSource config) { m_scene = scene; m_scene.RegisterModuleInterface<IRegionModule>(this); m_scene.EventManager.OnPluginConsole += EventManager_OnPluginConsole; // ini file settings try { m_active_trees = config.Configs["Trees"].GetBoolean("active_trees", m_active_trees); } catch (Exception) { m_log.Debug("[TREES]: ini failure for active_trees - using default"); } try { m_update_ms = config.Configs["Trees"].GetDouble("update_rate", m_update_ms); } catch (Exception) { m_log.Debug("[TREES]: ini failure for update_rate - using default"); } InstallCommands(); m_log.Debug("[TREES]: Initialised tree module"); } public void PostInitialise() { ReloadCopse(); if (m_copse.Count > 0) m_log.Info("[TREES]: Copse load complete"); if (m_active_trees) activeizeTreeze(true); } public void Close() { } public string Name { get { return "TreePopulatorModule"; } } public bool IsSharedModule { get { return false; } } #endregion //-------------------------------------------------------------- #region ICommandableModule Members private void HandleTreeActive(Object[] args) { if ((Boolean)args[0] && !m_active_trees) { m_log.InfoFormat("[TREES]: Activating Trees"); m_active_trees = true; activeizeTreeze(m_active_trees); } else if (!(Boolean)args[0] && m_active_trees) { m_log.InfoFormat("[TREES]: Trees module is no longer active"); m_active_trees = false; activeizeTreeze(m_active_trees); } else { m_log.InfoFormat("[TREES]: Trees module is already in the required state"); } } private void HandleTreeFreeze(Object[] args) { string copsename = ((string)args[0]).Trim(); Boolean freezeState = (Boolean) args[1]; foreach (Copse cp in m_copse) { if (cp.m_name == copsename && (!cp.m_frozen && freezeState || cp.m_frozen && !freezeState)) { cp.m_frozen = freezeState; foreach (UUID tree in cp.m_trees) { SceneObjectPart sop = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart; sop.Name = (freezeState ? sop.Name.Replace("ATPM", "FTPM") : sop.Name.Replace("FTPM", "ATPM")); sop.ParentGroup.HasGroupChanged = true; } m_log.InfoFormat("[TREES]: Activity for copse {0} is frozen {1}", copsename, freezeState); return; } else if (cp.m_name == copsename && (cp.m_frozen && freezeState || !cp.m_frozen && !freezeState)) { m_log.InfoFormat("[TREES]: Copse {0} is already in the requested freeze state", copsename); return; } } m_log.InfoFormat("[TREES]: Copse {0} was not found - command failed", copsename); } private void HandleTreeLoad(Object[] args) { Copse copse; m_log.InfoFormat("[TREES]: Loading copse definition...."); copse = new Copse(((string)args[0]), false); foreach (Copse cp in m_copse) { if (cp.m_name == copse.m_name) { m_log.InfoFormat("[TREES]: Copse: {0} is already defined - command failed", copse.m_name); return; } } m_copse.Add(copse); m_log.InfoFormat("[TREES]: Loaded copse: {0}", copse.ToString()); } private void HandleTreePlant(Object[] args) { string copsename = ((string)args[0]).Trim(); m_log.InfoFormat("[TREES]: New tree planting for copse {0}", copsename); UUID uuid = m_scene.RegionInfo.EstateSettings.EstateOwner; foreach (Copse copse in m_copse) { if (copse.m_name == copsename) { if (!copse.m_planted) { // The first tree for a copse is created here CreateTree(uuid, copse, copse.m_seed_point); copse.m_planted = true; return; } else { m_log.InfoFormat("[TREES]: Copse {0} has already been planted", copsename); } } } m_log.InfoFormat("[TREES]: Copse {0} not found for planting", copsename); } private void HandleTreeRate(Object[] args) { m_update_ms = (double)args[0]; if (m_update_ms >= 1000.0) { if (m_active_trees) { activeizeTreeze(false); activeizeTreeze(true); } m_log.InfoFormat("[TREES]: Update rate set to {0} mSec", m_update_ms); } else { m_log.InfoFormat("[TREES]: minimum rate is 1000.0 mSec - command failed"); } } private void HandleTreeReload(Object[] args) { if (m_active_trees) { CalculateTrees.Stop(); } ReloadCopse(); if (m_active_trees) { CalculateTrees.Start(); } } private void HandleTreeRemove(Object[] args) { string copsename = ((string)args[0]).Trim(); Copse copseIdentity = null; foreach (Copse cp in m_copse) { if (cp.m_name == copsename) { copseIdentity = cp; } } if (copseIdentity != null) { foreach (UUID tree in copseIdentity.m_trees) { if (m_scene.Entities.ContainsKey(tree)) { SceneObjectPart selectedTree = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart; m_scene.DeleteSceneObject(selectedTree.ParentGroup, false); m_scene.ForEachClient(delegate(IClientAPI controller) { controller.SendKillObject(m_scene.RegionInfo.RegionHandle, selectedTree.LocalId); }); } else { m_log.DebugFormat("[TREES]: Tree not in scene {0}", tree); } } copseIdentity.m_trees = new List<UUID>(); m_copse.Remove(copseIdentity); m_log.InfoFormat("[TREES]: Copse {0} has been removed", copsename); } else { m_log.InfoFormat("[TREES]: Copse {0} was not found - command failed", copsename); } } private void HandleTreeStatistics(Object[] args) { m_log.InfoFormat("[TREES]: Activity State: {0}; Update Rate: {1}", m_active_trees, m_update_ms); foreach (Copse cp in m_copse) { m_log.InfoFormat("[TREES]: Copse {0}; {1} trees; frozen {2}", cp.m_name, cp.m_trees.Count, cp.m_frozen); } } private void InstallCommands() { Command treeActiveCommand = new Command("active", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeActive, "Change activity state for the trees module"); treeActiveCommand.AddArgument("activeTF", "The required activity state", "Boolean"); Command treeFreezeCommand = new Command("freeze", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeFreeze, "Freeze/Unfreeze activity for a defined copse"); treeFreezeCommand.AddArgument("copse", "The required copse", "String"); treeFreezeCommand.AddArgument("freezeTF", "The required freeze state", "Boolean"); Command treeLoadCommand = new Command("load", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeLoad, "Load a copse definition from an xml file"); treeLoadCommand.AddArgument("filename", "The (xml) file you wish to load", "String"); Command treePlantCommand = new Command("plant", CommandIntentions.COMMAND_HAZARDOUS, HandleTreePlant, "Start the planting on a copse"); treePlantCommand.AddArgument("copse", "The required copse", "String"); Command treeRateCommand = new Command("rate", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeRate, "Reset the tree update rate (mSec)"); treeRateCommand.AddArgument("updateRate", "The required update rate (minimum 1000.0)", "Double"); Command treeReloadCommand = new Command("reload", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeReload, "Reload copse definitions from the in-scene trees"); Command treeRemoveCommand = new Command("remove", CommandIntentions.COMMAND_HAZARDOUS, HandleTreeRemove, "Remove a copse definition and all its in-scene trees"); treeRemoveCommand.AddArgument("copse", "The required copse", "String"); Command treeStatisticsCommand = new Command("statistics", CommandIntentions.COMMAND_STATISTICAL, HandleTreeStatistics, "Log statistics about the trees"); m_commander.RegisterCommand("active", treeActiveCommand); m_commander.RegisterCommand("freeze", treeFreezeCommand); m_commander.RegisterCommand("load", treeLoadCommand); m_commander.RegisterCommand("plant", treePlantCommand); m_commander.RegisterCommand("rate", treeRateCommand); m_commander.RegisterCommand("reload", treeReloadCommand); m_commander.RegisterCommand("remove", treeRemoveCommand); m_commander.RegisterCommand("statistics", treeStatisticsCommand); m_scene.RegisterModuleCommander(m_commander); } /// <summary> /// Processes commandline input. Do not call directly. /// </summary> /// <param name="args">Commandline arguments</param> private void EventManager_OnPluginConsole(string[] args) { if (args[0] == "tree") { if (args.Length == 1) { m_commander.ProcessConsoleCommand("help", new string[0]); return; } string[] tmpArgs = new string[args.Length - 2]; int i; for (i = 2; i < args.Length; i++) { tmpArgs[i - 2] = args[i]; } m_commander.ProcessConsoleCommand(args[1], tmpArgs); } } #endregion #region IVegetationModule Members public SceneObjectGroup AddTree( UUID uuid, UUID groupID, Vector3 scale, Quaternion rotation, Vector3 position, Tree treeType, bool newTree) { PrimitiveBaseShape treeShape = new PrimitiveBaseShape(); treeShape.PathCurve = 16; treeShape.PathEnd = 49900; treeShape.PCode = newTree ? (byte)PCode.NewTree : (byte)PCode.Tree; treeShape.Scale = scale; treeShape.State = (byte)treeType; return m_scene.AddNewPrim(uuid, groupID, position, rotation, treeShape); } #endregion #region IEntityCreator Members protected static readonly PCode[] creationCapabilities = new PCode[] { PCode.NewTree, PCode.Tree }; public PCode[] CreationCapabilities { get { return creationCapabilities; } } public SceneObjectGroup CreateEntity( UUID ownerID, UUID groupID, Vector3 pos, Quaternion rot, PrimitiveBaseShape shape) { if (Array.IndexOf(creationCapabilities, (PCode)shape.PCode) < 0) { m_log.DebugFormat("[VEGETATION]: PCode {0} not handled by {1}", shape.PCode, Name); return null; } SceneObjectGroup sceneObject = new SceneObjectGroup(ownerID, pos, rot, shape); SceneObjectPart rootPart = sceneObject.GetChildPart(sceneObject.UUID); rootPart.AddFlag(PrimFlags.Phantom); m_scene.AddNewSceneObject(sceneObject, true); sceneObject.SetGroup(groupID, null); return sceneObject; } #endregion //-------------------------------------------------------------- #region Tree Utilities static public void SerializeObject(string fileName, Object obj) { try { XmlSerializer xs = new XmlSerializer(typeof(Copse)); using (XmlTextWriter writer = new XmlTextWriter(fileName, Util.UTF8)) { writer.Formatting = Formatting.Indented; xs.Serialize(writer, obj); } } catch (SystemException ex) { throw new ApplicationException("Unexpected failure in Tree serialization", ex); } } static public object DeserializeObject(string fileName) { try { XmlSerializer xs = new XmlSerializer(typeof(Copse)); using (FileStream fs = new FileStream(fileName, FileMode.Open, FileAccess.Read)) return xs.Deserialize(fs); } catch (SystemException ex) { throw new ApplicationException("Unexpected failure in Tree de-serialization", ex); } } private void ReloadCopse() { m_copse = new List<Copse>(); EntityBase[] objs = m_scene.GetEntities(); foreach (EntityBase obj in objs) { if (obj is SceneObjectGroup) { SceneObjectGroup grp = (SceneObjectGroup)obj; if (grp.Name.Length > 5 && (grp.Name.Substring(0, 5) == "ATPM:" || grp.Name.Substring(0, 5) == "FTPM:")) { // Create a new copse definition or add uuid to an existing definition try { Boolean copsefound = false; Copse copse = new Copse(grp.Name); foreach (Copse cp in m_copse) { if (cp.m_name == copse.m_name) { copsefound = true; cp.m_trees.Add(grp.UUID); //m_log.DebugFormat("[TREES]: Found tree {0}", grp.UUID); } } if (!copsefound) { m_log.InfoFormat("[TREES]: Found copse {0}", grp.Name); m_copse.Add(copse); copse.m_trees.Add(grp.UUID); } } catch { m_log.InfoFormat("[TREES]: Ill formed copse definition {0} - ignoring", grp.Name); } } } } } #endregion private void activeizeTreeze(bool activeYN) { if (activeYN) { CalculateTrees = new Timer(m_update_ms); CalculateTrees.Elapsed += CalculateTrees_Elapsed; CalculateTrees.Start(); } else { CalculateTrees.Stop(); } } private void growTrees() { foreach (Copse copse in m_copse) { if (!copse.m_frozen) { foreach (UUID tree in copse.m_trees) { if (m_scene.Entities.ContainsKey(tree)) { SceneObjectPart s_tree = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart; if (s_tree.Scale.X < copse.m_maximum_scale.X && s_tree.Scale.Y < copse.m_maximum_scale.Y && s_tree.Scale.Z < copse.m_maximum_scale.Z) { s_tree.Scale += copse.m_rate; s_tree.ParentGroup.HasGroupChanged = true; s_tree.ScheduleFullUpdate(); } } else { m_log.DebugFormat("[TREES]: Tree not in scene {0}", tree); } } } } } private void seedTrees() { foreach (Copse copse in m_copse) { if (!copse.m_frozen) { foreach (UUID tree in copse.m_trees) { if (m_scene.Entities.ContainsKey(tree)) { SceneObjectPart s_tree = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart; if (copse.m_trees.Count < copse.m_tree_quantity) { // Tree has grown enough to seed if it has grown by at least 25% of seeded to full grown height if (s_tree.Scale.Z > copse.m_initial_scale.Z + (copse.m_maximum_scale.Z - copse.m_initial_scale.Z) / 4.0) { if (Util.RandomClass.NextDouble() > 0.75) { SpawnChild(copse, s_tree); } } } } else { m_log.DebugFormat("[TREES]: Tree not in scene {0}", tree); } } } } } private void killTrees() { foreach (Copse copse in m_copse) { if (!copse.m_frozen && copse.m_trees.Count >= copse.m_tree_quantity) { foreach (UUID tree in copse.m_trees) { double killLikelyhood = 0.0; if (m_scene.Entities.ContainsKey(tree)) { SceneObjectPart selectedTree = ((SceneObjectGroup)m_scene.Entities[tree]).RootPart; double selectedTreeScale = Math.Sqrt(Math.Pow(selectedTree.Scale.X, 2) + Math.Pow(selectedTree.Scale.Y, 2) + Math.Pow(selectedTree.Scale.Z, 2)); foreach (UUID picktree in copse.m_trees) { if (picktree != tree) { SceneObjectPart pickedTree = ((SceneObjectGroup)m_scene.Entities[picktree]).RootPart; double pickedTreeScale = Math.Sqrt(Math.Pow(pickedTree.Scale.X, 2) + Math.Pow(pickedTree.Scale.Y, 2) + Math.Pow(pickedTree.Scale.Z, 2)); double pickedTreeDistance = Vector3.Distance(pickedTree.AbsolutePosition, selectedTree.AbsolutePosition); killLikelyhood += (selectedTreeScale / (pickedTreeScale * pickedTreeDistance)) * 0.1; } } if (Util.RandomClass.NextDouble() < killLikelyhood) { m_scene.DeleteSceneObject(selectedTree.ParentGroup, false); copse.m_trees.Remove(selectedTree.ParentGroup.UUID); m_scene.ForEachClient(delegate(IClientAPI controller) { controller.SendKillObject(m_scene.RegionInfo.RegionHandle, selectedTree.LocalId); }); break; } } else { m_log.DebugFormat("[TREES]: Tree not in scene {0}", tree); } } } } } private void SpawnChild(Copse copse, SceneObjectPart s_tree) { Vector3 position = new Vector3(); double randX = ((Util.RandomClass.NextDouble() * 2.0) - 1.0) * (s_tree.Scale.X * 3); double randY = ((Util.RandomClass.NextDouble() * 2.0) - 1.0) * (s_tree.Scale.X * 3); position.X = s_tree.AbsolutePosition.X + (float)randX; position.Y = s_tree.AbsolutePosition.Y + (float)randY; if (position.X <= ((int)Constants.RegionSize - 1) && position.X >= 0 && position.Y <= ((int)Constants.RegionSize - 1) && position.Y >= 0 && Util.GetDistanceTo(position, copse.m_seed_point) <= copse.m_range) { UUID uuid = m_scene.RegionInfo.EstateSettings.EstateOwner; CreateTree(uuid, copse, position); } } private void CreateTree(UUID uuid, Copse copse, Vector3 position) { position.Z = (float)m_scene.Voxels.GetDoubles()[(int)position.X, (int)position.Y]; if (position.Z >= copse.m_treeline_low && position.Z <= copse.m_treeline_high) { SceneObjectGroup tree = AddTree(uuid, UUID.Zero, copse.m_initial_scale, Quaternion.Identity, position, copse.m_tree_type, false); tree.Name = copse.ToString(); copse.m_trees.Add(tree.UUID); tree.SendGroupFullUpdate(); } } private void CalculateTrees_Elapsed(object sender, ElapsedEventArgs e) { growTrees(); seedTrees(); killTrees(); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Reflection; using System.Text; using System.Xml; using log4net; using Nini.Config; using OpenMetaverse; using Mono.Addins; using OpenSim.Framework; using OpenSim.Region.DataSnapshot.Interfaces; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; namespace OpenSim.Region.DataSnapshot { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "DataSnapshotManager")] public class DataSnapshotManager : ISharedRegionModule, IDataSnapshot { #region Class members //Information from config private bool m_enabled = false; private bool m_configLoaded = false; private List<string> m_disabledModules = new List<string>(); private Dictionary<string, string> m_gridinfo = new Dictionary<string, string>(); private string m_snapsDir = "DataSnapshot"; private string m_exposure_level = "minimum"; //Lists of stuff we need private List<Scene> m_scenes = new List<Scene>(); private List<IDataSnapshotProvider> m_dataproviders = new List<IDataSnapshotProvider>(); //Various internal objects private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); internal object m_syncInit = new object(); //DataServices and networking private string m_dataServices = "noservices"; public string m_listener_port = ConfigSettings.DefaultRegionHttpPort.ToString(); public string m_hostname = "127.0.0.1"; private UUID m_Secret = UUID.Random(); private bool m_servicesNotified = false; //Update timers private int m_period = 20; // in seconds private int m_maxStales = 500; private int m_stales = 0; private int m_lastUpdate = 0; //Program objects private SnapshotStore m_snapStore = null; #endregion #region Properties public string ExposureLevel { get { return m_exposure_level; } } public UUID Secret { get { return m_Secret; } } #endregion #region Region Module interface public void Initialise(IConfigSource config) { if (!m_configLoaded) { m_configLoaded = true; //m_log.Debug("[DATASNAPSHOT]: Loading configuration"); //Read from the config for options lock (m_syncInit) { try { m_enabled = config.Configs["DataSnapshot"].GetBoolean("index_sims", m_enabled); string gatekeeper = Util.GetConfigVarFromSections<string>(config, "GatekeeperURI", new string[] { "Startup", "Hypergrid", "GridService" }, String.Empty); // Legacy. Remove soon! if (string.IsNullOrEmpty(gatekeeper)) { IConfig conf = config.Configs["GridService"]; if (conf != null) gatekeeper = conf.GetString("Gatekeeper", gatekeeper); } if (!string.IsNullOrEmpty(gatekeeper)) m_gridinfo.Add("gatekeeperURL", gatekeeper); m_gridinfo.Add( "name", config.Configs["DataSnapshot"].GetString("gridname", "the lost continent of hippo")); m_exposure_level = config.Configs["DataSnapshot"].GetString("data_exposure", m_exposure_level); m_period = config.Configs["DataSnapshot"].GetInt("default_snapshot_period", m_period); m_maxStales = config.Configs["DataSnapshot"].GetInt("max_changes_before_update", m_maxStales); m_snapsDir = config.Configs["DataSnapshot"].GetString("snapshot_cache_directory", m_snapsDir); m_listener_port = config.Configs["Network"].GetString("http_listener_port", m_listener_port); m_dataServices = config.Configs["DataSnapshot"].GetString("data_services", m_dataServices); // New way of spec'ing data services, one per line AddDataServicesVars(config.Configs["DataSnapshot"]); String[] annoying_string_array = config.Configs["DataSnapshot"].GetString("disable_modules", "").Split(".".ToCharArray()); foreach (String bloody_wanker in annoying_string_array) { m_disabledModules.Add(bloody_wanker); } m_lastUpdate = Environment.TickCount; } catch (Exception) { m_log.Warn("[DATASNAPSHOT]: Could not load configuration. DataSnapshot will be disabled."); m_enabled = false; return; } } } } public void AddRegion(Scene scene) { if (!m_enabled) return; m_log.DebugFormat("[DATASNAPSHOT]: Module added to Scene {0}.", scene.RegionInfo.RegionName); if (!m_servicesNotified) { m_hostname = scene.RegionInfo.ExternalHostName; m_snapStore = new SnapshotStore(m_snapsDir, m_gridinfo, m_listener_port, m_hostname); //Hand it the first scene, assuming that all scenes have the same BaseHTTPServer new DataRequestHandler(scene, this); if (m_dataServices != "" && m_dataServices != "noservices") NotifyDataServices(m_dataServices, "online"); m_servicesNotified = true; } m_scenes.Add(scene); m_snapStore.AddScene(scene); Assembly currentasm = Assembly.GetExecutingAssembly(); foreach (Type pluginType in currentasm.GetTypes()) { if (pluginType.IsPublic) { if (!pluginType.IsAbstract) { if (pluginType.GetInterface("IDataSnapshotProvider") != null) { IDataSnapshotProvider module = (IDataSnapshotProvider)Activator.CreateInstance(pluginType); module.Initialize(scene, this); module.OnStale += MarkDataStale; m_dataproviders.Add(module); m_snapStore.AddProvider(module); m_log.Debug("[DATASNAPSHOT]: Added new data provider type: " + pluginType.Name); } } } } } public void RemoveRegion(Scene scene) { if (!m_enabled) return; m_log.Info("[DATASNAPSHOT]: Region " + scene.RegionInfo.RegionName + " is being removed, removing from indexing"); Scene restartedScene = SceneForUUID(scene.RegionInfo.RegionID); m_scenes.Remove(restartedScene); m_snapStore.RemoveScene(restartedScene); //Getting around the fact that we can't remove objects from a collection we are enumerating over List<IDataSnapshotProvider> providersToRemove = new List<IDataSnapshotProvider>(); foreach (IDataSnapshotProvider provider in m_dataproviders) { if (provider.GetParentScene == restartedScene) { providersToRemove.Add(provider); } } foreach (IDataSnapshotProvider provider in providersToRemove) { m_dataproviders.Remove(provider); m_snapStore.RemoveProvider(provider); } m_snapStore.RemoveScene(restartedScene); } public void PostInitialise() { } public void RegionLoaded(Scene scene) { if (!m_enabled) return; m_log.DebugFormat("[DATASNAPSHOT]: Marking scene {0} as stale.", scene.RegionInfo.RegionName); m_snapStore.ForceSceneStale(scene); } public void Close() { if (!m_enabled) return; if (m_enabled && m_dataServices != "" && m_dataServices != "noservices") NotifyDataServices(m_dataServices, "offline"); } public string Name { get { return "External Data Generator"; } } public Type ReplaceableInterface { get { return null; } } #endregion #region Associated helper functions public Scene SceneForName(string name) { foreach (Scene scene in m_scenes) if (scene.RegionInfo.RegionName == name) return scene; return null; } public Scene SceneForUUID(UUID id) { foreach (Scene scene in m_scenes) if (scene.RegionInfo.RegionID == id) return scene; return null; } private void AddDataServicesVars(IConfig config) { // Make sure the services given this way aren't in m_dataServices already List<string> servs = new List<string>(m_dataServices.Split(new char[] { ';' })); StringBuilder sb = new StringBuilder(); string[] keys = config.GetKeys(); if (keys.Length > 0) { IEnumerable<string> serviceKeys = keys.Where(value => value.StartsWith("DATA_SRV_")); foreach (string serviceKey in serviceKeys) { string keyValue = config.GetString(serviceKey, string.Empty).Trim(); if (!servs.Contains(keyValue)) sb.Append(keyValue).Append(";"); } } m_dataServices = (m_dataServices == "noservices") ? sb.ToString() : sb.Append(m_dataServices).ToString(); } #endregion #region [Public] Snapshot storage functions /** * Reply to the http request */ public XmlDocument GetSnapshot(string regionName) { CheckStale(); XmlDocument requestedSnap = new XmlDocument(); requestedSnap.AppendChild(requestedSnap.CreateXmlDeclaration("1.0", null, null)); requestedSnap.AppendChild(requestedSnap.CreateWhitespace("\r\n")); XmlNode regiondata = requestedSnap.CreateNode(XmlNodeType.Element, "regiondata", ""); try { if (string.IsNullOrEmpty(regionName)) { XmlNode timerblock = requestedSnap.CreateNode(XmlNodeType.Element, "expire", ""); timerblock.InnerText = m_period.ToString(); regiondata.AppendChild(timerblock); regiondata.AppendChild(requestedSnap.CreateWhitespace("\r\n")); foreach (Scene scene in m_scenes) { regiondata.AppendChild(m_snapStore.GetScene(scene, requestedSnap)); } } else { Scene scene = SceneForName(regionName); regiondata.AppendChild(m_snapStore.GetScene(scene, requestedSnap)); } requestedSnap.AppendChild(regiondata); regiondata.AppendChild(requestedSnap.CreateWhitespace("\r\n")); } catch (XmlException e) { m_log.Warn("[DATASNAPSHOT]: XmlException while trying to load snapshot: " + e.ToString()); requestedSnap = GetErrorMessage(regionName, e); } catch (Exception e) { m_log.Warn("[DATASNAPSHOT]: Caught unknown exception while trying to load snapshot: " + e.StackTrace); requestedSnap = GetErrorMessage(regionName, e); } return requestedSnap; } private XmlDocument GetErrorMessage(string regionName, Exception e) { XmlDocument errorMessage = new XmlDocument(); XmlNode error = errorMessage.CreateNode(XmlNodeType.Element, "error", ""); XmlNode region = errorMessage.CreateNode(XmlNodeType.Element, "region", ""); region.InnerText = regionName; XmlNode exception = errorMessage.CreateNode(XmlNodeType.Element, "exception", ""); exception.InnerText = e.ToString(); error.AppendChild(region); error.AppendChild(exception); errorMessage.AppendChild(error); return errorMessage; } #endregion #region External data services private void NotifyDataServices(string servicesStr, string serviceName) { Stream reply = null; string delimStr = ";"; char [] delimiter = delimStr.ToCharArray(); string[] services = servicesStr.Split(delimiter, StringSplitOptions.RemoveEmptyEntries); for (int i = 0; i < services.Length; i++) { string url = services[i].Trim(); using (RestClient cli = new RestClient(url)) { cli.AddQueryParameter("service", serviceName); cli.AddQueryParameter("host", m_hostname); cli.AddQueryParameter("port", m_listener_port); cli.AddQueryParameter("secret", m_Secret.ToString()); cli.RequestMethod = "GET"; try { reply = cli.Request(null); } catch (WebException) { m_log.Warn("[DATASNAPSHOT]: Unable to notify " + url); } catch (Exception e) { m_log.Warn("[DATASNAPSHOT]: Ignoring unknown exception " + e.ToString()); } byte[] response = new byte[1024]; // int n = 0; try { // n = reply.Read(response, 0, 1024); reply.Read(response, 0, 1024); } catch (Exception e) { m_log.WarnFormat("[DATASNAPSHOT]: Unable to decode reply from data service. Ignoring. {0}", e.StackTrace); } // This is not quite working, so... // string responseStr = Util.UTF8.GetString(response); m_log.Info("[DATASNAPSHOT]: data service " + url + " notified. Secret: " + m_Secret); } } } #endregion #region Latency-based update functions public void MarkDataStale(IDataSnapshotProvider provider) { //Behavior here: Wait m_period seconds, then update if there has not been a request in m_period seconds //or m_maxStales has been exceeded m_stales++; } private void CheckStale() { // Wrap check if (Environment.TickCount < m_lastUpdate) { m_lastUpdate = Environment.TickCount; } if (m_stales >= m_maxStales) { if (Environment.TickCount - m_lastUpdate >= 20000) { m_stales = 0; m_lastUpdate = Environment.TickCount; MakeEverythingStale(); } } else { if (m_lastUpdate + 1000 * m_period < Environment.TickCount) { m_stales = 0; m_lastUpdate = Environment.TickCount; MakeEverythingStale(); } } } public void MakeEverythingStale() { m_log.Debug("[DATASNAPSHOT]: Marking all scenes as stale."); foreach (Scene scene in m_scenes) { m_snapStore.ForceSceneStale(scene); } } #endregion } }
#region License // Copyright (C) 2015 by Wm. Barrett Simms (wbsimms) // http://wbsimms.com // // MIT Licence // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // Updated version of original code from Jakub Bartkowiak (Gralin) #endregion using System; using System.Threading; using Microsoft.SPOT; using Microsoft.SPOT.Hardware; namespace Gadgeteer.Modules.WBSimms { /// <summary> /// Driver class for Nordic nRF24L01+ tranceiver /// </summary> public class NRF24L01Plus { #region Delegates public delegate void EventHandler(); public delegate void OnDataRecievedHandler(byte[] data); public delegate void OnInterruptHandler(Status status); #endregion private byte[] _slot0Address; private OutputPort _cePin; private bool _initialized; private InterruptPort _irqPin; private SPI _spiPort; private bool _enabled; private readonly ManualResetEvent _transmitSuccessFlag; private readonly ManualResetEvent _transmitFailedFlag; /// <summary> /// Gets a value indicating whether module is enabled (RX or TX mode). /// </summary> public bool IsEnabled { get { return _cePin.Read(); } } public NRF24L01Plus() { _transmitSuccessFlag = new ManualResetEvent(false); _transmitFailedFlag = new ManualResetEvent(false); } /// <summary> /// Enables the module /// </summary> public void Enable() { _enabled = true; SetEnabled(); } /// <summary> /// Disables the module /// </summary> public void Disable() { _enabled = false; SetDisabled(); } /// <summary> /// Initializes SPI connection and control pins /// </summary> public void Initialize(SPI.SPI_module spi, Cpu.Pin chipSelectPin, Cpu.Pin chipEnablePin, Cpu.Pin interruptPin) { // Chip Select : Active Low // Clock : Active High, Data clocked in on rising edge _spiPort = new SPI(new SPI.Configuration(chipSelectPin, false, 0, 0, false, true, 2000, spi)); // Initialize IRQ Port _irqPin = new InterruptPort(interruptPin, false, Port.ResistorMode.PullUp, Port.InterruptMode.InterruptEdgeLow); _irqPin.OnInterrupt += HandleInterrupt; // Initialize Chip Enable Port _cePin = new OutputPort(chipEnablePin, false); // Module reset time Thread.Sleep(100); _initialized = true; } /// <summary> /// Configure the module basic settings. Module needs to be initiaized. /// </summary> /// <param name="address">RF address (3-5 bytes). The width of this address determins the width of all addresses used for sending/receiving.</param> /// <param name="channel">RF channel (0-127)</param> public void Configure(byte[] address, byte channel) { Configure( address, channel, NRFDataRate.DR2Mbps ); } /// <summary> /// Configure the module basic settings. Module needs to be initiaized. /// </summary> /// <param name="address">RF address (3-5 bytes). The width of this address determins the width of all addresses used for sending/receiving.</param> /// <param name="channel">RF channel (0-127)</param> /// <param name="dataRate">Data Rate to use</param> public void Configure(byte[] address, byte channel, NRFDataRate dataRate) { CheckIsInitialized(); AddressWidth.Check(address); // Set radio channel Execute(Commands.W_REGISTER, Registers.RF_CH, new[] { (byte) (channel & 0x7F) // channel is 7 bits }); // Set Data rate var regValue = Execute(Commands.R_REGISTER, Registers.RF_SETUP, new byte[1])[1]; switch ( dataRate ) { case NRFDataRate.DR1Mbps: regValue &= (byte)~(1 << Bits.RF_DR_LOW); // 0 regValue &= (byte)~(1 << Bits.RF_DR_HIGH); // 0 break; case NRFDataRate.DR2Mbps: regValue &= (byte)~(1 << Bits.RF_DR_LOW); // 0 regValue |= (byte)(1 << Bits.RF_DR_HIGH); // 1 break; case NRFDataRate.DR250kbps: regValue |= (byte)(1 << Bits.RF_DR_LOW); // 1 regValue &= (byte)~(1 << Bits.RF_DR_HIGH); // 0 break; default: throw new ArgumentOutOfRangeException("dataRate"); } Execute(Commands.W_REGISTER, Registers.RF_SETUP, new[]{regValue}); // Enable dynamic payload length Execute(Commands.W_REGISTER, Registers.FEATURE, new[] { (byte) (1 << Bits.EN_DPL) }); // Set auto-ack Execute(Commands.W_REGISTER, Registers.EN_AA, new[] { (byte) (1 << Bits.ENAA_P0 | 1 << Bits.ENAA_P1) }); // Set dynamic payload length for pipes Execute(Commands.W_REGISTER, Registers.DYNPD, new[] { (byte) (1 << Bits.DPL_P0 | 1 << Bits.DPL_P1) }); // Flush RX FIFO Execute(Commands.FLUSH_RX, 0x00, new byte[0]); // Flush TX FIFO Execute(Commands.FLUSH_TX, 0x00, new byte[0]); // Clear IRQ Masks Execute(Commands.W_REGISTER, Registers.STATUS, new[] { (byte) (1 << Bits.MASK_RX_DR | 1 << Bits.MASK_TX_DS | 1 << Bits.MAX_RT) }); // Set default address Execute(Commands.W_REGISTER, Registers.SETUP_AW, new[] { AddressWidth.Get(address) }); // Set module address _slot0Address = address; Execute(Commands.W_REGISTER, (byte)AddressSlot.Zero, address); // Set retransmission values Execute(Commands.W_REGISTER, Registers.SETUP_RETR, new[] { (byte) (0x0F << Bits.ARD | 0x0F << Bits.ARC) }); // Setup, CRC enabled, Power Up, PRX SetReceiveMode(); } /// <summary> /// Set one of 6 available module addresses /// </summary> public void SetAddress(AddressSlot slot, byte[] address) { CheckIsInitialized(); AddressWidth.Check(address); Execute(Commands.W_REGISTER, (byte)slot, address); if (slot == AddressSlot.Zero) { _slot0Address = address; } } /// <summary> /// Read 1 of 6 available module addresses /// </summary> public byte[] GetAddress(AddressSlot slot, int width) { CheckIsInitialized(); AddressWidth.Check(width); var read = Execute(Commands.R_REGISTER, (byte)slot, new byte[width]); var result = new byte[read.Length - 1]; Array.Copy(read, 1, result, 0, result.Length); return result; } /// <summary> /// Executes a command in NRF24L01+ (for details see module datasheet) /// </summary> /// <param name = "command">Command</param> /// <param name = "addres">Register to write to</param> /// <param name = "data">Data to write</param> /// <returns>Response byte array. First byte is the status register</returns> public byte[] Execute(byte command, byte addres, byte[] data) { CheckIsInitialized(); // This command requires module to be in power down or standby mode if (command == Commands.W_REGISTER) SetDisabled(); // Create SPI Buffers with Size of Data + 1 (For Command) var writeBuffer = new byte[data.Length + 1]; var readBuffer = new byte[data.Length + 1]; // Add command and adres to SPI buffer writeBuffer[0] = (byte) (command | addres); // Add data to SPI buffer Array.Copy(data, 0, writeBuffer, 1, data.Length); // Do SPI Read/Write _spiPort.WriteRead(writeBuffer, readBuffer); // Enable module back if it was disabled if (command == Commands.W_REGISTER && _enabled) SetEnabled(); // Return ReadBuffer return readBuffer; } /// <summary> /// Gets module basic status information /// </summary> public Status GetStatus() { CheckIsInitialized(); var readBuffer = new byte[1]; _spiPort.WriteRead(new[] {Commands.NOP}, readBuffer); return new Status(readBuffer[0]); } /// <summary> /// Reads the current rf channel value set in module /// </summary> /// <returns></returns> public byte GetChannel() { CheckIsInitialized(); var result = Execute(Commands.R_REGISTER, Registers.RF_CH, new byte[1]); return (byte) (result[1] & 0x7F); } /// <summary> /// Gets the module radio frequency [MHz] /// </summary> /// <returns>Frequency in MHz</returns> public int GetFrequency() { return 2400 + GetChannel(); } /// <summary> /// Sets the rf channel value used by all data pipes /// </summary> /// <param name="channel">7 bit channel value</param> public void SetChannel(byte channel) { CheckIsInitialized(); var writeBuffer = new[] {(byte) (channel & 0x7F)}; Execute(Commands.W_REGISTER, Registers.RF_CH, writeBuffer); } /// <summary> /// Send <param name = "bytes">bytes</param> to given <param name = "address">address</param> /// This is a non blocking method. /// </summary> public void SendTo(byte[] address, byte[] bytes, Acknowledge acknowledge = Acknowledge.Yes) { // Chip enable low SetDisabled(); // Setup PTX (Primary TX) SetTransmitMode(); // Write transmit adres to TX_ADDR register. Execute(Commands.W_REGISTER, Registers.TX_ADDR, address); // Write transmit adres to RX_ADDRESS_P0 (Pipe0) (For Auto ACK) Execute(Commands.W_REGISTER, Registers.RX_ADDR_P0, address); // Send payload Execute(acknowledge == Acknowledge.Yes ? Commands.W_TX_PAYLOAD : Commands.W_TX_PAYLOAD_NO_ACK, 0x00, bytes); // Pulse for CE -> starts the transmission. SetEnabled(); } /// <summary> /// Sends <param name = "bytes">bytes</param> to given <param name = "address">address</param> /// This is a blocking method that returns true if data was received by the recipient or false if timeout occured. /// </summary> public bool SendTo(byte[] address, byte[] bytes, int timeout) { var startTime = DateTime.Now; while (true) { _transmitSuccessFlag.Reset(); _transmitFailedFlag.Reset(); SendTo(address, bytes); if (WaitHandle.WaitAny(new[] { _transmitSuccessFlag, _transmitFailedFlag }, 200, true) == 0) return true; if (DateTime.Now.CompareTo(startTime.AddMilliseconds(timeout)) > 0) return false; Debug.Print("Retransmitting packet..."); } } private void HandleInterrupt(uint data1, uint data2, DateTime dateTime) { if (!_initialized) return; if (!_enabled) { // Flush RX FIFO Execute(Commands.FLUSH_RX, 0x00, new byte[0]); // Flush TX FIFO Execute(Commands.FLUSH_TX, 0x00, new byte[0]); return; } // Disable RX/TX SetDisabled(); // Set PRX SetReceiveMode(); // there are 3 rx pipes in rf module so 3 arrays should be enough to store incoming data // sometimes though more than 3 data packets are received somehow var payloads = new byte[6][]; var status = GetStatus(); byte payloadCount = 0; var payloadCorrupted = false; OnInterrupt(status); if (status.DataReady) { while (!status.RxEmpty) { // Read payload size var payloadLength = Execute(Commands.R_RX_PL_WID, 0x00, new byte[1]); // this indicates corrupted data if (payloadLength[1] > 32) { payloadCorrupted = true; // Flush anything that remains in buffer Execute(Commands.FLUSH_RX, 0x00, new byte[0]); } else { if (payloadCount >= payloads.Length) { Debug.Print("Unexpected payloadCount value = " + payloadCount); Execute(Commands.FLUSH_RX, 0x00, new byte[0]); } else { // Read payload data payloads[payloadCount] = Execute(Commands.R_RX_PAYLOAD, 0x00, new byte[payloadLength[1]]); payloadCount++; } } // Clear RX_DR bit var result = Execute(Commands.W_REGISTER, Registers.STATUS, new[] {(byte) (1 << Bits.RX_DR)}); status.Update(result[0]); } } if (status.ResendLimitReached) { // Flush TX FIFO Execute(Commands.FLUSH_TX, 0x00, new byte[0]); // Clear MAX_RT bit in status register Execute(Commands.W_REGISTER, Registers.STATUS, new[] {(byte) (1 << Bits.MAX_RT)}); } if (status.TxFull) { // Flush TX FIFO Execute(Commands.FLUSH_TX, 0x00, new byte[0]); } if (status.DataSent) { // Clear TX_DS bit in status register Execute(Commands.W_REGISTER, Registers.STATUS, new[] {(byte) (1 << Bits.TX_DS)}); } // Enable RX SetEnabled(); if (payloadCorrupted) { Debug.Print("Corrupted data received"); } else if (payloadCount > 0) { if (payloadCount > payloads.Length) Debug.Print("Unexpected payloadCount value = " + payloadCount); for (var i = 0; i < System.Math.Min(payloadCount, payloads.Length); i++) { var payload = payloads[i]; var payloadWithoutCommand = new byte[payload.Length - 1]; Array.Copy(payload, 1, payloadWithoutCommand, 0, payload.Length - 1); OnDataReceived(payloadWithoutCommand); } } else if (status.DataSent) { _transmitSuccessFlag.Set(); OnTransmitSuccess(); } else { _transmitFailedFlag.Set(); OnTransmitFailed(); } } private void SetEnabled() { _irqPin.EnableInterrupt(); _cePin.Write(true); } private void SetDisabled() { _cePin.Write(false); _irqPin.DisableInterrupt(); } private void SetTransmitMode() { Execute(Commands.W_REGISTER, Registers.CONFIG, new[] { (byte) (1 << Bits.PWR_UP | 1 << Bits.CRCO) }); } private void SetReceiveMode() { Execute(Commands.W_REGISTER, Registers.RX_ADDR_P0, _slot0Address); Execute(Commands.W_REGISTER, Registers.CONFIG, new[] { (byte) (1 << Bits.PWR_UP | 1 << Bits.CRCO | 1 << Bits.PRIM_RX) }); } private void CheckIsInitialized() { if (!_initialized) { throw new InvalidOperationException("Initialize method needs to be called before this call"); } } /// <summary> /// Called on every IRQ interrupt /// </summary> public event OnInterruptHandler OnInterrupt = delegate { }; /// <summary> /// Occurs when data packet has been received /// </summary> public event OnDataRecievedHandler OnDataReceived = delegate { }; /// <summary> /// Occurs when ack has been received for send packet /// </summary> public event EventHandler OnTransmitSuccess = delegate { }; /// <summary> /// Occurs when no ack has been received for send packet /// </summary> public event EventHandler OnTransmitFailed = delegate { }; } }
/* * Copyright 2012-2016 The Pkcs11Interop Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Written for the Pkcs11Interop project by: * Jaroslav IMRICH <jimrich@jimrich.sk> */ using System; using System.IO; using Net.Pkcs11Interop.Common; using Net.Pkcs11Interop.LowLevelAPI81; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace Net.Pkcs11Interop.Tests.LowLevelAPI81 { /// <summary> /// C_SignInit, C_Sign, C_SignUpdate, C_SignFinal, C_VerifyInit, C_Verify, C_VerifyUpdate and C_VerifyFinal tests. /// </summary> [TestClass] public class _21_SignAndVerifyTest { /// <summary> /// C_SignInit, C_Sign, C_VerifyInit and C_Verify test with CKM_RSA_PKCS mechanism. /// </summary> [TestMethod] public void _01_SignAndVerifySinglePartTest() { if (Platform.UnmanagedLongSize != 8 || Platform.StructPackingSize != 1) Assert.Inconclusive("Test cannot be executed on this platform"); CKR rv = CKR.CKR_OK; using (Pkcs11 pkcs11 = new Pkcs11(Settings.Pkcs11LibraryPath)) { rv = pkcs11.C_Initialize(Settings.InitArgs81); if ((rv != CKR.CKR_OK) && (rv != CKR.CKR_CRYPTOKI_ALREADY_INITIALIZED)) Assert.Fail(rv.ToString()); // Find first slot with token present ulong slotId = Helpers.GetUsableSlot(pkcs11); ulong session = CK.CK_INVALID_HANDLE; rv = pkcs11.C_OpenSession(slotId, (CKF.CKF_SERIAL_SESSION | CKF.CKF_RW_SESSION), IntPtr.Zero, IntPtr.Zero, ref session); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Login as normal user rv = pkcs11.C_Login(session, CKU.CKU_USER, Settings.NormalUserPinArray, Convert.ToUInt64(Settings.NormalUserPinArray.Length)); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Generate asymetric key pair ulong pubKeyId = CK.CK_INVALID_HANDLE; ulong privKeyId = CK.CK_INVALID_HANDLE; rv = Helpers.GenerateKeyPair(pkcs11, session, ref pubKeyId, ref privKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Specify signing mechanism (needs no parameter => no unamanaged memory is needed) CK_MECHANISM mechanism = CkmUtils.CreateMechanism(CKM.CKM_SHA1_RSA_PKCS); // Initialize signing operation rv = pkcs11.C_SignInit(session, ref mechanism, privKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); byte[] sourceData = ConvertUtils.Utf8StringToBytes("Hello world"); // Get length of signature in first call ulong signatureLen = 0; rv = pkcs11.C_Sign(session, sourceData, Convert.ToUInt64(sourceData.Length), null, ref signatureLen); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); Assert.IsTrue(signatureLen > 0); // Allocate array for signature byte[] signature = new byte[signatureLen]; // Get signature in second call rv = pkcs11.C_Sign(session, sourceData, Convert.ToUInt64(sourceData.Length), signature, ref signatureLen); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Do something interesting with signature // Initialize verification operation rv = pkcs11.C_VerifyInit(session, ref mechanism, pubKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Verify signature rv = pkcs11.C_Verify(session, sourceData, Convert.ToUInt64(sourceData.Length), signature, Convert.ToUInt64(signature.Length)); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Do something interesting with verification result rv = pkcs11.C_DestroyObject(session, privKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); rv = pkcs11.C_DestroyObject(session, pubKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); rv = pkcs11.C_Logout(session); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); rv = pkcs11.C_CloseSession(session); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); rv = pkcs11.C_Finalize(IntPtr.Zero); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); } } /// <summary> /// C_SignInit, C_SignUpdate, C_SignFinal, C_VerifyInit, C_VerifyUpdate and C_VerifyFinal test. /// </summary> [TestMethod] public void _02_SignAndVerifyMultiPartTest() { if (Platform.UnmanagedLongSize != 8 || Platform.StructPackingSize != 1) Assert.Inconclusive("Test cannot be executed on this platform"); CKR rv = CKR.CKR_OK; using (Pkcs11 pkcs11 = new Pkcs11(Settings.Pkcs11LibraryPath)) { rv = pkcs11.C_Initialize(Settings.InitArgs81); if ((rv != CKR.CKR_OK) && (rv != CKR.CKR_CRYPTOKI_ALREADY_INITIALIZED)) Assert.Fail(rv.ToString()); // Find first slot with token present ulong slotId = Helpers.GetUsableSlot(pkcs11); ulong session = CK.CK_INVALID_HANDLE; rv = pkcs11.C_OpenSession(slotId, (CKF.CKF_SERIAL_SESSION | CKF.CKF_RW_SESSION), IntPtr.Zero, IntPtr.Zero, ref session); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Login as normal user rv = pkcs11.C_Login(session, CKU.CKU_USER, Settings.NormalUserPinArray, Convert.ToUInt64(Settings.NormalUserPinArray.Length)); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Generate asymetric key pair ulong pubKeyId = CK.CK_INVALID_HANDLE; ulong privKeyId = CK.CK_INVALID_HANDLE; rv = Helpers.GenerateKeyPair(pkcs11, session, ref pubKeyId, ref privKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Specify signing mechanism (needs no parameter => no unamanaged memory is needed) CK_MECHANISM mechanism = CkmUtils.CreateMechanism(CKM.CKM_SHA1_RSA_PKCS); byte[] sourceData = ConvertUtils.Utf8StringToBytes("Hello world"); byte[] signature = null; // Multipart signature functions C_SignUpdate and C_SignFinal can be used i.e. for signing of streamed data using (MemoryStream inputStream = new MemoryStream(sourceData)) { // Initialize signing operation rv = pkcs11.C_SignInit(session, ref mechanism, privKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Prepare buffer for source data part // Note that in real world application we would rather use bigger buffer i.e. 4096 bytes long byte[] part = new byte[8]; // Read input stream with source data int bytesRead = 0; while ((bytesRead = inputStream.Read(part, 0, part.Length)) > 0) { // Process each individual source data part rv = pkcs11.C_SignUpdate(session, part, Convert.ToUInt64(bytesRead)); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); } // Get the length of signature in first call ulong signatureLen = 0; rv = pkcs11.C_SignFinal(session, null, ref signatureLen); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); Assert.IsTrue(signatureLen > 0); // Allocate array for signature signature = new byte[signatureLen]; // Get signature in second call rv = pkcs11.C_SignFinal(session, signature, ref signatureLen); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); } // Do something interesting with signature // Multipart verification functions C_VerifyUpdate and C_VerifyFinal can be used i.e. for signature verification of streamed data using (MemoryStream inputStream = new MemoryStream(sourceData)) { // Initialize verification operation rv = pkcs11.C_VerifyInit(session, ref mechanism, pubKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); // Prepare buffer for source data part // Note that in real world application we would rather use bigger buffer i.e. 4096 bytes long byte[] part = new byte[8]; // Read input stream with source data int bytesRead = 0; while ((bytesRead = inputStream.Read(part, 0, part.Length)) > 0) { // Process each individual source data part rv = pkcs11.C_VerifyUpdate(session, part, Convert.ToUInt64(bytesRead)); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); } // Verify signature rv = pkcs11.C_VerifyFinal(session, signature, Convert.ToUInt64(signature.Length)); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); } // Do something interesting with verification result rv = pkcs11.C_DestroyObject(session, privKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); rv = pkcs11.C_DestroyObject(session, pubKeyId); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); rv = pkcs11.C_Logout(session); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); rv = pkcs11.C_CloseSession(session); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); rv = pkcs11.C_Finalize(IntPtr.Zero); if (rv != CKR.CKR_OK) Assert.Fail(rv.ToString()); } } } }
#region Copyright and license information // Copyright 2001-2009 Stephen Colebourne // Copyright 2009-2011 Jon Skeet // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; using System.Collections.Generic; using System.IO; using System.Text; namespace NodaTime.TimeZones { /// <summary> /// Provides an <see cref="DateTimeZone" /> writer that simply writes the values /// without any compression. Can be used as a base for implementing specific /// compression writers by overriding the methods for the types to be compressed. /// </summary> internal class DateTimeZoneWriter { internal const byte FlagTimeZoneCached = 0; internal const byte FlagTimeZoneDst = 1; internal const byte FlagTimeZoneFixed = 2; internal const byte FlagTimeZoneNull = 3; internal const byte FlagTimeZonePrecalculated = 4; internal const byte FlagTimeZoneUser = 5; protected readonly Stream Output; /// <summary> /// Constructs a DateTimeZoneWriter. /// </summary> /// <param name="output">Where to send the serialized output.</param> internal DateTimeZoneWriter(Stream output) { Output = output; } #region DateTimeZoneWriter Members /// <summary> /// Writes a boolean value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal void WriteBoolean(bool value) { WriteInt8((byte)(value ? 1 : 0)); } /// <summary> /// Writes the given non-negative integer value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal virtual void WriteCount(int value) { WriteInt32(value); } /// <summary> /// Writes the given dictionary of string to string to the stream. /// </summary> /// <param name="dictionary">The <see cref="IDictionary{TKey,TValue}" /> to write.</param> internal void WriteDictionary(IDictionary<string, string> dictionary) { if (dictionary == null) { throw new ArgumentNullException("dictionary"); } WriteCount(dictionary.Count); foreach (var entry in dictionary) { WriteString(entry.Key); WriteString(entry.Value); } } /// <summary> /// Writes an enumeration's integer value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal void WriteEnum(int value) { WriteInteger(value); } /// <summary> /// Writes the <see cref="Instant" /> value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal void WriteInstant(Instant value) { WriteTicks(value.Ticks); } /// <summary> /// Writes the integer value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal void WriteInteger(int value) { WriteInt32(value); } /// <summary> /// Writes the <see cref="LocalInstant" /> value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal void WriteLocalInstant(LocalInstant value) { WriteTicks(value.Ticks); } /// <summary> /// Writes the integer milliseconds value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal virtual void WriteMilliseconds(int value) { WriteInt32(value); } /// <summary> /// Writes the <see cref="Offset" /> value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal void WriteOffset(Offset value) { WriteMilliseconds(value.TotalMilliseconds); } /// <summary> /// Writes the string value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal void WriteString(string value) { byte[] data = Encoding.UTF8.GetBytes(value); int length = data.Length; WriteCount(length); Output.Write(data, 0, data.Length); } /// <summary> /// Writes the long ticks value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal virtual void WriteTicks(long value) { WriteInt64(value); } /// <summary> /// Writes the <see cref="DateTimeZone" /> value to the stream. /// </summary> /// <param name="value">The value to write.</param> internal void WriteTimeZone(DateTimeZone value) { if (value == null) { WriteInt8(FlagTimeZoneNull); return; } else if (value is FixedDateTimeZone) { WriteInt8(FlagTimeZoneFixed); } else if (value is PrecalculatedDateTimeZone) { WriteInt8(FlagTimeZonePrecalculated); } else if (value is CachedDateTimeZone) { WriteInt8(FlagTimeZoneCached); } else if (value is DaylightSavingsTimeZone) { WriteInt8(FlagTimeZoneDst); } else { WriteInt8(FlagTimeZoneUser); WriteString(value.GetType().AssemblyQualifiedName); } value.Write(this); } #endregion /// <summary> /// Writes the given 16 bit integer value to the stream. /// </summary> /// <param name="value">The value to write.</param> protected void WriteInt16(short value) { unchecked { WriteInt8((byte)((value >> 8) & 0xff)); WriteInt8((byte)(value & 0xff)); } } /// <summary> /// Writes the given 32 bit integer value to the stream. /// </summary> /// <param name="value">The value to write.</param> protected void WriteInt32(int value) { unchecked { WriteInt16((short)(value >> 16)); WriteInt16((short)value); } } /// <summary> /// Writes the given 64 bit integer value to the stream. /// </summary> /// <param name="value">The value to write.</param> protected void WriteInt64(long value) { unchecked { WriteInt32((int)(value >> 32)); WriteInt32((int)value); } } /// <summary> /// Writes the given 8 bit integer value to the stream. /// </summary> /// <param name="value">The value to write.</param> protected void WriteInt8(byte value) { unchecked { Output.WriteByte(value); } } } }
using System; using System.Drawing; using System.Collections; using System.ComponentModel; using System.Windows.Forms; using System.Data; using org.swyn.foundation.utils; namespace tdbadmin { /// <summary> /// Summary description for Country. /// </summary> public class FAct : System.Windows.Forms.Form { private System.Windows.Forms.GroupBox groupBox1; private System.Windows.Forms.GroupBox TDB_abgrp; private System.Windows.Forms.Button TDB_ab_clr; private System.Windows.Forms.Button TDB_ab_sel; private System.Windows.Forms.Button TDB_ab_exit; private System.Windows.Forms.Button TDB_ab_del; private System.Windows.Forms.Button TDB_ab_upd; private System.Windows.Forms.Button TDB_ab_ins; private System.Windows.Forms.Label tdb_e_id; private System.Windows.Forms.TextBox tdb_e_bez; private System.Windows.Forms.TextBox tdb_e_text; private System.Windows.Forms.Label tdb_l_text; private System.Windows.Forms.Label tdb_l_bez; private System.Windows.Forms.Label tdb_l_id; private tdbgui.GUIact AT; private System.Windows.Forms.ComboBox Act_e_fromres; private System.Windows.Forms.ComboBox Act_e_tores; private System.Windows.Forms.Label Act_l_exeflag; private System.Windows.Forms.ComboBox Act_e_exeflag; private System.Windows.Forms.Label Act_l_dev; private System.Windows.Forms.ComboBox Act_e_dev; private System.Windows.Forms.ComboBox Act_e_actt; private System.Windows.Forms.Label Act_l_actt; private System.Windows.Forms.Label Act_l_ressta; private System.Windows.Forms.ComboBox Act_e_parent; private System.Windows.Forms.Label Act_l_parent; private System.Windows.Forms.Label Act_l_ord; private System.Windows.Forms.NumericUpDown Act_e_ord; private System.Windows.Forms.CheckBox Act_e_top; private bool ishost; /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; public FAct() { // // Required for Windows Form Designer support // InitializeComponent(); // // TODO: Add any constructor code after InitializeComponent call // AT = new tdbgui.GUIact(); ishost = false; } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.groupBox1 = new System.Windows.Forms.GroupBox(); this.Act_l_actt = new System.Windows.Forms.Label(); this.Act_e_actt = new System.Windows.Forms.ComboBox(); this.Act_e_dev = new System.Windows.Forms.ComboBox(); this.Act_l_dev = new System.Windows.Forms.Label(); this.Act_e_exeflag = new System.Windows.Forms.ComboBox(); this.Act_l_exeflag = new System.Windows.Forms.Label(); this.Act_e_tores = new System.Windows.Forms.ComboBox(); this.Act_e_fromres = new System.Windows.Forms.ComboBox(); this.Act_l_ressta = new System.Windows.Forms.Label(); this.tdb_e_id = new System.Windows.Forms.Label(); this.tdb_e_bez = new System.Windows.Forms.TextBox(); this.tdb_e_text = new System.Windows.Forms.TextBox(); this.tdb_l_text = new System.Windows.Forms.Label(); this.tdb_l_bez = new System.Windows.Forms.Label(); this.tdb_l_id = new System.Windows.Forms.Label(); this.TDB_abgrp = new System.Windows.Forms.GroupBox(); this.TDB_ab_clr = new System.Windows.Forms.Button(); this.TDB_ab_sel = new System.Windows.Forms.Button(); this.TDB_ab_exit = new System.Windows.Forms.Button(); this.TDB_ab_del = new System.Windows.Forms.Button(); this.TDB_ab_upd = new System.Windows.Forms.Button(); this.TDB_ab_ins = new System.Windows.Forms.Button(); this.Act_e_parent = new System.Windows.Forms.ComboBox(); this.Act_l_parent = new System.Windows.Forms.Label(); this.Act_l_ord = new System.Windows.Forms.Label(); this.Act_e_ord = new System.Windows.Forms.NumericUpDown(); this.Act_e_top = new System.Windows.Forms.CheckBox(); this.groupBox1.SuspendLayout(); this.TDB_abgrp.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.Act_e_ord)).BeginInit(); this.SuspendLayout(); // // groupBox1 // this.groupBox1.Controls.Add(this.Act_e_top); this.groupBox1.Controls.Add(this.Act_e_ord); this.groupBox1.Controls.Add(this.Act_l_ord); this.groupBox1.Controls.Add(this.Act_l_parent); this.groupBox1.Controls.Add(this.Act_e_parent); this.groupBox1.Controls.Add(this.Act_l_actt); this.groupBox1.Controls.Add(this.Act_e_actt); this.groupBox1.Controls.Add(this.Act_e_dev); this.groupBox1.Controls.Add(this.Act_l_dev); this.groupBox1.Controls.Add(this.Act_e_exeflag); this.groupBox1.Controls.Add(this.Act_l_exeflag); this.groupBox1.Controls.Add(this.Act_e_tores); this.groupBox1.Controls.Add(this.Act_e_fromres); this.groupBox1.Controls.Add(this.Act_l_ressta); this.groupBox1.Controls.Add(this.tdb_e_id); this.groupBox1.Controls.Add(this.tdb_e_bez); this.groupBox1.Controls.Add(this.tdb_e_text); this.groupBox1.Controls.Add(this.tdb_l_text); this.groupBox1.Controls.Add(this.tdb_l_bez); this.groupBox1.Controls.Add(this.tdb_l_id); this.groupBox1.Dock = System.Windows.Forms.DockStyle.Top; this.groupBox1.Location = new System.Drawing.Point(0, 0); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(608, 296); this.groupBox1.TabIndex = 13; this.groupBox1.TabStop = false; this.groupBox1.Text = "Description"; // // Act_l_actt // this.Act_l_actt.Location = new System.Drawing.Point(368, 232); this.Act_l_actt.Name = "Act_l_actt"; this.Act_l_actt.Size = new System.Drawing.Size(100, 16); this.Act_l_actt.TabIndex = 18; this.Act_l_actt.Text = "Actiontype"; // // Act_e_actt // this.Act_e_actt.Location = new System.Drawing.Point(368, 248); this.Act_e_actt.Name = "Act_e_actt"; this.Act_e_actt.Size = new System.Drawing.Size(224, 21); this.Act_e_actt.TabIndex = 17; // // Act_e_dev // this.Act_e_dev.Location = new System.Drawing.Point(136, 248); this.Act_e_dev.Name = "Act_e_dev"; this.Act_e_dev.Size = new System.Drawing.Size(216, 21); this.Act_e_dev.TabIndex = 16; // // Act_l_dev // this.Act_l_dev.Location = new System.Drawing.Point(8, 248); this.Act_l_dev.Name = "Act_l_dev"; this.Act_l_dev.TabIndex = 15; this.Act_l_dev.Text = "Device"; // // Act_e_exeflag // this.Act_e_exeflag.Location = new System.Drawing.Point(136, 224); this.Act_e_exeflag.Name = "Act_e_exeflag"; this.Act_e_exeflag.Size = new System.Drawing.Size(216, 21); this.Act_e_exeflag.TabIndex = 14; // // Act_l_exeflag // this.Act_l_exeflag.Location = new System.Drawing.Point(8, 224); this.Act_l_exeflag.Name = "Act_l_exeflag"; this.Act_l_exeflag.TabIndex = 13; this.Act_l_exeflag.Text = "Execution flag"; // // Act_e_tores // this.Act_e_tores.Location = new System.Drawing.Point(368, 192); this.Act_e_tores.Name = "Act_e_tores"; this.Act_e_tores.Size = new System.Drawing.Size(224, 21); this.Act_e_tores.TabIndex = 12; // // Act_e_fromres // this.Act_e_fromres.Location = new System.Drawing.Point(136, 192); this.Act_e_fromres.Name = "Act_e_fromres"; this.Act_e_fromres.Size = new System.Drawing.Size(216, 21); this.Act_e_fromres.TabIndex = 11; // // Act_l_ressta // this.Act_l_ressta.Location = new System.Drawing.Point(8, 192); this.Act_l_ressta.Name = "Act_l_ressta"; this.Act_l_ressta.Size = new System.Drawing.Size(120, 32); this.Act_l_ressta.TabIndex = 10; this.Act_l_ressta.Text = "Set from / to reservation status"; // // tdb_e_id // this.tdb_e_id.Location = new System.Drawing.Point(136, 56); this.tdb_e_id.Name = "tdb_e_id"; this.tdb_e_id.Size = new System.Drawing.Size(64, 16); this.tdb_e_id.TabIndex = 9; // // tdb_e_bez // this.tdb_e_bez.Location = new System.Drawing.Point(136, 72); this.tdb_e_bez.Name = "tdb_e_bez"; this.tdb_e_bez.Size = new System.Drawing.Size(456, 20); this.tdb_e_bez.TabIndex = 0; this.tdb_e_bez.Text = ""; // // tdb_e_text // this.tdb_e_text.Location = new System.Drawing.Point(136, 96); this.tdb_e_text.Multiline = true; this.tdb_e_text.Name = "tdb_e_text"; this.tdb_e_text.Size = new System.Drawing.Size(456, 88); this.tdb_e_text.TabIndex = 2; this.tdb_e_text.Text = ""; // // tdb_l_text // this.tdb_l_text.Location = new System.Drawing.Point(8, 128); this.tdb_l_text.Name = "tdb_l_text"; this.tdb_l_text.RightToLeft = System.Windows.Forms.RightToLeft.No; this.tdb_l_text.TabIndex = 4; this.tdb_l_text.Text = "Description"; // // tdb_l_bez // this.tdb_l_bez.Location = new System.Drawing.Point(8, 72); this.tdb_l_bez.Name = "tdb_l_bez"; this.tdb_l_bez.TabIndex = 2; this.tdb_l_bez.Text = "Title"; // // tdb_l_id // this.tdb_l_id.Location = new System.Drawing.Point(8, 56); this.tdb_l_id.Name = "tdb_l_id"; this.tdb_l_id.TabIndex = 1; this.tdb_l_id.Text = "ID"; // // TDB_abgrp // this.TDB_abgrp.Controls.Add(this.TDB_ab_clr); this.TDB_abgrp.Controls.Add(this.TDB_ab_sel); this.TDB_abgrp.Controls.Add(this.TDB_ab_exit); this.TDB_abgrp.Controls.Add(this.TDB_ab_del); this.TDB_abgrp.Controls.Add(this.TDB_ab_upd); this.TDB_abgrp.Controls.Add(this.TDB_ab_ins); this.TDB_abgrp.Dock = System.Windows.Forms.DockStyle.Bottom; this.TDB_abgrp.Location = new System.Drawing.Point(0, 296); this.TDB_abgrp.Name = "TDB_abgrp"; this.TDB_abgrp.Size = new System.Drawing.Size(608, 53); this.TDB_abgrp.TabIndex = 15; this.TDB_abgrp.TabStop = false; this.TDB_abgrp.Text = "Actions"; // // TDB_ab_clr // this.TDB_ab_clr.Dock = System.Windows.Forms.DockStyle.Right; this.TDB_ab_clr.Location = new System.Drawing.Point(455, 16); this.TDB_ab_clr.Name = "TDB_ab_clr"; this.TDB_ab_clr.Size = new System.Drawing.Size(75, 34); this.TDB_ab_clr.TabIndex = 10; this.TDB_ab_clr.Text = "Clear"; this.TDB_ab_clr.Click += new System.EventHandler(this.TDB_ab_clr_Click); // // TDB_ab_sel // this.TDB_ab_sel.BackColor = System.Drawing.Color.FromArgb(((System.Byte)(192)), ((System.Byte)(192)), ((System.Byte)(255))); this.TDB_ab_sel.Dock = System.Windows.Forms.DockStyle.Left; this.TDB_ab_sel.Location = new System.Drawing.Point(228, 16); this.TDB_ab_sel.Name = "TDB_ab_sel"; this.TDB_ab_sel.Size = new System.Drawing.Size(80, 34); this.TDB_ab_sel.TabIndex = 8; this.TDB_ab_sel.Text = "Select"; this.TDB_ab_sel.Click += new System.EventHandler(this.TDB_ab_sel_Click); // // TDB_ab_exit // this.TDB_ab_exit.BackColor = System.Drawing.Color.FromArgb(((System.Byte)(0)), ((System.Byte)(192)), ((System.Byte)(192))); this.TDB_ab_exit.Dock = System.Windows.Forms.DockStyle.Right; this.TDB_ab_exit.Location = new System.Drawing.Point(530, 16); this.TDB_ab_exit.Name = "TDB_ab_exit"; this.TDB_ab_exit.Size = new System.Drawing.Size(75, 34); this.TDB_ab_exit.TabIndex = 9; this.TDB_ab_exit.Text = "Exit"; this.TDB_ab_exit.Click += new System.EventHandler(this.TDB_ab_exit_Click); // // TDB_ab_del // this.TDB_ab_del.BackColor = System.Drawing.Color.FromArgb(((System.Byte)(255)), ((System.Byte)(192)), ((System.Byte)(192))); this.TDB_ab_del.Dock = System.Windows.Forms.DockStyle.Left; this.TDB_ab_del.Location = new System.Drawing.Point(153, 16); this.TDB_ab_del.Name = "TDB_ab_del"; this.TDB_ab_del.Size = new System.Drawing.Size(75, 34); this.TDB_ab_del.TabIndex = 7; this.TDB_ab_del.Text = "Delete"; this.TDB_ab_del.Click += new System.EventHandler(this.TDB_ab_del_Click); // // TDB_ab_upd // this.TDB_ab_upd.BackColor = System.Drawing.Color.FromArgb(((System.Byte)(255)), ((System.Byte)(192)), ((System.Byte)(192))); this.TDB_ab_upd.Dock = System.Windows.Forms.DockStyle.Left; this.TDB_ab_upd.Location = new System.Drawing.Point(78, 16); this.TDB_ab_upd.Name = "TDB_ab_upd"; this.TDB_ab_upd.Size = new System.Drawing.Size(75, 34); this.TDB_ab_upd.TabIndex = 6; this.TDB_ab_upd.Text = "Update"; this.TDB_ab_upd.Click += new System.EventHandler(this.TDB_ab_upd_Click); // // TDB_ab_ins // this.TDB_ab_ins.BackColor = System.Drawing.Color.FromArgb(((System.Byte)(255)), ((System.Byte)(192)), ((System.Byte)(192))); this.TDB_ab_ins.Dock = System.Windows.Forms.DockStyle.Left; this.TDB_ab_ins.Location = new System.Drawing.Point(3, 16); this.TDB_ab_ins.Name = "TDB_ab_ins"; this.TDB_ab_ins.Size = new System.Drawing.Size(75, 34); this.TDB_ab_ins.TabIndex = 5; this.TDB_ab_ins.Text = "Insert"; this.TDB_ab_ins.Click += new System.EventHandler(this.TDB_ab_ins_Click); // // Act_e_parent // this.Act_e_parent.Location = new System.Drawing.Point(136, 16); this.Act_e_parent.Name = "Act_e_parent"; this.Act_e_parent.Size = new System.Drawing.Size(264, 21); this.Act_e_parent.TabIndex = 19; // // Act_l_parent // this.Act_l_parent.Location = new System.Drawing.Point(8, 16); this.Act_l_parent.Name = "Act_l_parent"; this.Act_l_parent.Size = new System.Drawing.Size(64, 16); this.Act_l_parent.TabIndex = 20; this.Act_l_parent.Text = "Parent"; // // Act_l_ord // this.Act_l_ord.Location = new System.Drawing.Point(256, 47); this.Act_l_ord.Name = "Act_l_ord"; this.Act_l_ord.Size = new System.Drawing.Size(80, 23); this.Act_l_ord.TabIndex = 21; this.Act_l_ord.Text = "Order number"; // // Act_e_ord // this.Act_e_ord.Location = new System.Drawing.Point(352, 48); this.Act_e_ord.Maximum = new System.Decimal(new int[] { 1000, 0, 0, 0}); this.Act_e_ord.Name = "Act_e_ord"; this.Act_e_ord.Size = new System.Drawing.Size(48, 20); this.Act_e_ord.TabIndex = 22; // // Act_e_top // this.Act_e_top.Location = new System.Drawing.Point(440, 32); this.Act_e_top.Name = "Act_e_top"; this.Act_e_top.TabIndex = 23; this.Act_e_top.Text = "Top / Root ?"; this.Act_e_top.CheckedChanged += new System.EventHandler(this.Act_e_top_CheckedChanged); // // FAct // this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.ClientSize = new System.Drawing.Size(608, 349); this.Controls.Add(this.TDB_abgrp); this.Controls.Add(this.groupBox1); this.Name = "FAct"; this.Text = "Actions"; this.Load += new System.EventHandler(this.FAct_Load); this.groupBox1.ResumeLayout(false); this.TDB_abgrp.ResumeLayout(false); ((System.ComponentModel.ISupportInitialize)(this.Act_e_ord)).EndInit(); this.ResumeLayout(false); } #endregion #region Form callbacks private void TDB_ab_sel_Click(object sender, System.EventArgs e) { SelForm Fsel = new SelForm(); AT.Sel(Fsel.GetLV); Fsel.Accept += new EventHandler(TDB_ab_sel_Click_Return); Fsel.ShowDialog(this); } void TDB_ab_sel_Click_Return(object sender, EventArgs e) { int id = -1, rows = 0; SelForm Fsel = (SelForm)sender; id = Fsel.GetID; AT.Get(id, ref rows); tdb_e_id.Text = AT.ObjId.ToString(); tdb_e_bez.Text = AT.ObjBez; tdb_e_text.Text = AT.ObjText; this.Act_e_exeflag.SelectedValue = AT.ObjExeflag; this.Act_e_fromres.SelectedValue = AT.ObjFrom; this.Act_e_tores.SelectedValue = AT.ObjTo; this.Act_e_dev.SelectedValue = AT.ObjDev; this.Act_e_actt.SelectedValue = AT.ObjActt; this.Act_e_ord.Value = AT.ObjOrd; if (AT.ObjParent == -1) { this.Act_e_top.CheckState = CheckState.Checked; this.Act_e_parent.Visible = false; ishost = true; } else { this.Act_e_parent.SelectedValue = AT.ObjParent; this.Act_e_top.CheckState = CheckState.Unchecked; this.Act_e_parent.Visible = true; ishost = false; } } private void TDB_ab_exit_Click(object sender, System.EventArgs e) { Close(); } private void TDB_ab_ins_Click(object sender, System.EventArgs e) { AT.InsUpd(true, tdb_e_bez.Text, tdb_e_text.Text, (int)this.Act_e_exeflag.SelectedValue, (int)this.Act_e_fromres.SelectedValue, (int)this.Act_e_tores.SelectedValue, (int)this.Act_e_dev.SelectedValue, (int)this.Act_e_actt.SelectedValue); tdb_e_id.Text = AT.ObjId.ToString(); } private void TDB_ab_upd_Click(object sender, System.EventArgs e) { AT.InsUpd(false, tdb_e_bez.Text, tdb_e_text.Text, (int)this.Act_e_exeflag.SelectedValue, (int)this.Act_e_fromres.SelectedValue, (int)this.Act_e_tores.SelectedValue, (int)this.Act_e_dev.SelectedValue, (int)this.Act_e_actt.SelectedValue); } private void TDB_ab_del_Click(object sender, System.EventArgs e) { int rows = 0; AT.Get(Convert.ToInt32(tdb_e_id.Text), ref rows); AT.Delete(); } private void TDB_ab_clr_Click(object sender, System.EventArgs e) { tdb_e_id.Text = ""; tdb_e_bez.Text = ""; tdb_e_text.Text = ""; this.Act_e_fromres.SelectedValue = -1; this.Act_e_tores.SelectedValue = -1; this.Act_e_exeflag.SelectedValue = -1; this.Act_e_dev.SelectedValue = -1; this.Act_e_ord.Value = 1; } private void Act_e_top_CheckedChanged(object sender, System.EventArgs e) { if (this.Act_e_top.CheckState == CheckState.Checked) { this.Act_e_parent.Visible = false; ishost = true; } else { this.Act_e_parent.Visible = true; ishost = false; } } private void FAct_Load(object sender, System.EventArgs e) { tdbgui.GUIsta Sexe = new tdbgui.GUIsta(); Sexe.ObjOptional = true; Sexe.ObjTyp = tdb.StatusTypes.exeflag; Sexe.SetCombo(this.Act_e_exeflag); tdbgui.GUIsta From = new tdbgui.GUIsta(); From.ObjOptional = true; From.ObjTyp = tdb.StatusTypes.stares; From.SetCombo(this.Act_e_fromres); tdbgui.GUIsta To = new tdbgui.GUIsta(); To.ObjTyp = tdb.StatusTypes.stares; To.ObjOptional = true; To.SetCombo(this.Act_e_tores); tdbgui.GUIsta Sdev = new tdbgui.GUIsta(); Sdev.ObjOptional = true; Sdev.ObjTyp = tdb.StatusTypes.device; Sdev.SetCombo(this.Act_e_dev); tdbgui.GUIactt At = new tdbgui.GUIactt(); At.SetCombo(this.Act_e_actt); tdbgui.GUIact A = new tdbgui.GUIact(); A.SetCombo(this.Act_e_parent); } #endregion } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Impl.Binary { using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.IO; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Impl.Binary.IO; using Apache.Ignite.Core.Impl.Binary.Structure; using Apache.Ignite.Core.Impl.Common; /// <summary> /// Binary reader implementation. /// </summary> internal class BinaryReader : IBinaryReader, IBinaryRawReader { /** Marshaller. */ private readonly Marshaller _marsh; /** Parent builder. */ private readonly BinaryObjectBuilder _builder; /** Handles. */ private BinaryReaderHandleDictionary _hnds; /** Current position. */ private int _curPos; /** Current raw flag. */ private bool _curRaw; /** Detach flag. */ private bool _detach; /** Binary read mode. */ private BinaryMode _mode; /** Current type structure tracker. */ private BinaryStructureTracker _curStruct; /** Current schema. */ private int[] _curSchema; /** Current schema with positions. */ private Dictionary<int, int> _curSchemaMap; /** Current header. */ private BinaryObjectHeader _curHdr; /// <summary> /// Constructor. /// </summary> /// <param name="marsh">Marshaller.</param> /// <param name="stream">Input stream.</param> /// <param name="mode">The mode.</param> /// <param name="builder">Builder.</param> public BinaryReader (Marshaller marsh, IBinaryStream stream, BinaryMode mode, BinaryObjectBuilder builder) { _marsh = marsh; _mode = mode; _builder = builder; _curPos = stream.Position; Stream = stream; } /// <summary> /// Gets the marshaller. /// </summary> public Marshaller Marshaller { get { return _marsh; } } /** <inheritdoc /> */ public IBinaryRawReader GetRawReader() { MarkRaw(); return this; } /** <inheritdoc /> */ public bool ReadBoolean(string fieldName) { return ReadField(fieldName, r => r.ReadBoolean(), BinaryUtils.TypeBool); } /** <inheritdoc /> */ public bool ReadBoolean() { return Stream.ReadBool(); } /** <inheritdoc /> */ public bool[] ReadBooleanArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadBooleanArray, BinaryUtils.TypeArrayBool); } /** <inheritdoc /> */ public bool[] ReadBooleanArray() { return Read(BinaryUtils.ReadBooleanArray, BinaryUtils.TypeArrayBool); } /** <inheritdoc /> */ public byte ReadByte(string fieldName) { return ReadField(fieldName, ReadByte, BinaryUtils.TypeByte); } /** <inheritdoc /> */ public byte ReadByte() { return Stream.ReadByte(); } /** <inheritdoc /> */ public byte[] ReadByteArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadByteArray, BinaryUtils.TypeArrayByte); } /** <inheritdoc /> */ public byte[] ReadByteArray() { return Read(BinaryUtils.ReadByteArray, BinaryUtils.TypeArrayByte); } /** <inheritdoc /> */ public short ReadShort(string fieldName) { return ReadField(fieldName, ReadShort, BinaryUtils.TypeShort); } /** <inheritdoc /> */ public short ReadShort() { return Stream.ReadShort(); } /** <inheritdoc /> */ public short[] ReadShortArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadShortArray, BinaryUtils.TypeArrayShort); } /** <inheritdoc /> */ public short[] ReadShortArray() { return Read(BinaryUtils.ReadShortArray, BinaryUtils.TypeArrayShort); } /** <inheritdoc /> */ public char ReadChar(string fieldName) { return ReadField(fieldName, ReadChar, BinaryUtils.TypeChar); } /** <inheritdoc /> */ public char ReadChar() { return Stream.ReadChar(); } /** <inheritdoc /> */ public char[] ReadCharArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadCharArray, BinaryUtils.TypeArrayChar); } /** <inheritdoc /> */ public char[] ReadCharArray() { return Read(BinaryUtils.ReadCharArray, BinaryUtils.TypeArrayChar); } /** <inheritdoc /> */ public int ReadInt(string fieldName) { return ReadField(fieldName, ReadInt, BinaryUtils.TypeInt); } /** <inheritdoc /> */ public int ReadInt() { return Stream.ReadInt(); } /** <inheritdoc /> */ public int[] ReadIntArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadIntArray, BinaryUtils.TypeArrayInt); } /** <inheritdoc /> */ public int[] ReadIntArray() { return Read(BinaryUtils.ReadIntArray, BinaryUtils.TypeArrayInt); } /** <inheritdoc /> */ public long ReadLong(string fieldName) { return ReadField(fieldName, ReadLong, BinaryUtils.TypeLong); } /** <inheritdoc /> */ public long ReadLong() { return Stream.ReadLong(); } /** <inheritdoc /> */ public long[] ReadLongArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadLongArray, BinaryUtils.TypeArrayLong); } /** <inheritdoc /> */ public long[] ReadLongArray() { return Read(BinaryUtils.ReadLongArray, BinaryUtils.TypeArrayLong); } /** <inheritdoc /> */ public float ReadFloat(string fieldName) { return ReadField(fieldName, ReadFloat, BinaryUtils.TypeFloat); } /** <inheritdoc /> */ public float ReadFloat() { return Stream.ReadFloat(); } /** <inheritdoc /> */ public float[] ReadFloatArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadFloatArray, BinaryUtils.TypeArrayFloat); } /** <inheritdoc /> */ public float[] ReadFloatArray() { return Read(BinaryUtils.ReadFloatArray, BinaryUtils.TypeArrayFloat); } /** <inheritdoc /> */ public double ReadDouble(string fieldName) { return ReadField(fieldName, ReadDouble, BinaryUtils.TypeDouble); } /** <inheritdoc /> */ public double ReadDouble() { return Stream.ReadDouble(); } /** <inheritdoc /> */ public double[] ReadDoubleArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadDoubleArray, BinaryUtils.TypeArrayDouble); } /** <inheritdoc /> */ public double[] ReadDoubleArray() { return Read(BinaryUtils.ReadDoubleArray, BinaryUtils.TypeArrayDouble); } /** <inheritdoc /> */ public decimal? ReadDecimal(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadDecimal, BinaryUtils.TypeDecimal); } /** <inheritdoc /> */ public decimal? ReadDecimal() { return Read(BinaryUtils.ReadDecimal, BinaryUtils.TypeDecimal); } /** <inheritdoc /> */ public decimal?[] ReadDecimalArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadDecimalArray, BinaryUtils.TypeArrayDecimal); } /** <inheritdoc /> */ public decimal?[] ReadDecimalArray() { return Read(BinaryUtils.ReadDecimalArray, BinaryUtils.TypeArrayDecimal); } /** <inheritdoc /> */ public DateTime? ReadTimestamp(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadTimestamp, BinaryUtils.TypeTimestamp); } /** <inheritdoc /> */ public DateTime? ReadTimestamp() { return Read(BinaryUtils.ReadTimestamp, BinaryUtils.TypeTimestamp); } /** <inheritdoc /> */ public DateTime?[] ReadTimestampArray(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadTimestampArray, BinaryUtils.TypeArrayTimestamp); } /** <inheritdoc /> */ public DateTime?[] ReadTimestampArray() { return Read(BinaryUtils.ReadTimestampArray, BinaryUtils.TypeArrayTimestamp); } /** <inheritdoc /> */ public string ReadString(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadString, BinaryUtils.TypeString); } /** <inheritdoc /> */ public string ReadString() { return Read(BinaryUtils.ReadString, BinaryUtils.TypeString); } /** <inheritdoc /> */ public string[] ReadStringArray(string fieldName) { return ReadField(fieldName, r => BinaryUtils.ReadArray<string>(r, false), BinaryUtils.TypeArrayString); } /** <inheritdoc /> */ public string[] ReadStringArray() { return Read(r => BinaryUtils.ReadArray<string>(r, false), BinaryUtils.TypeArrayString); } /** <inheritdoc /> */ public Guid? ReadGuid(string fieldName) { return ReadField(fieldName, BinaryUtils.ReadGuid, BinaryUtils.TypeGuid); } /** <inheritdoc /> */ public Guid? ReadGuid() { return Read(BinaryUtils.ReadGuid, BinaryUtils.TypeGuid); } /** <inheritdoc /> */ public Guid?[] ReadGuidArray(string fieldName) { return ReadField(fieldName, r => BinaryUtils.ReadArray<Guid?>(r, false), BinaryUtils.TypeArrayGuid); } /** <inheritdoc /> */ public Guid?[] ReadGuidArray() { return Read(r => BinaryUtils.ReadArray<Guid?>(r, false), BinaryUtils.TypeArrayGuid); } /** <inheritdoc /> */ public T ReadEnum<T>(string fieldName) { return SeekField(fieldName) ? ReadEnum<T>() : default(T); } /** <inheritdoc /> */ public T ReadEnum<T>() { var hdr = ReadByte(); switch (hdr) { case BinaryUtils.HdrNull: return default(T); case BinaryUtils.TypeEnum: // Never read enums in binary mode when reading a field (we do not support half-binary objects) return ReadEnum0<T>(this, false); case BinaryUtils.HdrFull: // Unregistered enum written as serializable Stream.Seek(-1, SeekOrigin.Current); return ReadObject<T>(); default: throw new BinaryObjectException( string.Format("Invalid header on enum deserialization. Expected: {0} or {1} but was: {2}", BinaryUtils.TypeEnum, BinaryUtils.HdrFull, hdr)); } } /** <inheritdoc /> */ public T[] ReadEnumArray<T>(string fieldName) { return ReadField(fieldName, r => BinaryUtils.ReadArray<T>(r, true), BinaryUtils.TypeArrayEnum); } /** <inheritdoc /> */ public T[] ReadEnumArray<T>() { return Read(r => BinaryUtils.ReadArray<T>(r, true), BinaryUtils.TypeArrayEnum); } /** <inheritdoc /> */ public T ReadObject<T>(string fieldName) { if (_curRaw) throw new BinaryObjectException("Cannot read named fields after raw data is read."); if (SeekField(fieldName)) return Deserialize<T>(); return default(T); } /** <inheritdoc /> */ public T ReadObject<T>() { return Deserialize<T>(); } /** <inheritdoc /> */ public T[] ReadArray<T>(string fieldName) { return ReadField(fieldName, r => BinaryUtils.ReadArray<T>(r, true), BinaryUtils.TypeArray); } /** <inheritdoc /> */ public T[] ReadArray<T>() { return Read(r => BinaryUtils.ReadArray<T>(r, true), BinaryUtils.TypeArray); } /** <inheritdoc /> */ public ICollection ReadCollection(string fieldName) { return ReadCollection(fieldName, null, null); } /** <inheritdoc /> */ public ICollection ReadCollection() { return ReadCollection(null, null); } /** <inheritdoc /> */ public ICollection ReadCollection(string fieldName, Func<int, ICollection> factory, Action<ICollection, object> adder) { return ReadField(fieldName, r => BinaryUtils.ReadCollection(r, factory, adder), BinaryUtils.TypeCollection); } /** <inheritdoc /> */ public ICollection ReadCollection(Func<int, ICollection> factory, Action<ICollection, object> adder) { return Read(r => BinaryUtils.ReadCollection(r, factory, adder), BinaryUtils.TypeCollection); } /** <inheritdoc /> */ public IDictionary ReadDictionary(string fieldName) { return ReadDictionary(fieldName, null); } /** <inheritdoc /> */ public IDictionary ReadDictionary() { return ReadDictionary((Func<int, IDictionary>) null); } /** <inheritdoc /> */ public IDictionary ReadDictionary(string fieldName, Func<int, IDictionary> factory) { return ReadField(fieldName, r => BinaryUtils.ReadDictionary(r, factory), BinaryUtils.TypeDictionary); } /** <inheritdoc /> */ public IDictionary ReadDictionary(Func<int, IDictionary> factory) { return Read(r => BinaryUtils.ReadDictionary(r, factory), BinaryUtils.TypeDictionary); } /// <summary> /// Enable detach mode for the next object read. /// </summary> public BinaryReader DetachNext() { _detach = true; return this; } /// <summary> /// Deserialize object. /// </summary> /// <returns>Deserialized object.</returns> public T Deserialize<T>() { T res; // ReSharper disable once CompareNonConstrainedGenericWithNull if (!TryDeserialize(out res) && default(T) != null) throw new BinaryObjectException(string.Format("Invalid data on deserialization. " + "Expected: '{0}' But was: null", typeof (T))); return res; } /// <summary> /// Deserialize object. /// </summary> /// <returns>Deserialized object.</returns> public bool TryDeserialize<T>(out T res) { int pos = Stream.Position; byte hdr = Stream.ReadByte(); var doDetach = _detach; // save detach flag into a var and reset so it does not go deeper _detach = false; switch (hdr) { case BinaryUtils.HdrNull: res = default(T); return false; case BinaryUtils.HdrHnd: res = ReadHandleObject<T>(pos); return true; case BinaryUtils.HdrFull: res = ReadFullObject<T>(pos); return true; case BinaryUtils.TypeBinary: res = ReadBinaryObject<T>(doDetach); return true; case BinaryUtils.TypeEnum: res = ReadEnum0<T>(this, _mode != BinaryMode.Deserialize); return true; } if (BinarySystemHandlers.TryReadSystemType(hdr, this, out res)) return true; throw new BinaryObjectException("Invalid header on deserialization [pos=" + pos + ", hdr=" + hdr + ']'); } /// <summary> /// Reads the binary object. /// </summary> private T ReadBinaryObject<T>(bool doDetach) { var len = Stream.ReadInt(); var binaryBytesPos = Stream.Position; if (_mode != BinaryMode.Deserialize) return TypeCaster<T>.Cast(ReadAsBinary(binaryBytesPos, len, doDetach)); Stream.Seek(len, SeekOrigin.Current); var offset = Stream.ReadInt(); var retPos = Stream.Position; Stream.Seek(binaryBytesPos + offset, SeekOrigin.Begin); _mode = BinaryMode.KeepBinary; try { return Deserialize<T>(); } finally { _mode = BinaryMode.Deserialize; Stream.Seek(retPos, SeekOrigin.Begin); } } /// <summary> /// Reads the binary object in binary form. /// </summary> private BinaryObject ReadAsBinary(int binaryBytesPos, int dataLen, bool doDetach) { try { Stream.Seek(dataLen + binaryBytesPos, SeekOrigin.Begin); var offs = Stream.ReadInt(); // offset inside data var pos = binaryBytesPos + offs; var hdr = BinaryObjectHeader.Read(Stream, pos); if (!doDetach) return new BinaryObject(_marsh, Stream.GetArray(), pos, hdr); Stream.Seek(pos, SeekOrigin.Begin); return new BinaryObject(_marsh, Stream.ReadByteArray(hdr.Length), 0, hdr); } finally { Stream.Seek(binaryBytesPos + dataLen + 4, SeekOrigin.Begin); } } /// <summary> /// Reads the full object. /// </summary> [SuppressMessage("Microsoft.Performance", "CA1804:RemoveUnusedLocals", MessageId = "hashCode")] private T ReadFullObject<T>(int pos) { var hdr = BinaryObjectHeader.Read(Stream, pos); // Validate protocol version. BinaryUtils.ValidateProtocolVersion(hdr.Version); try { // Already read this object? object hndObj; if (_hnds != null && _hnds.TryGetValue(pos, out hndObj)) return (T) hndObj; if (hdr.IsUserType && _mode == BinaryMode.ForceBinary) { BinaryObject portObj; if (_detach) { Stream.Seek(pos, SeekOrigin.Begin); portObj = new BinaryObject(_marsh, Stream.ReadByteArray(hdr.Length), 0, hdr); } else portObj = new BinaryObject(_marsh, Stream.GetArray(), pos, hdr); T obj = _builder == null ? TypeCaster<T>.Cast(portObj) : TypeCaster<T>.Cast(_builder.Child(portObj)); AddHandle(pos, obj); return obj; } else { // Find descriptor. var desc = _marsh.GetDescriptor(hdr.IsUserType, hdr.TypeId); // Instantiate object. if (desc.Type == null) { if (desc is BinarySurrogateTypeDescriptor) throw new BinaryObjectException("Unknown type ID: " + hdr.TypeId); throw new BinaryObjectException("No matching type found for object [typeId=" + desc.TypeId + ", typeName=" + desc.TypeName + ']'); } // Preserve old frame. var oldHdr = _curHdr; int oldPos = _curPos; var oldStruct = _curStruct; bool oldRaw = _curRaw; var oldSchema = _curSchema; var oldSchemaMap = _curSchemaMap; // Set new frame. _curHdr = hdr; _curPos = pos; SetCurSchema(desc); _curStruct = new BinaryStructureTracker(desc, desc.ReaderTypeStructure); _curRaw = false; // Read object. Stream.Seek(pos + BinaryObjectHeader.Size, SeekOrigin.Begin); var obj = desc.Serializer.ReadBinary<T>(this, desc.Type, pos); _curStruct.UpdateReaderStructure(); // Restore old frame. _curHdr = oldHdr; _curPos = oldPos; _curStruct = oldStruct; _curRaw = oldRaw; _curSchema = oldSchema; _curSchemaMap = oldSchemaMap; return obj; } } finally { // Advance stream pointer. Stream.Seek(pos + hdr.Length, SeekOrigin.Begin); } } /// <summary> /// Sets the current schema. /// </summary> private void SetCurSchema(IBinaryTypeDescriptor desc) { if (_curHdr.HasSchema) { _curSchema = desc.Schema.Get(_curHdr.SchemaId); if (_curSchema == null) { _curSchema = ReadSchema(); desc.Schema.Add(_curHdr.SchemaId, _curSchema); } } } /// <summary> /// Reads the schema. /// </summary> private int[] ReadSchema() { if (_curHdr.IsCompactFooter) { // Get schema from Java var schema = Marshaller.Ignite.BinaryProcessor.GetSchema(_curHdr.TypeId, _curHdr.SchemaId); if (schema == null) throw new BinaryObjectException("Cannot find schema for object with compact footer [" + "typeId=" + _curHdr.TypeId + ", schemaId=" + _curHdr.SchemaId + ']'); return schema; } Stream.Seek(_curPos + _curHdr.SchemaOffset, SeekOrigin.Begin); var count = _curHdr.SchemaFieldCount; var offsetSize = _curHdr.SchemaFieldOffsetSize; var res = new int[count]; for (int i = 0; i < count; i++) { res[i] = Stream.ReadInt(); Stream.Seek(offsetSize, SeekOrigin.Current); } return res; } /// <summary> /// Reads the handle object. /// </summary> private T ReadHandleObject<T>(int pos) { // Get handle position. int hndPos = pos - Stream.ReadInt(); int retPos = Stream.Position; try { object hndObj; if (_builder == null || !_builder.TryGetCachedField(hndPos, out hndObj)) { if (_hnds == null || !_hnds.TryGetValue(hndPos, out hndObj)) { // No such handler, i.e. we trying to deserialize inner object before deserializing outer. Stream.Seek(hndPos, SeekOrigin.Begin); hndObj = Deserialize<T>(); } // Notify builder that we deserialized object on other location. if (_builder != null) _builder.CacheField(hndPos, hndObj); } return (T) hndObj; } finally { // Position stream to correct place. Stream.Seek(retPos, SeekOrigin.Begin); } } /// <summary> /// Adds a handle to the dictionary. /// </summary> /// <param name="pos">Position.</param> /// <param name="obj">Object.</param> internal void AddHandle(int pos, object obj) { if (_hnds == null) _hnds = new BinaryReaderHandleDictionary(pos, obj); else _hnds.Add(pos, obj); } /// <summary> /// Underlying stream. /// </summary> public IBinaryStream Stream { get; private set; } /// <summary> /// Mark current output as raw. /// </summary> private void MarkRaw() { if (!_curRaw) { _curRaw = true; Stream.Seek(_curPos + _curHdr.GetRawOffset(Stream, _curPos), SeekOrigin.Begin); } } /// <summary> /// Seeks the field by name. /// </summary> private bool SeekField(string fieldName) { if (_curRaw) throw new BinaryObjectException("Cannot read named fields after raw data is read."); if (!_curHdr.HasSchema) return false; var actionId = _curStruct.CurStructAction; var fieldId = _curStruct.GetFieldId(fieldName); if (_curSchema == null || actionId >= _curSchema.Length || fieldId != _curSchema[actionId]) { _curSchemaMap = _curSchemaMap ?? BinaryObjectSchemaSerializer.ReadSchema(Stream, _curPos, _curHdr, () => _curSchema).ToDictionary(); _curSchema = null; // read order is different, ignore schema for future reads int pos; if (!_curSchemaMap.TryGetValue(fieldId, out pos)) return false; Stream.Seek(pos + _curPos, SeekOrigin.Begin); } return true; } /// <summary> /// Seeks specified field and invokes provided func. /// </summary> private T ReadField<T>(string fieldName, Func<IBinaryStream, T> readFunc, byte expHdr) { return SeekField(fieldName) ? Read(readFunc, expHdr) : default(T); } /// <summary> /// Seeks specified field and invokes provided func. /// </summary> private T ReadField<T>(string fieldName, Func<BinaryReader, T> readFunc, byte expHdr) { return SeekField(fieldName) ? Read(readFunc, expHdr) : default(T); } /// <summary> /// Seeks specified field and invokes provided func. /// </summary> private T ReadField<T>(string fieldName, Func<T> readFunc, byte expHdr) { return SeekField(fieldName) ? Read(readFunc, expHdr) : default(T); } /// <summary> /// Reads header and invokes specified func if the header is not null. /// </summary> private T Read<T>(Func<BinaryReader, T> readFunc, byte expHdr) { return Read(() => readFunc(this), expHdr); } /// <summary> /// Reads header and invokes specified func if the header is not null. /// </summary> private T Read<T>(Func<IBinaryStream, T> readFunc, byte expHdr) { return Read(() => readFunc(Stream), expHdr); } /// <summary> /// Reads header and invokes specified func if the header is not null. /// </summary> private T Read<T>(Func<T> readFunc, byte expHdr) { var hdr = ReadByte(); if (hdr == BinaryUtils.HdrNull) return default(T); if (hdr == BinaryUtils.HdrHnd) return ReadHandleObject<T>(Stream.Position - 1); if (expHdr != hdr) throw new BinaryObjectException(string.Format("Invalid header on deserialization. " + "Expected: {0} but was: {1}", expHdr, hdr)); return readFunc(); } /// <summary> /// Reads the enum. /// </summary> private static T ReadEnum0<T>(BinaryReader reader, bool keepBinary) { var enumType = reader.ReadInt(); var enumValue = reader.ReadInt(); if (!keepBinary) return BinaryUtils.GetEnumValue<T>(enumValue, enumType, reader.Marshaller); return TypeCaster<T>.Cast(new BinaryEnum(enumType, enumValue, reader.Marshaller)); } } }
using System; using System.Collections.Generic; using VersionOne.SDK.ObjectModel.Filters; using VersionOne.SDK.ObjectModel.List; namespace VersionOne.SDK.ObjectModel { /// <summary> /// Represents an issue in the VersionOne system. /// </summary> [MetaData("Issue")] public class Issue : ProjectAsset { internal Issue(V1Instance instance) : base(instance) { } internal Issue(AssetID id, V1Instance instance) : base(id, instance) { } /// <summary> /// This Issue rank order among all Issues. /// </summary> [MetaRenamed("Order")] public Rank<Issue> RankOrder { get { return GetRank<Issue>("Order"); } } /// <summary> /// Stories and Defects associated with this Issue. /// </summary> /// <param name="filter">Criteria to filter stories and defects on. Pass a DefectFilter or StoryFilter to get only Defects or Stories, respectively.</param> public ICollection<PrimaryWorkitem> GetPrimaryWorkitems(PrimaryWorkitemFilter filter) { filter = filter ?? new PrimaryWorkitemFilter(); filter.Issues.Clear(); filter.Issues.Add(this); return Instance.Get.PrimaryWorkitems(filter); } /// <summary> /// Requests associated with this Issue. /// </summary> public ICollection<Request> GetRequests(RequestFilter filter) { filter = filter ?? new RequestFilter(); filter.Issues.Clear(); filter.Issues.Add(this); return Instance.Get.Requests(filter); } /// <summary> /// Stories and Defects that cannot be completed because of this Issue. /// </summary> /// <param name="filter">Criteria to filter stories and defects on. Pass a DefectFilter or StoryFilter to get only Defects or Stories, respectively.</param> public ICollection<PrimaryWorkitem> GetBlockedPrimaryWorkitems(PrimaryWorkitemFilter filter) { filter = filter ?? new PrimaryWorkitemFilter(); filter.Issues.Clear(); filter.Issues.Add(this); return Instance.Get.PrimaryWorkitems(filter); } /// <summary> /// Epics associated with this Issue. /// </summary> /// <param name="filter">Criteria to filter epics on.</param> /// <returns> A collection epics that belong to this issue filtered by the /// passed in filter.</returns> public ICollection<Epic> GetEpics(EpicFilter filter) { filter = filter ?? new EpicFilter(); filter.Issues.Clear(); filter.Issues.Add(this); return Instance.Get.Epics(filter); } /// <summary> /// Epics that cannot be completed because of this Issue. /// </summary> /// <param name="filter">Criteria to filter epics on.</param> /// <returns> A collection epics cannot be completed because of /// this Issue filtered by the passed in filter.</returns> public ICollection<Epic> GetBlockedEpics(EpicFilter filter) { filter = filter ?? new EpicFilter(); filter.BlockingIssues.Clear(); filter.BlockingIssues.Add(this); return Instance.Get.Epics(filter); } /// <summary> /// This Issue's Source /// </summary> public IListValueProperty Source { get { return GetListValue<WorkitemSource>("Source"); } } /// <summary> /// This Issue's Type /// </summary> public IListValueProperty Type { get { return GetListValue<IssueType>("Category"); } } /// <summary> /// This Issue's Priority /// </summary> public IListValueProperty Priority { get { return GetListValue<IssuePriority>("Priority"); } } /// <summary> /// Reason this Issue was resolved. /// </summary> public IListValueProperty ResolutionReason { get { return GetListValue<IssueResolutionReason>("ResolutionReason"); } } /// <summary> /// Text field for the description of how this Request was resolved. /// </summary> public string ResolutionDetails { get { return Get<string>("Resolution"); } set { Set("Resolution", value); } } /// <summary> /// Name of person or organization originating this Issue. /// </summary> public string IdentifiedBy { get { return Get<string>("IdentifiedBy"); } set { Set("IdentifiedBy", value); } } /// <summary> /// Cross-reference of this Issue with an external system. /// </summary> public string Reference { get { return Get<string>("Reference"); } set { Set("Reference", value); } } /// <summary> /// Date this Issue brings the system down to a screeching halt /// </summary> public DateTime? TargetDate { get { return Get<DateTime?>("EndDate"); } set { if (value.HasValue) Set("EndDate", value.Value.Date); else Set("EndDate", value); } } /// <summary> /// The Team this Issue is assigned to. /// </summary> public Team Team { get { return GetRelation<Team>("Team"); } set { SetRelation("Team", value); } } /// <summary> /// The Member who pwns this Issue. /// </summary> public Member Owner { get { return GetRelation<Member>("Owner"); } set { SetRelation("Owner", value); } } /// <summary> /// The Retrospectives related to this Issue /// </summary> public ICollection<Retrospective> Retrospectives { get { return GetMultiRelation<Retrospective>("Retrospectives"); } } /// <summary> /// Inactivates the Issue /// </summary> /// <exception cref="InvalidOperationException">The Issue is an invalid state for the Operation, e.g. it is already closed.</exception> internal override void CloseImpl() { Instance.ExecuteOperation<Issue>(this, "Inactivate"); } /// <summary> /// Reactivates the Issue /// </summary> internal override void ReactivateImpl() { Instance.ExecuteOperation<Issue>(this, "Reactivate"); } /// <summary> /// Creates a Story from this Issue. /// </summary> /// <returns>A Story in the VersionOne system related to this Issue.</returns> public Story GenerateStory() { return GenerateStory(null); } /// <summary> /// Creates a Story from this Issue. /// </summary> /// <param name="attributes">required attributes</param> /// <returns>A Story in the VersionOne system related to this Issue.</returns> public Story GenerateStory(IDictionary<string, object> attributes) { Story story = Instance.New<Story>(this); Instance.Create.AddAttributes(story, attributes); story.Save(); return story; } /// <summary> /// Creates a Defect from this Issue. /// </summary> /// <returns>A Defect in the VersionOne system related to this Issue.</returns> public Defect GenerateDefect() { return GenerateDefect(null); } /// <summary> /// Creates a Defect from this Issue. /// </summary> /// <param name="attributes">required attributes</param> /// <returns>A Defect in the VersionOne system related to this Issue.</returns> public Defect GenerateDefect(IDictionary<string, object> attributes) { Defect defect = Instance.New<Defect>(this); Instance.Create.AddAttributes(defect, attributes); defect.Save(); return defect; } } }
/* Project Orleans Cloud Service SDK ver. 1.0 Copyright (c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using System; using System.Threading; using System.Threading.Tasks; namespace Orleans.Runtime { internal class GrainTimer : IDisposable { private Func<object, Task> asyncCallback; private AsyncTaskSafeTimer timer; private readonly TimeSpan dueTime; private readonly TimeSpan timerFrequency; private DateTime previousTickTime; private int totalNumTicks; private static readonly TraceLogger logger = TraceLogger.GetLogger("GrainTimer", TraceLogger.LoggerType.Runtime); private Task currentlyExecutingTickTask; private readonly ActivationData activationData; internal string Name { get; private set; } private bool TimerAlreadyStopped { get { return timer == null || asyncCallback == null; } } private GrainTimer(Func<object, Task> asyncCallback, object state, TimeSpan dueTime, TimeSpan period, string name) { var ctxt = RuntimeContext.Current.ActivationContext; activationData = (ActivationData) RuntimeClient.Current.CurrentActivationData; this.Name = name; this.asyncCallback = asyncCallback; timer = new AsyncTaskSafeTimer( stateObj => TimerAlreadyStopped ? TaskDone.Done : RuntimeClient.Current.ExecAsync(() => ForwardToAsyncCallback(stateObj), ctxt), state); this.dueTime = dueTime; timerFrequency = period; previousTickTime = DateTime.UtcNow; totalNumTicks = 0; } internal static GrainTimer FromTimerCallback( TimerCallback callback, object state, TimeSpan dueTime, TimeSpan period, string name = null) { return new GrainTimer( ob => { if (callback != null) callback(ob); return TaskDone.Done; }, state, dueTime, period, name); } internal static GrainTimer FromTaskCallback( Func<object, Task> asyncCallback, object state, TimeSpan dueTime, TimeSpan period, string name = null) { return new GrainTimer(asyncCallback, state, dueTime, period, name); } public void Start() { if (TimerAlreadyStopped) throw new ObjectDisposedException(String.Format("The timer {0} was already disposed.", GetFullName())); timer.Start(dueTime, timerFrequency); } public void Stop() { asyncCallback = null; } private async Task ForwardToAsyncCallback(object state) { // AsyncSafeTimer ensures that calls to this method are serialized. if (TimerAlreadyStopped) return; totalNumTicks++; if (logger.IsVerbose3) logger.Verbose3(ErrorCode.TimerBeforeCallback, "About to make timer callback for timer {0}", GetFullName()); try { currentlyExecutingTickTask = asyncCallback(state); await currentlyExecutingTickTask; if (logger.IsVerbose3) logger.Verbose3(ErrorCode.TimerAfterCallback, "Completed timer callback for timer {0}", GetFullName()); } catch (Exception exc) { logger.Error( ErrorCode.Timer_GrainTimerCallbackError, string.Format( "Caught and ignored exception: {0} with mesagge: {1} thrown from timer callback {2}", exc.GetType(), exc.Message, GetFullName()), exc); } finally { previousTickTime = DateTime.UtcNow; currentlyExecutingTickTask = null; // if this is not a repeating timer, then we can // dispose of the timer. if (timerFrequency == Constants.INFINITE_TIMESPAN) DisposeTimer(); } } internal Task GetCurrentlyExecutingTickTask() { return currentlyExecutingTickTask ?? TaskDone.Done; } private string GetFullName() { return String.Format("GrainTimer.{0} TimerCallbackHandler:{1}->{2}", Name == null ? "" : Name + ".", (asyncCallback != null && asyncCallback.Target != null) ? asyncCallback.Target.ToString() : "", (asyncCallback != null && asyncCallback.Method != null) ? asyncCallback.Method.ToString() : ""); } internal int GetNumTicks() { return totalNumTicks; } // The reason we need to check CheckTimerFreeze on both the SafeTimer and this GrainTimer // is that SafeTimer may tick OK (no starvation by .NET thread pool), but then scheduler.QueueWorkItem // may not execute and starve this GrainTimer callback. internal bool CheckTimerFreeze(DateTime lastCheckTime) { if (TimerAlreadyStopped) return true; // check underlying SafeTimer (checking that .NET thread pool does not starve this timer) if (!timer.CheckTimerFreeze(lastCheckTime, () => Name)) return false; // if SafeTimer failed the check, no need to check GrainTimer too, since it will fail as well. // check myself (checking that scheduler.QueueWorkItem does not starve this timer) return SafeTimerBase.CheckTimerDelay(previousTickTime, totalNumTicks, dueTime, timerFrequency, logger, GetFullName, ErrorCode.Timer_TimerInsideGrainIsNotTicking, true); } internal bool CheckTimerDelay() { return SafeTimerBase.CheckTimerDelay(previousTickTime, totalNumTicks, dueTime, timerFrequency, logger, GetFullName, ErrorCode.Timer_TimerInsideGrainIsNotTicking, false); } #region IDisposable Members public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } // Maybe called by finalizer thread with disposing=false. As per guidelines, in such a case do not touch other objects. // Dispose() may be called multiple times protected virtual void Dispose(bool disposing) { if (disposing) DisposeTimer(); asyncCallback = null; } private void DisposeTimer() { var tmp = timer; if (tmp == null) return; Utils.SafeExecute(tmp.Dispose); timer = null; asyncCallback = null; if (activationData != null) activationData.OnTimerDisposed(this); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Data.Common; using System.Diagnostics; using System.Threading.Tasks; namespace System.Data.SqlClient.SNI { internal class TdsParserStateObjectManaged : TdsParserStateObject { private SNIMarsConnection _marsConnection; private SNIHandle _sessionHandle; private SspiClientContextStatus _sspiClientContextStatus; public TdsParserStateObjectManaged(TdsParser parser) : base(parser) { } internal TdsParserStateObjectManaged(TdsParser parser, TdsParserStateObject physicalConnection, bool async) : base(parser, physicalConnection, async) { } internal SNIHandle Handle => _sessionHandle; internal override uint Status => _sessionHandle != null ? _sessionHandle.Status : TdsEnums.SNI_UNINITIALIZED; internal override SessionHandle SessionHandle => SessionHandle.FromManagedSession(_sessionHandle); protected override bool CheckPacket(PacketHandle packet, TaskCompletionSource<object> source) { SNIPacket p = packet.ManagedPacket; return p.IsInvalid || source != null; } protected override void CreateSessionHandle(TdsParserStateObject physicalConnection, bool async) { Debug.Assert(physicalConnection is TdsParserStateObjectManaged, "Expected a stateObject of type " + this.GetType()); TdsParserStateObjectManaged managedSNIObject = physicalConnection as TdsParserStateObjectManaged; _sessionHandle = managedSNIObject.CreateMarsSession(this, async); } internal SNIMarsHandle CreateMarsSession(object callbackObject, bool async) { return _marsConnection.CreateMarsSession(callbackObject, async); } protected override uint SNIPacketGetData(PacketHandle packet, byte[] _inBuff, ref uint dataSize) => SNIProxy.Singleton.PacketGetData(packet.ManagedPacket, _inBuff, ref dataSize); internal override void CreatePhysicalSNIHandle(string serverName, bool ignoreSniOpenTimeout, long timerExpire, out byte[] instanceName, ref byte[] spnBuffer, bool flushCache, bool async, bool parallel, bool isIntegratedSecurity) { _sessionHandle = SNIProxy.Singleton.CreateConnectionHandle(this, serverName, ignoreSniOpenTimeout, timerExpire, out instanceName, ref spnBuffer, flushCache, async, parallel, isIntegratedSecurity); if (_sessionHandle == null) { _parser.ProcessSNIError(this); } else if (async) { // Create call backs and allocate to the session handle SNIAsyncCallback ReceiveAsyncCallbackDispatcher = new SNIAsyncCallback(ReadAsyncCallback); SNIAsyncCallback SendAsyncCallbackDispatcher = new SNIAsyncCallback(WriteAsyncCallback); _sessionHandle.SetAsyncCallbacks(ReceiveAsyncCallbackDispatcher, SendAsyncCallbackDispatcher); } } internal void ReadAsyncCallback(SNIPacket packet, uint error) => ReadAsyncCallback(IntPtr.Zero, PacketHandle.FromManagedPacket(packet), error); internal void WriteAsyncCallback(SNIPacket packet, uint sniError) => WriteAsyncCallback(IntPtr.Zero, PacketHandle.FromManagedPacket(packet), sniError); protected override void RemovePacketFromPendingList(PacketHandle packet) { // No-Op } internal override void Dispose() { SNIHandle sessionHandle = _sessionHandle; _sessionHandle = null; _marsConnection = null; DisposeCounters(); if (sessionHandle != null) { sessionHandle.Dispose(); DecrementPendingCallbacks(true); // Will dispose of GC handle. } DisposePacketCache(); } internal override void DisposePacketCache() { } protected override void FreeGcHandle(int remaining, bool release) { // No - op } internal override bool IsFailedHandle() => _sessionHandle.Status != TdsEnums.SNI_SUCCESS; internal override PacketHandle ReadSyncOverAsync(int timeoutRemaining, out uint error) { SNIHandle handle = Handle; if (handle == null) { throw ADP.ClosedConnectionError(); } SNIPacket packet = null; error = SNIProxy.Singleton.ReadSyncOverAsync(handle, out packet, timeoutRemaining); return PacketHandle.FromManagedPacket(packet); } protected override PacketHandle EmptyReadPacket => PacketHandle.FromManagedPacket(null); internal override bool IsPacketEmpty(PacketHandle packet) { return packet.ManagedPacket == null; } internal override void ReleasePacket(PacketHandle syncReadPacket) { syncReadPacket.ManagedPacket?.Release(); } internal override uint CheckConnection() { SNIHandle handle = Handle; return handle == null ? TdsEnums.SNI_SUCCESS : SNIProxy.Singleton.CheckConnection(handle); } internal override PacketHandle ReadAsync(SessionHandle handle, out uint error) { SNIPacket packet; error = SNIProxy.Singleton.ReadAsync(handle.ManagedHandle, out packet); return PacketHandle.FromManagedPacket(packet); } internal override PacketHandle CreateAndSetAttentionPacket() { PacketHandle packetHandle = GetResetWritePacket(TdsEnums.HEADER_LEN); SetPacketData(packetHandle, SQL.AttentionHeader, TdsEnums.HEADER_LEN); return packetHandle; } internal override uint WritePacket(PacketHandle packet, bool sync) { return SNIProxy.Singleton.WritePacket(Handle, packet.ManagedPacket, sync); } internal override PacketHandle AddPacketToPendingList(PacketHandle packet) { // No-Op return packet; } internal override bool IsValidPacket(PacketHandle packet) { Debug.Assert(packet.Type == PacketHandle.ManagedPacketType, "unexpected packet type when requiring ManagedPacket"); return ( packet.Type == PacketHandle.ManagedPacketType && packet.ManagedPacket != null && !packet.ManagedPacket.IsInvalid ); } internal override PacketHandle GetResetWritePacket(int dataSize) { var packet = new SNIPacket(headerSize: _sessionHandle.ReserveHeaderSize, dataSize: dataSize); Debug.Assert(packet.ReservedHeaderSize == _sessionHandle.ReserveHeaderSize, "failed to reserve header"); return PacketHandle.FromManagedPacket(packet); } internal override void ClearAllWritePackets() { Debug.Assert(_asyncWriteCount == 0, "Should not clear all write packets if there are packets pending"); } internal override void SetPacketData(PacketHandle packet, byte[] buffer, int bytesUsed) => SNIProxy.Singleton.PacketSetData(packet.ManagedPacket, buffer, bytesUsed); internal override uint SniGetConnectionId(ref Guid clientConnectionId) => SNIProxy.Singleton.GetConnectionId(Handle, ref clientConnectionId); internal override uint DisabeSsl() => SNIProxy.Singleton.DisableSsl(Handle); internal override uint EnableMars(ref uint info) { _marsConnection = new SNIMarsConnection(Handle); if (_marsConnection.StartReceive() == TdsEnums.SNI_SUCCESS_IO_PENDING) { return TdsEnums.SNI_SUCCESS; } return TdsEnums.SNI_ERROR; } internal override uint EnableSsl(ref uint info) => SNIProxy.Singleton.EnableSsl(Handle, info); internal override uint SetConnectionBufferSize(ref uint unsignedPacketSize) => SNIProxy.Singleton.SetConnectionBufferSize(Handle, unsignedPacketSize); internal override uint GenerateSspiClientContext(byte[] receivedBuff, uint receivedLength, ref byte[] sendBuff, ref uint sendLength, byte[] _sniSpnBuffer) { if (_sspiClientContextStatus == null) { _sspiClientContextStatus = new SspiClientContextStatus(); } SNIProxy.Singleton.GenSspiClientContext(_sspiClientContextStatus, receivedBuff, ref sendBuff, _sniSpnBuffer); sendLength = (uint)(sendBuff != null ? sendBuff.Length : 0); return 0; } internal override uint WaitForSSLHandShakeToComplete() => 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using NUnit.Framework; using IndexReader = Lucene.Net.Index.IndexReader; using Directory = Lucene.Net.Store.Directory; namespace Lucene.Net.Search { public class CheckHits { /// <summary> Some explains methods calculate their values though a slightly /// different order of operations from the actual scoring method ... /// this allows for a small amount of variation /// /// mgarski - Java uses a value of 0.00005f, which causes tests in TestCustomScoreQuery /// to fail when comparing the scorer and explanation scores with the wrapped /// indexes and query boosts of greater than 2.0. A comparison of the Java and .Net /// explanations reveal a floating point precision issue, not a hidden defect. /// </summary> public static float EXPLAIN_SCORE_TOLERANCE_DELTA = 0.00025f; // {{See: LUCENENET-288}} Intentional diversion from Java Lucene per above comment /// <summary> Tests that all documents up to maxDoc which are *not* in the /// expected result set, have an explanation which indicates no match /// (ie: Explanation value of 0.0f) /// </summary> public static void CheckNoMatchExplanations(Query q, System.String defaultFieldName, Searcher searcher, int[] results) { System.String d = q.ToString(defaultFieldName); System.Collections.Hashtable ignore = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(ignore, (System.Int32) results[i]); } int maxDoc = searcher.MaxDoc(); for (int doc = 0; doc < maxDoc; doc++) { if (ignore.Contains((System.Int32) doc)) continue; Explanation exp = searcher.Explain(q, doc); Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null"); Assert.AreEqual(0.0f, exp.GetValue(), 0.0f, "Explanation of [[" + d + "]] for #" + doc + " doesn't indicate non-match: " + exp.ToString()); } } /// <summary> Tests that a query matches the an expected set of documents using a /// HitCollector. /// /// <p/> /// Note that when using the HitCollector API, documents will be collected /// if they "match" regardless of what their score is. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaying the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query,HitCollector)"> /// </seealso> /// <seealso cref="checkHits"> /// </seealso> public static void CheckHitCollector(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { QueryUtils.Check(query, searcher); System.Collections.Hashtable correct = new System.Collections.Hashtable(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(correct, (System.Int32) results[i]); } System.Collections.Hashtable actual = new System.Collections.Hashtable(); Collector c = new SetCollector(actual); searcher.Search(query, c); Assert.AreEqual(correct, actual, "Simple: " + query.ToString(defaultFieldName)); for (int i = - 1; i < 2; i++) { actual.Clear(); QueryUtils.WrapSearcher(searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Searcher " + i + ": " + query.ToString(defaultFieldName)); } if (!(searcher is IndexSearcher)) return ; for (int i = - 1; i < 2; i++) { actual.Clear(); QueryUtils.WrapUnderlyingReader((IndexSearcher) searcher, i).Search(query, c); Assert.AreEqual(correct, actual, "Wrap Reader " + i + ": " + query.ToString(defaultFieldName)); } } public class SetCollector:Collector { internal System.Collections.Hashtable bag; public SetCollector(System.Collections.Hashtable bag) { this.bag = bag; } private int base_Renamed = 0; public override void SetScorer(Scorer scorer) { } public override void Collect(int doc) { SupportClass.CollectionsHelper.AddIfNotContains(bag, (System.Int32)(doc + base_Renamed)); } public override void SetNextReader(IndexReader reader, int docBase) { base_Renamed = docBase; } public override bool AcceptsDocsOutOfOrder() { return true; } } /// <summary> Tests that a query matches the an expected set of documents using Hits. /// /// <p/> /// Note that when using the Hits API, documents will only be returned /// if they have a positive normalized score. /// <p/> /// </summary> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="results">a list of documentIds that must match the query /// </param> /// <seealso cref="Searcher.Search(Query)"> /// </seealso> /// <seealso cref="checkHitCollector"> /// </seealso> public static void CheckHits_Renamed_Method(Query query, System.String defaultFieldName, Searcher searcher, int[] results) { if (searcher is IndexSearcher) { QueryUtils.Check(query, searcher); } ScoreDoc[] hits = searcher.Search(query, null, 1000).ScoreDocs; System.Collections.ArrayList correct = new System.Collections.ArrayList(); for (int i = 0; i < results.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(correct, results[i]); } correct.Sort(); System.Collections.ArrayList actual = new System.Collections.ArrayList(); for (int i = 0; i < hits.Length; i++) { SupportClass.CollectionsHelper.AddIfNotContains(actual, hits[i].doc); } actual.Sort(); Assert.AreEqual(correct, actual, query.ToString(defaultFieldName)); QueryUtils.Check(query, searcher); } /// <summary>Tests that a Hits has an expected order of documents </summary> public static void CheckDocIds(System.String mes, int[] results, ScoreDoc[] hits) { Assert.AreEqual(hits.Length, results.Length, mes + " nr of hits"); for (int i = 0; i < results.Length; i++) { Assert.AreEqual(results[i], hits[i].doc, mes + " doc nrs for hit " + i); } } /// <summary>Tests that two queries have an expected order of documents, /// and that the two queries have the same score values. /// </summary> public static void CheckHitsQuery(Query query, ScoreDoc[] hits1, ScoreDoc[] hits2, int[] results) { CheckDocIds("hits1", results, hits1); CheckDocIds("hits2", results, hits2); CheckEqual(query, hits1, hits2); } public static void CheckEqual(Query query, ScoreDoc[] hits1, ScoreDoc[] hits2) { float scoreTolerance = 1.0e-6f; if (hits1.Length != hits2.Length) { Assert.Fail("Unequal lengths: hits1=" + hits1.Length + ",hits2=" + hits2.Length); } for (int i = 0; i < hits1.Length; i++) { if (hits1[i].doc != hits2[i].doc) { Assert.Fail("Hit " + i + " docnumbers don't match\n" + Hits2str(hits1, hits2, 0, 0) + "for query:" + query.ToString()); } if ((hits1[i].doc != hits2[i].doc) || System.Math.Abs(hits1[i].score - hits2[i].score) > scoreTolerance) { Assert.Fail("Hit " + i + ", doc nrs " + hits1[i].doc + " and " + hits2[i].doc + "\nunequal : " + hits1[i].score + "\n and: " + hits2[i].score + "\nfor query:" + query.ToString()); } } } public static System.String Hits2str(ScoreDoc[] hits1, ScoreDoc[] hits2, int start, int end) { System.Text.StringBuilder sb = new System.Text.StringBuilder(); int len1 = hits1 == null?0:hits1.Length; int len2 = hits2 == null?0:hits2.Length; if (end <= 0) { end = System.Math.Max(len1, len2); } sb.Append("Hits length1=").Append(len1).Append("\tlength2=").Append(len2); sb.Append('\n'); for (int i = start; i < end; i++) { sb.Append("hit=").Append(i).Append(':'); if (i < len1) { sb.Append(" doc").Append(hits1[i].doc).Append('=').Append(hits1[i].score); } else { sb.Append(" "); } sb.Append(",\t"); if (i < len2) { sb.Append(" doc").Append(hits2[i].doc).Append('=').Append(hits2[i].score); } sb.Append('\n'); } return sb.ToString(); } public static System.String TopdocsString(TopDocs docs, int start, int end) { System.Text.StringBuilder sb = new System.Text.StringBuilder(); sb.Append("TopDocs totalHits=").Append(docs.TotalHits).Append(" top=").Append(docs.ScoreDocs.Length).Append('\n'); if (end <= 0) end = docs.ScoreDocs.Length; else end = System.Math.Min(end, docs.ScoreDocs.Length); for (int i = start; i < end; i++) { sb.Append('\t'); sb.Append(i); sb.Append(") doc="); sb.Append(docs.ScoreDocs[i].doc); sb.Append("\tscore="); sb.Append(docs.ScoreDocs[i].score); sb.Append('\n'); } return sb.ToString(); } /// <summary> Asserts that the explanation value for every document matching a /// query corresponds with the true score. /// /// </summary> /// <seealso cref="ExplanationAsserter"> /// </seealso> /// <seealso cref="CheckExplanations(Query, String, Searcher, boolean) for a"> /// "deep" testing of the explanation details. /// /// </seealso> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> public static void CheckExplanations(Query query, System.String defaultFieldName, Searcher searcher) { CheckExplanations(query, defaultFieldName, searcher, false); } /// <summary> Asserts that the explanation value for every document matching a /// query corresponds with the true score. Optionally does "deep" /// testing of the explanation details. /// /// </summary> /// <seealso cref="ExplanationAsserter"> /// </seealso> /// <param name="query">the query to test /// </param> /// <param name="searcher">the searcher to test the query against /// </param> /// <param name="defaultFieldName">used for displaing the query in assertion messages /// </param> /// <param name="deep">indicates whether a deep comparison of sub-Explanation details should be executed /// </param> public static void CheckExplanations(Query query, System.String defaultFieldName, Searcher searcher, bool deep) { searcher.Search(query, new ExplanationAsserter(query, defaultFieldName, searcher, deep)); } /// <summary> Assert that an explanation has the expected score, and optionally that its /// sub-details max/sum/factor match to that score. /// /// </summary> /// <param name="q">String representation of the query for assertion messages /// </param> /// <param name="doc">Document ID for assertion messages /// </param> /// <param name="score">Real score value of doc with query q /// </param> /// <param name="deep">indicates whether a deep comparison of sub-Explanation details should be executed /// </param> /// <param name="expl">The Explanation to match against score /// </param> public static void VerifyExplanation(System.String q, int doc, float score, bool deep, Explanation expl) { float value_Renamed = expl.GetValue(); Assert.AreEqual(score, value_Renamed, EXPLAIN_SCORE_TOLERANCE_DELTA, q + ": score(doc=" + doc + ")=" + score + " != explanationScore=" + value_Renamed + " Explanation: " + expl); if (!deep) return ; Explanation[] detail = expl.GetDetails(); if (detail != null) { if (detail.Length == 1) { // simple containment, no matter what the description says, // just verify contained expl has same score VerifyExplanation(q, doc, score, deep, detail[0]); } else { // explanation must either: // - end with one of: "product of:", "sum of:", "max of:", or // - have "max plus <x> times others" (where <x> is float). float x = 0; System.String descr = expl.GetDescription().ToLower(); bool productOf = descr.EndsWith("product of:"); bool sumOf = descr.EndsWith("sum of:"); bool maxOf = descr.EndsWith("max of:"); bool maxTimesOthers = false; if (!(productOf || sumOf || maxOf)) { // maybe 'max plus x times others' int k1 = descr.IndexOf("max plus "); if (k1 >= 0) { k1 += "max plus ".Length; int k2 = descr.IndexOf(" ", k1); try { x = SupportClass.Single.Parse(descr.Substring(k1, (k2) - (k1)).Trim()); if (descr.Substring(k2).Trim().Equals("times others of:")) { maxTimesOthers = true; } } catch (System.FormatException e) { } } } Assert.IsTrue(productOf || sumOf || maxOf || maxTimesOthers, q + ": multi valued explanation description=\"" + descr + "\" must be 'max of plus x times others' or end with 'product of'" + " or 'sum of:' or 'max of:' - " + expl); float sum = 0; float product = 1; float max = 0; for (int i = 0; i < detail.Length; i++) { float dval = detail[i].GetValue(); VerifyExplanation(q, doc, dval, deep, detail[i]); product *= dval; sum += dval; max = System.Math.Max(max, dval); } float combined = 0; if (productOf) { combined = product; } else if (sumOf) { combined = sum; } else if (maxOf) { combined = max; } else if (maxTimesOthers) { combined = max + x * (sum - max); } else { Assert.IsTrue(false, "should never get here!"); } Assert.AreEqual(combined, value_Renamed, EXPLAIN_SCORE_TOLERANCE_DELTA, q + ": actual subDetails combined==" + combined + " != value=" + value_Renamed + " Explanation: " + expl); } } } /// <summary> an IndexSearcher that implicitly checks hte explanation of every match /// whenever it executes a search. /// /// </summary> /// <seealso cref="ExplanationAsserter"> /// </seealso> public class ExplanationAssertingSearcher:IndexSearcher { public ExplanationAssertingSearcher(Directory d):base(d) { } public ExplanationAssertingSearcher(IndexReader r):base(r) { } protected internal virtual void CheckExplanations(Query q) { base.Search(q, null, new ExplanationAsserter(q, null, this)); } public override TopFieldDocs Search(Query query, Filter filter, int n, Sort sort) { CheckExplanations(query); return base.Search(query, filter, n, sort); } /// <deprecated> use {@link #Search(Query, Collector)} instead. /// </deprecated> [Obsolete("use Search(Query, Collector) instead. ")] public override void Search(Query query, HitCollector results) { Search(query, new HitCollectorWrapper(results)); } public override void Search(Query query, Collector results) { CheckExplanations(query); base.Search(query, results); } /// <deprecated> use {@link #Search(Query, Filter, Collector)} instead. /// </deprecated> [Obsolete("use Search(Query, Filter, Collector) instead. ")] public override void Search(Query query, Filter filter, HitCollector results) { Search(query, filter, new HitCollectorWrapper(results)); } public override void Search(Query query, Filter filter, Collector results) { CheckExplanations(query); base.Search(query, filter, results); } public override TopDocs Search(Query query, Filter filter, int n) { CheckExplanations(query); return base.Search(query, filter, n); } } /// <summary> Asserts that the score explanation for every document matching a /// query corresponds with the true score. /// /// NOTE: this HitCollector should only be used with the Query and Searcher /// specified at when it is constructed. /// /// </summary> /// <seealso cref="CheckHits.verifyExplanation"> /// </seealso> public class ExplanationAsserter:Collector { /// <deprecated> /// </deprecated> /// <seealso cref="CheckHits.EXPLAIN_SCORE_TOLERANCE_DELTA"> /// </seealso> public static float SCORE_TOLERANCE_DELTA = 0.00005f; internal Query q; internal Searcher s; internal System.String d; internal bool deep; internal Scorer scorer; private int base_Renamed = 0; /// <summary>Constructs an instance which does shallow tests on the Explanation </summary> public ExplanationAsserter(Query q, System.String defaultFieldName, Searcher s):this(q, defaultFieldName, s, false) { } public ExplanationAsserter(Query q, System.String defaultFieldName, Searcher s, bool deep) { this.q = q; this.s = s; this.d = q.ToString(defaultFieldName); this.deep = deep; } public override void SetScorer(Scorer scorer) { this.scorer = scorer; } public override void Collect(int doc) { Explanation exp = null; doc = doc + base_Renamed; try { exp = s.Explain(q, doc); } catch (System.IO.IOException e) { throw new System.SystemException("exception in hitcollector of [[" + d + "]] for #" + doc, e); } Assert.IsNotNull(exp, "Explanation of [[" + d + "]] for #" + doc + " is null"); Lucene.Net.Search.CheckHits.VerifyExplanation(d, doc, scorer.Score(), deep, exp); } public override void SetNextReader(IndexReader reader, int docBase) { base_Renamed = docBase; } public override bool AcceptsDocsOutOfOrder() { return true; } } } }
using Lucene.Net.Documents; using NUnit.Framework; using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Search.Spans { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Directory = Lucene.Net.Store.Directory; using Document = Documents.Document; using Field = Field; using IndexReader = Lucene.Net.Index.IndexReader; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; using Term = Lucene.Net.Index.Term; /// <summary> /// Tests for <seealso cref="SpanMultiTermQueryWrapper"/>, wrapping a few MultiTermQueries. /// </summary> [TestFixture] public class TestSpanMultiTermQueryWrapper : LuceneTestCase { private Directory directory; private IndexReader reader; private IndexSearcher searcher; [SetUp] public override void SetUp() { base.SetUp(); directory = NewDirectory(); RandomIndexWriter iw = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, directory); Document doc = new Document(); Field field = NewTextField("field", "", Field.Store.NO); doc.Add(field); field.SetStringValue("quick brown fox"); iw.AddDocument(doc); field.SetStringValue("jumps over lazy broun dog"); iw.AddDocument(doc); field.SetStringValue("jumps over extremely very lazy broxn dog"); iw.AddDocument(doc); reader = iw.GetReader(); iw.Dispose(); searcher = NewSearcher(reader); } [TearDown] public override void TearDown() { reader.Dispose(); directory.Dispose(); base.TearDown(); } [Test] public virtual void TestWildcard() { WildcardQuery wq = new WildcardQuery(new Term("field", "bro?n")); SpanQuery swq = new SpanMultiTermQueryWrapper<MultiTermQuery>(wq); // will only match quick brown fox SpanFirstQuery sfq = new SpanFirstQuery(swq, 2); Assert.AreEqual(1, searcher.Search(sfq, 10).TotalHits); } [Test] public virtual void TestPrefix() { WildcardQuery wq = new WildcardQuery(new Term("field", "extrem*")); SpanQuery swq = new SpanMultiTermQueryWrapper<MultiTermQuery>(wq); // will only match "jumps over extremely very lazy broxn dog" SpanFirstQuery sfq = new SpanFirstQuery(swq, 3); Assert.AreEqual(1, searcher.Search(sfq, 10).TotalHits); } [Test] public virtual void TestFuzzy() { FuzzyQuery fq = new FuzzyQuery(new Term("field", "broan")); SpanQuery sfq = new SpanMultiTermQueryWrapper<MultiTermQuery>(fq); // will not match quick brown fox SpanPositionRangeQuery sprq = new SpanPositionRangeQuery(sfq, 3, 6); Assert.AreEqual(2, searcher.Search(sprq, 10).TotalHits); } [Test] public virtual void TestFuzzy2() { // maximum of 1 term expansion FuzzyQuery fq = new FuzzyQuery(new Term("field", "broan"), 1, 0, 1, false); SpanQuery sfq = new SpanMultiTermQueryWrapper<MultiTermQuery>(fq); // will only match jumps over lazy broun dog SpanPositionRangeQuery sprq = new SpanPositionRangeQuery(sfq, 0, 100); Assert.AreEqual(1, searcher.Search(sprq, 10).TotalHits); } [Test] public virtual void TestNoSuchMultiTermsInNear() { //test to make sure non existent multiterms aren't throwing null pointer exceptions FuzzyQuery fuzzyNoSuch = new FuzzyQuery(new Term("field", "noSuch"), 1, 0, 1, false); SpanQuery spanNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(fuzzyNoSuch); SpanQuery term = new SpanTermQuery(new Term("field", "brown")); SpanQuery near = new SpanNearQuery(new SpanQuery[] { term, spanNoSuch }, 1, true); Assert.AreEqual(0, searcher.Search(near, 10).TotalHits); //flip order near = new SpanNearQuery(new SpanQuery[] { spanNoSuch, term }, 1, true); Assert.AreEqual(0, searcher.Search(near, 10).TotalHits); WildcardQuery wcNoSuch = new WildcardQuery(new Term("field", "noSuch*")); SpanQuery spanWCNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(wcNoSuch); near = new SpanNearQuery(new SpanQuery[] { term, spanWCNoSuch }, 1, true); Assert.AreEqual(0, searcher.Search(near, 10).TotalHits); RegexpQuery rgxNoSuch = new RegexpQuery(new Term("field", "noSuch")); SpanQuery spanRgxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(rgxNoSuch); near = new SpanNearQuery(new SpanQuery[] { term, spanRgxNoSuch }, 1, true); Assert.AreEqual(0, searcher.Search(near, 10).TotalHits); PrefixQuery prfxNoSuch = new PrefixQuery(new Term("field", "noSuch")); SpanQuery spanPrfxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(prfxNoSuch); near = new SpanNearQuery(new SpanQuery[] { term, spanPrfxNoSuch }, 1, true); Assert.AreEqual(0, searcher.Search(near, 10).TotalHits); //test single noSuch near = new SpanNearQuery(new SpanQuery[] { spanPrfxNoSuch }, 1, true); Assert.AreEqual(0, searcher.Search(near, 10).TotalHits); //test double noSuch near = new SpanNearQuery(new SpanQuery[] { spanPrfxNoSuch, spanPrfxNoSuch }, 1, true); Assert.AreEqual(0, searcher.Search(near, 10).TotalHits); } [Test] public virtual void TestNoSuchMultiTermsInNotNear() { //test to make sure non existent multiterms aren't throwing non-matching field exceptions FuzzyQuery fuzzyNoSuch = new FuzzyQuery(new Term("field", "noSuch"), 1, 0, 1, false); SpanQuery spanNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(fuzzyNoSuch); SpanQuery term = new SpanTermQuery(new Term("field", "brown")); SpanNotQuery notNear = new SpanNotQuery(term, spanNoSuch, 0, 0); Assert.AreEqual(1, searcher.Search(notNear, 10).TotalHits); //flip notNear = new SpanNotQuery(spanNoSuch, term, 0, 0); Assert.AreEqual(0, searcher.Search(notNear, 10).TotalHits); //both noSuch notNear = new SpanNotQuery(spanNoSuch, spanNoSuch, 0, 0); Assert.AreEqual(0, searcher.Search(notNear, 10).TotalHits); WildcardQuery wcNoSuch = new WildcardQuery(new Term("field", "noSuch*")); SpanQuery spanWCNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(wcNoSuch); notNear = new SpanNotQuery(term, spanWCNoSuch, 0, 0); Assert.AreEqual(1, searcher.Search(notNear, 10).TotalHits); RegexpQuery rgxNoSuch = new RegexpQuery(new Term("field", "noSuch")); SpanQuery spanRgxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(rgxNoSuch); notNear = new SpanNotQuery(term, spanRgxNoSuch, 1, 1); Assert.AreEqual(1, searcher.Search(notNear, 10).TotalHits); PrefixQuery prfxNoSuch = new PrefixQuery(new Term("field", "noSuch")); SpanQuery spanPrfxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(prfxNoSuch); notNear = new SpanNotQuery(term, spanPrfxNoSuch, 1, 1); Assert.AreEqual(1, searcher.Search(notNear, 10).TotalHits); } [Test] public virtual void TestNoSuchMultiTermsInOr() { //test to make sure non existent multiterms aren't throwing null pointer exceptions FuzzyQuery fuzzyNoSuch = new FuzzyQuery(new Term("field", "noSuch"), 1, 0, 1, false); SpanQuery spanNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(fuzzyNoSuch); SpanQuery term = new SpanTermQuery(new Term("field", "brown")); SpanOrQuery near = new SpanOrQuery(new SpanQuery[] { term, spanNoSuch }); Assert.AreEqual(1, searcher.Search(near, 10).TotalHits); //flip near = new SpanOrQuery(new SpanQuery[] { spanNoSuch, term }); Assert.AreEqual(1, searcher.Search(near, 10).TotalHits); WildcardQuery wcNoSuch = new WildcardQuery(new Term("field", "noSuch*")); SpanQuery spanWCNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(wcNoSuch); near = new SpanOrQuery(new SpanQuery[] { term, spanWCNoSuch }); Assert.AreEqual(1, searcher.Search(near, 10).TotalHits); RegexpQuery rgxNoSuch = new RegexpQuery(new Term("field", "noSuch")); SpanQuery spanRgxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(rgxNoSuch); near = new SpanOrQuery(new SpanQuery[] { term, spanRgxNoSuch }); Assert.AreEqual(1, searcher.Search(near, 10).TotalHits); PrefixQuery prfxNoSuch = new PrefixQuery(new Term("field", "noSuch")); SpanQuery spanPrfxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(prfxNoSuch); near = new SpanOrQuery(new SpanQuery[] { term, spanPrfxNoSuch }); Assert.AreEqual(1, searcher.Search(near, 10).TotalHits); near = new SpanOrQuery(new SpanQuery[] { spanPrfxNoSuch }); Assert.AreEqual(0, searcher.Search(near, 10).TotalHits); near = new SpanOrQuery(new SpanQuery[] { spanPrfxNoSuch, spanPrfxNoSuch }); Assert.AreEqual(0, searcher.Search(near, 10).TotalHits); } [Test] public virtual void TestNoSuchMultiTermsInSpanFirst() { //this hasn't been a problem FuzzyQuery fuzzyNoSuch = new FuzzyQuery(new Term("field", "noSuch"), 1, 0, 1, false); SpanQuery spanNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(fuzzyNoSuch); SpanQuery spanFirst = new SpanFirstQuery(spanNoSuch, 10); Assert.AreEqual(0, searcher.Search(spanFirst, 10).TotalHits); WildcardQuery wcNoSuch = new WildcardQuery(new Term("field", "noSuch*")); SpanQuery spanWCNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(wcNoSuch); spanFirst = new SpanFirstQuery(spanWCNoSuch, 10); Assert.AreEqual(0, searcher.Search(spanFirst, 10).TotalHits); RegexpQuery rgxNoSuch = new RegexpQuery(new Term("field", "noSuch")); SpanQuery spanRgxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(rgxNoSuch); spanFirst = new SpanFirstQuery(spanRgxNoSuch, 10); Assert.AreEqual(0, searcher.Search(spanFirst, 10).TotalHits); PrefixQuery prfxNoSuch = new PrefixQuery(new Term("field", "noSuch")); SpanQuery spanPrfxNoSuch = new SpanMultiTermQueryWrapper<MultiTermQuery>(prfxNoSuch); spanFirst = new SpanFirstQuery(spanPrfxNoSuch, 10); Assert.AreEqual(0, searcher.Search(spanFirst, 10).TotalHits); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Concurrent; using System.Collections.Generic; using Xunit; namespace System.Linq.Parallel.Tests { public static class SelectSelectManyTests { [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2)] [InlineData(16)] public static void Select_Unordered(int count) { IntegerRangeSet seen = new IntegerRangeSet(0, count); foreach (var p in UnorderedSources.Default(count).Select(x => KeyValuePair.Create(x, x * x))) { seen.Add(p.Key); Assert.Equal(p.Key * p.Key, p.Value); } seen.AssertComplete(); } [Fact] [OuterLoop] public static void Select_Unordered_Longrunning() { Select_Unordered(Sources.OuterLoopCount / 64); } [Theory] [MemberData(nameof(Sources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(Sources))] public static void Select(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; int seen = 0; foreach (var p in query.Select(x => KeyValuePair.Create(x, x * x))) { Assert.Equal(seen++, p.Key); Assert.Equal(p.Key * p.Key, p.Value); } Assert.Equal(count, seen); } [Theory] [OuterLoop] [MemberData(nameof(Sources.Ranges), new[] { 1024 * 4 }, MemberType = typeof(Sources))] public static void Select_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Select(labeled, count); } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2)] [InlineData(16)] public static void Select_Unordered_NotPipelined(int count) { IntegerRangeSet seen = new IntegerRangeSet(0, count); Assert.All(UnorderedSources.Default(count).Select(x => KeyValuePair.Create(x, x * x)).ToList(), p => { seen.Add(p.Key); Assert.Equal(p.Key * p.Key, p.Value); }); seen.AssertComplete(); } [Fact] [OuterLoop] public static void Select_Unordered_NotPipelined_Longrunning() { Select_Unordered_NotPipelined(Sources.OuterLoopCount / 64); } [Theory] [MemberData(nameof(Sources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(Sources))] public static void Select_NotPipelined(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; int seen = 0; Assert.All(query.Select(x => KeyValuePair.Create(x, x * x)).ToList(), p => { Assert.Equal(seen++, p.Key); Assert.Equal(p.Key * p.Key, p.Value); }); Assert.Equal(count, seen); } [Theory] [OuterLoop] [MemberData(nameof(Sources.Ranges), new[] { 1024 * 4 }, MemberType = typeof(Sources))] public static void Select_NotPipelined_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Select_NotPipelined(labeled, count); } // Uses an element's index to calculate an output value. If order preservation isn't // working, this would PROBABLY fail. Unfortunately, this isn't deterministic. But choosing // larger input sizes increases the probability that it will. [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2)] [InlineData(16)] public static void Select_Indexed_Unordered(int count) { // For unordered collections, which element is at which index isn't actually guaranteed, but an effect of the implementation. // If this test starts failing it should be updated, and possibly mentioned in release notes. IntegerRangeSet seen = new IntegerRangeSet(0, count); foreach (var p in UnorderedSources.Default(count).Select((x, index) => KeyValuePair.Create(x, index))) { seen.Add(p.Key); Assert.Equal(p.Key, p.Value); } seen.AssertComplete(); } [Fact] [OuterLoop] public static void Select_Indexed_Unordered_Longrunning() { Select_Indexed_Unordered(Sources.OuterLoopCount / 64); } [Theory] [MemberData(nameof(Sources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(Sources))] public static void Select_Indexed(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; int seen = 0; foreach (var p in query.Select((x, index) => KeyValuePair.Create(x, index))) { Assert.Equal(seen++, p.Key); Assert.Equal(p.Key, p.Value); } Assert.Equal(count, seen); } [Theory] [OuterLoop] [MemberData(nameof(Sources.Ranges), new[] { 1024 * 4 }, MemberType = typeof(Sources))] public static void Select_Indexed_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Select_Indexed(labeled, count); } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(2)] [InlineData(16)] public static void Select_Indexed_Unordered_NotPipelined(int count) { // For unordered collections, which element is at which index isn't actually guaranteed, but an effect of the implementation. // If this test starts failing it should be updated, and possibly mentioned in release notes. IntegerRangeSet seen = new IntegerRangeSet(0, count); Assert.All(UnorderedSources.Default(count).Select((x, index) => KeyValuePair.Create(x, index)).ToList(), p => { seen.Add(p.Key); Assert.Equal(p.Key, p.Value); }); seen.AssertComplete(); } [Fact] [OuterLoop] public static void Select_Indexed_Unordered_NotPipelined_Longrunning() { Select_Indexed_Unordered_NotPipelined(Sources.OuterLoopCount / 64); } [Theory] [MemberData(nameof(Sources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(Sources))] public static void Select_Indexed_NotPipelined(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; int seen = 0; Assert.All(query.Select((x, index) => KeyValuePair.Create(x, index)).ToList(), p => { Assert.Equal(seen++, p.Key); Assert.Equal(p.Key, p.Value); }); Assert.Equal(count, seen); } [Theory] [OuterLoop] [MemberData(nameof(Sources.Ranges), new[] { 1024 * 4 }, MemberType = typeof(Sources))] public static void Select_Indexed_NotPipelined_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { Select_Indexed_NotPipelined(labeled, count); } [Fact] public static void Select_ArgumentNullException() { Assert.Throws<ArgumentNullException>("source", () => ((ParallelQuery<bool>)null).Select(x => x)); Assert.Throws<ArgumentNullException>("source", () => ((ParallelQuery<bool>)null).Select((x, index) => x)); Assert.Throws<ArgumentNullException>("selector", () => ParallelEnumerable.Empty<bool>().Select((Func<bool, bool>)null)); Assert.Throws<ArgumentNullException>("selector", () => ParallelEnumerable.Empty<bool>().Select((Func<bool, int, bool>)null)); } [Theory] [InlineData(true, true)] [InlineData(true, false)] [InlineData(false, true)] [InlineData(false, false)] public static void Select_OrderablePartitionerWithOutOfOrderInputs_AsOrdered_CorrectOrder(bool keysOrderedInEachPartition, bool keysNormalized) { var range = new RangeOrderablePartitioner(0, 1024, keysOrderedInEachPartition, keysNormalized); int next = 0; foreach (int i in range.AsParallel().AsOrdered().Select(i => i)) { Assert.Equal(next++, i); } } // // SelectMany // // [Regression Test] // An issue occurred because the QuerySettings structure was not being deep-cloned during // query-opening. As a result, the concurrent inner-enumerators (for the RHS operators) // that occur in SelectMany were sharing CancellationState that they should not have. // The result was that enumerators could falsely believe they had been canceled when // another inner-enumerator was disposed. // // Note: the failure was intermittent. this test would fail about 1 in 2 times on mikelid1 (4-core). public static IEnumerable<object[]> SelectManyUnorderedData(int[] counts) { foreach (int count in counts.DefaultIfEmpty(Sources.OuterLoopCount / 64)) { foreach (Labeled<Func<int, int, IEnumerable<int>>> expander in Expanders()) { foreach (int expandCount in new[] { 0, 1, 2, 8 }) { yield return new object[] { count, expander, expandCount }; } } } } public static IEnumerable<object[]> SelectManyData(int[] counts) { foreach (object[] results in Sources.Ranges(counts.DefaultIfEmpty(Sources.OuterLoopCount / 64))) { foreach (Labeled<Func<int, int, IEnumerable<int>>> expander in Expanders()) { foreach (int count in new[] { 0, 1, 2, 8 }) { yield return new object[] { results[0], results[1], expander, count }; } } } } private static IEnumerable<Labeled<Func<int, int, IEnumerable<int>>>> Expanders() { yield return Labeled.Label("Array", (Func<int, int, IEnumerable<int>>)((start, count) => Enumerable.Range(start * count, count).ToArray())); yield return Labeled.Label("Enumerable.Range", (Func<int, int, IEnumerable<int>>)((start, count) => Enumerable.Range(start * count, count))); yield return Labeled.Label("ParallelEnumerable.Range", (Func<int, int, IEnumerable<int>>)((start, count) => ParallelEnumerable.Range(start * count, count).AsOrdered().Select(x => x))); } [Theory] [MemberData(nameof(SelectManyUnorderedData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Unordered(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { Func<int, int, IEnumerable<int>> expand = expander.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count * expansion); foreach (int i in UnorderedSources.Default(count).SelectMany(x => expand(x, expansion))) { seen.Add(i); } seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyUnorderedData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Unordered_Longrunning(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Unordered(count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyData), new[] { 0, 1, 2, 16 })] public static void SelectMany(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { ParallelQuery<int> query = labeled.Item; Func<int, int, IEnumerable<int>> expand = expander.Item; int seen = 0; foreach (int i in query.SelectMany(x => expand(x, expansion))) { Assert.Equal(seen++, i); } Assert.Equal(count * expansion, seen); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany(labeled, count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyUnorderedData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Unordered_NotPipelined(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { Func<int, int, IEnumerable<int>> expand = expander.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count * expansion); Assert.All(UnorderedSources.Default(count).SelectMany(x => expand(x, expansion)).ToList(), x => seen.Add(x)); seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyUnorderedData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Unordered_NotPipelined_Longrunning(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Unordered_NotPipelined(count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyData), new[] { 0, 1, 2, 16 })] public static void SelectMany_NotPipelined(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { ParallelQuery<int> query = labeled.Item; Func<int, int, IEnumerable<int>> expand = expander.Item; int seen = 0; Assert.All(query.SelectMany(x => expand(x, expansion)).ToList(), x => Assert.Equal(seen++, x)); Assert.Equal(count * expansion, seen); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_NotPipelined_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_NotPipelined(labeled, count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyUnorderedData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Unordered_ResultSelector(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { Func<int, int, IEnumerable<int>> expand = expander.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count * expansion); foreach (var p in UnorderedSources.Default(count).SelectMany(x => expand(x, expansion), (original, expanded) => KeyValuePair.Create(original, expanded))) { seen.Add(p.Value); Assert.Equal(p.Key, p.Value / expansion); } seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyUnorderedData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Unordered_ResultSelector_Longrunning(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Unordered_ResultSelector(count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyUnorderedData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Unordered_ResultSelector_NotPipelined(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { Func<int, int, IEnumerable<int>> expand = expander.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count * expansion); Assert.All(UnorderedSources.Default(count).SelectMany(x => expand(x, expansion), (original, expanded) => KeyValuePair.Create(original, expanded)).ToList(), p => { seen.Add(p.Value); Assert.Equal(p.Key, p.Value / expansion); }); seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyUnorderedData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Unordered_ResultSelector_NotPipelined_Longrunning(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Unordered_ResultSelector_NotPipelined(count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyData), new[] { 0, 1, 2, 16 })] public static void SelectMany_ResultSelector(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { ParallelQuery<int> query = labeled.Item; Func<int, int, IEnumerable<int>> expand = expander.Item; int seen = 0; foreach (var p in query.SelectMany(x => expand(x, expansion), (original, expanded) => KeyValuePair.Create(original, expanded))) { Assert.Equal(seen++, p.Value); Assert.Equal(p.Key, p.Value / expansion); } Assert.Equal(count * expansion, seen); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_ResultSelector_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_ResultSelector(labeled, count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyData), new[] { 0, 1, 2, 16 })] public static void SelectMany_ResultSelector_NotPipelined(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { ParallelQuery<int> query = labeled.Item; Func<int, int, IEnumerable<int>> expand = expander.Item; int seen = 0; Assert.All(query.SelectMany(x => expand(x, expansion), (original, expanded) => KeyValuePair.Create(original, expanded)).ToList(), p => { Assert.Equal(seen++, p.Value); Assert.Equal(p.Key, p.Value / expansion); }); Assert.Equal(count * expansion, seen); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_ResultSelector_NotPipelined_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_ResultSelector_NotPipelined(labeled, count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyUnorderedData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Indexed_Unordered(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { // For unordered collections, which element is at which index isn't actually guaranteed, but an effect of the implementation. // If this test starts failing it should be updated, and possibly mentioned in release notes. Func<int, int, IEnumerable<int>> expand = expander.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count * expansion); foreach (var pIndex in UnorderedSources.Default(count).SelectMany((x, index) => expand(x, expansion).Select(y => KeyValuePair.Create(index, y)))) { seen.Add(pIndex.Value); Assert.Equal(pIndex.Key, pIndex.Value / expansion); } seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyUnorderedData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Indexed_Unordered_Longrunning(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Indexed_Unordered(count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyUnorderedData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Indexed_Unordered_NotPipelined(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { // For unordered collections, which element is at which index isn't actually guaranteed, but an effect of the implementation. // If this test starts failing it should be updated, and possibly mentioned in release notes. Func<int, int, IEnumerable<int>> expand = expander.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count * expansion); Assert.All(UnorderedSources.Default(count).SelectMany((x, index) => expand(x, expansion).Select(y => KeyValuePair.Create(index, y))).ToList(), pIndex => { seen.Add(pIndex.Value); Assert.Equal(pIndex.Key, pIndex.Value / expansion); }); seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyUnorderedData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Indexed_Unordered_NotPipelined_Longrunning(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Indexed_Unordered_NotPipelined(count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Indexed(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { ParallelQuery<int> query = labeled.Item; Func<int, int, IEnumerable<int>> expand = expander.Item; int seen = 0; foreach (var pIndex in query.SelectMany((x, index) => expand(x, expansion).Select(y => KeyValuePair.Create(index, y)))) { Assert.Equal(seen++, pIndex.Value); Assert.Equal(pIndex.Key, pIndex.Value / expansion); } Assert.Equal(count * expansion, seen); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Indexed_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Indexed(labeled, count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Indexed_NotPipelined(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { ParallelQuery<int> query = labeled.Item; Func<int, int, IEnumerable<int>> expand = expander.Item; int seen = 0; Assert.All(query.SelectMany((x, index) => expand(x, expansion).Select(y => KeyValuePair.Create(index, y))).ToList(), pIndex => { Assert.Equal(seen++, pIndex.Value); Assert.Equal(pIndex.Key, pIndex.Value / expansion); }); Assert.Equal(count * expansion, seen); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Indexed_NotPipelined_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Indexed_NotPipelined(labeled, count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyUnorderedData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Indexed_Unordered_ResultSelector(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { // For unordered collections, which element is at which index isn't actually guaranteed, but an effect of the implementation. // If this test starts failing it should be updated, and possibly mentioned in release notes. Func<int, int, IEnumerable<int>> expand = expander.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count * expansion); foreach (var pOuter in UnorderedSources.Default(count).SelectMany((x, index) => expand(x, expansion).Select(y => KeyValuePair.Create(index, y)), (original, expanded) => KeyValuePair.Create(original, expanded))) { var pInner = pOuter.Value; Assert.Equal(pOuter.Key, pInner.Key); seen.Add(pInner.Value); Assert.Equal(pOuter.Key, pInner.Value / expansion); } seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyUnorderedData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Indexed_Unordered_ResultSelector_Longrunning(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Indexed_Unordered_ResultSelector(count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyUnorderedData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Indexed_Unordered_ResultSelector_NotPipelined(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { // For unordered collections, which element is at which index isn't actually guaranteed, but an effect of the implementation. // If this test starts failing it should be updated, and possibly mentioned in release notes. Func<int, int, IEnumerable<int>> expand = expander.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count * expansion); Assert.All(UnorderedSources.Default(count).SelectMany((x, index) => expand(x, expansion).Select(y => KeyValuePair.Create(index, y)), (original, expanded) => KeyValuePair.Create(original, expanded)).ToList(), pOuter => { var pInner = pOuter.Value; Assert.Equal(pOuter.Key, pInner.Key); seen.Add(pInner.Value); Assert.Equal(pOuter.Key, pInner.Value / expansion); }); seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyUnorderedData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Indexed_Unordered_ResultSelector_NotPipelined_Longrunning(int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Indexed_Unordered_ResultSelector_NotPipelined(count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Indexed_ResultSelector(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { ParallelQuery<int> query = labeled.Item; Func<int, int, IEnumerable<int>> expand = expander.Item; int seen = 0; foreach (var pOuter in query.SelectMany((x, index) => expand(x, expansion).Select(y => KeyValuePair.Create(index, y)), (original, expanded) => KeyValuePair.Create(original, expanded))) { var pInner = pOuter.Value; Assert.Equal(pOuter.Key, pInner.Key); Assert.Equal(seen++, pInner.Value); Assert.Equal(pOuter.Key, pInner.Value / expansion); } Assert.Equal(count * expansion, seen); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Indexed_ResultSelector_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Indexed_ResultSelector(labeled, count, expander, expansion); } [Theory] [MemberData(nameof(SelectManyData), new[] { 0, 1, 2, 16 })] public static void SelectMany_Indexed_ResultSelector_NotPipelined(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { ParallelQuery<int> query = labeled.Item; Func<int, int, IEnumerable<int>> expand = expander.Item; int seen = 0; Assert.All(query.SelectMany((x, index) => expand(x, expansion).Select(y => KeyValuePair.Create(index, y)), (original, expanded) => KeyValuePair.Create(original, expanded)).ToList(), pOuter => { var pInner = pOuter.Value; Assert.Equal(pOuter.Key, pInner.Key); Assert.Equal(seen++, pInner.Value); Assert.Equal(pOuter.Key, pInner.Value / expansion); }); Assert.Equal(count * expansion, seen); } [Theory] [OuterLoop] [MemberData(nameof(SelectManyData), new int[] { /* Sources.OuterLoopCount */ })] public static void SelectMany_Indexed_ResultSelector_NotPipelined_Longrunning(Labeled<ParallelQuery<int>> labeled, int count, Labeled<Func<int, int, IEnumerable<int>>> expander, int expansion) { SelectMany_Indexed_ResultSelector_NotPipelined(labeled, count, expander, expansion); } [Fact] public static void SelectMany_ArgumentNullException() { Assert.Throws<ArgumentNullException>("source", () => ((ParallelQuery<bool>)null).SelectMany(x => new[] { x })); Assert.Throws<ArgumentNullException>("source", () => ((ParallelQuery<bool>)null).SelectMany((x, index) => new[] { x })); Assert.Throws<ArgumentNullException>("selector", () => ParallelEnumerable.Empty<bool>().SelectMany((Func<bool, IEnumerable<bool>>)null)); Assert.Throws<ArgumentNullException>("selector", () => ParallelEnumerable.Empty<bool>().SelectMany((Func<bool, int, IEnumerable<bool>>)null)); Assert.Throws<ArgumentNullException>("source", () => ((ParallelQuery<bool>)null).SelectMany(x => new[] { x }, (x, y) => x)); Assert.Throws<ArgumentNullException>("source", () => ((ParallelQuery<bool>)null).SelectMany((x, index) => new[] { x }, (x, y) => x)); Assert.Throws<ArgumentNullException>("collectionSelector", () => ParallelEnumerable.Empty<bool>().SelectMany((Func<bool, IEnumerable<bool>>)null, (x, y) => x)); Assert.Throws<ArgumentNullException>("collectionSelector", () => ParallelEnumerable.Empty<bool>().SelectMany((Func<bool, int, IEnumerable<bool>>)null, (x, y) => x)); Assert.Throws<ArgumentNullException>("resultSelector", () => ParallelEnumerable.Empty<bool>().SelectMany(x => new[] { x }, (Func<bool, bool, bool>)null)); Assert.Throws<ArgumentNullException>("resultSelector", () => ParallelEnumerable.Empty<bool>().SelectMany((x, index) => new[] { x }, (Func<bool, bool, bool>)null)); } } }
#region License /* * Copyright 2002-2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #endregion using System; namespace Spring.Threading.AtomicTypes { /// <summary> /// An <see cref="AtomicMarkableReference{T}"/> maintains an object reference /// along with a mark bit, that can be updated atomically. /// <p/> /// <b>Note:</b>This implementation maintains markable /// references by creating internal objects representing "boxed" /// [reference, boolean] pairs. /// <p/> /// Based on the on the back port of JCP JSR-166. /// </summary> /// <author>Doug Lea</author> /// <author>Griffin Caprio (.NET)</author> /// <author>Andreas Doehring (.NET)</author> [Serializable] public class AtomicMarkableReference<T> { /// <summary> /// Holds the <see cref="Spring.Threading.AtomicTypes.AtomicReference{T}"/> reference /// </summary> private readonly AtomicReference<ReferenceBooleanPair<T>> _atomicReference; [Serializable] private class ReferenceBooleanPair<TI> { private readonly TI _reference; private readonly bool _markBit; internal ReferenceBooleanPair(TI reference, bool markBit) { _reference = reference; _markBit = markBit; } public TI Reference { get { return _reference; } } public bool MarkBit { get { return _markBit; } } } /// <summary> /// Creates a new <see cref="Spring.Threading.AtomicTypes.AtomicMarkableReference{T}"/> with the given /// initial values. /// </summary> /// <param name="initialReference"> /// the initial reference /// </param> /// <param name="initialMark"> /// the initial mark /// </param> public AtomicMarkableReference(T initialReference, bool initialMark) { _atomicReference = new AtomicReference<ReferenceBooleanPair<T>>(new ReferenceBooleanPair<T>(initialReference, initialMark)); } /// <summary> /// Returns the <see cref="ReferenceBooleanPair{TI}"/> held but this instance. /// </summary> private ReferenceBooleanPair<T> Pair { get { return _atomicReference.Reference; } } /// <summary> /// Returns the current value of the reference. /// </summary> /// <returns> /// The current value of the reference /// </returns> public object Reference { get { return Pair.Reference; } } /// <summary> /// Returns the current value of the mark. /// </summary> /// <returns> /// The current value of the mark /// </returns> public bool IsReferenceMarked { get { return Pair.MarkBit; } } /// <summary> /// Returns the current values of both the reference and the mark. /// Typical usage is: /// <code> /// bool[1] holder; /// object reference = v.GetobjectReference(holder); /// </code> /// </summary> /// <param name="markHolder"> /// An array of size of at least one. On return, /// markholder[0] will hold the value of the mark. /// </param> /// <returns> /// The current value of the reference /// </returns> public T GetReference(ref bool[] markHolder) { ReferenceBooleanPair<T> p = Pair; markHolder[0] = p.MarkBit; return p.Reference; } /// <summary> /// Atomically sets the value of both the reference and mark /// to the given update values if the /// current reference is equal to <paramref name="expectedReference"/> /// and the current mark is equal to the <paramref name="expectedMark"/>. /// </summary> /// <param name="expectedReference"> /// The expected value of the reference /// </param> /// <param name="newReference"> /// The new value for the reference /// </param> /// <param name="expectedMark"> /// The expected value of the mark /// </param> /// <param name="newMark"> /// The new value for the mark /// </param> /// <returns> /// <see lang="true"/> if successful, <see lang="false"/> otherwise /// </returns> public virtual bool WeakCompareAndSet(T expectedReference, T newReference, bool expectedMark, bool newMark) { ReferenceBooleanPair<T> current = Pair; return expectedReference.Equals(current.Reference) && expectedMark == current.MarkBit && ((newReference.Equals(current.Reference) && newMark == current.MarkBit) || _atomicReference.CompareAndSet(current, new ReferenceBooleanPair<T>(newReference, newMark))); } /// <summary> /// Atomically sets the value of both the reference and mark /// to the given update values if the /// current reference is equal to <paramref name="expectedReference"/> /// and the current mark is equal to the <paramref name="expectedMark"/>. /// </summary> /// <param name="expectedReference"> /// The expected value of the reference /// </param> /// <param name="newReference"> /// The new value for the reference /// </param> /// <param name="expectedMark"> /// The expected value of the mark /// </param> /// <param name="newMark"> /// The new value for the mark /// </param> /// <returns> /// <see lang="true"/> if successful, <see lang="false"/> otherwise /// </returns> public bool CompareAndSet(T expectedReference, T newReference, bool expectedMark, bool newMark) { ReferenceBooleanPair<T> current = Pair; return expectedReference.Equals(current.Reference) && expectedMark == current.MarkBit && ((newReference.Equals(current.Reference) && newMark == current.MarkBit) || _atomicReference.CompareAndSet(current, new ReferenceBooleanPair<T>(newReference, newMark))); } /// <summary> /// Unconditionally sets the value of both the reference and mark. /// </summary> /// <param name="newReference">the new value for the reference /// </param> /// <param name="newMark">the new value for the mark /// </param> public void SetNewAtomicValue(T newReference, bool newMark) { ReferenceBooleanPair<T> current = Pair; if(!newReference.Equals(current.Reference) || newMark != current.MarkBit) _atomicReference.SetNewAtomicValue(new ReferenceBooleanPair<T>(newReference, newMark)); } /// <summary> /// Atomically sets the value of the mark to the given update value /// if the current reference is equal to the expected /// reference. Any given invocation of this operation may fail /// (return false) spuriously, but repeated invocation /// when the current value holds the expected value and no other /// thread is also attempting to set the value will eventually /// succeed. /// </summary> /// <param name="expectedReference"> /// The expected value of the reference /// </param> /// <param name="newMark"> /// The new value for the mark /// </param> /// <returns> /// <see lang="true"/> if successful, <see lang="false"/> otherwise /// </returns> public bool AttemptMark(T expectedReference, bool newMark) { ReferenceBooleanPair<T> current = Pair; return expectedReference.Equals(current.Reference) && (newMark == current.MarkBit || _atomicReference.CompareAndSet(current, new ReferenceBooleanPair<T>(expectedReference, newMark))); } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; /// <summary> /// DropDown Menu Control /// </summary> [AddComponentMenu("2D Toolkit/UI/tk2dUIDropDownMenu")] public class tk2dUIDropDownMenu : MonoBehaviour { /// <summary> /// Button that controls, dropdown list from appearing /// </summary> public tk2dUIItem dropDownButton; /// <summary> /// Primary textMesh, this will read what DropDownItem is selected /// </summary> public tk2dTextMesh selectedTextMesh; /// <summary> /// Visual height of this ui item, used for spacing /// </summary> [HideInInspector] public float height; /// <summary> /// Template for each drop down item. Will be cloned. /// </summary> public tk2dUIDropDownItem dropDownItemTemplate; /// <summary> /// List all all the text for the dropdown list /// </summary> [SerializeField] #pragma warning disable 649 private string[] startingItemList; #pragma warning restore 649 /// <summary> /// Index of which item in the dropdown list will be selected first /// </summary> [SerializeField] private int startingIndex = 0; private List<string> itemList = new List<string>(); /// <summary> /// List of all text item in dropdown menu /// </summary> public List<string> ItemList { get { return itemList; } set { itemList = value; } } /// <summary> /// Event, if different item is selected /// </summary> public event System.Action OnSelectedItemChange; public string SendMessageOnSelectedItemChangeMethodName = ""; private int index; /// <summary> /// Which list index is currently selected /// </summary> public int Index { get { return index; } set { index = Mathf.Clamp(value, 0, ItemList.Count - 1); SetSelectedItem(); } } /// <summary> /// Text of the currently selected dropdown list item /// </summary> public string SelectedItem { get { if (index >= 0 && index < itemList.Count) { return itemList[index]; } else { return ""; } } } public GameObject SendMessageTarget { get { if (dropDownButton != null) { return dropDownButton.sendMessageTarget; } else return null; } set { if (dropDownButton != null && dropDownButton.sendMessageTarget != value) { dropDownButton.sendMessageTarget = value; #if UNITY_EDITOR UnityEditor.EditorUtility.SetDirty(dropDownButton); #endif } } } private List<tk2dUIDropDownItem> dropDownItems = new List<tk2dUIDropDownItem>(); private bool isExpanded = false; //is currently in expanded state [SerializeField] [HideInInspector] private tk2dUILayout menuLayoutItem = null; public tk2dUILayout MenuLayoutItem { get { return menuLayoutItem; } set { menuLayoutItem = value; } } [SerializeField] [HideInInspector] private tk2dUILayout templateLayoutItem = null; public tk2dUILayout TemplateLayoutItem { get { return templateLayoutItem; } set { templateLayoutItem = value; } } void Awake() { foreach (string itemStr in startingItemList) { itemList.Add(itemStr); } index = startingIndex; #if UNITY_3_5 //disable all items in template, do make it so Unity 4.x works nicely dropDownItemTemplate.gameObject.SetActiveRecursively(false); #else dropDownItemTemplate.gameObject.SetActive(false); #endif UpdateList(); } void OnEnable() { dropDownButton.OnDown += ExpandButtonPressed; } void OnDisable() { dropDownButton.OnDown -= ExpandButtonPressed; } /// <summary> /// Updates all items in list. Need to call this after manipulating strings /// </summary> public void UpdateList() { Vector3 localPos; tk2dUIDropDownItem item; if (dropDownItems.Count > ItemList.Count) { for (int n = ItemList.Count; n < dropDownItems.Count; n++) { #if UNITY_3_5 dropDownItems[n].gameObject.SetActiveRecursively(false); #else dropDownItems[n].gameObject.SetActive(false); #endif } } while (dropDownItems.Count < ItemList.Count) { dropDownItems.Add(CreateAnotherDropDownItem()); } for (int p = 0; p < ItemList.Count; p++) { item = dropDownItems[p]; localPos = item.transform.localPosition; if (menuLayoutItem != null && templateLayoutItem != null) localPos.y = menuLayoutItem.bMin.y - (p * (templateLayoutItem.bMax.y - templateLayoutItem.bMin.y)); else localPos.y = -height - (p * item.height); item.transform.localPosition = localPos; if (item.label != null) { item.LabelText = itemList[p]; } item.Index = p; } SetSelectedItem(); } /// <summary> /// Sets the selected item (based on index) /// </summary> public void SetSelectedItem() { if (index < 0 || index >= ItemList.Count) { index = 0; } if (index >= 0 && index < ItemList.Count) { selectedTextMesh.text = ItemList[index]; selectedTextMesh.Commit(); } else { selectedTextMesh.text = ""; selectedTextMesh.Commit(); } if (OnSelectedItemChange != null) { OnSelectedItemChange(); } if (SendMessageTarget != null && SendMessageOnSelectedItemChangeMethodName.Length > 0) { SendMessageTarget.SendMessage( SendMessageOnSelectedItemChangeMethodName, this, SendMessageOptions.RequireReceiver ); } } //clones another dropdown item from template private tk2dUIDropDownItem CreateAnotherDropDownItem() { GameObject go = Instantiate(dropDownItemTemplate.gameObject) as GameObject; go.name = "DropDownItem"; go.transform.parent = transform; go.transform.localPosition = dropDownItemTemplate.transform.localPosition; go.transform.localRotation = dropDownItemTemplate.transform.localRotation; go.transform.localScale = dropDownItemTemplate.transform.localScale; tk2dUIDropDownItem item = go.GetComponent<tk2dUIDropDownItem>(); item.OnItemSelected += ItemSelected; tk2dUIUpDownHoverButton itemUpDownHoverBtn = go.GetComponent<tk2dUIUpDownHoverButton>(); item.upDownHoverBtn = itemUpDownHoverBtn; itemUpDownHoverBtn.OnToggleOver += DropDownItemHoverBtnToggle; #if UNITY_3_5 go.SetActiveRecursively(false); #endif return item; } //when an item in list is selected private void ItemSelected(tk2dUIDropDownItem item) { if (isExpanded) { CollapseList(); } Index = item.Index; } private void ExpandButtonPressed() { if (isExpanded) { CollapseList(); } else { ExpandList(); } } //drops list down private void ExpandList() { isExpanded = true; int count = Mathf.Min( ItemList.Count, dropDownItems.Count ); for(int i = 0; i < count; ++i) { #if UNITY_3_5 dropDownItems[i].gameObject.SetActiveRecursively(true); dropDownItems[i].upDownHoverBtn.SetState(); //deals with how active recursive needs to work in Unity 3.x #else dropDownItems[i].gameObject.SetActive(true); #endif } tk2dUIDropDownItem selectedItem = dropDownItems[index]; if (selectedItem.upDownHoverBtn != null) { selectedItem.upDownHoverBtn.IsOver = true; } } //collapses list on selecting item or closing private void CollapseList() { isExpanded = false; foreach (tk2dUIDropDownItem item in dropDownItems) { #if UNITY_3_5 item.gameObject.SetActiveRecursively(false); #else item.gameObject.SetActive(false); #endif } } private void DropDownItemHoverBtnToggle(tk2dUIUpDownHoverButton upDownHoverButton) { if (upDownHoverButton.IsOver) { foreach (tk2dUIDropDownItem item in dropDownItems) { if (item.upDownHoverBtn != upDownHoverButton && item.upDownHoverBtn != null) { item.upDownHoverBtn.IsOver = false; } } } } void OnDestroy() { foreach (tk2dUIDropDownItem item in dropDownItems) { item.OnItemSelected -= ItemSelected; if (item.upDownHoverBtn != null) { item.upDownHoverBtn.OnToggleOver -= DropDownItemHoverBtnToggle; } } } }
#region MigraDoc - Creating Documents on the Fly // // Authors: // Stefan Lange (mailto:Stefan.Lange@pdfsharp.com) // Klaus Potzesny (mailto:Klaus.Potzesny@pdfsharp.com) // David Stephensen (mailto:David.Stephensen@pdfsharp.com) // // Copyright (c) 2001-2009 empira Software GmbH, Cologne (Germany) // // http://www.pdfsharp.com // http://www.migradoc.com // http://sourceforge.net/projects/pdfsharp // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. #endregion using System; using System.Diagnostics; using System.Reflection; using System.IO; using MigraDoc.DocumentObjectModel.Internals; namespace MigraDoc.DocumentObjectModel.Shapes { /// <summary> /// Represents an image in the document or paragraph. /// </summary> public class Image : Shape, IDisposable { /// <summary> /// Initializes a new instance of the Image class. /// </summary> public Image() { } public Image(MemoryStream imageStream) : this() { // Added new property to hold the stream ImageStream = new MemoryStream(); ImageStream.Write(imageStream.GetBuffer(), 0, imageStream.GetBuffer().Length); Name = String.Empty; // When no filename, we'll look for the stream. } // for convenience, added Image class property to indicate whether this image is file or stream public bool StreamBased { get { return ImageStream != null; } } public MemoryStream ImageStream; /// <summary> /// Initializes a new instance of the Image class with the specified parent. /// </summary> internal Image(DocumentObject parent) : base(parent) { } /// <summary> /// Initializes a new instance of the Image class from the specified (file) name. /// </summary> public Image(string name) : this() { Name = name; } //#region Methods /// <summary> /// Creates a deep copy of this object. /// </summary> public new Image Clone() { return (Image)DeepCopy(); } /// <summary> /// Implements the deep copy of the object. /// </summary> protected override object DeepCopy() { Image image = (Image)base.DeepCopy(); if (image.pictureFormat != null) { image.pictureFormat = image.pictureFormat.Clone(); image.pictureFormat.parent = image; } return image; } //#endregion //#region Properties /// <summary> /// Gets or sets the name of the image. /// </summary> public string Name { get { return this.name.Value; } set { this.name.Value = value; } } [DV] internal NString name = NString.NullValue; /// <summary> /// Gets or sets the ScaleWidth of the image. /// If the Width is set to, the resulting image width is ScaleWidth * Width. /// </summary> public double ScaleWidth { get { return this.scaleWidth.Value; } set { this.scaleWidth.Value = value; } } [DV] internal NDouble scaleWidth = NDouble.NullValue; /// <summary> /// Gets or sets the ScaleHeight of the image. /// If the Height is set too, the resulting image height is ScaleHeight * Height. /// </summary> public double ScaleHeight { get { return this.scaleHeight.Value; } set { this.scaleHeight.Value = value; } } [DV] internal NDouble scaleHeight = NDouble.NullValue; /// <summary> /// Gets or sets whether the AspectRatio of the image is kept unchanged. /// If both Width and Height are set, this property is ignored. /// </summary> public bool LockAspectRatio { get { return this.lockAspectRatio.Value; } set { this.lockAspectRatio.Value = value; } } [DV] internal NBool lockAspectRatio = NBool.NullValue; /// <summary> /// Gets or sets the PictureFormat for the image /// </summary> public PictureFormat PictureFormat { get { if (this.pictureFormat == null) this.pictureFormat = new PictureFormat(this); return this.pictureFormat; } set { SetParent(value); this.pictureFormat = value; } } [DV] internal PictureFormat pictureFormat; /// <summary> /// Gets or sets a user defined resolution for the image in dots per inch. /// </summary> public double Resolution { get { return this.resolution.Value; } set { this.resolution.Value = value; } } [DV] internal NDouble resolution = NDouble.NullValue; //#endregion #region Internal /// <summary> /// Converts Image into DDL. /// </summary> internal override void Serialize(Serializer serializer) { serializer.WriteLine("\\image(\"" + this.name.Value.Replace("\\", "\\\\").Replace("\"", "\\\"") + "\")"); int pos = serializer.BeginAttributes(); base.Serialize(serializer); if (!this.scaleWidth.IsNull) serializer.WriteSimpleAttribute("ScaleWidth", this.ScaleWidth); if (!this.scaleHeight.IsNull) serializer.WriteSimpleAttribute("ScaleHeight", this.ScaleHeight); if (!this.lockAspectRatio.IsNull) serializer.WriteSimpleAttribute("LockAspectRatio", this.LockAspectRatio); if (!this.resolution.IsNull) serializer.WriteSimpleAttribute("Resolution", this.Resolution); if (!this.IsNull("PictureFormat")) this.pictureFormat.Serialize(serializer); serializer.EndAttributes(pos); } /// <summary> /// Gets the concrete image path, taking into account the DOM document's DdlFile and /// ImagePath properties as well as the given working directory (which can be null). /// </summary> public string GetFilePath(string workingDir) { string filePath = ""; if (StreamBased) { return String.Empty; } try { if (!String.IsNullOrEmpty(workingDir)) filePath = workingDir; else filePath = Directory.GetCurrentDirectory() + "\\"; if (!Document.IsNull("ImagePath")) { string foundfile = ImageHelper.GetImageName(filePath, this.Name, Document.ImagePath); if (foundfile != null) filePath = foundfile; else filePath = Path.Combine(filePath, Name); } else filePath = Path.Combine(filePath, Name); } catch (Exception ex) { Debug.Assert(false, "Should never occur with properly formatted Wiki texts. " + ex); return null; //throw; } return filePath; } /// <summary> /// Returns the meta object of this instance. /// </summary> internal override Meta Meta { get { if (meta == null) meta = new Meta(typeof(Image)); return meta; } } static Meta meta; #endregion public void Dispose() { if (ImageStream != null) { ImageStream.Close(); ImageStream.Dispose(); ImageStream = null; } } } }
/******************************************************************** The Multiverse Platform is made available under the MIT License. Copyright (c) 2012 The Multiverse Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *********************************************************************/ using System; using System.IO; using System.ComponentModel; using System.Runtime.InteropServices; using Microsoft.VisualStudio; using Microsoft.VisualStudio.Package; namespace Microsoft.MultiverseInterfaceStudio { /// <summary> /// General property settings page for World of Warcraft projects. /// </summary> [ComVisible(true)] [Guid(GuidStrings.GeneralPropertyPage)] public class GeneralPropertyPage : SettingsPage, ICustomTypeDescriptor { private string wowPath; private string interfaceVersion; private string addonTitle; private string addonNotes; private string dependencies; private string savedVariables; private string savedVariablesPerCharacter; private string author; private string addonVersion; private string authorEmail; /// <summary> /// Initializes a new instance of the <see cref="GeneralPropertyPage"/> class. /// </summary> public GeneralPropertyPage() { // Get the localized general caption for the page's name this.Name = Resources.GetString(Resources.GeneralCaption); } #region Project Properties /// <summary> /// Gets or sets the path to the local World of Warcraft installation. /// </summary> [LocalizedCategory(Resources.MultiverseInterface)] [LocalizedDisplayName("WowPath_Name")] [LocalizedDescription("WowPath_Description")] public string WowPath { get { return this.wowPath; } set { this.wowPath = value; this.IsDirty = true; } } [LocalizedCategory("Category_Details")] [LocalizedDisplayName("InterfaceVersion_Name")] [LocalizedDescription("InterfaceVersion_Description")] public string InterfaceVersion { get { return this.interfaceVersion; } set { this.interfaceVersion = value; this.IsDirty = true; } } [LocalizedCategory("Category_AddonInfo")] [LocalizedDisplayName("AddonTitle_Name")] [LocalizedDescription("AddonTitle_Description")] public string AddonTitle { get { return this.addonTitle; } set { this.addonTitle = value; this.IsDirty = true; } } [LocalizedCategory("Category_AddonInfo")] [LocalizedDisplayName("AddonNotes_Name")] [LocalizedDescription("AddonNotes_Description")] public string AddonNotes { get { return this.addonNotes; } set { this.addonNotes = value; this.IsDirty = true; } } [LocalizedCategory("Category_Details")] [LocalizedDisplayName("Dependencies_Name")] [LocalizedDescription("Dependencies_Description")] public string Dependencies { get { return this.dependencies; } set { this.dependencies = value; this.IsDirty = true; } } [LocalizedCategory("Category_Details")] [LocalizedDisplayName("SavedVariables_Name")] [LocalizedDescription("SavedVariables_Description")] public string SavedVariables { get { return this.savedVariables; } set { this.savedVariables = value; this.IsDirty = true; } } [LocalizedCategory("Category_Details")] [LocalizedDisplayName("SavedVariablesPerCharacter_Name")] [LocalizedDescription("SavedVariablesPerCharacter_Description")] public string SavedVariablesPerCharacter { get { return this.savedVariablesPerCharacter; } set { this.savedVariablesPerCharacter = value; this.IsDirty = true; } } [LocalizedCategory("Category_AddonInfo")] [LocalizedDisplayName("Author_Name")] [LocalizedDescription("Author_Description")] public string Author { get { return this.author; } set { this.author = value; this.IsDirty = true; } } [LocalizedCategory("Category_AddonInfo")] [LocalizedDisplayName("AddonVersion_Name")] [LocalizedDescription("AddonVersion_Description")] public string AddonVersion { get { return this.addonVersion; } set { this.addonVersion = value; this.IsDirty = true; } } [LocalizedCategory("Category_AddonInfo")] [LocalizedDisplayName("AuthorEmail_Name")] [LocalizedDescription("AuthorEmail_Description")] public string AuthorEmail { get { return this.authorEmail; } set { this.authorEmail = value; this.IsDirty = true; } } #endregion /// <summary> /// Returns the name of the class. /// </summary> public override string GetClassName() { return this.GetType().FullName; } /// <summary> /// Binds the properties. /// </summary> protected override void BindProperties() { if (this.ProjectMgr == null) return; this.wowPath = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.MultiversePath.ToString(), false); this.interfaceVersion = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.InterfaceVersion.ToString(), false); this.addonTitle = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.AddonTitle.ToString(), false); this.addonNotes = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.AddonNotes.ToString(), false); this.addonVersion = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.AddonVersion.ToString(), false); this.author = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.Author.ToString(), false); this.authorEmail = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.AuthorEmail.ToString(), false); this.dependencies = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.Dependencies.ToString(), false); this.savedVariables = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.SavedVariables.ToString(), false); this.savedVariablesPerCharacter = this.ProjectMgr.GetProjectProperty(GeneralPropertyPageTag.SavedVariablesPerCharacter.ToString(), false); } /// <summary> /// Apply the changes made to the project. /// </summary> protected override int ApplyChanges() { if (this.ProjectMgr == null) return VSConstants.E_INVALIDARG; this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.MultiversePath.ToString(), this.wowPath); this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.InterfaceVersion.ToString(), this.interfaceVersion); this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.AddonTitle.ToString(), this.addonTitle); this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.AddonNotes.ToString(), this.addonNotes); this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.Dependencies.ToString(), this.dependencies); this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.SavedVariables.ToString(), this.savedVariables); this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.SavedVariablesPerCharacter.ToString(), this.savedVariablesPerCharacter); this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.Author.ToString(), this.author); this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.AddonVersion.ToString(), this.addonVersion); this.ProjectMgr.SetProjectProperty(GeneralPropertyPageTag.AuthorEmail.ToString(), this.authorEmail); this.IsDirty = false; return VSConstants.S_OK; } } }
using System; using System.Globalization; using System.Text; using System.Windows; using System.Windows.Controls; using System.Windows.Controls.Primitives; using System.Windows.Data; using System.Windows.Input; using System.Windows.Threading; using MaterialDesignThemes.Wpf.Converters; namespace MaterialDesignThemes.Wpf { [TemplatePart(Name = ButtonPartName, Type = typeof(Button))] [TemplatePart(Name = PopupPartName, Type = typeof(Popup))] [TemplatePart(Name = TextBoxPartName, Type = typeof(TimePickerTextBox))] public class TimePicker : Control { public const string ButtonPartName = "PART_Button"; public const string PopupPartName = "PART_Popup"; public const string TextBoxPartName = "PART_TextBox"; private readonly ContentControl _clockHostContentControl; private readonly Clock _clock; private TextBox? _textBox; private Popup? _popup; private Button? _dropDownButton; private bool _disablePopupReopen; private DateTime? _lastValidTime; private bool _isManuallyMutatingText; static TimePicker() { DefaultStyleKeyProperty.OverrideMetadata(typeof(TimePicker), new FrameworkPropertyMetadata(typeof(TimePicker))); EventManager.RegisterClassHandler(typeof(TimePicker), UIElement.GotFocusEvent, new RoutedEventHandler(OnGotFocus)); } /// <summary> /// Called when this element gets focus. /// </summary> private static void OnGotFocus(object sender, RoutedEventArgs e) { // When TimePicker gets focus move it to the TextBox TimePicker picker = (TimePicker)sender; if ((!e.Handled) && (picker._textBox != null)) { if (e.OriginalSource == picker) { picker._textBox.Focus(); e.Handled = true; } else if (e.OriginalSource == picker._textBox) { picker._textBox.SelectAll(); e.Handled = true; } } } public TimePicker() { _clock = new Clock { DisplayAutomation = ClockDisplayAutomation.ToMinutesOnly }; _clockHostContentControl = new ContentControl { Content = _clock }; InitializeClock(); } public static readonly DependencyProperty TextProperty = DependencyProperty.Register( nameof(Text), typeof(string), typeof(TimePicker), new FrameworkPropertyMetadata(default(string?), FrameworkPropertyMetadataOptions.BindsTwoWayByDefault, TextPropertyChangedCallback)); private static void TextPropertyChangedCallback(DependencyObject dependencyObject, DependencyPropertyChangedEventArgs dependencyPropertyChangedEventArgs) { var timePicker = (TimePicker)dependencyObject; if (!timePicker._isManuallyMutatingText) { timePicker.SetSelectedTime(); } if (timePicker._textBox != null) { timePicker.UpdateTextBoxText(dependencyPropertyChangedEventArgs.NewValue as string ?? ""); } } public string? Text { get => (string?)GetValue(TextProperty); set => SetValue(TextProperty, value); } public static readonly DependencyProperty SelectedTimeProperty = DependencyProperty.Register( nameof(SelectedTime), typeof(DateTime?), typeof(TimePicker), new FrameworkPropertyMetadata(default(DateTime?), FrameworkPropertyMetadataOptions.BindsTwoWayByDefault, SelectedTimePropertyChangedCallback)); private static void SelectedTimePropertyChangedCallback(DependencyObject dependencyObject, DependencyPropertyChangedEventArgs dependencyPropertyChangedEventArgs) { var timePicker = (TimePicker)dependencyObject; timePicker._isManuallyMutatingText = true; timePicker.SetCurrentValue(TextProperty, timePicker.DateTimeToString(timePicker.SelectedTime)); timePicker._isManuallyMutatingText = false; timePicker._lastValidTime = timePicker.SelectedTime; OnSelectedTimeChanged(timePicker, dependencyPropertyChangedEventArgs); } public DateTime? SelectedTime { get => (DateTime?)GetValue(SelectedTimeProperty); set => SetValue(SelectedTimeProperty, value); } public static readonly RoutedEvent SelectedTimeChangedEvent = EventManager.RegisterRoutedEvent( nameof(SelectedTime), RoutingStrategy.Bubble, typeof(RoutedPropertyChangedEventHandler<DateTime?>), typeof(TimePicker)); public event RoutedPropertyChangedEventHandler<DateTime?> SelectedTimeChanged { add => AddHandler(SelectedTimeChangedEvent, value); remove => RemoveHandler(SelectedTimeChangedEvent, value); } private static void OnSelectedTimeChanged( DependencyObject d, DependencyPropertyChangedEventArgs e) { var instance = (TimePicker)d; var args = new RoutedPropertyChangedEventArgs<DateTime?>( (DateTime?)e.OldValue, (DateTime?)e.NewValue) { RoutedEvent = SelectedTimeChangedEvent }; instance.RaiseEvent(args); } public static readonly DependencyProperty SelectedTimeFormatProperty = DependencyProperty.Register( nameof(SelectedTimeFormat), typeof(DatePickerFormat), typeof(TimePicker), new PropertyMetadata(DatePickerFormat.Short)); public DatePickerFormat SelectedTimeFormat { get => (DatePickerFormat)GetValue(SelectedTimeFormatProperty); set => SetValue(SelectedTimeFormatProperty, value); } public static readonly DependencyProperty IsDropDownOpenProperty = DependencyProperty.Register( nameof(IsDropDownOpen), typeof(bool), typeof(TimePicker), new FrameworkPropertyMetadata(false, FrameworkPropertyMetadataOptions.BindsTwoWayByDefault, OnIsDropDownOpenChanged, OnCoerceIsDropDownOpen)); public bool IsDropDownOpen { get => (bool)GetValue(IsDropDownOpenProperty); set => SetValue(IsDropDownOpenProperty, value); } public static readonly DependencyProperty Is24HoursProperty = DependencyProperty.Register( nameof(Is24Hours), typeof(bool), typeof(TimePicker), new PropertyMetadata(default(bool), Is24HoursChanged)); private static void Is24HoursChanged(DependencyObject dependencyObject, DependencyPropertyChangedEventArgs e) { var timePicker = (TimePicker)dependencyObject; timePicker._isManuallyMutatingText = true; timePicker.SetCurrentValue(TextProperty, timePicker.DateTimeToString(timePicker.SelectedTime)); timePicker._isManuallyMutatingText = false; } public bool Is24Hours { get => (bool)GetValue(Is24HoursProperty); set => SetValue(Is24HoursProperty, value); } public static readonly DependencyProperty IsHeaderVisibleProperty = DependencyProperty.Register( nameof(IsHeaderVisible), typeof(bool), typeof(TimePicker), new PropertyMetadata(default(bool))); public bool IsHeaderVisible { get => (bool)GetValue(IsHeaderVisibleProperty); set => SetValue(IsHeaderVisibleProperty, value); } private static object OnCoerceIsDropDownOpen(DependencyObject d, object baseValue) { var timePicker = (TimePicker)d; return timePicker.IsEnabled ? baseValue : false; } /// <summary> /// IsDropDownOpenProperty property changed handler. /// </summary> /// <param name="d">DatePicker that changed its IsDropDownOpen.</param> /// <param name="e">DependencyPropertyChangedEventArgs.</param> private static void OnIsDropDownOpenChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { var timePicker = (TimePicker)d; var newValue = (bool)e.NewValue; if (timePicker._popup == null || timePicker._popup.IsOpen == newValue) return; timePicker._popup.IsOpen = newValue; if (newValue) { //TODO set time //dp._originalSelectedDate = dp.SelectedDate; timePicker.Dispatcher?.BeginInvoke(DispatcherPriority.Input, new Action(() => { timePicker._clock.Focus(); })); } } public static readonly DependencyProperty ClockStyleProperty = DependencyProperty.Register( nameof(ClockStyle), typeof(Style), typeof(TimePicker), new PropertyMetadata(default(Style?))); public Style? ClockStyle { get => (Style?)GetValue(ClockStyleProperty); set => SetValue(ClockStyleProperty, value); } public static readonly DependencyProperty ClockHostContentControlStyleProperty = DependencyProperty.Register( nameof(ClockHostContentControlStyle), typeof(Style), typeof(TimePicker), new PropertyMetadata(default(Style?))); public Style? ClockHostContentControlStyle { get => (Style?)GetValue(ClockHostContentControlStyleProperty); set => SetValue(ClockHostContentControlStyleProperty, value); } public static readonly DependencyProperty IsInvalidTextAllowedProperty = DependencyProperty.Register( "IsInvalidTextAllowed", typeof(bool), typeof(TimePicker), new PropertyMetadata(default(bool))); /// <summary> /// Set to true to stop invalid text reverting back to previous valid value. Useful in cases where you /// want to display validation messages and allow the user to correct the data without it reverting. /// </summary> public bool IsInvalidTextAllowed { get => (bool)GetValue(IsInvalidTextAllowedProperty); set => SetValue(IsInvalidTextAllowedProperty, value); } public static readonly DependencyProperty WithSecondsProperty = DependencyProperty.Register( nameof(WithSeconds), typeof(bool), typeof(TimePicker), new PropertyMetadata(default(bool), WithSecondsPropertyChanged)); /// <summary> /// Set to true to display seconds in the time and allow the user to select seconds. /// </summary> public bool WithSeconds { get => (bool)GetValue(WithSecondsProperty); set => SetValue(WithSecondsProperty, value); } private static void WithSecondsPropertyChanged(DependencyObject sender, DependencyPropertyChangedEventArgs e) { if (sender is TimePicker picker) { // update the clock's behavior as needed when the WithSeconds value changes picker._clock.DisplayAutomation = picker.WithSeconds ? ClockDisplayAutomation.ToSeconds : ClockDisplayAutomation.ToMinutesOnly; } } public override void OnApplyTemplate() { if (_popup != null) { _popup.RemoveHandler(PreviewMouseLeftButtonDownEvent, new MouseButtonEventHandler(PopupOnPreviewMouseLeftButtonDown)); _popup.Opened -= PopupOnOpened; _popup.Closed -= PopupOnClosed; _popup.Child = null; } if (_dropDownButton != null) { _dropDownButton.Click -= DropDownButtonOnClick; } if (_textBox != null) { _textBox.RemoveHandler(KeyDownEvent, new KeyEventHandler(TextBoxOnKeyDown)); _textBox.RemoveHandler(TextBoxBase.TextChangedEvent, new TextChangedEventHandler(TextBoxOnTextChanged)); _textBox.AddHandler(LostFocusEvent, new RoutedEventHandler(TextBoxOnLostFocus)); } _textBox = GetTemplateChild(TextBoxPartName) as TextBox; if (_textBox != null) { _textBox.AddHandler(KeyDownEvent, new KeyEventHandler(TextBoxOnKeyDown)); _textBox.AddHandler(TextBoxBase.TextChangedEvent, new TextChangedEventHandler(TextBoxOnTextChanged)); _textBox.AddHandler(LostFocusEvent, new RoutedEventHandler(TextBoxOnLostFocus)); _textBox.Text = Text; } _popup = GetTemplateChild(PopupPartName) as Popup; if (_popup != null) { _popup.AddHandler(PreviewMouseLeftButtonDownEvent, new MouseButtonEventHandler(PopupOnPreviewMouseLeftButtonDown)); _popup.Opened += PopupOnOpened; _popup.Closed += PopupOnClosed; _popup.Child = _clockHostContentControl; if (IsDropDownOpen) { _popup.IsOpen = true; } } _dropDownButton = GetTemplateChild(ButtonPartName) as Button; if (_dropDownButton != null) { _dropDownButton.Click += DropDownButtonOnClick; } base.OnApplyTemplate(); } private void TextBoxOnLostFocus(object sender, RoutedEventArgs routedEventArgs) { string? text = _textBox?.Text; if (string.IsNullOrEmpty(text)) { SetCurrentValue(SelectedTimeProperty, null); return; } if (IsTimeValid(text!, out DateTime time)) { SetSelectedTime(time); UpdateTextBoxTextIfNeeded(text!); } else // Invalid time, jump back to previous good time { SetInvalidTime(); } } private void SetInvalidTime() { if (IsInvalidTextAllowed) return; if (_textBox is { } textBox) { if (_lastValidTime != null) { //SetCurrentValue(SelectedTimeProperty, _lastValidTime.Value); //_textBox.Text = _lastValidTime.Value.ToString(_lastValidTime.Value.Hour % 12 > 9 ? "hh:mm tt" : "h:mm tt"); textBox.Text = DateTimeToString(_lastValidTime.Value, DatePickerFormat.Short); } else { SetCurrentValue(SelectedTimeProperty, null); textBox.Text = ""; } } } private void TextBoxOnKeyDown(object sender, KeyEventArgs keyEventArgs) => keyEventArgs.Handled = ProcessKey(keyEventArgs) || keyEventArgs.Handled; private bool ProcessKey(KeyEventArgs keyEventArgs) { switch (keyEventArgs.Key) { case Key.System: { switch (keyEventArgs.SystemKey) { case Key.Down: { if ((Keyboard.Modifiers & ModifierKeys.Alt) == ModifierKeys.Alt) { TogglePopup(); return true; } break; } } break; } case Key.Enter: { SetSelectedTime(); return true; } } return false; } private void TextBoxOnTextChanged(object sender, TextChangedEventArgs textChangedEventArgs) { if (_textBox is { } textBox && (_popup?.IsOpen == true || IsInvalidTextAllowed)) { _isManuallyMutatingText = true; SetCurrentValue(TextProperty, textBox.Text); _isManuallyMutatingText = false; } if (_popup?.IsOpen == false) { SetSelectedTime(true); } } private void UpdateTextBoxText(string? text) { // Save and restore the cursor position if (_textBox is { } textBox) { int caretIndex = textBox.CaretIndex; textBox.Text = text; textBox.CaretIndex = caretIndex; } } private void UpdateTextBoxTextIfNeeded(string lastText) { if (_textBox?.Text == lastText) { string? formattedText = DateTimeToString(SelectedTime); if (formattedText != lastText) { UpdateTextBoxText(formattedText); } } } private void SetSelectedTime(in DateTime time) => SetCurrentValue(SelectedTimeProperty, (SelectedTime?.Date ?? DateTime.Today).Add(time.TimeOfDay)); private void SetSelectedTime(bool beCautious = false) { string? currentText = _textBox?.Text; if (!string.IsNullOrEmpty(currentText)) { ParseTime(currentText!, t => { if (!beCautious || DateTimeToString(t) == currentText) { SetSelectedTime(t); } if (!beCautious) { UpdateTextBoxTextIfNeeded(currentText!); } }); } else { SetCurrentValue(SelectedTimeProperty, null); } } private void ParseTime(string s, Action<DateTime> successContinuation) { if (IsTimeValid(s, out DateTime time)) { successContinuation(time); } } private bool IsTimeValid(string s, out DateTime time) { CultureInfo culture = Language.GetSpecificCulture(); return DateTime.TryParse(s, culture, DateTimeStyles.AssumeLocal | DateTimeStyles.AllowWhiteSpaces | DateTimeStyles.NoCurrentDateDefault, out time); } private string? DateTimeToString(DateTime? d) => d.HasValue ? DateTimeToString(d.Value) : null; private string DateTimeToString(DateTime d) => DateTimeToString(d, SelectedTimeFormat); private string DateTimeToString(DateTime datetime, DatePickerFormat format) { CultureInfo culture = Language.GetSpecificCulture(); DateTimeFormatInfo dtfi = culture.GetDateFormat(); string hourFormatChar = Is24Hours ? "H" : "h"; var sb = new StringBuilder(); sb.Append(hourFormatChar); if (format == DatePickerFormat.Long) { sb.Append(hourFormatChar); } sb.Append(dtfi.TimeSeparator); sb.Append("mm"); if (WithSeconds) { sb.Append(dtfi.TimeSeparator); sb.Append("ss"); } if (!Is24Hours && (!string.IsNullOrEmpty(dtfi.AMDesignator) || !string.IsNullOrEmpty(dtfi.PMDesignator))) { sb.Append(" tt"); } return datetime.ToString(sb.ToString(), culture); } private void PopupOnPreviewMouseLeftButtonDown(object sender, MouseButtonEventArgs mouseButtonEventArgs) { if (sender is not Popup popup || popup.StaysOpen) return; if (_dropDownButton?.InputHitTest(mouseButtonEventArgs.GetPosition(_dropDownButton)) != null) { // This popup is being closed by a mouse press on the drop down button // The following mouse release will cause the closed popup to immediately reopen. // Raise a flag to block re-opening the popup _disablePopupReopen = true; } } private void PopupOnClosed(object? sender, EventArgs eventArgs) { if (IsDropDownOpen) { SetCurrentValue(IsDropDownOpenProperty, false); } if (_clock.IsKeyboardFocusWithin) { MoveFocus(new TraversalRequest(FocusNavigationDirection.First)); } //TODO Clock closed event //OnCalendarClosed(new RoutedEventArgs()); } private void PopupOnOpened(object? sender, EventArgs eventArgs) { if (!IsDropDownOpen) { SetCurrentValue(IsDropDownOpenProperty, true); } if (_clock != null) { _clock.DisplayMode = ClockDisplayMode.Hours; _clock.MoveFocus(new TraversalRequest(FocusNavigationDirection.First)); } //TODO ClockOpenedEvent //this.OnCalendarOpened(new RoutedEventArgs()); } private void InitializeClock() { _clock.AddHandler(Clock.ClockChoiceMadeEvent, new ClockChoiceMadeEventHandler(ClockChoiceMadeHandler)); _clock.SetBinding(ForegroundProperty, GetBinding(ForegroundProperty)); _clock.SetBinding(StyleProperty, GetBinding(ClockStyleProperty)); _clock.SetBinding(Clock.TimeProperty, GetBinding(SelectedTimeProperty, new NullableDateTimeToCurrentDateConverter())); _clock.SetBinding(Clock.Is24HoursProperty, GetBinding(Is24HoursProperty)); _clockHostContentControl.SetBinding(StyleProperty, GetBinding(ClockHostContentControlStyleProperty)); } private void ClockChoiceMadeHandler(object sender, ClockChoiceMadeEventArgs clockChoiceMadeEventArgs) { if (WithSeconds && clockChoiceMadeEventArgs.Mode == ClockDisplayMode.Seconds || !WithSeconds && clockChoiceMadeEventArgs.Mode == ClockDisplayMode.Minutes) { TogglePopup(); if (SelectedTime == null) { SelectedTime = _clock.Time; } } } private void DropDownButtonOnClick(object sender, RoutedEventArgs routedEventArgs) => TogglePopup(); private void TogglePopup() { if (IsDropDownOpen) { SetCurrentValue(IsDropDownOpenProperty, false); } else { if (_disablePopupReopen) { _disablePopupReopen = false; } else { SetSelectedTime(); SetCurrentValue(IsDropDownOpenProperty, true); } } } private BindingBase GetBinding(DependencyProperty property, IValueConverter? converter = null) { var binding = new Binding(property.Name) { Source = this, Converter = converter }; return binding; } } }
// Copyright (C) 2014 dot42 // // Original filename: Javax.Security.Auth.cs // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #pragma warning disable 1717 namespace Javax.Security.Auth { /// <summary> /// <para>Legacy security code; do not use. </para> /// </summary> /// <java-name> /// javax/security/auth/PrivateCredentialPermission /// </java-name> [Dot42.DexImport("javax/security/auth/PrivateCredentialPermission", AccessFlags = 49)] public sealed partial class PrivateCredentialPermission : global::Java.Security.Permission /* scope: __dot42__ */ { [Dot42.DexImport("<init>", "(Ljava/lang/String;Ljava/lang/String;)V", AccessFlags = 1)] public PrivateCredentialPermission(string name, string action) /* MethodBuilder.Create */ { } /// <java-name> /// getPrincipals /// </java-name> [Dot42.DexImport("getPrincipals", "()[[Ljava/lang/String;", AccessFlags = 1)] public string[][] GetPrincipals() /* MethodBuilder.Create */ { return default(string[][]); } /// <java-name> /// getCredentialClass /// </java-name> [Dot42.DexImport("getCredentialClass", "()Ljava/lang/String;", AccessFlags = 1)] public string GetCredentialClass() /* MethodBuilder.Create */ { return default(string); } /// <java-name> /// getActions /// </java-name> [Dot42.DexImport("getActions", "()Ljava/lang/String;", AccessFlags = 1)] public override string GetActions() /* MethodBuilder.Create */ { return default(string); } /// <java-name> /// implies /// </java-name> [Dot42.DexImport("implies", "(Ljava/security/Permission;)Z", AccessFlags = 1)] public override bool Implies(global::Java.Security.Permission permission) /* MethodBuilder.Create */ { return default(bool); } [global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)] internal PrivateCredentialPermission() /* TypeBuilder.AddDefaultConstructor */ { } /// <java-name> /// getPrincipals /// </java-name> public string[][] Principals { [Dot42.DexImport("getPrincipals", "()[[Ljava/lang/String;", AccessFlags = 1)] get{ return GetPrincipals(); } } /// <java-name> /// getCredentialClass /// </java-name> public string CredentialClass { [Dot42.DexImport("getCredentialClass", "()Ljava/lang/String;", AccessFlags = 1)] get{ return GetCredentialClass(); } } /// <java-name> /// getActions /// </java-name> public string Actions { [Dot42.DexImport("getActions", "()Ljava/lang/String;", AccessFlags = 1)] get{ return GetActions(); } } } /// <summary> /// <para>The central class of the <c> javax.security.auth </c> package representing an authenticated user or entity (both referred to as "subject"). IT defines also the static methods that allow code to be run, and do modifications according to the subject's permissions. </para><para>A subject has the following features: <ul><li><para>A set of <c> Principal </c> objects specifying the identities bound to a <c> Subject </c> that distinguish it. </para></li><li><para>Credentials (public and private) such as certificates, keys, or authentication proofs such as tickets </para></li></ul></para> /// </summary> /// <java-name> /// javax/security/auth/Subject /// </java-name> [Dot42.DexImport("javax/security/auth/Subject", AccessFlags = 49)] public sealed partial class Subject : global::Java.Io.ISerializable /* scope: __dot42__ */ { /// <summary> /// <para>The default constructor initializing the sets of public and private credentials and principals with the empty set. </para> /// </summary> [Dot42.DexImport("<init>", "()V", AccessFlags = 1)] public Subject() /* MethodBuilder.Create */ { } /// <summary> /// <para>The constructor for the subject, setting its public and private credentials and principals according to the arguments.</para><para></para> /// </summary> [Dot42.DexImport("<init>", "(ZLjava/util/Set;Ljava/util/Set;Ljava/util/Set;)V", AccessFlags = 1, Signature = "(ZLjava/util/Set<+Ljava/security/Principal;>;Ljava/util/Set<*>;Ljava/util/Set<*>;" + ")V")] public Subject(bool readOnly, global::Java.Util.ISet<global::Java.Security.IPrincipal> subjPrincipals, global::Java.Util.ISet<object> pubCredentials, global::Java.Util.ISet<object> privCredentials) /* MethodBuilder.Create */ { } /// <java-name> /// doAs /// </java-name> [Dot42.DexImport("doAs", "(Ljavax/security/auth/Subject;Ljava/security/PrivilegedAction;)Ljava/lang/Object;" + "", AccessFlags = 9, Signature = "<T:Ljava/lang/Object;>(Ljavax/security/auth/Subject;Ljava/security/PrivilegedActi" + "on<TT;>;)TT;")] public static T DoAs<T>(global::Javax.Security.Auth.Subject subject, global::Java.Security.IPrivilegedAction<T> privilegedAction) /* MethodBuilder.Create */ { return default(T); } /// <java-name> /// doAsPrivileged /// </java-name> [Dot42.DexImport("doAsPrivileged", "(Ljavax/security/auth/Subject;Ljava/security/PrivilegedAction;Ljava/security/Acce" + "ssControlContext;)Ljava/lang/Object;", AccessFlags = 9, Signature = "<T:Ljava/lang/Object;>(Ljavax/security/auth/Subject;Ljava/security/PrivilegedActi" + "on<TT;>;Ljava/security/AccessControlContext;)TT;")] public static T DoAsPrivileged<T>(global::Javax.Security.Auth.Subject subject, global::Java.Security.IPrivilegedAction<T> privilegedAction, global::Java.Security.AccessControlContext accessControlContext) /* MethodBuilder.Create */ { return default(T); } /// <java-name> /// doAs /// </java-name> [Dot42.DexImport("doAs", "(Ljavax/security/auth/Subject;Ljava/security/PrivilegedExceptionAction;)Ljava/lan" + "g/Object;", AccessFlags = 9, Signature = "<T:Ljava/lang/Object;>(Ljavax/security/auth/Subject;Ljava/security/PrivilegedExce" + "ptionAction<TT;>;)TT;")] public static T DoAs<T>(global::Javax.Security.Auth.Subject subject, global::Java.Security.IPrivilegedExceptionAction<T> privilegedExceptionAction) /* MethodBuilder.Create */ { return default(T); } /// <java-name> /// doAsPrivileged /// </java-name> [Dot42.DexImport("doAsPrivileged", "(Ljavax/security/auth/Subject;Ljava/security/PrivilegedExceptionAction;Ljava/secu" + "rity/AccessControlContext;)Ljava/lang/Object;", AccessFlags = 9, Signature = "<T:Ljava/lang/Object;>(Ljavax/security/auth/Subject;Ljava/security/PrivilegedExce" + "ptionAction<TT;>;Ljava/security/AccessControlContext;)TT;")] public static T DoAsPrivileged<T>(global::Javax.Security.Auth.Subject subject, global::Java.Security.IPrivilegedExceptionAction<T> privilegedExceptionAction, global::Java.Security.AccessControlContext accessControlContext) /* MethodBuilder.Create */ { return default(T); } /// <summary> /// <para>Checks two Subjects for equality. More specifically if the principals, public and private credentials are equal, equality for two <c> Subjects </c> is implied.</para><para></para> /// </summary> /// <returns> /// <para><c> true </c> if the specified <c> Subject </c> is equal to this one. </para> /// </returns> /// <java-name> /// equals /// </java-name> [Dot42.DexImport("equals", "(Ljava/lang/Object;)Z", AccessFlags = 1)] public override bool Equals(object obj) /* MethodBuilder.Create */ { return default(bool); } /// <summary> /// <para>Returns this <c> Subject </c> 's Principal.</para><para></para> /// </summary> /// <returns> /// <para>this <c> Subject </c> 's Principal. </para> /// </returns> /// <java-name> /// getPrincipals /// </java-name> [Dot42.DexImport("getPrincipals", "()Ljava/util/Set;", AccessFlags = 1, Signature = "()Ljava/util/Set<Ljava/security/Principal;>;")] public global::Java.Util.ISet<global::Java.Security.IPrincipal> GetPrincipals() /* MethodBuilder.Create */ { return default(global::Java.Util.ISet<global::Java.Security.IPrincipal>); } /// <summary> /// <para>Returns this <c> Subject </c> 's Principal which is a subclass of the <c> Class </c> provided.</para><para></para> /// </summary> /// <returns> /// <para>this <c> Subject </c> 's Principal. Modifications to the returned set of <c> Principal </c> s do not affect this <c> Subject </c> 's set. </para> /// </returns> /// <java-name> /// getPrincipals /// </java-name> [Dot42.DexImport("getPrincipals", "(Ljava/lang/Class;)Ljava/util/Set;", AccessFlags = 1, Signature = "<T::Ljava/security/Principal;>(Ljava/lang/Class<TT;>;)Ljava/util/Set<TT;>;")] public global::Java.Util.ISet<T> GetPrincipals<T>(global::System.Type c) /* MethodBuilder.Create */ { return default(global::Java.Util.ISet<T>); } /// <summary> /// <para>Returns the private credentials associated with this <c> Subject </c> .</para><para></para> /// </summary> /// <returns> /// <para>the private credentials associated with this <c> Subject </c> . </para> /// </returns> /// <java-name> /// getPrivateCredentials /// </java-name> [Dot42.DexImport("getPrivateCredentials", "()Ljava/util/Set;", AccessFlags = 1, Signature = "()Ljava/util/Set<Ljava/lang/Object;>;")] public global::Java.Util.ISet<object> GetPrivateCredentials() /* MethodBuilder.Create */ { return default(global::Java.Util.ISet<object>); } /// <summary> /// <para>Returns this <c> Subject </c> 's private credentials which are a subclass of the <c> Class </c> provided.</para><para></para> /// </summary> /// <returns> /// <para>this <c> Subject </c> 's private credentials. Modifications to the returned set of credentials do not affect this <c> Subject </c> 's credentials. </para> /// </returns> /// <java-name> /// getPrivateCredentials /// </java-name> [Dot42.DexImport("getPrivateCredentials", "(Ljava/lang/Class;)Ljava/util/Set;", AccessFlags = 1, Signature = "<T:Ljava/lang/Object;>(Ljava/lang/Class<TT;>;)Ljava/util/Set<TT;>;")] public global::Java.Util.ISet<T> GetPrivateCredentials<T>(global::System.Type c) /* MethodBuilder.Create */ { return default(global::Java.Util.ISet<T>); } /// <summary> /// <para>Returns the public credentials associated with this <c> Subject </c> .</para><para></para> /// </summary> /// <returns> /// <para>the public credentials associated with this <c> Subject </c> . </para> /// </returns> /// <java-name> /// getPublicCredentials /// </java-name> [Dot42.DexImport("getPublicCredentials", "()Ljava/util/Set;", AccessFlags = 1, Signature = "()Ljava/util/Set<Ljava/lang/Object;>;")] public global::Java.Util.ISet<object> GetPublicCredentials() /* MethodBuilder.Create */ { return default(global::Java.Util.ISet<object>); } /// <summary> /// <para>Returns this <c> Subject </c> 's public credentials which are a subclass of the <c> Class </c> provided.</para><para></para> /// </summary> /// <returns> /// <para>this <c> Subject </c> 's public credentials. Modifications to the returned set of credentials do not affect this <c> Subject </c> 's credentials. </para> /// </returns> /// <java-name> /// getPublicCredentials /// </java-name> [Dot42.DexImport("getPublicCredentials", "(Ljava/lang/Class;)Ljava/util/Set;", AccessFlags = 1, Signature = "<T:Ljava/lang/Object;>(Ljava/lang/Class<TT;>;)Ljava/util/Set<TT;>;")] public global::Java.Util.ISet<T> GetPublicCredentials<T>(global::System.Type c) /* MethodBuilder.Create */ { return default(global::Java.Util.ISet<T>); } /// <summary> /// <para>Returns a hash code of this <c> Subject </c> .</para><para></para> /// </summary> /// <returns> /// <para>a hash code of this <c> Subject </c> . </para> /// </returns> /// <java-name> /// hashCode /// </java-name> [Dot42.DexImport("hashCode", "()I", AccessFlags = 1)] public override int GetHashCode() /* MethodBuilder.Create */ { return default(int); } /// <summary> /// <para>Prevents from modifications being done to the credentials and Principal sets. After setting it to read-only this <c> Subject </c> can not be made writable again. The destroy method on the credentials still works though. </para> /// </summary> /// <java-name> /// setReadOnly /// </java-name> [Dot42.DexImport("setReadOnly", "()V", AccessFlags = 1)] public void SetReadOnly() /* MethodBuilder.Create */ { } /// <summary> /// <para>Returns whether this <c> Subject </c> is read-only or not.</para><para></para> /// </summary> /// <returns> /// <para>whether this <c> Subject </c> is read-only or not. </para> /// </returns> /// <java-name> /// isReadOnly /// </java-name> [Dot42.DexImport("isReadOnly", "()Z", AccessFlags = 1)] public bool IsReadOnly() /* MethodBuilder.Create */ { return default(bool); } /// <summary> /// <para>Returns a <c> String </c> representation of this <c> Subject </c> .</para><para></para> /// </summary> /// <returns> /// <para>a <c> String </c> representation of this <c> Subject </c> . </para> /// </returns> /// <java-name> /// toString /// </java-name> [Dot42.DexImport("toString", "()Ljava/lang/String;", AccessFlags = 1)] public override string ToString() /* MethodBuilder.Create */ { return default(string); } /// <summary> /// <para>Returns the <c> Subject </c> that was last associated with the <c> context </c> provided as argument.</para><para></para> /// </summary> /// <returns> /// <para>the <c> Subject </c> that was last associated with the <c> context </c> provided as argument. </para> /// </returns> /// <java-name> /// getSubject /// </java-name> [Dot42.DexImport("getSubject", "(Ljava/security/AccessControlContext;)Ljavax/security/auth/Subject;", AccessFlags = 9)] public static global::Javax.Security.Auth.Subject GetSubject(global::Java.Security.AccessControlContext context) /* MethodBuilder.Create */ { return default(global::Javax.Security.Auth.Subject); } /// <summary> /// <para>Returns this <c> Subject </c> 's Principal.</para><para></para> /// </summary> /// <returns> /// <para>this <c> Subject </c> 's Principal. </para> /// </returns> /// <java-name> /// getPrincipals /// </java-name> public global::Java.Util.ISet<global::Java.Security.IPrincipal> Principals { [Dot42.DexImport("getPrincipals", "()Ljava/util/Set;", AccessFlags = 1, Signature = "()Ljava/util/Set<Ljava/security/Principal;>;")] get{ return GetPrincipals(); } } /// <summary> /// <para>Returns the private credentials associated with this <c> Subject </c> .</para><para></para> /// </summary> /// <returns> /// <para>the private credentials associated with this <c> Subject </c> . </para> /// </returns> /// <java-name> /// getPrivateCredentials /// </java-name> public global::Java.Util.ISet<object> PrivateCredentials { [Dot42.DexImport("getPrivateCredentials", "()Ljava/util/Set;", AccessFlags = 1, Signature = "()Ljava/util/Set<Ljava/lang/Object;>;")] get{ return GetPrivateCredentials(); } } /// <summary> /// <para>Returns the public credentials associated with this <c> Subject </c> .</para><para></para> /// </summary> /// <returns> /// <para>the public credentials associated with this <c> Subject </c> . </para> /// </returns> /// <java-name> /// getPublicCredentials /// </java-name> public global::Java.Util.ISet<object> PublicCredentials { [Dot42.DexImport("getPublicCredentials", "()Ljava/util/Set;", AccessFlags = 1, Signature = "()Ljava/util/Set<Ljava/lang/Object;>;")] get{ return GetPublicCredentials(); } } } /// <summary> /// <para>Signals that the Destroyable#destroy() method failed. </para> /// </summary> /// <java-name> /// javax/security/auth/DestroyFailedException /// </java-name> [Dot42.DexImport("javax/security/auth/DestroyFailedException", AccessFlags = 33)] public partial class DestroyFailedException : global::System.Exception /* scope: __dot42__ */ { /// <summary> /// <para>Creates an exception of type <c> DestroyFailedException </c> . </para> /// </summary> [Dot42.DexImport("<init>", "()V", AccessFlags = 1)] public DestroyFailedException() /* MethodBuilder.Create */ { } /// <summary> /// <para>Creates an exception of type <c> DestroyFailedException </c> .</para><para></para> /// </summary> [Dot42.DexImport("<init>", "(Ljava/lang/String;)V", AccessFlags = 1)] public DestroyFailedException(string message) /* MethodBuilder.Create */ { } } /// <summary> /// <para>Legacy security code; do not use. </para> /// </summary> /// <java-name> /// javax/security/auth/AuthPermission /// </java-name> [Dot42.DexImport("javax/security/auth/AuthPermission", AccessFlags = 49)] public sealed partial class AuthPermission : global::Java.Security.BasicPermission /* scope: __dot42__ */ { [Dot42.DexImport("<init>", "(Ljava/lang/String;)V", AccessFlags = 1)] public AuthPermission(string name) /* MethodBuilder.Create */ { } [Dot42.DexImport("<init>", "(Ljava/lang/String;Ljava/lang/String;)V", AccessFlags = 1)] public AuthPermission(string name, string actions) /* MethodBuilder.Create */ { } /// <java-name> /// getActions /// </java-name> [Dot42.DexImport("getActions", "()Ljava/lang/String;", AccessFlags = 1)] public override string GetActions() /* MethodBuilder.Create */ { return default(string); } /// <java-name> /// implies /// </java-name> [Dot42.DexImport("implies", "(Ljava/security/Permission;)Z", AccessFlags = 1)] public override bool Implies(global::Java.Security.Permission permission) /* MethodBuilder.Create */ { return default(bool); } [global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)] internal AuthPermission() /* TypeBuilder.AddDefaultConstructor */ { } /// <java-name> /// getActions /// </java-name> public string Actions { [Dot42.DexImport("getActions", "()Ljava/lang/String;", AccessFlags = 1)] get{ return GetActions(); } } } /// <summary> /// <para>Legacy security code; do not use. </para> /// </summary> /// <java-name> /// javax/security/auth/SubjectDomainCombiner /// </java-name> [Dot42.DexImport("javax/security/auth/SubjectDomainCombiner", AccessFlags = 33)] public partial class SubjectDomainCombiner : global::Java.Security.IDomainCombiner /* scope: __dot42__ */ { [Dot42.DexImport("<init>", "(Ljavax/security/auth/Subject;)V", AccessFlags = 1)] public SubjectDomainCombiner(global::Javax.Security.Auth.Subject subject) /* MethodBuilder.Create */ { } /// <java-name> /// getSubject /// </java-name> [Dot42.DexImport("getSubject", "()Ljavax/security/auth/Subject;", AccessFlags = 1)] public virtual global::Javax.Security.Auth.Subject GetSubject() /* MethodBuilder.Create */ { return default(global::Javax.Security.Auth.Subject); } /// <summary> /// <para>Returns a combination of the two provided <c> ProtectionDomain </c> arrays. Implementers can simply merge the two arrays into one, remove duplicates and perform other optimizations.</para><para></para> /// </summary> /// <returns> /// <para>a single <c> ProtectionDomain </c> array computed from the two provided arrays. </para> /// </returns> /// <java-name> /// combine /// </java-name> [Dot42.DexImport("combine", "([Ljava/security/ProtectionDomain;[Ljava/security/ProtectionDomain;)[Ljava/securi" + "ty/ProtectionDomain;", AccessFlags = 1)] public virtual global::Java.Security.ProtectionDomain[] Combine(global::Java.Security.ProtectionDomain[] current, global::Java.Security.ProtectionDomain[] assigned) /* MethodBuilder.Create */ { return default(global::Java.Security.ProtectionDomain[]); } [global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)] internal SubjectDomainCombiner() /* TypeBuilder.AddDefaultConstructor */ { } /// <java-name> /// getSubject /// </java-name> public global::Javax.Security.Auth.Subject Subject { [Dot42.DexImport("getSubject", "()Ljavax/security/auth/Subject;", AccessFlags = 1)] get{ return GetSubject(); } } } /// <summary> /// <para>Allows for special treatment of sensitive information, when it comes to destroying or clearing of the data. </para> /// </summary> /// <java-name> /// javax/security/auth/Destroyable /// </java-name> [Dot42.DexImport("javax/security/auth/Destroyable", AccessFlags = 1537)] public partial interface IDestroyable /* scope: __dot42__ */ { /// <summary> /// <para>Erases the sensitive information. Once an object is destroyed any calls to its methods will throw an <c> IllegalStateException </c> . If it does not succeed a DestroyFailedException is thrown.</para><para></para> /// </summary> /// <java-name> /// destroy /// </java-name> [Dot42.DexImport("destroy", "()V", AccessFlags = 1025)] void Destroy() /* MethodBuilder.Create */ ; /// <summary> /// <para>Returns <c> true </c> once an object has been safely destroyed.</para><para></para> /// </summary> /// <returns> /// <para>whether the object has been safely destroyed. </para> /// </returns> /// <java-name> /// isDestroyed /// </java-name> [Dot42.DexImport("isDestroyed", "()Z", AccessFlags = 1025)] bool IsDestroyed() /* MethodBuilder.Create */ ; } }
/* Copyright (c) 2013 DEVSENSE The use and distribution terms for this software are contained in the file named License.txt, which can be found in the root of the Phalanger distribution. By using this software in any fashion, you are agreeing to be bound by the terms of this license. You must not remove this notice from this software. */ using System; using System.IO; using System.Text; using System.Collections.Generic; using System.Reflection; using System.Reflection.Emit; using System.Diagnostics; using System.Collections; using PHP.Core.AST; using PHP.Core.Emit; using PHP.Core.Parsers; using PHP.Core.Reflection; namespace PHP.Core.Compiler.AST { partial class NodeCompilers { #region ActualParam [NodeCompiler(typeof(ActualParam), Singleton = true)] sealed class ActualParamCompiler : INodeCompiler, IActualParamCompiler { public void Analyze(ActualParam/*!*/node, Analyzer/*!*/ analyzer, bool isBaseCtorCallConstrained) { // TODO: isBaseCtorCallConstrained ExInfoFromParent info = new ExInfoFromParent(node); analyzer.EnterActParam(); if (node.IsVariadic) throw new NotImplementedException(); if (analyzer.ActParamDeclIsUnknown()) { // we don't know whether the parameter will be passed by reference at run-time: if (node.Expression.AllowsPassByReference) { info.Access = AccessType.ReadUnknown; // Although we prepare to pass reference, value can be really passed. // That's why we report warning when user use '&' in calling, // because it has no influence. if (node.Ampersand) analyzer.ErrorSink.Add(Warnings.ActualParamWithAmpersand, analyzer.SourceUnit, node.Span); } else { info.Access = AccessType.Read; } } else { if (analyzer.ActParamPassedByRef()) { if (node.Expression.AllowsPassByReference) { info.Access = AccessType.ReadRef; } else { analyzer.ErrorSink.Add(Errors.NonVariablePassedByRef, analyzer.SourceUnit, node.Expression.Span); analyzer.LeaveActParam(); return; } } else { info.Access = AccessType.Read; if (node.Ampersand) analyzer.ErrorSink.Add(Warnings.ActualParamWithAmpersand, analyzer.SourceUnit, node.Span); } } node._expression = node.Expression.Analyze(analyzer, info).Literalize(); // TODO: if signature is known, act. param has type hint and expression has known type; check if type hint matches expression analyzer.LeaveActParam(); } public PhpTypeCode Emit(ActualParam/*!*/node, CodeGenerator/*!*/ codeGenerator, bool ensureChainWritable = false) { codeGenerator.ChainBuilder.Create(); if (ensureChainWritable) codeGenerator.ChainBuilder.EnsureWritable = true; try { return node.Expression.Emit(codeGenerator); } finally { codeGenerator.ChainBuilder.End(); } } } #endregion #region NamedActualParam [NodeCompiler(typeof(NamedActualParam))] public sealed class NamedActualParamCompiler : INodeCompiler, INamedActualParamCompiler { public DProperty Property { get { return property; } } private DProperty property; public void Analyze(NamedActualParam/*!*/node, Analyzer/*!*/ analyzer, DType/*!*/ propertiesDeclarer) { // TODO: Named parameters can target the non-static, public, and read-write fields // or properties of the attribute class bool visibility_check; if (!propertiesDeclarer.IsUnknown) { property = analyzer.ResolveProperty(propertiesDeclarer, node.Name, node.Span, false, null, null, out visibility_check); } node.expression = node.Expression.Analyze(analyzer, ExInfoFromParent.DefaultExInfo).Literalize(); } } #endregion #region CallSignature [NodeCompiler(typeof(CallSignature), Singleton = true)] sealed class CallSignatureCompiler : INodeCompiler, ICallSignatureCompiler { /// <summary> /// Gets true if all the Parameters (after the analysis) have the value and could be evaluated during the compilation time. /// </summary> public bool AllParamsHaveValue(CallSignature/*!*/node) { foreach (var p in node.Parameters) if (!p.Expression.HasValue()) return false; return true; } public void Analyze(CallSignature/*!*/node, Analyzer/*!*/ analyzer, RoutineSignature/*!*/ signature, ExInfoFromParent info, bool isBaseCtorCallConstrained) { // generic: foreach (var p in node.GenericParams) TypeRefHelper.Analyze(p, analyzer); // regular: analyzer.EnterActualParams(signature, node.Parameters.Length); foreach (var p in node.Parameters) p.NodeCompiler<ActualParamCompiler>().Analyze(p, analyzer, isBaseCtorCallConstrained); analyzer.LeaveActualParams(); } /// <summary> /// Builds <see cref="ArrayEx"/> with call signature parameters. /// </summary> /// <returns></returns> public ArrayEx/*!*/BuildPhpArray(CallSignature/*!*/node) { Debug.Assert(node.GenericParams.Empty()); List<Item> arrayItems = new List<Item>(node.Parameters.Length); var pos = Text.Span.Invalid; foreach (var p in node.Parameters) { arrayItems.Add(new ValueItem(null, p.Expression)); if (pos.IsValid) pos = p.Span; else pos = Text.Span.FromBounds(pos.Start, p.Span.End); } return new ArrayEx(pos, arrayItems); } #region Emission /// <summary> /// Emits IL instructions that load actual parameters and optionally add a new stack frame to /// current <see cref="PHP.Core.ScriptContext.Stack"/>. /// </summary> /// <param name="node">Instance.</param> /// <param name="codeGenerator">Code generator.</param> /// <remarks> /// Nothing is expected on the evaluation stack. Nothing is left on the evaluation stack. /// </remarks> public void EmitLoadOnPhpStack(CallSignature/*!*/node, CodeGenerator/*!*/ codeGenerator) { var parameters = node.Parameters; var genericParams = node.GenericParams; PhpStackBuilder.EmitAddFrame(codeGenerator.IL, codeGenerator.ScriptContextPlace, genericParams.Length, parameters.Length, delegate(ILEmitter il, int i) { // generic arguments: genericParams[i].EmitLoadTypeDesc(codeGenerator, ResolveTypeFlags.UseAutoload | ResolveTypeFlags.ThrowErrors); }, delegate(ILEmitter il, int i) { // regular arguments: var p = parameters[i]; codeGenerator.EmitBoxing(p.NodeCompiler<ActualParamCompiler>().Emit(p, codeGenerator)); } ); } /// <summary> /// Emits IL instructions that load actual parameters on the evaluation stack. /// </summary> /// <param name="node">Instance.</param> /// <param name="codeGenerator">Code generator.</param> /// <param name="routine">PHP method being called.</param> /// <remarks> /// <para> /// The function has mandatory and optional formal arguments. /// Mandatory arguments are those formal arguments which are not preceded by /// any formal argument with default value. The others are optional. /// If a formal argument without default value is declared beyond the last mandatory argument /// it is treated as optional one by the caller. The callee checks this and throws warning. /// </para> /// Missing arguments handling: /// <list type="bullet"> /// <item>missing mandatory argument - WARNING; LOAD(null);</item> /// <item>missing optional argument - LOAD(Arg.Default);</item> /// <item>superfluous arguments are ignored</item> /// </list> /// </remarks> public void EmitLoadOnEvalStack(CallSignature/*!*/node, CodeGenerator/*!*/ codeGenerator, PhpRoutine/*!*/ routine) { EmitLoadTypeArgsOnEvalStack(node, codeGenerator, routine); EmitLoadArgsOnEvalStack(node, codeGenerator, routine); } internal void EmitLoadTypeArgsOnEvalStack(CallSignature/*!*/node, CodeGenerator/*!*/ codeGenerator, PhpRoutine/*!*/ routine) { ILEmitter il = codeGenerator.IL; int mandatory_count = (routine.Signature != null) ? routine.Signature.MandatoryGenericParamCount : 0; int formal_count = (routine.Signature != null) ? routine.Signature.GenericParamCount : 0; int actual_count = node.GenericParams.Length; // loads all actual parameters which are not superfluous: for (int i = 0; i < Math.Min(actual_count, formal_count); i++) node.GenericParams[i].EmitLoadTypeDesc(codeGenerator, ResolveTypeFlags.UseAutoload | ResolveTypeFlags.ThrowErrors); // loads missing mandatory arguments: for (int i = actual_count; i < mandatory_count; i++) { // CALL PhpException.MissingTypeArgument(<i+1>,<name>); il.LdcI4(i + 1); il.Emit(OpCodes.Ldstr, routine.FullName); codeGenerator.EmitPhpException(Methods.PhpException.MissingTypeArgument); // LOAD DTypeDesc.ObjectTypeDesc; il.Emit(OpCodes.Ldsfld, Fields.DTypeDesc.ObjectTypeDesc); } // loads missing optional arguments: for (int i = Math.Max(mandatory_count, actual_count); i < formal_count; i++) { // LOAD Arg.DefaultType; il.Emit(OpCodes.Ldsfld, Fields.Arg_DefaultType); } } internal void EmitLoadArgsOnEvalStack(CallSignature/*!*/node, CodeGenerator/*!*/ codeGenerator, PhpRoutine/*!*/ routine) { ILEmitter il = codeGenerator.IL; int mandatory_count = (routine.Signature != null) ? routine.Signature.MandatoryParamCount : 0; int formal_count = (routine.Signature != null) ? routine.Signature.ParamCount : 0; int actual_count = node.Parameters.Length; PhpTypeCode param_type; // loads all actual parameters which are not superfluous: for (int i = 0; i < Math.Min(actual_count, formal_count); i++) { var p = node.Parameters[i]; codeGenerator.EmitBoxing(param_type = p.NodeCompiler<ActualParamCompiler>().Emit(p, codeGenerator)); // Actual param emitter should emit "boxing" to a reference if its access type is ReadRef. // That's why no operation is needed here and references should match. Debug.Assert((routine.Signature == null || routine.Signature.IsAlias(i)) == (param_type == PhpTypeCode.PhpReference)); } // loads missing mandatory arguments: for (int i = actual_count; i < mandatory_count; i++) { // CALL PhpException.MissingArgument(<i+1>,<name>); il.LdcI4(i + 1); il.Emit(OpCodes.Ldstr, routine.FullName); codeGenerator.EmitPhpException(Methods.PhpException.MissingArgument); // LOAD null; if (routine.Signature.IsAlias(i)) il.Emit(OpCodes.Newobj, Constructors.PhpReference_Void); else il.Emit(OpCodes.Ldnull); } // loads missing optional arguments: for (int i = Math.Max(mandatory_count, actual_count); i < formal_count; i++) { // LOAD Arg.Default; il.Emit(OpCodes.Ldsfld, Fields.Arg_Default); } } /// <summary> /// Emits parameter loading. /// </summary> /// <param name="node">Instance.</param> /// <param name="il">Emitter.</param> /// <param name="index">The index of the parameter starting from 0.</param> /// <param name="codeGenerator">Code generator.</param> /// <param name="param">Target <see cref="ParameterInfo"/>.</param> /// <returns>The type of the actual argument or its value if it is a leteral.</returns> public object EmitLibraryLoadArgument(CallSignature/*!*/node, ILEmitter/*!*/ il, int index, object/*!*/ codeGenerator, ParameterInfo param) { Debug.Assert(codeGenerator != null); Debug.Assert(index < node.Parameters.Length, "Missing arguments prevents code generation"); // returns value if the parameter is evaluable at compile time: if (node.Parameters[index].Expression.HasValue()) return node.Parameters[index].Expression.GetValue(); // emits parameter evaluation: var p = node.Parameters[index]; return PhpTypeCodeEnum.ToType(p.NodeCompiler<ActualParamCompiler>().Emit(p, (CodeGenerator)codeGenerator, PhpRwAttribute.IsDefined(param))); } /// <summary> /// Emits load of optional parameters array on the evaluation stack. /// </summary> /// <param name="node">Instance.</param> /// <param name="builder">An overloads builder.</param> /// <param name="start">An index of the first optional parameter to be loaded into the array (indices start from 0).</param> /// <param name="param"> /// A <see cref="ParameterInfo"/> of the formal parameter of the target method where the array will be passed. /// This information influences conversions all optional parameters. /// </param> /// <param name="optArgCount">Optional argument count (unused).</param> public void EmitLibraryLoadOptArguments(CallSignature/*!*/node, OverloadsBuilder/*!*/ builder, int start, ParameterInfo/*!*/ param, IPlace optArgCount) { Debug.Assert(start >= 0 && builder != null && param != null && builder.Aux is CodeGenerator); ILEmitter il = builder.IL; Type elem_type = param.ParameterType.GetElementType(); Type array_type = elem_type.MakeArrayType(); // NEW <alem_type>[<parameters count - start>] il.LdcI4(node.Parameters.Length - start); il.Emit(OpCodes.Newarr, elem_type); // loads each optional parameter into the appropriate bucket of the array: for (int i = start; i < node.Parameters.Length; i++) { // <arr>[i - start] il.Emit(OpCodes.Dup); il.LdcI4(i - start); // <parameter value> object type_or_value = EmitLibraryLoadArgument(node, il, i, builder.Aux, param); builder.EmitArgumentConversion(elem_type, type_or_value, false, param, 3); // <arr>[i - start] = <parameter value>; il.Stelem(elem_type); } // <arr> } #endregion } #endregion } #region INamedActualParamCompiler internal interface INamedActualParamCompiler { DProperty Property { get; } void Analyze(NamedActualParam/*!*/node, Analyzer/*!*/ analyzer, DType/*!*/ propertiesDeclarer); } internal static class NamedActualParamCompilerHelper { public static DProperty GetProperty(this NamedActualParam node) { return node.NodeCompiler<INamedActualParamCompiler>().Property; } public static void Analyze(this NamedActualParam/*!*/node, Analyzer/*!*/ analyzer, DType/*!*/ propertiesDeclarer) { node.NodeCompiler<INamedActualParamCompiler>().Analyze(node, analyzer, propertiesDeclarer); } } #endregion #region IActualParamCompiler internal interface IActualParamCompiler { PhpTypeCode Emit(ActualParam/*!*/node, CodeGenerator/*!*/ codeGenerator, bool ensureChainWritable); } internal static class ActualParamCompilerHelper { public static PhpTypeCode Emit(this ActualParam/*!*/node, CodeGenerator/*!*/ codeGenerator, bool ensureChainWritable = false) { return node.NodeCompiler<IActualParamCompiler>().Emit(node, codeGenerator, ensureChainWritable); } } #endregion #region ICallSignatureCompiler internal interface ICallSignatureCompiler { bool AllParamsHaveValue(CallSignature/*!*/node); ArrayEx/*!*/BuildPhpArray(CallSignature/*!*/node); void Analyze(CallSignature/*!*/node, Analyzer/*!*/ analyzer, RoutineSignature/*!*/ signature, ExInfoFromParent info, bool isBaseCtorCallConstrained); void EmitLoadOnPhpStack(CallSignature/*!*/node, CodeGenerator/*!*/ codeGenerator); void EmitLoadOnEvalStack(CallSignature/*!*/node, CodeGenerator/*!*/ codeGenerator, PhpRoutine/*!*/ routine); object EmitLibraryLoadArgument(CallSignature/*!*/node, ILEmitter/*!*/ il, int index, object/*!*/ codeGenerator, ParameterInfo param); void EmitLibraryLoadOptArguments(CallSignature/*!*/node, OverloadsBuilder/*!*/ builder, int start, ParameterInfo/*!*/ param, IPlace optArgCount); } internal static class CallSignatureHelpers { public static bool AllParamsHaveValue(this CallSignature/*!*/node) { return node.NodeCompiler<ICallSignatureCompiler>().AllParamsHaveValue(node); } public static ArrayEx/*!*/BuildPhpArray(this CallSignature/*!*/node) { return node.NodeCompiler<ICallSignatureCompiler>().BuildPhpArray(node); } public static void Analyze(this CallSignature/*!*/node, Analyzer/*!*/ analyzer, RoutineSignature/*!*/ signature, ExInfoFromParent info, bool isBaseCtorCallConstrained) { node.NodeCompiler<ICallSignatureCompiler>().Analyze(node, analyzer, signature, info, isBaseCtorCallConstrained); } public static void EmitLoadOnPhpStack(this CallSignature/*!*/node, CodeGenerator/*!*/ codeGenerator) { node.NodeCompiler<ICallSignatureCompiler>().EmitLoadOnPhpStack(node, codeGenerator); } public static void EmitLoadOnEvalStack(this CallSignature/*!*/node, CodeGenerator/*!*/ codeGenerator, PhpRoutine/*!*/ routine) { node.NodeCompiler<ICallSignatureCompiler>().EmitLoadOnEvalStack(node, codeGenerator, routine); } public static object EmitLibraryLoadArgument(this CallSignature/*!*/node, ILEmitter/*!*/ il, int index, object/*!*/ codeGenerator, ParameterInfo param) { return node.NodeCompiler<ICallSignatureCompiler>().EmitLibraryLoadArgument(node, il, index, codeGenerator, param); } public static void EmitLibraryLoadOptArguments(this CallSignature/*!*/node, OverloadsBuilder/*!*/ builder, int start, ParameterInfo/*!*/ param, IPlace optArgCount) { node.NodeCompiler<ICallSignatureCompiler>().EmitLibraryLoadOptArguments(node, builder, start, param, optArgCount); } } #endregion }
using System.Collections.Generic; using System.Linq; using Ical.Net.DataTypes; using NUnit.Framework; using NUnit.Framework.Interfaces; namespace Ical.Net.FrameworkUnitTests { [TestFixture] public class TodoTest { private const string _tzid = "US-Eastern"; [Test, TestCaseSource(nameof(ActiveTodo_TestCases)), Category("Todo")] public void ActiveTodo_Tests(string calendarString, IList<KeyValuePair<CalDateTime, bool>> incoming) { var iCal = Calendar.Load(calendarString); ProgramTest.TestCal(iCal); var todo = iCal.Todos; foreach (var calDateTime in incoming) { var dt = calDateTime.Key; dt.TzId = _tzid; Assert.AreEqual(calDateTime.Value, todo[0].IsActive(dt)); } } public static IEnumerable<ITestCaseData> ActiveTodo_TestCases() { var testVals = new List<KeyValuePair<CalDateTime, bool>> { new KeyValuePair<CalDateTime, bool>(new CalDateTime(2200, 12, 31, 0, 0, 0), true) }; yield return new TestCaseData(IcsFiles.Todo1, testVals) .SetName("Todo1"); testVals = new List<KeyValuePair<CalDateTime, bool>> { new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 28, 8, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 28, 8, 59, 59), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 28, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2200, 12, 31, 0, 0, 0), true), }; yield return new TestCaseData(IcsFiles.Todo2, testVals) .SetName("Todo2"); testVals = new List<KeyValuePair<CalDateTime, bool>> { new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 28, 8, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2200, 12, 31, 0, 0, 0), false), }; yield return new TestCaseData(IcsFiles.Todo3, testVals).SetName("Todo3"); testVals = new List<KeyValuePair<CalDateTime, bool>> { new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 28, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 29, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 30, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 31, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 1, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 2, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 3, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 4, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 5, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 6, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 7, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 8, 9, 0, 0), true), }; yield return new TestCaseData(IcsFiles.Todo5, testVals).SetName("Todo5"); testVals = new List<KeyValuePair<CalDateTime, bool>> { new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 28, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 29, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 30, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 31, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 1, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 2, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 3, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 4, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 5, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 6, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 7, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 8, 9, 0, 0), true), }; yield return new TestCaseData(IcsFiles.Todo6, testVals).SetName("Todo6"); testVals = new List<KeyValuePair<CalDateTime, bool>> { new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 28, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 29, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 30, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 31, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 1, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 2, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 3, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 4, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 5, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 6, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 30, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 31, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 1, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 2, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 3, 9, 0, 0), true), }; yield return new TestCaseData(IcsFiles.Todo7, testVals).SetName("Todo7"); testVals = new List<KeyValuePair<CalDateTime, bool>> { new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 28, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 29, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 30, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 31, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 1, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 2, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 3, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 4, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 5, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 6, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 30, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 31, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 1, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 2, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 3, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 10, 10, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 11, 15, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 12, 5, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 1, 3, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 1, 4, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 1, 5, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 1, 6, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 1, 7, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 2, 1, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 2, 2, 8, 59, 59), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 2, 2, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 2, 3, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2007, 2, 4, 9, 0, 0), true), }; yield return new TestCaseData(IcsFiles.Todo8, testVals).SetName("Todo8"); testVals = new List<KeyValuePair<CalDateTime, bool>> { new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 28, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 29, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 7, 30, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 17, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 18, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 19, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 7, 9, 0, 0), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 8, 8, 59, 59), false), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 8, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 9, 9, 9, 0, 0), true), }; yield return new TestCaseData(IcsFiles.Todo9, testVals).SetName("Todo9"); } [Test, TestCaseSource(nameof(CompletedTodo_TestCases)), Category("Todo")] public void CompletedTodo_Tests(string calendarString, IList<KeyValuePair<CalDateTime, bool>> incoming) { var iCal = Calendar.Load(calendarString); ProgramTest.TestCal(iCal); var todo = iCal.Todos; foreach (var calDateTime in incoming) { var dt = calDateTime.Key; dt.TzId = _tzid; Assert.AreEqual(calDateTime.Value, todo[0].IsCompleted(dt)); } } public static IEnumerable<ITestCaseData> CompletedTodo_TestCases() { var testVals = new List<KeyValuePair<CalDateTime, bool>> { new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 07, 28, 8, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 07, 28, 9, 0, 0), true), new KeyValuePair<CalDateTime, bool>(new CalDateTime(2006, 8, 1, 0, 0, 0), true), }; yield return new TestCaseData(IcsFiles.Todo4, testVals).SetName("Todo4"); } [Test, Category("Todo")] public void Todo7_1() { var iCal = Calendar.Load(IcsFiles.Todo7); var todo = iCal.Todos; var items = new List<CalDateTime> { new CalDateTime(2006, 7, 28, 9, 0, 0, _tzid), new CalDateTime(2006, 8, 4, 9, 0, 0, _tzid), new CalDateTime(2006, 9, 1, 9, 0, 0, _tzid), new CalDateTime(2006, 10, 6, 9, 0, 0, _tzid), new CalDateTime(2006, 11, 3, 9, 0, 0, _tzid), new CalDateTime(2006, 12, 1, 9, 0, 0, _tzid), new CalDateTime(2007, 1, 5, 9, 0, 0, _tzid), new CalDateTime(2007, 2, 2, 9, 0, 0, _tzid), new CalDateTime(2007, 3, 2, 9, 0, 0, _tzid), new CalDateTime(2007, 4, 6, 9, 0, 0, _tzid) }; var occurrences = todo[0].GetOccurrences( new CalDateTime(2006, 7, 1, 9, 0, 0), new CalDateTime(2007, 7, 1, 9, 0, 0)).OrderBy(o => o.Period.StartTime).ToList(); // FIXME: Count is not properly restricting recurrences to 10. // What's going wrong here? Assert.AreEqual( items.Count, occurrences.Count, "TODO should have " + items.Count + " occurrences; it has " + occurrences.Count); for (var i = 0; i < items.Count; i++) { Assert.AreEqual(items[i], occurrences[i].Period.StartTime, "TODO should occur at " + items[i] + ", but does not."); } } } }
// Code generated by Microsoft (R) AutoRest Code Generator 1.2.1.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace ApplicationGateway { using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using System.Threading; using System.Threading.Tasks; /// <summary> /// Extension methods for SecurityRulesOperations. /// </summary> public static partial class SecurityRulesOperationsExtensions { /// <summary> /// Deletes the specified network security rule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> public static void Delete(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName) { operations.DeleteAsync(resourceGroupName, networkSecurityGroupName, securityRuleName).GetAwaiter().GetResult(); } /// <summary> /// Deletes the specified network security rule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task DeleteAsync(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName, CancellationToken cancellationToken = default(CancellationToken)) { (await operations.DeleteWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, securityRuleName, null, cancellationToken).ConfigureAwait(false)).Dispose(); } /// <summary> /// Get the specified network security rule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> public static SecurityRule Get(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName) { return operations.GetAsync(resourceGroupName, networkSecurityGroupName, securityRuleName).GetAwaiter().GetResult(); } /// <summary> /// Get the specified network security rule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<SecurityRule> GetAsync(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, securityRuleName, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Creates or updates a security rule in the specified network security group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> /// <param name='securityRuleParameters'> /// Parameters supplied to the create or update network security rule /// operation. /// </param> public static SecurityRule CreateOrUpdate(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName, SecurityRule securityRuleParameters) { return operations.CreateOrUpdateAsync(resourceGroupName, networkSecurityGroupName, securityRuleName, securityRuleParameters).GetAwaiter().GetResult(); } /// <summary> /// Creates or updates a security rule in the specified network security group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> /// <param name='securityRuleParameters'> /// Parameters supplied to the create or update network security rule /// operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<SecurityRule> CreateOrUpdateAsync(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName, SecurityRule securityRuleParameters, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, securityRuleName, securityRuleParameters, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Gets all security rules in a network security group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> public static IPage<SecurityRule> List(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName) { return operations.ListAsync(resourceGroupName, networkSecurityGroupName).GetAwaiter().GetResult(); } /// <summary> /// Gets all security rules in a network security group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<SecurityRule>> ListAsync(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.ListWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Deletes the specified network security rule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> public static void BeginDelete(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName) { operations.BeginDeleteAsync(resourceGroupName, networkSecurityGroupName, securityRuleName).GetAwaiter().GetResult(); } /// <summary> /// Deletes the specified network security rule. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task BeginDeleteAsync(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName, CancellationToken cancellationToken = default(CancellationToken)) { (await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, securityRuleName, null, cancellationToken).ConfigureAwait(false)).Dispose(); } /// <summary> /// Creates or updates a security rule in the specified network security group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> /// <param name='securityRuleParameters'> /// Parameters supplied to the create or update network security rule /// operation. /// </param> public static SecurityRule BeginCreateOrUpdate(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName, SecurityRule securityRuleParameters) { return operations.BeginCreateOrUpdateAsync(resourceGroupName, networkSecurityGroupName, securityRuleName, securityRuleParameters).GetAwaiter().GetResult(); } /// <summary> /// Creates or updates a security rule in the specified network security group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='networkSecurityGroupName'> /// The name of the network security group. /// </param> /// <param name='securityRuleName'> /// The name of the security rule. /// </param> /// <param name='securityRuleParameters'> /// Parameters supplied to the create or update network security rule /// operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<SecurityRule> BeginCreateOrUpdateAsync(this ISecurityRulesOperations operations, string resourceGroupName, string networkSecurityGroupName, string securityRuleName, SecurityRule securityRuleParameters, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, networkSecurityGroupName, securityRuleName, securityRuleParameters, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Gets all security rules in a network security group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static IPage<SecurityRule> ListNext(this ISecurityRulesOperations operations, string nextPageLink) { return operations.ListNextAsync(nextPageLink).GetAwaiter().GetResult(); } /// <summary> /// Gets all security rules in a network security group. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<SecurityRule>> ListNextAsync(this ISecurityRulesOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } } }
// Define this to enable the dispatching of load events. The implementation // of load events requires that a complete implementation of SvgDocument.Load // be supplied rather than relying on the base XmlDocument.Load behaviour. // This is required because I know of no way to hook into the key stages of // XML document creation in order to throw events at the right times during // the load process. //#define ENABLE_LOAD_EVENTS using System; using System.Collections; using System.Diagnostics; using System.Drawing; using System.Drawing.Drawing2D; using System.Reflection; using System.Text.RegularExpressions; using System.Xml; using System.IO; using SharpVectors.Collections; using SharpVectors.Dom.Css; using SharpVectors.Xml; using SharpVectors.Dom.Svg.Rendering; namespace SharpVectors.Dom.Svg { /// <summary> /// The root object in the document object hierarchy of an Svg document. /// </summary> /// <remarks> /// <p> /// When an 'svg' element is embedded inline as a component of a /// document from another namespace, such as when an 'svg' element is /// embedded inline within an XHTML document /// [<a href="http://www.w3.org/TR/SVG/refs.html#ref-XHTML">XHTML</a>], /// then an /// <see cref="ISvgDocument">ISvgDocument</see> object will not exist; /// instead, the root object in the /// document object hierarchy will be a Document object of a different /// type, such as an HTMLDocument object. /// </p> /// <p> /// However, an <see cref="ISvgDocument">ISvgDocument</see> object will /// indeed exist when the root /// element of the XML document hierarchy is an 'svg' element, such as /// when viewing a stand-alone SVG file (i.e., a file with MIME type /// "image/svg+xml"). In this case, the /// <see cref="ISvgDocument">ISvgDocument</see> object will be the /// root object of the document object model hierarchy. /// </p> /// <p> /// In the case where an SVG document is embedded by reference, such as /// when an XHTML document has an 'object' element whose href attribute /// references an SVG document (i.e., a document whose MIME type is /// "image/svg+xml" and whose root element is thus an 'svg' element), /// there will exist two distinct DOM hierarchies. The first DOM hierarchy /// will be for the referencing document (e.g., an XHTML document). The /// second DOM hierarchy will be for the referenced SVG document. In this /// second DOM hierarchy, the root object of the document object model /// hierarchy is an <see cref="ISvgDocument">ISvgDocument</see> object. /// </p> /// <p> /// The <see cref="ISvgDocument">ISvgDocument</see> interface contains a /// similar list of attributes and /// methods to the HTMLDocument interface described in the /// <a href="http://www.w3.org/TR/REC-DOM-Level-1/level-one-html.html">Document /// Object Model (HTML) Level 1</a> chapter of the /// [<a href="http://www.w3.org/TR/SVG/refs.html#ref-DOM1">DOM1</a>] specification. /// </p> /// </remarks> /// <developer>niklas@protocol7.com</developer> /// <completed>60</completed> public class SvgDocument : CssXmlDocument, ISvgDocument { #region Fields private SvgWindow window; private TypeDictionary nodeByTagName = new TypeDictionary(); #endregion #region Constructors public SvgDocument(SvgWindow window) { this.window = window; this.window.Document = this; // set up CSS properties AddStyleElement(SvgDocument.SvgNamespace, "style"); CssPropertyProfile = CssPropertyProfile.SvgProfile; //this.XmlResolver = new CachingXmlUrlResolver(); // build tagName to type dictionary buildTypeDictionary(); } #endregion #region NamespaceManager private XmlNamespaceManager namespaceManager; public XmlNamespaceManager NamespaceManager { get { if(namespaceManager == null) { // Setup namespace manager and add default namespaces namespaceManager = new XmlNamespaceManager(this.NameTable); namespaceManager.AddNamespace(String.Empty, SvgDocument.SvgNamespace); namespaceManager.AddNamespace("svg", SvgDocument.SvgNamespace); namespaceManager.AddNamespace("xlink", SvgDocument.XLinkNamespace); } return namespaceManager; } } #endregion #region Type handling and creation of elements /// <summary> /// buildTypeDictionary /// </summary> protected virtual void buildTypeDictionary() { SetTagNameNodeType(SvgNamespace, "a", typeof(SvgTransformableElement)); SetTagNameNodeType(SvgNamespace, "circle", typeof(SvgCircleElement)); SetTagNameNodeType(SvgNamespace, "clipPath", typeof(SvgClipPathElement)); SetTagNameNodeType(SvgNamespace, "defs", typeof(SvgDefsElement)); SetTagNameNodeType(SvgNamespace, "desc", typeof(SvgDescElement)); SetTagNameNodeType(SvgNamespace, "ellipse", typeof(SvgEllipseElement)); SetTagNameNodeType(SvgNamespace, "g", typeof(SvgGElement)); SetTagNameNodeType(SvgNamespace, "image", typeof(SvgImageElement)); SetTagNameNodeType(SvgNamespace, "line", typeof(SvgLineElement)); SetTagNameNodeType(SvgNamespace, "linearGradient", typeof(SvgLinearGradientElement)); SetTagNameNodeType(SvgNamespace, "marker", typeof(SvgMarkerElement)); SetTagNameNodeType(SvgNamespace, "mask", typeof(SvgMaskElement)); SetTagNameNodeType(SvgNamespace, "metadata", typeof(SvgMetadataElement)); SetTagNameNodeType(SvgNamespace, "rect", typeof(SvgRectElement)); SetTagNameNodeType(SvgNamespace, "path", typeof(SvgPathElement)); SetTagNameNodeType(SvgNamespace, "pattern", typeof(SvgPatternElement)); SetTagNameNodeType(SvgNamespace, "polyline", typeof(SvgPolylineElement)); SetTagNameNodeType(SvgNamespace, "polygon", typeof(SvgPolygonElement)); SetTagNameNodeType(SvgNamespace, "radialGradient", typeof(SvgRadialGradientElement)); SetTagNameNodeType(SvgNamespace, "script", typeof(SvgScriptElement)); SetTagNameNodeType(SvgNamespace, "stop", typeof(SvgStopElement)); SetTagNameNodeType(SvgNamespace, "svg", typeof(SvgSvgElement)); SetTagNameNodeType(SvgNamespace, "switch", typeof(SvgSwitchElement)); SetTagNameNodeType(SvgNamespace, "symbol", typeof(SvgSymbolElement)); SetTagNameNodeType(SvgNamespace, "text", typeof(SvgTextElement)); SetTagNameNodeType(SvgNamespace, "title", typeof(SvgTitleElement)); SetTagNameNodeType(SvgNamespace, "tref", typeof(SvgTRefElement)); SetTagNameNodeType(SvgNamespace, "tspan", typeof(SvgTSpanElement)); SetTagNameNodeType(SvgNamespace, "use", typeof(SvgUseElement)); } public void SetTagNameNodeType(string prefix, string localName, Type type) { nodeByTagName[prefix + ":" + localName] = type; } public override XmlElement CreateElement(string prefix, string localName, string ns) { string name = ns + ":" + localName; XmlElement result; if ( nodeByTagName.ContainsKey(name) ) { Type type = nodeByTagName[name]; object[] args = new object[] { prefix, localName, ns, this }; result = (XmlElement) nodeByTagName.CreateInstance( name, args, BindingFlags.Instance | BindingFlags.NonPublic); } else if(ns == SvgNamespace) { result = new SvgElement(prefix, localName, ns, this); } else { result = base.CreateElement(prefix, localName, ns); } return result; } #endregion #region Static properties public static string SvgNamespace = "http://www.w3.org/2000/svg"; public static string XLinkNamespace = "http://www.w3.org/1999/xlink"; #endregion #region Support collections private string[] supportedFeatures = new string[] { "org.w3c.svg.static", "http://www.w3.org/TR/Svg11/feature#Shape", "http://www.w3.org/TR/Svg11/feature#BasicText", "http://www.w3.org/TR/Svg11/feature#OpacityAttribute" }; private string[] supportedExtensions = new string[] {}; public override bool Supports( string feature, string version) { foreach(string supportedFeature in supportedFeatures) { if(supportedFeature == feature) { return true; } } foreach(string supportedExtension in supportedExtensions) { if(supportedExtension == feature) { return true; } } return base.Supports(feature, version); } #endregion #region Overrides of Load() private void prepareXmlResolver( XmlReader reader) { // TODO: 1.2 has removed the DTD, can we do this safely? if (reader != null && reader is XmlValidatingReader) { XmlValidatingReader valReader = (XmlValidatingReader)reader; valReader.ValidationType = ValidationType.None; } return; /*LocalDtdXmlUrlResolver localDtdXmlUrlResolver = new LocalDtdXmlUrlResolver(); localDtdXmlUrlResolver.AddDtd("http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd", @"dtd\svg10.dtd"); localDtdXmlUrlResolver.AddDtd("http://www.w3.org/TR/SVG/DTD/svg10.dtd", @"dtd\svg10.dtd"); localDtdXmlUrlResolver.AddDtd("http://www.w3.org/Graphics/SVG/1.1/DTD/svg11-tiny.dtd", @"dtd\svg11-tiny.dtd"); localDtdXmlUrlResolver.AddDtd("http://www.w3.org/Graphics/SVG/1.1/DTD/svg11-basic.dtd", @"dtd\svg11-basic.dtd"); localDtdXmlUrlResolver.AddDtd("http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd", @"dtd\svg11.dtd"); if (reader != null && reader is XmlValidatingReader) { XmlValidatingReader valReader = (XmlValidatingReader)reader; valReader.ValidationType = ValidationType.None; valReader.XmlResolver = localDtdXmlUrlResolver; } this.XmlResolver = localDtdXmlUrlResolver;*/ } /// <overloads> /// Loads an XML document.Loads the specified XML data. /// <blockquote> /// <b>Note</b> The Load method always preserves significant white /// space. The PreserveWhitespace property determines whether or not /// white space is preserved. The default is false, whites space is /// not preserved. /// </blockquote> /// </overloads> /// <summary> /// Loads the XML document from the specified URL. /// </summary> /// <param name="url"> /// URL for the file containing the XML document to load. /// </param> public override void Load(string url) { System.Net.WebClient client = new System.Net.WebClient (); byte[] data = client.DownloadData(url); base.LoadXml (System.Text.Encoding.UTF8.GetString(data)); /* Console.WriteLine ("1"); XmlTextReader reader = new XmlTextReader(url); Console.WriteLine ("2"); XmlValidatingReader vr = new XmlValidatingReader(reader); prepareXmlResolver(vr); LoadAndFire(vr); reader.Close(); Console.WriteLine ("3"); */ } /// <summary> /// Loads the XML document from the specified stream but with the /// specified base URL /// </summary> /// <param name="url"> /// Base URL for the stream from which the XML document is loaded. /// </param> /// <param name="stream"> /// The stream containing the XML document to load. /// </param> public void Load( string url, Stream stream) { XmlValidatingReader vr = new XmlValidatingReader( new XmlTextReader(url, stream)); prepareXmlResolver(vr); LoadAndFire(vr); } /// <summary> /// Loads the XML document from the specified /// <see cref="XmlReader">XmlReader</see>. /// </summary> /// <param name="reader"> /// The <see cref="XmlReader">XmlReader</see> used to feed the XML /// data into the document. /// </param> public override void Load( XmlReader reader) { prepareXmlResolver(reader); LoadAndFire(reader); } /// <summary> /// Loads the XML document from the specified /// <see cref="TextReader">TextReader</see>. /// </summary> /// <param name="reader"></param> public override void Load( TextReader reader) { XmlValidatingReader vr = new XmlValidatingReader( new XmlTextReader(reader)); prepareXmlResolver(vr); LoadAndFire(vr); } /// <summary> /// Loads the XML document from the specified stream. /// </summary> /// <param name="stream"> /// The stream containing the XML document to load. /// </param> public override void Load( Stream stream) { prepareXmlResolver(null); Load("", stream); } /// <summary> /// Loads the specified XML data and fires load events. /// </summary> /// <param name="reader"> /// The <see cref="XmlReader">XmlReader</see> describing the document /// to be loaded. /// </param> private void LoadAndFire( XmlReader reader) { #if !ENABLE_LOAD_EVENTS base.Load(reader); #else RemoveAll(); base.Load(new XmlTextReader(reader.BaseURI, new MemoryStream(System.Text.Encoding.UTF8.GetBytes( "<?xml version=\"1.0\"?><xmldoc/>")))); RemoveAll(); XmlNode currentNode = this; // Read each node in the tree. while (reader.Read()) { Console.WriteLine (reader.NodeType); switch (reader.NodeType) { case XmlNodeType.Element: XmlElement xmlElement = CreateElement( reader.Prefix, reader.LocalName, reader.NamespaceURI); currentNode.AppendChild(xmlElement); if (!reader.IsEmptyElement) { currentNode = xmlElement; } while (reader.MoveToNextAttribute()) { XmlAttribute xmlAttribute = CreateAttribute( reader.Prefix, reader.LocalName, reader.NamespaceURI); xmlAttribute.Value = reader.Value; xmlElement.SetAttributeNode(xmlAttribute); } break; case XmlNodeType.Text: currentNode.AppendChild(CreateTextNode(reader.Value)); break; case XmlNodeType.CDATA: currentNode.AppendChild(CreateCDataSection(reader.Value)); break; case XmlNodeType.ProcessingInstruction: currentNode.AppendChild(CreateProcessingInstruction( reader.Name, reader.Value)); break; case XmlNodeType.Comment: currentNode.AppendChild(CreateComment(reader.Value)); break; case XmlNodeType.Document: currentNode = this; break; case XmlNodeType.Whitespace: if (PreserveWhitespace) { currentNode.AppendChild(CreateWhitespace(reader.Value)); } break; case XmlNodeType.SignificantWhitespace: currentNode.AppendChild(CreateSignificantWhitespace(reader.Value)); break; case XmlNodeType.EndElement: currentNode = currentNode.ParentNode; break; case XmlNodeType.Attribute: currentNode.AppendChild(CreateAttribute( reader.Prefix, reader.LocalName, reader.NamespaceURI)); break; case XmlNodeType.EntityReference: currentNode.AppendChild(this.CreateEntityReference(reader.Value)); break; case XmlNodeType.XmlDeclaration: XmlDeclaration xmlDeclaration = CreateXmlDeclaration( "1.0", String.Empty, String.Empty); xmlDeclaration.Value = reader.Value; currentNode.AppendChild(xmlDeclaration); break; case XmlNodeType.DocumentType: currentNode.AppendChild(CreateDocumentType( reader.Name, reader["PUBLIC"], reader["SYSTEM"], reader.Value)); break; } } reader.Close(); #endif//ENABLE_LOAD_EVENTS } #endregion #region Resource handling public XmlNode GetNodeByUri(Uri absoluteUri) { return GetNodeByUri(absoluteUri.AbsoluteUri); } public XmlNode GetNodeByUri(string absoluteUrl) { absoluteUrl = absoluteUrl.Trim(); if(absoluteUrl.StartsWith("#")) { return GetElementById(absoluteUrl.Substring(1)); } else { Uri docUri = ResolveUri(""); Uri absoluteUri = new Uri(absoluteUrl); string fragment = absoluteUri.Fragment; if(fragment.Length == 0) { // no fragment => return entire document if(docUri.AbsolutePath == absoluteUri.AbsolutePath) { return this; } else { SvgDocument doc = new SvgDocument((SvgWindow)Window); XmlTextReader xtr = new XmlTextReader(absoluteUri.AbsolutePath, GetResource(absoluteUri).GetResponseStream() ); XmlValidatingReader vr = new XmlValidatingReader(xtr); vr.ValidationType = ValidationType.None; doc.Load(vr); return doc; } } else { // got a fragment => return XmlElement string noFragment = absoluteUri.AbsoluteUri.Replace(fragment, ""); SvgDocument doc = (SvgDocument)GetNodeByUri(new Uri(noFragment, true)); return doc.GetElementById(fragment.Substring(1)); } } } public Uri ResolveUri(string uri) { string baseUri = BaseURI; if(baseUri.Length == 0) { baseUri = "file:///" + SharpVectors.ApplicationContext.ExecutableDirectory.FullName.Replace('\\', '/'); } return new Uri(new Uri(baseUri), uri); } #endregion #region Implementation of ISvgDocument /// <summary> /// The title of the document which is the text content of the first child title element of the 'svg' root element. /// </summary> public string Title { get { string result = ""; XmlNode node = SelectSingleNode("/svg:svg/svg:title[text()!='']", NamespaceManager); if(node != null) { node.Normalize(); // NOTE: should probably use spec-defined whitespace result = Regex.Replace(node.InnerText, @"\s\s+", " "); } return result; } } /// <summary> /// Returns the URI of the page that linked to this page. The value is an empty string if the user navigated to the page directly (not through a link, but, for example, via a bookmark). /// </summary> public string Referrer { get { return String.Empty; } } /// <summary> /// The domain name of the server that served the document, or a null string if the server cannot be identified by a domain name. /// </summary> public string Domain { get { if(Url.Length == 0 || Url.StartsWith(Uri.UriSchemeFile)) { return null; } else { return new Uri(Url).Host; } } } /// <summary> /// The root 'svg' element in the document hierarchy /// </summary> public ISvgSvgElement RootElement { get { return DocumentElement as ISvgSvgElement; } } internal Hashtable collectedIds = null; public override XmlElement GetElementById(string elementId) { // TODO: handle element and attribute updates globally to watch for id changes. if (collectedIds == null) { collectedIds = new Hashtable(); // TODO: handle xml:id, handle duplicate ids? XmlNodeList ids = this.SelectNodes("//*/@id"); foreach (XmlAttribute node in ids) { try { collectedIds.Add(node.Value, node.OwnerElement); } catch (Exception) { // Duplicate ID... what to do? } } } // Find the item object res = collectedIds[elementId]; if (res == null) return null; else return (Element)res; } #endregion #region Implementation of ISvgDocument from SVG 1.2 public ISvgWindow Window { get { return window; } } #endregion #region Other public properties public new SvgDocument OwnerDocument { get { return base.OwnerDocument as SvgDocument; } } #endregion #region Rendering public void Render(ISvgRenderer renderer) { SvgSvgElement root = RootElement as SvgSvgElement; if ( root != null ) root.Render(renderer); } #endregion } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using Newtonsoft.Json; using DG.Tweening; using Jammer.Events; using Jammer.Scenes; namespace Jammer.UI { /// <summary> /// Light weight menu manager fake singleton. Listens for menu events, maps /// them, handles loading and unload the menus as needed. Hang this off the /// GameManager. /// </summary> public class MenuManager : EventHandler { /// <summary> /// Main menu container. Set in IDE. /// </summary> public GameObject menuContainer; /// <summary> /// CanvasGroup convenience property. Lazy loaded. Cached. /// </summary> public CanvasGroup CanvasGroup { get { if (canvasGroup == null) { canvasGroup = menuContainer.GetComponent<CanvasGroup>(); } return canvasGroup; } } private CanvasGroup canvasGroup; /// <summary> /// There should be just one manager. If not found return null /// </summary> public static MenuManager Instance { get { if (instance == null) { Log.Verbose(string.Format("MenuManager.Instance.get looking for object")); instance = (MenuManager) GameObject.FindObjectOfType(typeof(MenuManager)); } return instance; } } private static MenuManager instance = null; /// <summary> /// Dictionary of available menus, populated at runtime /// </summary> public Dictionary<MenuId, Menu> Menus { get; set; } /// <summary> /// The currently open menu. Defaults to MenuId.Main unless we are in play /// mode on this scene, then the loaded design time menu is current. /// </summary> public Menu CurrentMenu { get; private set; } /// <summary> /// Menu state /// </summary> public MenuState State { get { return GetState(); } protected set { SetState(value); }} private MenuState state = MenuState.Closed; /// <summary> /// The name of the currently loaded scene or nil none /// </summary> public string CurrentScene { get; set; } protected override void OnEnable() { Log.Verbose(string.Format("MenuManager.OnEnable()")); base.OnEnable(); // always turn off design time menus so we start from the initial know state of closed menuContainer.SetActive(false); Menus = new Dictionary<MenuId, Menu>(); // collect all the child menus into a dictionary Menus.Clear(); Menu[] menus = transform.GetComponentsInChildren<Menu>(includeInactive: true); foreach (Menu menu in menus) { if (!Menus.ContainsKey(menu.Id)) { Log.Verbose(string.Format("MenuManager.OnEnable() add menu={0} to Menus", menu)); Menus[menu.Id] = menu; // make sure all design time active menus start inactive menu.gameObject.SetActive(false); } else { Log.Error(string.Format("MenuManager.OnEnable() menu={0} duplicate id assigned to menu gameObject", menu)); } } } public override void SubscribeEvents() { Log.Verbose(string.Format("MenuManager.SubscribeEvents()")); Events.AddListener<MenuCommandEvent>(OnMainMenuCommand); Events.AddListener<LoadSceneCommandEvent>(OnLoadSceneCommand); Events.AddListener<UnloadSceneCommandEvent>(OnUnloadSceneCommand); } public override void UnsubscribeEvents() { Log.Verbose(string.Format("MenuManager.UnsubscribeEvents()")); Events.RemoveListener<MenuCommandEvent>(OnMainMenuCommand); Events.RemoveListener<LoadSceneCommandEvent>(OnLoadSceneCommand); Events.RemoveListener<UnloadSceneCommandEvent>(OnUnloadSceneCommand); } public void OnMainMenuCommand(MenuCommandEvent e) { if (!e.Handled) { Log.Debug(string.Format("MenuManager.OnMainMenuCommand({0})", e)); OpenMenu(id: e.MenuId); // open or close depending on the request State = e.State; } } private void OpenMenu(MenuId id) { Log.Debug(string.Format("MenuManager.OpenMenu(id: {0})", id)); Menu newMenu = null; if (Menus.ContainsKey(id)) { newMenu = Menus[id]; } else { Log.Error(string.Format("MenuManager.OpenMenu({0}) unable to find requested menu", id)); // stop the handler here return; } // TODO: swapping current menu should be a coroutine so it can be animated if (CurrentMenu != null) { Log.Verbose(string.Format("MenuManager.OpenMenu({0}) disabling CurrentMenu {1}", id, CurrentMenu), gameObject); CurrentMenu.gameObject.SetActive(false); } CurrentMenu = newMenu; Log.Verbose(string.Format("MenuManager.OpenMenu({0}) enable CurrentMenu {1}", id, CurrentMenu), gameObject); CurrentMenu.gameObject.SetActive(true); } /// <summary> /// Request to unload a scene /// </summary> private void OnUnloadSceneCommand(UnloadSceneCommandEvent e) { if (!e.Handled) { Log.Debug(string.Format("MenuManager.OnUnloadSceneCommand({0})", e)); UnityEngine.SceneManagement.SceneManager.UnloadScene(sceneName: e.SceneName); // notify event Events.Raise(new UnloadSceneCommandEvent() { Handled=true, SceneName=e.SceneName }); } } /// <summary> /// Request/Announce scene loading /// </summary> private void OnLoadSceneCommand(LoadSceneCommandEvent e) { if (!e.Handled) { Log.Debug(string.Format("MenuManager.OnLoadSceneCommand({0})", e)); // turn off menus State = MenuState.Closed; if (CurrentScene == e.SceneName) { // TODO: Add e.Force to reload if needed Log.Debug(string.Format("MenuManager.OnLoadSceneCommand({0}) skipping, already loaded", e)); return; } switch (e.Mode) { case LoadSceneMode.Additive: UnityEngine.SceneManagement.SceneManager.LoadScene(sceneName: e.SceneName, mode: UnityEngine.SceneManagement.LoadSceneMode.Additive); break; default: UnityEngine.SceneManagement.SceneManager.LoadScene(sceneName: e.SceneName, mode: UnityEngine.SceneManagement.LoadSceneMode.Single); break; } CurrentScene = e.SceneName; } else { Log.Verbose(string.Format("MenuManager.OnLoadSceneCommand({0}) scene loaded", e)); // do nothing } } private MenuState GetState() { return state; } private void SetState(MenuState value) { Log.Verbose(string.Format("MenuManager.SetState(value: {0})", value)); switch(value) { case MenuState.Closed: StartCoroutine(ToggleMenu(on: false)); break; case MenuState.Open: StartCoroutine(ToggleMenu(on: true)); break; case MenuState.Opening: case MenuState.Closing: // nothing to do break; default: Log.Error(string.Format("MenuManager.SetState(value: {0}) unhandled state", value)); break; } // notify event Events.Raise(new MenuCommandEvent() { Handled=true, MenuId=CurrentMenu.Id, State=State }); } private IEnumerator ToggleMenu(bool on) { Log.Verbose(string.Format("MenuManager.ToggleMenus(on: {0})", on)); float duration = 0.5f; if (on) { state = MenuState.Opening; if (menuContainer.activeSelf) { // already active, just in case, make sure the alpha is all the way up CanvasGroup.alpha = 1f; } else { // alpha off immediately CanvasGroup.alpha = 0f; // enable container menuContainer.SetActive(true); // fade on over duration yield return CanvasGroup.DOFade(1f, duration: duration).WaitForCompletion(); } state = MenuState.Open; } else { if (menuContainer.activeSelf) { state = MenuState.Closing; // fade off over duration, yield return CanvasGroup.DOFade(0f, duration: duration).WaitForCompletion(); menuContainer.SetActive(false); state = MenuState.Closed; } } } protected virtual void Update() { // only handle input if we are already open, ignore closed and transitioning states if (State == MenuState.Open) { HandleInput(); } } protected virtual void HandleInput() { if (Input.GetKeyDown(KeyCode.Escape)) { Log.Debug(string.Format("MenuManager.HandleInput() KeyCode.Escape")); switch(CurrentMenu.Id) { case MenuId.Main: // close menus if we are not on the start menu if (ActiveScene.name != ApplicationConstants.StartScene) { StartCoroutine(ToggleMenu(on: false)); } break; default: // back to main menu OpenMenu(id: MenuId.Main); break; } } } } }
using System; using System.IO; using System.Windows.Forms; using System.Drawing; using System.Collections; using System.Xml; using Microsoft.DirectX; using D3D = Microsoft.DirectX.Direct3D; using Voyage.Terraingine.DataCore; namespace Voyage.Terraingine.ExportTerrainProject { /// <summary> /// Driver class for the ExportTerrainProject plug-in. /// </summary> public class Driver : PlugIn { #region Data Members private SaveFileDialog _dlgSave; private XmlDocument _xmlDoc; private string _projectName; private bool _fileExists; #endregion #region Properties /// <summary> /// Gets the name of the project exported. /// </summary> public string ProjectName { get { return _projectName; } } #endregion #region Basic Plug-In Methods /// <summary> /// Creates the Driver class. /// </summary> public Driver() { base.InitializeBase(); _name = "Export Terrain Project"; this.CenterToParent(); _xmlDoc = null; _projectName = null; _fileExists = false; _dlgSave = new SaveFileDialog(); _dlgSave.Filter = "XML Files (*.xml)|*.xml|All files (*.*)|*.*" ; _dlgSave.InitialDirectory = Path.GetDirectoryName( Application.ExecutablePath ) + "\\Projects"; } /// <summary> /// Runs the plug-in. /// </summary> public override void Run() { if ( _page != null ) { DialogResult result = _dlgSave.ShowDialog( _owner ); if ( result == DialogResult.OK && _dlgSave.FileName != null ) { FileInfo file = new FileInfo( _dlgSave.FileName ); if ( File.Exists( file.FullName ) ) _fileExists = true; _xmlDoc = new XmlDocument(); CreateXML(); FileStorage(); _success = true; } } } /// <summary> /// Runs the plug-in with a pre-set filename. /// </summary> public void Run( string filename ) { if ( _page != null ) { FileInfo file = new FileInfo( _dlgSave.FileName ); if ( File.Exists( file.FullName ) ) _fileExists = true; _dlgSave.FileName = filename; _xmlDoc = new XmlDocument(); CreateXML(); FileStorage(); _success = true; } } #endregion #region XML Handling /// <summary> /// Creates and saves XML data to the chosen file in the SaveFileDialog. /// </summary> private void CreateXML() { if ( _dlgSave.FileName != null ) { XmlNode xmlNode; XmlElement xmlParent; // Create the XML declaration xmlNode = _xmlDoc.CreateNode( XmlNodeType.XmlDeclaration, "", "" ); _xmlDoc.AppendChild( xmlNode ); // Create the root node xmlParent = _xmlDoc.CreateElement( "", "TerrainProject", "Voyage" ); _xmlDoc.AppendChild( xmlParent ); // Add version data to the root node xmlParent.AppendChild( CreateTextNode( "Version", "Version 1.0" ) ); // Create the TerrainPage(s) WriteTerrainPage( xmlParent ); } } /// <summary> /// Stores data about the TerrainPage into the XML document. /// </summary> /// <param name="xmlParent">The XML node to insert data at.</param> private void WriteTerrainPage( XmlElement xmlParent ) { XmlElement xmlTPPar; // Create the TerrainPage element xmlTPPar = _xmlDoc.CreateElement( "", "TerrainPage", "" ); // Store the TerrainPage name xmlTPPar.AppendChild( CreateTextNode( "Name", _page.Name ) ); // Store the TerrainPage position xmlTPPar.AppendChild( CreateVector3Node( "Position", _page.Position ) ); // Store the TerrainPage rotation xmlTPPar.AppendChild( CreateQuaternionNode( "Rotation", _page.Rotation ) ); // Store the TerrainPage scale xmlTPPar.AppendChild( CreateVector3Node( "Scale", _page.Scale ) ); // Store the maximum vertex height of the TerrainPage xmlTPPar.AppendChild( CreateTextNode( "MaximumVertexHeight", _page.MaximumVertexHeight.ToString() ) ); // Write the TerrainPatch data to the XML file WriteTerrainPatch( xmlTPPar ); // Store the TerrainPage data xmlParent.AppendChild( xmlTPPar ); } /// <summary> /// Stores data about the TerrainPatch into the XML document. /// </summary> /// <param name="xmlParent">The XML node to insert data at.</param> private void WriteTerrainPatch( XmlElement xmlParent ) { XmlElement xmlTPPar, xmlElem; // Create the TerrainPatch element xmlTPPar = _xmlDoc.CreateElement( "", "TerrainPatch", "" ); // Store the number of rows in the TerrainPatch xmlTPPar.AppendChild( CreateTextNode( "Rows", _page.TerrainPatch.Rows.ToString() ) ); // Store the number of columns in the TerrainPatch xmlTPPar.AppendChild( CreateTextNode( "Columns", _page.TerrainPatch.Columns.ToString() ) ); // Store the height of the TerrainPatch xmlTPPar.AppendChild( CreateTextNode( "Height", _page.TerrainPatch.Height.ToString() ) ); // Store the width of the TerrainPatch xmlTPPar.AppendChild( CreateTextNode( "Width", _page.TerrainPatch.Width.ToString() ) ); if ( _page.TerrainPatch.NumTextures > 0 ) { xmlElem = _xmlDoc.CreateElement( "", "Textures", "" ); for ( int i = 0; i < _page.TerrainPatch.NumTextures; i++ ) WriteTexture( xmlElem, (DataCore.Texture) _page.TerrainPatch.Textures[i] ); xmlTPPar.AppendChild( xmlElem ); } // Store the vertices of the TerrainPatch xmlElem = _xmlDoc.CreateElement( "", "Vertices", "" ); for ( int i = 0; i < _page.TerrainPatch.NumVertices; i++ ) WriteVertex( xmlElem, i ); xmlTPPar.AppendChild( xmlElem ); // Store the TerrainPatch data xmlParent.AppendChild( xmlTPPar ); } /// <summary> /// Stores data about a texture into the XML document. /// </summary> /// <param name="xmlParent">The XML node to insert data at.</param> /// <param name="tex">The DataCore.Texture to get data from.</param> private void WriteTexture( XmlElement xmlParent, DataCore.Texture tex ) { XmlElement xmlTPPar; string filepath, filename = null; // Create the texture element xmlTPPar = _xmlDoc.CreateElement( "", "Texture", "" ); // Store the texture name xmlTPPar.AppendChild( CreateTextNode( "Name", Path.GetFileName( tex.Name ) ) ); // Store the texture filename if ( _fileExists ) { filepath = Path.GetDirectoryName( _dlgSave.FileName ); filename = filepath + "\\Textures\\"; filename += Path.GetFileNameWithoutExtension( tex.Name ) + Path.GetExtension( tex.FileName ); } else { filepath = Path.GetDirectoryName( _dlgSave.FileName ); filepath += "\\" + Path.GetFileNameWithoutExtension( _dlgSave.FileName ); filename = filepath + "\\Textures\\"; filename += Path.GetFileNameWithoutExtension( tex.Name ) + Path.GetExtension( tex.FileName ); } xmlTPPar.AppendChild( CreateTextNode( "FileName", filename ) ); // Store the texture texture operation xmlTPPar.AppendChild( CreateTextNode( "TextureOperation", tex.OperationText ) ); // Store if the texture is a mask xmlTPPar.AppendChild( CreateTextNode( "IsMask", tex.Mask.ToString() ) ); // Store the texture scale values xmlTPPar.AppendChild( CreateVector2Node( "Scale", tex.Scale ) ); // Store the texture shift values xmlTPPar.AppendChild( CreateVector2Node( "Shift", tex.Shift ) ); // Store the texture element xmlParent.AppendChild( xmlTPPar ); } /// <summary> /// Stores data about a vertex into the XML document. /// </summary> /// <param name="xmlParent">The XML node to insert data at.</param> /// <param name="index">Index of the vertex to get data from.</param> private void WriteVertex( XmlElement xmlParent, int index ) { XmlElement xmlElem, xmlTPPar; // Create the vertex element xmlTPPar = _xmlDoc.CreateElement( "", "Vertex", "" ); // Store the vertex position xmlTPPar.AppendChild( CreateVector3Node( "Position", _page.TerrainPatch.Vertices[index].Position ) ); // Store the vertex normal xmlTPPar.AppendChild( CreateVector3Node( "Normal", _page.TerrainPatch.Vertices[index].Normal ) ); // Store the texture coordinates for each texture if ( _page.TerrainPatch.NumTextures > 0 ) { xmlElem = _xmlDoc.CreateElement( "", "TextureCoordinates", "" ); for ( int i = 0; i < _page.TerrainPatch.NumTextures; i++ ) { xmlElem.AppendChild( CreateVector2Node( "TextureCoordinate" + (i + 1), ( (Vector2[]) _page.TerrainPatch.TextureCoordinates[i] )[index] ) ); } xmlTPPar.AppendChild( xmlElem ); } // Store the vertex element xmlParent.AppendChild( xmlTPPar ); } #endregion #region Element Creation /// <summary> /// Creates an XmlElement for text data. /// </summary> /// <param name="label">The label for the XmlElement.</param> /// <param name="data">The text data to insert into the XmlElement.</param> /// <returns>The XmlElement with packaged data.</returns> private XmlElement CreateTextNode( string label, string data ) { // Create the XML node XmlElement xmlElem = _xmlDoc.CreateElement( "", label, "" ); XmlText xmlText = _xmlDoc.CreateTextNode( data ); xmlElem.AppendChild( xmlText ); return xmlElem; } /// <summary> /// Creates an XmlElement for Vector2 data. /// </summary> /// <param name="label">The label for the XmlElement.</param> /// <param name="data">The Vector2 data to insert into the XmlElement.</param> /// <returns>The XmlElement with packaged data.</returns> private XmlElement CreateVector2Node( string label, Vector2 data ) { // Create the XML node XmlElement xmlElem = _xmlDoc.CreateElement( "", label, "" ); XmlElement xmlElem2; XmlText xmlText; // X-element xmlElem2 = _xmlDoc.CreateElement( "", "X", "" ); xmlText = _xmlDoc.CreateTextNode( data.X.ToString() ); xmlElem2.AppendChild( xmlText ); xmlElem.AppendChild( xmlElem2 ); // Y-element xmlElem2 = _xmlDoc.CreateElement( "", "Y", "" ); xmlText = _xmlDoc.CreateTextNode( data.Y.ToString() ); xmlElem2.AppendChild( xmlText ); xmlElem.AppendChild( xmlElem2 ); return xmlElem; } /// <summary> /// Creates an XmlElement for Vector3 data. /// </summary> /// <param name="label">The label for the XmlElement.</param> /// <param name="data">The Vector3 data to insert into the XmlElement.</param> /// <returns>The XmlElement with packaged data.</returns> private XmlElement CreateVector3Node( string label, Vector3 data ) { // Create the XML node XmlElement xmlElem = _xmlDoc.CreateElement( "", label, "" ); XmlElement xmlElem2; XmlText xmlText; // X-element xmlElem2 = _xmlDoc.CreateElement( "", "X", "" ); xmlText = _xmlDoc.CreateTextNode( data.X.ToString() ); xmlElem2.AppendChild( xmlText ); xmlElem.AppendChild( xmlElem2 ); // Y-element xmlElem2 = _xmlDoc.CreateElement( "", "Y", "" ); xmlText = _xmlDoc.CreateTextNode( data.Y.ToString() ); xmlElem2.AppendChild( xmlText ); xmlElem.AppendChild( xmlElem2 ); // Z-element xmlElem2 = _xmlDoc.CreateElement( "", "Z", "" ); xmlText = _xmlDoc.CreateTextNode( data.Z.ToString() ); xmlElem2.AppendChild( xmlText ); xmlElem.AppendChild( xmlElem2 ); return xmlElem; } /// <summary> /// Creates an XmlElement for Quaternion data. /// </summary> /// <param name="label">The label for the XmlElement.</param> /// <param name="data">The Quaternion data to insert into the XmlElement.</param> /// <returns>The XmlElement with packaged data.</returns> private XmlElement CreateQuaternionNode( string label, Quaternion data ) { // Create the XML node XmlElement xmlElem = _xmlDoc.CreateElement( "", label, "" ); XmlElement xmlElem2; XmlText xmlText; // X-element xmlElem2 = _xmlDoc.CreateElement( "", "X", "" ); xmlText = _xmlDoc.CreateTextNode( data.X.ToString() ); xmlElem2.AppendChild( xmlText ); xmlElem.AppendChild( xmlElem2 ); // Y-element xmlElem2 = _xmlDoc.CreateElement( "", "Y", "" ); xmlText = _xmlDoc.CreateTextNode( data.Y.ToString() ); xmlElem2.AppendChild( xmlText ); xmlElem.AppendChild( xmlElem2 ); // Z-element xmlElem2 = _xmlDoc.CreateElement( "", "Z", "" ); xmlText = _xmlDoc.CreateTextNode( data.Z.ToString() ); xmlElem2.AppendChild( xmlText ); xmlElem.AppendChild( xmlElem2 ); // W-element xmlElem2 = _xmlDoc.CreateElement( "", "W", "" ); xmlText = _xmlDoc.CreateTextNode( data.W.ToString() ); xmlElem2.AppendChild( xmlText ); xmlElem.AppendChild( xmlElem2 ); return xmlElem; } #endregion #region File Storage /// <summary> /// Stores the files used into a project sub-directory. /// </summary> private void FileStorage() { string filename; DataCore.Texture tex; FileInfo file = new FileInfo( _dlgSave.FileName ); FileInfo texFile; DirectoryInfo dir = Directory.GetParent( file.FullName ); StreamWriter writer; if ( File.Exists( file.FullName ) ) { // Overwrite existing files file.Delete(); _xmlDoc.Save( file.FullName ); _projectName = file.FullName; } else { filename = Path.GetFileNameWithoutExtension( file.FullName ); // Create a new project directory dir = Directory.CreateDirectory( dir.FullName + "\\" + filename ); // Save the XML file writer = new StreamWriter( dir.FullName + "\\" + filename + Path.GetExtension( file.FullName ), false ); _xmlDoc.Save( writer ); writer.Close(); _projectName = dir.FullName + "\\" + filename + Path.GetExtension( file.FullName ); } // Create the textures sub-directory if it doesn't already exist if ( !Directory.Exists( dir.FullName + "\\Textures" ) ) dir = Directory.CreateDirectory( dir.FullName + "\\Textures" ); else dir = new DirectoryInfo( dir.FullName + "\\Textures" ); // Copy texture files to "Textures" folder for ( int i = 0; i < _page.TerrainPatch.NumTextures; i++ ) { // Get the new path and name for the texture tex = (DataCore.Texture) _page.TerrainPatch.Textures[i]; texFile = new FileInfo( tex.FileName ); // Rename the file, if the texture name has been changed if ( tex.Name.Length > 0 && tex.Name != tex.FileName ) filename = dir.FullName + "\\" + tex.Name + Path.GetExtension( texFile.FullName ); else filename = dir.FullName + "\\" + Path.GetFileName( texFile.FullName ); // Copy the file to the new folder texFile.CopyTo( filename, true ); } } #endregion } }
using System; using System.Collections.Generic; using System.Composition; using System.IO; using System.Reflection; using System.Text; using System.Threading.Tasks; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Http.Internal; using Microsoft.CodeAnalysis; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; using Newtonsoft.Json; using OmniSharp.Mef; using OmniSharp.Middleware; using OmniSharp.Models; using OmniSharp.Models.v1; using OmniSharp.Services; using Xunit; namespace OmniSharp.Tests { public class EndpointMiddlewareFacts { [OmniSharpHandler(OmnisharpEndpoints.GotoDefinition, LanguageNames.CSharp)] public class GotoDefinitionService : RequestHandler<GotoDefinitionRequest, GotoDefinitionResponse> { [Import] public OmnisharpWorkspace Workspace { get; set; } public Task<GotoDefinitionResponse> Handle(GotoDefinitionRequest request) { return Task.FromResult<GotoDefinitionResponse>(null); } } [OmniSharpHandler(OmnisharpEndpoints.FindSymbols, LanguageNames.CSharp)] public class FindSymbolsService : RequestHandler<FindSymbolsRequest, QuickFixResponse> { [Import] public OmnisharpWorkspace Workspace { get; set; } public Task<QuickFixResponse> Handle(FindSymbolsRequest request) { return Task.FromResult<QuickFixResponse>(null); } } [OmniSharpHandler(OmnisharpEndpoints.UpdateBuffer, LanguageNames.CSharp)] public class UpdateBufferService : RequestHandler<UpdateBufferRequest, object> { [Import] public OmnisharpWorkspace Workspace { get; set; } public Task<object> Handle(UpdateBufferRequest request) { return Task.FromResult<object>(null); } } class Response { } [Export(typeof(IProjectSystem))] class FakeProjectSystem : IProjectSystem { public string Key { get { return "Fake"; } } public string Language { get { return LanguageNames.CSharp; } } public IEnumerable<string> Extensions { get; } = new[] { ".cs" }; public Task<object> GetInformationModel(WorkspaceInformationRequest request) { throw new NotImplementedException(); } public Task<object> GetProjectModel(string path) { throw new NotImplementedException(); } public void Initalize(IConfiguration configuration) { } } class LoggerFactory : ILoggerFactory { public LogLevel MinimumLevel { get; set; } public void AddProvider(ILoggerProvider provider) { } public ILogger CreateLogger(string categoryName) => new Logger(); public void Dispose() { } } class Disposable : IDisposable { public void Dispose() { } } class Logger : ILogger { public IDisposable BeginScopeImpl(object state) => new Disposable(); public bool IsEnabled(LogLevel logLevel) => true; public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception exception, Func<TState, Exception, string> formatter) { } } [Fact] public async Task Passes_through_for_invalid_path() { RequestDelegate _next = (ctx) => Task.Run(() => { throw new NotImplementedException(); }); var host = TestHelpers.CreatePluginHost(new[] { typeof(EndpointMiddlewareFacts).GetTypeInfo().Assembly }); var middleware = new EndpointMiddleware(_next, host, new LoggerFactory()); var context = new DefaultHttpContext(); context.Request.Path = PathString.FromUriComponent("/notvalid"); await Assert.ThrowsAsync<NotImplementedException>(() => middleware.Invoke(context)); } [Fact] public async Task Does_not_throw_for_valid_path() { RequestDelegate _next = (ctx) => Task.Run(() => { throw new NotImplementedException(); }); var host = TestHelpers.CreatePluginHost(new[] { typeof(EndpointMiddlewareFacts).GetTypeInfo().Assembly, typeof(EndpointDescriptor).GetTypeInfo().Assembly }); var middleware = new EndpointMiddleware(_next, host, new LoggerFactory()); var context = new DefaultHttpContext(); context.Request.Path = PathString.FromUriComponent("/gotodefinition"); var memoryStream = new MemoryStream(); context.Request.Body = new MemoryStream( Encoding.UTF8.GetBytes( JsonConvert.SerializeObject(new GotoDefinitionRequest { FileName = "bar.cs", Line = 2, Column = 14, Timeout = 60000 }) ) ); await middleware.Invoke(context); Assert.True(true); } [Fact] public async Task Passes_through_to_services() { RequestDelegate _next = (ctx) => Task.Run(() => { throw new NotImplementedException(); }); var host = TestHelpers.CreatePluginHost(new[] { typeof(EndpointMiddlewareFacts).GetTypeInfo().Assembly, typeof(EndpointDescriptor).GetTypeInfo().Assembly }); var middleware = new EndpointMiddleware(_next, host, new LoggerFactory()); var context = new DefaultHttpContext(); context.Request.Path = PathString.FromUriComponent("/gotodefinition"); var memoryStream = new MemoryStream(); context.Request.Body = new MemoryStream( Encoding.UTF8.GetBytes( JsonConvert.SerializeObject(new GotoDefinitionRequest { FileName = "bar.cs", Line = 2, Column = 14, Timeout = 60000 }) ) ); await middleware.Invoke(context); Assert.True(true); } [Fact] public async Task Passes_through_to_all_services_with_delegate() { RequestDelegate _next = (ctx) => Task.Run(() => { throw new NotImplementedException(); }); var host = TestHelpers.CreatePluginHost(new[] { typeof(EndpointMiddlewareFacts).GetTypeInfo().Assembly, typeof(EndpointDescriptor).GetTypeInfo().Assembly }); var middleware = new EndpointMiddleware(_next, host, new LoggerFactory()); var context = new DefaultHttpContext(); context.Request.Path = PathString.FromUriComponent("/findsymbols"); var memoryStream = new MemoryStream(); context.Request.Body = new MemoryStream( Encoding.UTF8.GetBytes( JsonConvert.SerializeObject(new FindSymbolsRequest { }) ) ); await middleware.Invoke(context); Assert.True(true); } [Fact] public async Task Passes_through_to_specific_service_with_delegate() { RequestDelegate _next = (ctx) => Task.Run(() => { throw new NotImplementedException(); }); var host = TestHelpers.CreatePluginHost(new[] { typeof(EndpointMiddlewareFacts).GetTypeInfo().Assembly, typeof(EndpointDescriptor).GetTypeInfo().Assembly }); var middleware = new EndpointMiddleware(_next, host, new LoggerFactory()); var context = new DefaultHttpContext(); context.Request.Path = PathString.FromUriComponent("/findsymbols"); var memoryStream = new MemoryStream(); context.Request.Body = new MemoryStream( Encoding.UTF8.GetBytes( JsonConvert.SerializeObject(new FindSymbolsRequest { Language = LanguageNames.CSharp }) ) ); await middleware.Invoke(context); Assert.True(true); } public Func<ThrowRequest, Task<ThrowResponse>> ThrowDelegate = (request) => { return Task.FromResult<ThrowResponse>(null); }; [OmniSharpEndpoint("/throw", typeof(ThrowRequest), typeof(ThrowResponse))] public class ThrowRequest : IRequest { } public class ThrowResponse { } [Fact] public async Task Should_throw_if_type_is_not_mergeable() { RequestDelegate _next = async (ctx) => await Task.Run(() => { throw new NotImplementedException(); }); var host = TestHelpers.CreatePluginHost(new[] { typeof(EndpointMiddlewareFacts).GetTypeInfo().Assembly }); var middleware = new EndpointMiddleware(_next, host, new LoggerFactory()); var context = new DefaultHttpContext(); context.Request.Path = PathString.FromUriComponent("/throw"); var memoryStream = new MemoryStream(); context.Request.Body = new MemoryStream( Encoding.UTF8.GetBytes( JsonConvert.SerializeObject(new ThrowRequest()) ) ); await Assert.ThrowsAsync<NotSupportedException>(async () => await middleware.Invoke(context)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Collections.Generic; using System.Linq; using Microsoft.AspNetCore.Razor.Language.CodeGeneration; using Microsoft.AspNetCore.Razor.Language.Extensions; using Microsoft.AspNetCore.Razor.Language.Intermediate; namespace Microsoft.AspNetCore.Razor.Language.Components { internal abstract class ComponentNodeWriter : IntermediateNodeWriter, ITemplateTargetExtension { protected abstract void BeginWriteAttribute(CodeRenderingContext context, string key); protected abstract void BeginWriteAttribute(CodeRenderingContext context, IntermediateNode expression); protected abstract void WriteReferenceCaptureInnards(CodeRenderingContext context, ReferenceCaptureIntermediateNode node, bool shouldTypeCheck); public abstract void WriteTemplate(CodeRenderingContext context, TemplateIntermediateNode node); public sealed override void BeginWriterScope(CodeRenderingContext context, string writer) { throw new NotImplementedException(nameof(BeginWriterScope)); } public sealed override void EndWriterScope(CodeRenderingContext context) { throw new NotImplementedException(nameof(EndWriterScope)); } public sealed override void WriteCSharpCodeAttributeValue(CodeRenderingContext context, CSharpCodeAttributeValueIntermediateNode node) { // We used to support syntaxes like <elem onsomeevent=@{ /* some C# code */ } /> but this is no longer the // case. // // We provide an error for this case just to be friendly. var content = string.Join("", node.Children.OfType<IntermediateToken>().Select(t => t.Content)); context.Diagnostics.Add(ComponentDiagnosticFactory.Create_CodeBlockInAttribute(node.Source, content)); return; } // Currently the same for design time and runtime public override void WriteComponentTypeInferenceMethod(CodeRenderingContext context, ComponentTypeInferenceMethodIntermediateNode node) { if (context == null) { throw new ArgumentNullException(nameof(context)); } if (node == null) { throw new ArgumentNullException(nameof(node)); } var parameters = GetTypeInferenceMethodParameters(node); // This is really similar to the code in WriteComponentAttribute and WriteComponentChildContent - except simpler because // attributes and child contents look like variables. // // Looks like: // // public static void CreateFoo_0<T1, T2>(RenderTreeBuilder __builder, int seq, int __seq0, T1 __arg0, int __seq1, global::System.Collections.Generic.List<T2> __arg1, int __seq2, string __arg2) // { // builder.OpenComponent<Foo<T1, T2>>(); // builder.AddAttribute(__seq0, "Attr0", __arg0); // builder.AddAttribute(__seq1, "Attr1", __arg1); // builder.AddAttribute(__seq2, "Attr2", __arg2); // builder.CloseComponent(); // } // // As a special case, we need to generate a thunk for captures in this block instead of using // them verbatim. // // The problem is that RenderTreeBuilder wants an Action<object>. The caller can't write the type // name if it contains generics, and we can't write the variable they want to assign to. var writer = context.CodeWriter; writer.Write("public static void "); writer.Write(node.MethodName); writer.Write("<"); writer.Write(string.Join(", ", node.Component.Component.GetTypeParameters().Select(a => a.Name))); writer.Write(">"); writer.Write("("); writer.Write("global::"); writer.Write(ComponentsApi.RenderTreeBuilder.FullTypeName); writer.Write(" "); writer.Write(ComponentsApi.RenderTreeBuilder.BuilderParameter); writer.Write(", "); writer.Write("int seq"); if (parameters.Count > 0) { writer.Write(", "); } for (var i = 0; i < parameters.Count; i++) { if (!string.IsNullOrEmpty(parameters[i].SeqName)) { writer.Write("int "); writer.Write(parameters[i].SeqName); writer.Write(", "); } writer.Write(parameters[i].TypeName); writer.Write(" "); writer.Write(parameters[i].ParameterName); if (i < parameters.Count - 1) { writer.Write(", "); } } writer.Write(")"); // Writes out a list of generic type constraints with indentation // public void Foo<T, U>(T t, U u) // where T: new() // where U: Foo, notnull foreach (var constraint in node.GenericTypeConstraints) { writer.WriteLine(); writer.Indent(writer.CurrentIndent + writer.TabSize); writer.Write(constraint); } writer.WriteLine(); writer.WriteLine("{"); // _builder.OpenComponent<TComponent>(42); context.CodeWriter.Write(ComponentsApi.RenderTreeBuilder.BuilderParameter); context.CodeWriter.Write("."); context.CodeWriter.Write(ComponentsApi.RenderTreeBuilder.OpenComponent); context.CodeWriter.Write("<"); context.CodeWriter.Write(node.Component.TypeName); context.CodeWriter.Write(">("); context.CodeWriter.Write("seq"); context.CodeWriter.Write(");"); context.CodeWriter.WriteLine(); foreach (var parameter in parameters) { switch (parameter.Source) { case ComponentAttributeIntermediateNode attribute: context.CodeWriter.WriteStartInstanceMethodInvocation(ComponentsApi.RenderTreeBuilder.BuilderParameter, ComponentsApi.RenderTreeBuilder.AddAttribute); context.CodeWriter.Write(parameter.SeqName); context.CodeWriter.Write(", "); context.CodeWriter.Write($"\"{attribute.AttributeName}\""); context.CodeWriter.Write(", "); context.CodeWriter.Write(parameter.ParameterName); context.CodeWriter.WriteEndMethodInvocation(); break; case SplatIntermediateNode: context.CodeWriter.WriteStartInstanceMethodInvocation(ComponentsApi.RenderTreeBuilder.BuilderParameter, ComponentsApi.RenderTreeBuilder.AddMultipleAttributes); context.CodeWriter.Write(parameter.SeqName); context.CodeWriter.Write(", "); context.CodeWriter.Write(parameter.ParameterName); context.CodeWriter.WriteEndMethodInvocation(); break; case ComponentChildContentIntermediateNode childContent: context.CodeWriter.WriteStartInstanceMethodInvocation(ComponentsApi.RenderTreeBuilder.BuilderParameter, ComponentsApi.RenderTreeBuilder.AddAttribute); context.CodeWriter.Write(parameter.SeqName); context.CodeWriter.Write(", "); context.CodeWriter.Write($"\"{childContent.AttributeName}\""); context.CodeWriter.Write(", "); context.CodeWriter.Write(parameter.ParameterName); context.CodeWriter.WriteEndMethodInvocation(); break; case SetKeyIntermediateNode: context.CodeWriter.WriteStartInstanceMethodInvocation(ComponentsApi.RenderTreeBuilder.BuilderParameter, ComponentsApi.RenderTreeBuilder.SetKey); context.CodeWriter.Write(parameter.ParameterName); context.CodeWriter.WriteEndMethodInvocation(); break; case ReferenceCaptureIntermediateNode capture: context.CodeWriter.WriteStartInstanceMethodInvocation(ComponentsApi.RenderTreeBuilder.BuilderParameter, capture.IsComponentCapture ? ComponentsApi.RenderTreeBuilder.AddComponentReferenceCapture : ComponentsApi.RenderTreeBuilder.AddElementReferenceCapture); context.CodeWriter.Write(parameter.SeqName); context.CodeWriter.Write(", "); var cast = capture.IsComponentCapture ? $"({capture.ComponentCaptureTypeName})" : string.Empty; context.CodeWriter.Write($"(__value) => {{ {parameter.ParameterName}({cast}__value); }}"); context.CodeWriter.WriteEndMethodInvocation(); break; case CascadingGenericTypeParameter: // We only use the synthetic cascading parameters for type inference break; default: throw new InvalidOperationException($"Not implemented: type inference method parameter from source {parameter.Source}"); } } context.CodeWriter.WriteInstanceMethodInvocation(ComponentsApi.RenderTreeBuilder.BuilderParameter, ComponentsApi.RenderTreeBuilder.CloseComponent); writer.WriteLine("}"); if (node.Component.Component.SuppliesCascadingGenericParameters()) { // If this component cascades any generic parameters, we'll need to be able to capture its type inference // args at the call site. The point of this is to ensure that: // // [1] We only evaluate each expression once // [2] We evaluate them in the correct order matching the developer's source // [3] We can even make variables for lambdas or other expressions that can't just be assigned to implicitly-typed vars. // // We do that by emitting a method like the following. It has exactly the same generic type inference // characteristics as the regular CreateFoo_0 method emitted earlier // // public static void CreateFoo_0_CaptureParameters<T1, T2>(T1 __arg0, out T1 __arg0_out, global::System.Collections.Generic.List<T2> __arg1, out global::System.Collections.Generic.List<T2> __arg1_out, int __seq2, string __arg2, out string __arg2_out) // { // __arg0_out = __arg0; // __arg1_out = __arg1; // __arg2_out = __arg2; // } // writer.WriteLine(); writer.Write("public static void "); writer.Write(node.MethodName); writer.Write("_CaptureParameters<"); writer.Write(string.Join(", ", node.Component.Component.GetTypeParameters().Select(a => a.Name))); writer.Write(">"); writer.Write("("); var isFirst = true; foreach (var parameter in parameters.Where(p => p.UsedForTypeInference)) { if (isFirst) { isFirst = false; } else { writer.Write(", "); } writer.Write(parameter.TypeName); writer.Write(" "); writer.Write(parameter.ParameterName); writer.Write(", out "); writer.Write(parameter.TypeName); writer.Write(" "); writer.Write(parameter.ParameterName); writer.Write("_out"); } writer.WriteLine(")"); writer.WriteLine("{"); foreach (var parameter in parameters.Where(p => p.UsedForTypeInference)) { writer.Write(" "); writer.Write(parameter.ParameterName); writer.Write("_out = "); writer.Write(parameter.ParameterName); writer.WriteLine(";"); } writer.WriteLine("}"); } } protected List<TypeInferenceMethodParameter> GetTypeInferenceMethodParameters(ComponentTypeInferenceMethodIntermediateNode node) { var p = new List<TypeInferenceMethodParameter>(); // Preserve order between attributes and splats foreach (var child in node.Component.Children) { if (child is ComponentAttributeIntermediateNode attribute) { string typeName; if (attribute.GloballyQualifiedTypeName != null) { typeName = attribute.GloballyQualifiedTypeName; } else { typeName = attribute.TypeName; if (attribute.BoundAttribute != null && !attribute.BoundAttribute.IsGenericTypedProperty()) { typeName = "global::" + typeName; } } p.Add(new TypeInferenceMethodParameter($"__seq{p.Count}", typeName, $"__arg{p.Count}", usedForTypeInference: true, attribute)); } else if (child is SplatIntermediateNode splat) { var typeName = ComponentsApi.AddMultipleAttributesTypeFullName; p.Add(new TypeInferenceMethodParameter($"__seq{p.Count}", typeName, $"__arg{p.Count}", usedForTypeInference: false, splat)); } } foreach (var childContent in node.Component.ChildContents) { var typeName = childContent.TypeName; if (childContent.BoundAttribute != null && !childContent.BoundAttribute.IsGenericTypedProperty()) { typeName = "global::" + typeName; } p.Add(new TypeInferenceMethodParameter($"__seq{p.Count}", typeName, $"__arg{p.Count}", usedForTypeInference: false, childContent)); } foreach (var capture in node.Component.SetKeys) { p.Add(new TypeInferenceMethodParameter($"__seq{p.Count}", "object", $"__arg{p.Count}", usedForTypeInference: false, capture)); } foreach (var capture in node.Component.Captures) { // The capture type name should already contain the global:: prefix. p.Add(new TypeInferenceMethodParameter($"__seq{p.Count}", capture.TypeName, $"__arg{p.Count}", usedForTypeInference: false, capture)); } // Insert synthetic args for cascaded type inference at the start of the list // We do this last so that the indices above aren't affected if (node.ReceivesCascadingGenericTypes != null) { var i = 0; foreach (var cascadingGenericType in node.ReceivesCascadingGenericTypes) { p.Insert(i, new TypeInferenceMethodParameter(null, cascadingGenericType.ValueType, $"__syntheticArg{i}", usedForTypeInference: true, cascadingGenericType)); i++; } } return p; } protected static void UseCapturedCascadingGenericParameterVariable(ComponentIntermediateNode node, TypeInferenceMethodParameter parameter, string variableName) { // If this captured variable corresponds to a generic type we want to cascade to // descendants, supply that info to descendants if (node.ProvidesCascadingGenericTypes != null) { foreach (var cascadeGeneric in node.ProvidesCascadingGenericTypes.Values) { if (cascadeGeneric.ValueSourceNode == parameter.Source) { cascadeGeneric.ValueExpression = variableName; } } } // Since we've now evaluated and captured this expression, use the variable // instead of the expression from now on parameter.ReplaceSourceWithCapturedVariable(variableName); } protected class TypeInferenceMethodParameter { public string SeqName { get; private set; } public string TypeName { get; private set; } public string ParameterName { get; private set; } public bool UsedForTypeInference { get; private set; } public object Source { get; private set; } public TypeInferenceMethodParameter(string seqName, string typeName, string parameterName, bool usedForTypeInference, object source) { SeqName = seqName; TypeName = typeName; ParameterName = parameterName; UsedForTypeInference = usedForTypeInference; Source = source; } public void ReplaceSourceWithCapturedVariable(string variableName) { Source = new TypeInferenceCapturedVariable(variableName); } } protected class TypeInferenceCapturedVariable { public string VariableName { get; private set; } public TypeInferenceCapturedVariable(string variableName) { VariableName = variableName; } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Diagnostics; using System.IO; namespace Microsoft.CodeAnalysis.BuildTasks { public static class MvidReader { private static readonly Guid s_empty = Guid.Empty; public static Guid ReadAssemblyMvidOrEmpty(Stream stream) { return ReadAssemblyMvidOrEmpty(new BinaryReader(stream)); } private static Guid ReadAssemblyMvidOrEmpty(BinaryReader reader) { // DOS Header: Magic number (2) if (!ReadUInt16(reader, out ushort magicNumber) || magicNumber != 0x5a4d) // "MZ" { return s_empty; } // DOS Header: Address of PE Signature (at 0x3C) if (!MoveTo(0x3C, reader)) { return s_empty; } if (!ReadUInt32(reader, out uint pointerToPeSignature)) { return s_empty; } // jump over the MS DOS Stub to the PE Signature if (!MoveTo(pointerToPeSignature, reader)) { return s_empty; } // PE Signature ('P' 'E' null null) if (!ReadUInt32(reader, out uint peSig) || peSig != 0x00004550) { return s_empty; } // COFF Header: Machine (2) if (!Skip(2, reader)) { return s_empty; } // COFF Header: NumberOfSections (2) if (!ReadUInt16(reader, out ushort sections)) { return s_empty; } // COFF Header: TimeDateStamp (4), PointerToSymbolTable (4), NumberOfSymbols (4) if (!Skip(12, reader)) { return s_empty; } // COFF Header: OptionalHeaderSize (2) if (!ReadUInt16(reader, out ushort optionalHeaderSize)) { return s_empty; } // COFF Header: Characteristics (2) if (!Skip(2, reader)) { return s_empty; } // Optional header if (!Skip(optionalHeaderSize, reader)) { return s_empty; } // Section headers return FindMvidInSections(sections, reader); } private static Guid FindMvidInSections(ushort count, BinaryReader reader) { for (int i = 0; i < count; i++) { // Section: Name (8) if (!ReadBytes(reader, 8, out byte[] name)) { return s_empty; } if (name.Length == 8 && name[0] == '.' && name[1] == 'm' && name[2] == 'v' && name[3] == 'i' && name[4] == 'd' && name[5] == '\0') { // Section: VirtualSize (4) if (!ReadUInt32(reader, out uint virtualSize) || virtualSize != 16) { // The .mvid section only stores a Guid return s_empty; } // Section: VirtualAddress (4), SizeOfRawData (4) if (!Skip(8, reader)) { return s_empty; } // Section: PointerToRawData (4) if (!ReadUInt32(reader, out uint pointerToRawData)) { return s_empty; } return ReadMvidSection(reader, pointerToRawData); } else { // Section: VirtualSize (4), VirtualAddress (4), SizeOfRawData (4), // PointerToRawData (4), PointerToRelocations (4), PointerToLineNumbers (4), // NumberOfRelocations (2), NumberOfLineNumbers (2), Characteristics (4) if (!Skip(4 + 4 + 4 + 4 + 4 + 4 + 2 + 2 + 4, reader)) { return s_empty; } } } return s_empty; } private static Guid ReadMvidSection(BinaryReader reader, uint pointerToMvidSection) { if (!MoveTo(pointerToMvidSection, reader)) { return s_empty; } if (!ReadBytes(reader, 16, out byte[] guidBytes)) { return s_empty; } return new Guid(guidBytes); } private static bool ReadUInt16(BinaryReader reader, out ushort output) { if (reader.BaseStream.Position + 2 >= reader.BaseStream.Length) { output = 0; return false; } output = reader.ReadUInt16(); return true; } private static bool ReadUInt32(BinaryReader reader, out uint output) { if (reader.BaseStream.Position + 4 >= reader.BaseStream.Length) { output = 0; return false; } output = reader.ReadUInt32(); return true; } private static bool ReadBytes(BinaryReader reader, int count, out byte[] output) { if (reader.BaseStream.Position + count >= reader.BaseStream.Length) { output = null; return false; } output = reader.ReadBytes(count); return true; } private static bool Skip(int bytes, BinaryReader reader) { if (reader.BaseStream.Position + bytes >= reader.BaseStream.Length) { return false; } reader.BaseStream.Seek(bytes, SeekOrigin.Current); return true; } private static bool MoveTo(uint position, BinaryReader reader) { if (position >= reader.BaseStream.Length) { return false; } reader.BaseStream.Seek(position, SeekOrigin.Begin); return true; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Immutable; using System.Diagnostics; using System.IO; using System.Reflection.Internal; using System.Reflection.Metadata; using System.Runtime.ExceptionServices; using System.Threading; namespace System.Reflection.PortableExecutable { /// <summary> /// Portable Executable format reader. /// </summary> /// <remarks> /// The implementation is thread-safe, that is multiple threads can read data from the reader in parallel. /// Disposal of the reader is not thread-safe (see <see cref="Dispose"/>). /// </remarks> public sealed partial class PEReader : IDisposable { /// <summary> /// True if the PE image has been loaded into memory by the OS loader. /// </summary> public bool IsLoadedImage { get; } // May be null in the event that the entire image is not // deemed necessary and we have been instructed to read // the image contents without being lazy. // // _lazyPEHeaders are not null in that case. private MemoryBlockProvider _peImage; // If we read the data from the image lazily (peImage != null) we defer reading the PE headers. private PEHeaders _lazyPEHeaders; private AbstractMemoryBlock _lazyMetadataBlock; private AbstractMemoryBlock _lazyImageBlock; private AbstractMemoryBlock[] _lazyPESectionBlocks; /// <summary> /// Creates a Portable Executable reader over a PE image stored in memory. /// </summary> /// <param name="peImage">Pointer to the start of the PE image.</param> /// <param name="size">The size of the PE image.</param> /// <exception cref="ArgumentNullException"><paramref name="peImage"/> is <see cref="IntPtr.Zero"/>.</exception> /// <exception cref="ArgumentOutOfRangeException"><paramref name="size"/> is negative.</exception> /// <remarks> /// The memory is owned by the caller and not released on disposal of the <see cref="PEReader"/>. /// The caller is responsible for keeping the memory alive and unmodified throughout the lifetime of the <see cref="PEReader"/>. /// The content of the image is not read during the construction of the <see cref="PEReader"/> /// </remarks> public unsafe PEReader(byte* peImage, int size) : this(peImage, size, isLoadedImage: false) { } /// <summary> /// Creates a Portable Executable reader over a PE image stored in memory. /// </summary> /// <param name="peImage">Pointer to the start of the PE image.</param> /// <param name="size">The size of the PE image.</param> /// <param name="isLoadedImage">True if the PE image has been loaded into memory by the OS loader.</param> /// <exception cref="ArgumentNullException"><paramref name="peImage"/> is <see cref="IntPtr.Zero"/>.</exception> /// <exception cref="ArgumentOutOfRangeException"><paramref name="size"/> is negative.</exception> /// <remarks> /// The memory is owned by the caller and not released on disposal of the <see cref="PEReader"/>. /// The caller is responsible for keeping the memory alive and unmodified throughout the lifetime of the <see cref="PEReader"/>. /// The content of the image is not read during the construction of the <see cref="PEReader"/> /// </remarks> public unsafe PEReader(byte* peImage, int size, bool isLoadedImage) { if (peImage == null) { throw new ArgumentNullException(nameof(peImage)); } if (size < 0) { throw new ArgumentOutOfRangeException(nameof(size)); } _peImage = new ExternalMemoryBlockProvider(peImage, size); IsLoadedImage = isLoadedImage; } /// <summary> /// Creates a Portable Executable reader over a PE image stored in a stream. /// </summary> /// <param name="peStream">PE image stream.</param> /// <exception cref="ArgumentNullException"><paramref name="peStream"/> is null.</exception> /// <remarks> /// Ownership of the stream is transferred to the <see cref="PEReader"/> upon successful validation of constructor arguments. It will be /// disposed by the <see cref="PEReader"/> and the caller must not manipulate it. /// </remarks> public PEReader(Stream peStream) : this(peStream, PEStreamOptions.Default) { } /// <summary> /// Creates a Portable Executable reader over a PE image stored in a stream beginning at its current position and ending at the end of the stream. /// </summary> /// <param name="peStream">PE image stream.</param> /// <param name="options"> /// Options specifying how sections of the PE image are read from the stream. /// /// Unless <see cref="PEStreamOptions.LeaveOpen"/> is specified, ownership of the stream is transferred to the <see cref="PEReader"/> /// upon successful argument validation. It will be disposed by the <see cref="PEReader"/> and the caller must not manipulate it. /// /// Unless <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/> is specified no data /// is read from the stream during the construction of the <see cref="PEReader"/>. Furthermore, the stream must not be manipulated /// by caller while the <see cref="PEReader"/> is alive and undisposed. /// /// If <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/>, the <see cref="PEReader"/> /// will have read all of the data requested during construction. As such, if <see cref="PEStreamOptions.LeaveOpen"/> is also /// specified, the caller retains full ownership of the stream and is assured that it will not be manipulated by the <see cref="PEReader"/> /// after construction. /// </param> /// <exception cref="ArgumentNullException"><paramref name="peStream"/> is null.</exception> /// <exception cref="ArgumentOutOfRangeException"><paramref name="options"/> has an invalid value.</exception> /// <exception cref="IOException">Error reading from the stream (only when prefetching data).</exception> /// <exception cref="BadImageFormatException"><see cref="PEStreamOptions.PrefetchMetadata"/> is specified and the PE headers of the image are invalid.</exception> public PEReader(Stream peStream, PEStreamOptions options) : this(peStream, options, 0) { } /// <summary> /// Creates a Portable Executable reader over a PE image of the given size beginning at the stream's current position. /// </summary> /// <param name="peStream">PE image stream.</param> /// <param name="size">PE image size.</param> /// <param name="options"> /// Options specifying how sections of the PE image are read from the stream. /// /// Unless <see cref="PEStreamOptions.LeaveOpen"/> is specified, ownership of the stream is transferred to the <see cref="PEReader"/> /// upon successful argument validation. It will be disposed by the <see cref="PEReader"/> and the caller must not manipulate it. /// /// Unless <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/> is specified no data /// is read from the stream during the construction of the <see cref="PEReader"/>. Furthermore, the stream must not be manipulated /// by caller while the <see cref="PEReader"/> is alive and undisposed. /// /// If <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/>, the <see cref="PEReader"/> /// will have read all of the data requested during construction. As such, if <see cref="PEStreamOptions.LeaveOpen"/> is also /// specified, the caller retains full ownership of the stream and is assured that it will not be manipulated by the <see cref="PEReader"/> /// after construction. /// </param> /// <exception cref="ArgumentOutOfRangeException">Size is negative or extends past the end of the stream.</exception> /// <exception cref="IOException">Error reading from the stream (only when prefetching data).</exception> /// <exception cref="BadImageFormatException"><see cref="PEStreamOptions.PrefetchMetadata"/> is specified and the PE headers of the image are invalid.</exception> public unsafe PEReader(Stream peStream, PEStreamOptions options, int size) { if (peStream == null) { throw new ArgumentNullException(nameof(peStream)); } if (!peStream.CanRead || !peStream.CanSeek) { throw new ArgumentException(SR.StreamMustSupportReadAndSeek, nameof(peStream)); } if (!options.IsValid()) { throw new ArgumentOutOfRangeException(nameof(options)); } IsLoadedImage = (options & PEStreamOptions.IsLoadedImage) != 0; long start = peStream.Position; int actualSize = StreamExtensions.GetAndValidateSize(peStream, size, nameof(peStream)); bool closeStream = true; try { bool isFileStream = FileStreamReadLightUp.IsFileStream(peStream); if ((options & (PEStreamOptions.PrefetchMetadata | PEStreamOptions.PrefetchEntireImage)) == 0) { _peImage = new StreamMemoryBlockProvider(peStream, start, actualSize, isFileStream, (options & PEStreamOptions.LeaveOpen) != 0); closeStream = false; } else { // Read in the entire image or metadata blob: if ((options & PEStreamOptions.PrefetchEntireImage) != 0) { var imageBlock = StreamMemoryBlockProvider.ReadMemoryBlockNoLock(peStream, isFileStream, start, actualSize); _lazyImageBlock = imageBlock; _peImage = new ExternalMemoryBlockProvider(imageBlock.Pointer, imageBlock.Size); // if the caller asked for metadata initialize the PE headers (calculates metadata offset): if ((options & PEStreamOptions.PrefetchMetadata) != 0) { InitializePEHeaders(); } } else { // The peImage is left null, but the lazyMetadataBlock is initialized up front. _lazyPEHeaders = new PEHeaders(peStream); _lazyMetadataBlock = StreamMemoryBlockProvider.ReadMemoryBlockNoLock(peStream, isFileStream, _lazyPEHeaders.MetadataStartOffset, _lazyPEHeaders.MetadataSize); } // We read all we need, the stream is going to be closed. } } finally { if (closeStream && (options & PEStreamOptions.LeaveOpen) == 0) { peStream.Dispose(); } } } /// <summary> /// Creates a Portable Executable reader over a PE image stored in a byte array. /// </summary> /// <param name="peImage">PE image.</param> /// <remarks> /// The content of the image is not read during the construction of the <see cref="PEReader"/> /// </remarks> /// <exception cref="ArgumentNullException"><paramref name="peImage"/> is null.</exception> public PEReader(ImmutableArray<byte> peImage) { if (peImage.IsDefault) { throw new ArgumentNullException(nameof(peImage)); } _peImage = new ByteArrayMemoryProvider(peImage); } /// <summary> /// Disposes all memory allocated by the reader. /// </summary> /// <remarks> /// <see cref="Dispose"/> can be called multiple times (but not in parallel). /// It is not safe to call <see cref="Dispose"/> in parallel with any other operation on the <see cref="PEReader"/> /// or reading from <see cref="PEMemoryBlock"/>s retrieved from the reader. /// </remarks> public void Dispose() { _lazyPEHeaders = null; _peImage?.Dispose(); _peImage = null; _lazyImageBlock?.Dispose(); _lazyImageBlock = null; _lazyMetadataBlock?.Dispose(); _lazyMetadataBlock = null; var peSectionBlocks = _lazyPESectionBlocks; if (peSectionBlocks != null) { foreach (var block in peSectionBlocks) { block?.Dispose(); } _lazyPESectionBlocks = null; } } private MemoryBlockProvider GetPEImage() { var peImage = _peImage; if (peImage == null) { if (_lazyPEHeaders == null) { Throw.PEReaderDisposed(); } Throw.InvalidOperation_PEImageNotAvailable(); } return peImage; } /// <summary> /// Gets the PE headers. /// </summary> /// <exception cref="BadImageFormatException">The headers contain invalid data.</exception> /// <exception cref="IOException">Error reading from the stream.</exception> public PEHeaders PEHeaders { get { if (_lazyPEHeaders == null) { InitializePEHeaders(); } return _lazyPEHeaders; } } /// <exception cref="IOException">Error reading from the stream.</exception> private void InitializePEHeaders() { StreamConstraints constraints; Stream stream = GetPEImage().GetStream(out constraints); PEHeaders headers; if (constraints.GuardOpt != null) { lock (constraints.GuardOpt) { headers = ReadPEHeadersNoLock(stream, constraints.ImageStart, constraints.ImageSize, IsLoadedImage); } } else { headers = ReadPEHeadersNoLock(stream, constraints.ImageStart, constraints.ImageSize, IsLoadedImage); } Interlocked.CompareExchange(ref _lazyPEHeaders, headers, null); } /// <exception cref="IOException">Error reading from the stream.</exception> private static PEHeaders ReadPEHeadersNoLock(Stream stream, long imageStartPosition, int imageSize, bool isLoadedImage) { Debug.Assert(imageStartPosition >= 0 && imageStartPosition <= stream.Length); stream.Seek(imageStartPosition, SeekOrigin.Begin); return new PEHeaders(stream, imageSize, isLoadedImage); } /// <summary> /// Returns a view of the entire image as a pointer and length. /// </summary> /// <exception cref="InvalidOperationException">PE image not available.</exception> private AbstractMemoryBlock GetEntireImageBlock() { if (_lazyImageBlock == null) { var newBlock = GetPEImage().GetMemoryBlock(); if (Interlocked.CompareExchange(ref _lazyImageBlock, newBlock, null) != null) { // another thread created the block already, we need to dispose ours: newBlock.Dispose(); } } return _lazyImageBlock; } /// <exception cref="IOException">IO error while reading from the underlying stream.</exception> /// <exception cref="InvalidOperationException">PE image doesn't have metadata.</exception> private AbstractMemoryBlock GetMetadataBlock() { if (!HasMetadata) { throw new InvalidOperationException(SR.PEImageDoesNotHaveMetadata); } if (_lazyMetadataBlock == null) { var newBlock = GetPEImage().GetMemoryBlock(PEHeaders.MetadataStartOffset, PEHeaders.MetadataSize); if (Interlocked.CompareExchange(ref _lazyMetadataBlock, newBlock, null) != null) { // another thread created the block already, we need to dispose ours: newBlock.Dispose(); } } return _lazyMetadataBlock; } /// <exception cref="IOException">IO error while reading from the underlying stream.</exception> /// <exception cref="InvalidOperationException">PE image not available.</exception> private AbstractMemoryBlock GetPESectionBlock(int index) { Debug.Assert(index >= 0 && index < PEHeaders.SectionHeaders.Length); var peImage = GetPEImage(); if (_lazyPESectionBlocks == null) { Interlocked.CompareExchange(ref _lazyPESectionBlocks, new AbstractMemoryBlock[PEHeaders.SectionHeaders.Length], null); } AbstractMemoryBlock newBlock; if (IsLoadedImage) { newBlock = peImage.GetMemoryBlock( PEHeaders.SectionHeaders[index].VirtualAddress, PEHeaders.SectionHeaders[index].VirtualSize); } else { // Virtual size can be smaller than size in the image // since the size in the image is aligned. // Trim the alignment. // // Virtual size can also be larger than size in the image. // When loaded sizeInImage bytes are mapped from the image // and the rest of the bytes are zeroed out. // Only return data stored in the image. int size = Math.Min( PEHeaders.SectionHeaders[index].VirtualSize, PEHeaders.SectionHeaders[index].SizeOfRawData); newBlock = peImage.GetMemoryBlock(PEHeaders.SectionHeaders[index].PointerToRawData, size); } if (Interlocked.CompareExchange(ref _lazyPESectionBlocks[index], newBlock, null) != null) { // another thread created the block already, we need to dispose ours: newBlock.Dispose(); } return _lazyPESectionBlocks[index]; } /// <summary> /// Return true if the reader can access the entire PE image. /// </summary> /// <remarks> /// Returns false if the <see cref="PEReader"/> is constructed from a stream and only part of it is prefetched into memory. /// </remarks> public bool IsEntireImageAvailable => _lazyImageBlock != null || _peImage != null; /// <summary> /// Gets a pointer to and size of the PE image if available (<see cref="IsEntireImageAvailable"/>). /// </summary> /// <exception cref="InvalidOperationException">The entire PE image is not available.</exception> public PEMemoryBlock GetEntireImage() { return new PEMemoryBlock(GetEntireImageBlock()); } /// <summary> /// Returns true if the PE image contains CLI metadata. /// </summary> /// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception> /// <exception cref="IOException">Error reading from the underlying stream.</exception> public bool HasMetadata { get { return PEHeaders.MetadataSize > 0; } } /// <summary> /// Loads PE section that contains CLI metadata. /// </summary> /// <exception cref="InvalidOperationException">The PE image doesn't contain metadata (<see cref="HasMetadata"/> returns false).</exception> /// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception> /// <exception cref="IOException">IO error while reading from the underlying stream.</exception> public PEMemoryBlock GetMetadata() { return new PEMemoryBlock(GetMetadataBlock()); } /// <summary> /// Loads PE section that contains the specified <paramref name="relativeVirtualAddress"/> into memory /// and returns a memory block that starts at <paramref name="relativeVirtualAddress"/> and ends at the end of the containing section. /// </summary> /// <param name="relativeVirtualAddress">Relative Virtual Address of the data to read.</param> /// <returns> /// An empty block if <paramref name="relativeVirtualAddress"/> doesn't represent a location in any of the PE sections of this PE image. /// </returns> /// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception> /// <exception cref="IOException">IO error while reading from the underlying stream.</exception> /// <exception cref="InvalidOperationException">PE image not available.</exception> /// <exception cref="ArgumentOutOfRangeException"><paramref name="relativeVirtualAddress"/> is negative.</exception> public PEMemoryBlock GetSectionData(int relativeVirtualAddress) { if (relativeVirtualAddress < 0) { Throw.ArgumentOutOfRange(nameof(relativeVirtualAddress)); } int sectionIndex = PEHeaders.GetContainingSectionIndex(relativeVirtualAddress); if (sectionIndex < 0) { return default(PEMemoryBlock); } var block = GetPESectionBlock(sectionIndex); int relativeOffset = relativeVirtualAddress - PEHeaders.SectionHeaders[sectionIndex].VirtualAddress; if (relativeOffset > block.Size) { return default(PEMemoryBlock); } return new PEMemoryBlock(block, relativeOffset); } /// <summary> /// Loads PE section of the specified name into memory and returns a memory block that spans the section. /// </summary> /// <param name="sectionName">Name of the section.</param> /// <returns> /// An empty block if no section of the given <paramref name="sectionName"/> exists in this PE image. /// </returns> /// <exception cref="ArgumentNullException"><paramref name="sectionName"/> is null.</exception> /// <exception cref="InvalidOperationException">PE image not available.</exception> public PEMemoryBlock GetSectionData(string sectionName) { if (sectionName == null) { Throw.ArgumentNull(nameof(sectionName)); } int sectionIndex = PEHeaders.IndexOfSection(sectionName); if (sectionIndex < 0) { return default(PEMemoryBlock); } return new PEMemoryBlock(GetPESectionBlock(sectionIndex)); } /// <summary> /// Reads all Debug Directory table entries. /// </summary> /// <exception cref="BadImageFormatException">Bad format of the entry.</exception> /// <exception cref="IOException">IO error while reading from the underlying stream.</exception> /// <exception cref="InvalidOperationException">PE image not available.</exception> public ImmutableArray<DebugDirectoryEntry> ReadDebugDirectory() { var debugDirectory = PEHeaders.PEHeader.DebugTableDirectory; if (debugDirectory.Size == 0) { return ImmutableArray<DebugDirectoryEntry>.Empty; } int position; if (!PEHeaders.TryGetDirectoryOffset(debugDirectory, out position)) { throw new BadImageFormatException(SR.InvalidDirectoryRVA); } if (debugDirectory.Size % DebugDirectoryEntry.Size != 0) { throw new BadImageFormatException(SR.InvalidDirectorySize); } using (AbstractMemoryBlock block = GetPEImage().GetMemoryBlock(position, debugDirectory.Size)) { return ReadDebugDirectoryEntries(block.GetReader()); } } internal static ImmutableArray<DebugDirectoryEntry> ReadDebugDirectoryEntries(BlobReader reader) { int entryCount = reader.Length / DebugDirectoryEntry.Size; var builder = ImmutableArray.CreateBuilder<DebugDirectoryEntry>(entryCount); for (int i = 0; i < entryCount; i++) { // Reserved, must be zero. int characteristics = reader.ReadInt32(); if (characteristics != 0) { throw new BadImageFormatException(SR.InvalidDebugDirectoryEntryCharacteristics); } uint stamp = reader.ReadUInt32(); ushort majorVersion = reader.ReadUInt16(); ushort minorVersion = reader.ReadUInt16(); var type = (DebugDirectoryEntryType)reader.ReadInt32(); int dataSize = reader.ReadInt32(); int dataRva = reader.ReadInt32(); int dataPointer = reader.ReadInt32(); builder.Add(new DebugDirectoryEntry(stamp, majorVersion, minorVersion, type, dataSize, dataRva, dataPointer)); } return builder.MoveToImmutable(); } private AbstractMemoryBlock GetDebugDirectoryEntryDataBlock(DebugDirectoryEntry entry) { int dataOffset = IsLoadedImage ? entry.DataRelativeVirtualAddress : entry.DataPointer; return GetPEImage().GetMemoryBlock(dataOffset, entry.DataSize); } /// <summary> /// Reads the data pointed to by the specified Debug Directory entry and interprets them as CodeView. /// </summary> /// <exception cref="ArgumentException"><paramref name="entry"/> is not a CodeView entry.</exception> /// <exception cref="BadImageFormatException">Bad format of the data.</exception> /// <exception cref="IOException">IO error while reading from the underlying stream.</exception> /// <exception cref="InvalidOperationException">PE image not available.</exception> public CodeViewDebugDirectoryData ReadCodeViewDebugDirectoryData(DebugDirectoryEntry entry) { if (entry.Type != DebugDirectoryEntryType.CodeView) { Throw.InvalidArgument(SR.Format(SR.UnexpectedDebugDirectoryType, nameof(DebugDirectoryEntryType.CodeView)), nameof(entry)); } using (var block = GetDebugDirectoryEntryDataBlock(entry)) { return DecodeCodeViewDebugDirectoryData(block); } } // internal for testing internal static CodeViewDebugDirectoryData DecodeCodeViewDebugDirectoryData(AbstractMemoryBlock block) { var reader = block.GetReader(); if (reader.ReadByte() != (byte)'R' || reader.ReadByte() != (byte)'S' || reader.ReadByte() != (byte)'D' || reader.ReadByte() != (byte)'S') { throw new BadImageFormatException(SR.UnexpectedCodeViewDataSignature); } Guid guid = reader.ReadGuid(); int age = reader.ReadInt32(); string path = reader.ReadUtf8NullTerminated(); return new CodeViewDebugDirectoryData(guid, age, path); } /// <summary> /// Opens a Portable PDB associated with this PE image. /// </summary> /// <param name="peImagePath"> /// The path to the PE image. The path is used to locate the PDB file located in the directory containing the PE file. /// </param> /// <param name="pdbFileStreamProvider"> /// If specified, called to open a <see cref="Stream"/> for a given file path. /// The provider is expected to either return a readable and seekable <see cref="Stream"/>, /// or <c>null</c> if the target file doesn't exist or should be ignored for some reason. /// /// The provider shall throw <see cref="IOException"/> if it fails to open the file due to an unexpected IO error. /// </param> /// <param name="pdbReaderProvider"> /// If successful, a new instance of <see cref="MetadataReaderProvider"/> to be used to read the Portable PDB,. /// </param> /// <param name="pdbPath"> /// If successful and the PDB is found in a file, the path to the file. Returns <c>null</c> if the PDB is embedded in the PE image itself. /// </param> /// <returns> /// True if the PE image has a PDB associated with it and the PDB has been successfully opened. /// </returns> /// <remarks> /// Implements a simple PDB file lookup based on the content of the PE image Debug Directory. /// A sophisticated tool might need to follow up with additional lookup on search paths or symbol server. /// /// The method looks the PDB up in the following steps in the listed order: /// 1) Check for a matching PDB file of the name found in the CodeView entry in the directory containing the PE file (the directory of <paramref name="peImagePath"/>). /// 2) Check for a PDB embedded in the PE image itself. /// /// The first PDB that matches the information specified in the Debug Directory is returned. /// </remarks> /// <exception cref="ArgumentNullException"><paramref name="peImagePath"/> or <paramref name="pdbFileStreamProvider"/> is null.</exception> /// <exception cref="InvalidOperationException">The stream returned from <paramref name="pdbFileStreamProvider"/> doesn't support read and seek operations.</exception> /// <exception cref="BadImageFormatException">No matching PDB file is found due to an error: The PE image or the PDB is invalid.</exception> /// <exception cref="IOException">No matching PDB file is found due to an error: An IO error occurred while reading the PE image or the PDB.</exception> public bool TryOpenAssociatedPortablePdb(string peImagePath, Func<string, Stream> pdbFileStreamProvider, out MetadataReaderProvider pdbReaderProvider, out string pdbPath) { if (peImagePath == null) { Throw.ArgumentNull(nameof(peImagePath)); } if (pdbFileStreamProvider == null) { Throw.ArgumentNull(nameof(pdbFileStreamProvider)); } pdbReaderProvider = null; pdbPath = null; string peImageDirectory; try { peImageDirectory = Path.GetDirectoryName(peImagePath); } catch (Exception e) { throw new ArgumentException(e.Message, nameof(peImagePath)); } Exception errorToReport = null; var entries = ReadDebugDirectory(); // First try .pdb file specified in CodeView data (we prefer .pdb file on disk over embedded PDB // since embedded PDB needs decompression which is less efficient than memory-mapping the file). var codeViewEntry = entries.FirstOrDefault(e => e.IsPortableCodeView); if (codeViewEntry.DataSize != 0 && TryOpenCodeViewPortablePdb(codeViewEntry, peImageDirectory, pdbFileStreamProvider, out pdbReaderProvider, out pdbPath, ref errorToReport)) { return true; } // if it failed try Embedded Portable PDB (if available): var embeddedPdbEntry = entries.FirstOrDefault(e => e.Type == DebugDirectoryEntryType.EmbeddedPortablePdb); if (embeddedPdbEntry.DataSize != 0) { bool openedEmbeddedPdb = false; pdbReaderProvider = null; TryOpenEmbeddedPortablePdb(embeddedPdbEntry, ref openedEmbeddedPdb, ref pdbReaderProvider, ref errorToReport); if (openedEmbeddedPdb) return true; } // Report any metadata and IO errors. PDB might exist but we couldn't read some metadata. // The caller might chose to ignore the failure or report it to the user. if (errorToReport != null) { Debug.Assert(errorToReport is BadImageFormatException || errorToReport is IOException); ExceptionDispatchInfo.Capture(errorToReport).Throw(); } return false; } private bool TryOpenCodeViewPortablePdb(DebugDirectoryEntry codeViewEntry, string peImageDirectory, Func<string, Stream> pdbFileStreamProvider, out MetadataReaderProvider provider, out string pdbPath, ref Exception errorToReport) { pdbPath = null; provider = null; CodeViewDebugDirectoryData data; try { data = ReadCodeViewDebugDirectoryData(codeViewEntry); } catch (Exception e) when (e is BadImageFormatException || e is IOException) { errorToReport = errorToReport ?? e; return false; } if (data.Age != 1) { // not a portable code view: return false; } var id = new BlobContentId(data.Guid, codeViewEntry.Stamp); // The interpretation os the path in the CodeView needs to be platform agnostic, // so that PDBs built on Windows work on Unix-like systems and vice versa. // System.IO.Path.GetFileName() on Unix-like systems doesn't treat '\' as a file name separator, // so we need a custom implementation. Also avoid throwing an exception if the path contains invalid characters, // they might not be invalid on the other platform. It's up to the FS APIs to deal with that when opening the stream. string collocatedPdbPath = PathUtilities.CombinePathWithRelativePath(peImageDirectory, PathUtilities.GetFileName(data.Path)); if (TryOpenPortablePdbFile(collocatedPdbPath, id, pdbFileStreamProvider, out provider, ref errorToReport)) { pdbPath = collocatedPdbPath; return true; } return false; } private bool TryOpenPortablePdbFile(string path, BlobContentId id, Func<string, Stream> pdbFileStreamProvider, out MetadataReaderProvider provider, ref Exception errorToReport) { provider = null; MetadataReaderProvider candidate = null; try { Stream pdbStream; try { pdbStream = pdbFileStreamProvider(path); } catch (FileNotFoundException) { // Not an unexpected IO exception, continue witout reporting the error. pdbStream = null; } if (pdbStream == null) { return false; } if (!pdbStream.CanRead || !pdbStream.CanSeek) { throw new InvalidOperationException(SR.StreamMustSupportReadAndSeek); } candidate = MetadataReaderProvider.FromPortablePdbStream(pdbStream); // Validate that the PDB matches the assembly version if (new BlobContentId(candidate.GetMetadataReader().DebugMetadataHeader.Id) != id) { return false; } provider = candidate; return true; } catch (Exception e) when (e is BadImageFormatException || e is IOException) { errorToReport = errorToReport ?? e; return false; } finally { if (provider == null) { candidate?.Dispose(); } } } partial void TryOpenEmbeddedPortablePdb(DebugDirectoryEntry embeddedPdbEntry, ref bool openedEmbeddedPdb, ref MetadataReaderProvider provider, ref Exception errorToReport); } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Runtime.CompilerServices; internal class FloatOvfToInt { [MethodImpl(MethodImplOptions.NoInlining)] public static long FloatToLong(float f) { return (long)f; } public static long FloatToLongInline(float f) { return (long)f; } [MethodImpl(MethodImplOptions.NoInlining)] public static ulong FloatToUlong(float f) { return (ulong)f; } public static ulong FloatToUlongInline(float f) { return (ulong)f; } [MethodImpl(MethodImplOptions.NoInlining)] public static int FloatToInt(float f) { return (int)f; } public static int FloatToIntInline(float f) { return (int)f; } [MethodImpl(MethodImplOptions.NoInlining)] public static uint FloatToUint(float f) { return (uint)f; } public static uint FloatToUintInline(float f) { return (uint)f; } [MethodImpl(MethodImplOptions.NoInlining)] public static short FloatToShort(float f) { return (short)f; } public static short FloatToShortInline(float f) { return (short)f; } [MethodImpl(MethodImplOptions.NoInlining)] public static ushort FloatToUshort(float f) { return (ushort)f; } public static ushort FloatToUshortInline(float f) { return (ushort)f; } [MethodImpl(MethodImplOptions.NoInlining)] public static sbyte FloatToSbyte(float f) { return (sbyte)f; } public static sbyte FloatToSbyteInline(float f) { return (sbyte)f; } [MethodImpl(MethodImplOptions.NoInlining)] public static byte FloatToByte(float f) { return (byte)f; } public static byte FloatToByteInline(float f) { return (byte)f; } [MethodImpl(MethodImplOptions.NoInlining)] public static long DoubleToLong(double d) { return (long)d; } public static long DoubleToLongInline(double d) { return (long)d; } [MethodImpl(MethodImplOptions.NoInlining)] public static ulong DoubleToUlong(double d) { return (ulong)d; } public static ulong DoubleToUlongInline(double d) { return (ulong)d; } [MethodImpl(MethodImplOptions.NoInlining)] public static int DoubleToInt(double d) { return (int)d; } public static int DoubleToIntInline(double d) { return (int)d; } [MethodImpl(MethodImplOptions.NoInlining)] public static uint DoubleToUint(double d) { return (uint)d; } public static uint DoubleToUintInline(double d) { return (uint)d; } [MethodImpl(MethodImplOptions.NoInlining)] public static short DoubleToShort(double d) { return (short)d; } public static short DoubleToShortInline(double d) { return (short)d; } [MethodImpl(MethodImplOptions.NoInlining)] public static ushort DoubleToUshort(double d) { return (ushort)d; } public static ushort DoubleToUshortInline(double d) { return (ushort)d; } [MethodImpl(MethodImplOptions.NoInlining)] public static sbyte DoubleToSbyte(double d) { return (sbyte)d; } public static sbyte DoubleToSbyteInline(double d) { return (sbyte)d; } [MethodImpl(MethodImplOptions.NoInlining)] public static byte DoubleToByte(double d) { return (byte)d; } public static byte DoubleToByteInline(double d) { return (byte)d; } public static void PrintValues() { float bigf = 100000000000000000000000000000.0f; Console.WriteLine("F to long = 0x{0}", FloatToLong(bigf).ToString("x")); Console.WriteLine("F to ulong = 0x{0}", FloatToUlong(bigf).ToString("x")); Console.WriteLine("-F to long = 0x{0}", FloatToLong(-bigf).ToString("x")); Console.WriteLine("-F to ulong = 0x{0}", FloatToUlong(-bigf).ToString("x")); Console.WriteLine(""); Console.WriteLine("F to int = 0x{0}", FloatToInt(bigf).ToString("x")); Console.WriteLine("F to uint = 0x{0}", FloatToUint(bigf).ToString("x")); Console.WriteLine("-F to int = 0x{0}", FloatToInt(-bigf).ToString("x")); Console.WriteLine("-F to uint = 0x{0}", FloatToUint(-bigf).ToString("x")); Console.WriteLine(""); Console.WriteLine("F to short = 0x{0}", FloatToShort(bigf).ToString("x")); Console.WriteLine("F to ushort = 0x{0}", FloatToUshort(bigf).ToString("x")); Console.WriteLine("-F to short = 0x{0}", FloatToShort(-bigf).ToString("x")); Console.WriteLine("-F to ushort = 0x{0}", FloatToUshort(-bigf).ToString("x")); Console.WriteLine(""); Console.WriteLine("F to sbyte = 0x{0}", FloatToSbyte(bigf).ToString("x")); Console.WriteLine("F to byte = 0x{0}", FloatToByte(bigf).ToString("x")); Console.WriteLine("-F to sbyte = 0x{0}", FloatToSbyte(-bigf).ToString("x")); Console.WriteLine("-F to byte = 0x{0}", FloatToByte(-bigf).ToString("x")); Console.WriteLine(""); double bigd = 100000000000000000000000000000.0; Console.WriteLine("D to long = 0x{0}", DoubleToLong(bigd).ToString("x")); Console.WriteLine("D to ulong = 0x{0}", DoubleToUlong(bigd).ToString("x")); Console.WriteLine("-D to long = 0x{0}", DoubleToLong(-bigd).ToString("x")); Console.WriteLine("-D to ulong = 0x{0}", DoubleToUlong(-bigd).ToString("x")); Console.WriteLine(""); Console.WriteLine("D to int = 0x{0}", DoubleToInt(bigd).ToString("x")); Console.WriteLine("D to uint = 0x{0}", DoubleToUint(bigd).ToString("x")); Console.WriteLine("-D to int = 0x{0}", DoubleToInt(-bigd).ToString("x")); Console.WriteLine("-D to uint = 0x{0}", DoubleToUint(-bigd).ToString("x")); Console.WriteLine(""); Console.WriteLine("D to short = 0x{0}", DoubleToShort(bigd).ToString("x")); Console.WriteLine("D to ushort = 0x{0}", DoubleToUshort(bigd).ToString("x")); Console.WriteLine("-D to short = 0x{0}", DoubleToShort(-bigd).ToString("x")); Console.WriteLine("-D to ushort = 0x{0}", DoubleToUshort(-bigd).ToString("x")); Console.WriteLine(""); Console.WriteLine("D to sbyte = 0x{0}", DoubleToSbyte(bigd).ToString("x")); Console.WriteLine("D to byte = 0x{0}", DoubleToByte(bigd).ToString("x")); Console.WriteLine("-D to sbyte = 0x{0}", DoubleToSbyte(-bigd).ToString("x")); Console.WriteLine("-D to byte = 0x{0}", DoubleToByte(-bigd).ToString("x")); Console.WriteLine(""); } public static int TestValuesFloatLong() { float bigf = 100000000000000000000000000000.0f; if (FloatToLong(bigf) != FloatToLongInline(bigf)) return 101; if (FloatToUlong(bigf) != FloatToUlongInline(bigf)) return 102; if (FloatToLong(-bigf) != FloatToLongInline(-bigf)) return 103; if (FloatToUlong(-bigf) != FloatToUlongInline(-bigf)) return 104; bigf = 987654321001234567899876543210.0f; if (FloatToLong(bigf) != FloatToLongInline(bigf)) return 101; if (FloatToUlong(bigf) != FloatToUlongInline(bigf)) return 102; if (FloatToLong(-bigf) != FloatToLongInline(-bigf)) return 103; if (FloatToUlong(-bigf) != FloatToUlongInline(-bigf)) return 104; bigf = 254783961024896571038054632179.0f; if (FloatToLong(bigf) != FloatToLongInline(bigf)) return 101; if (FloatToUlong(bigf) != FloatToUlongInline(bigf)) return 102; if (FloatToLong(-bigf) != FloatToLongInline(-bigf)) return 103; if (FloatToUlong(-bigf) != FloatToUlongInline(-bigf)) return 104; return 100; } public static int TestValuesFloatInt() { float bigf = 100000000000000000000000000000.0f; if (FloatToInt(bigf) != FloatToIntInline(bigf)) return 111; if (FloatToUint(bigf) != FloatToUintInline(bigf)) return 112; if (FloatToInt(-bigf) != FloatToIntInline(-bigf)) return 113; if (FloatToUint(-bigf) != FloatToUintInline(-bigf)) return 114; bigf = 987654321001234567899876543210.0f; if (FloatToInt(bigf) != FloatToIntInline(bigf)) return 111; if (FloatToUint(bigf) != FloatToUintInline(bigf)) return 112; if (FloatToInt(-bigf) != FloatToIntInline(-bigf)) return 113; if (FloatToUint(-bigf) != FloatToUintInline(-bigf)) return 114; bigf = 254783961024896571038054632179.0f; if (FloatToInt(bigf) != FloatToIntInline(bigf)) return 111; if (FloatToUint(bigf) != FloatToUintInline(bigf)) return 112; if (FloatToInt(-bigf) != FloatToIntInline(-bigf)) return 113; if (FloatToUint(-bigf) != FloatToUintInline(-bigf)) return 114; return 100; } public static int TestValuesFloatShort() { float bigf = 100000000000000000000000000000.0f; if (FloatToShort(bigf) != FloatToShortInline(bigf)) return 121; if (FloatToUshort(bigf) != FloatToUshortInline(bigf)) return 122; if (FloatToShort(-bigf) != FloatToShortInline(-bigf)) return 123; if (FloatToUshort(-bigf) != FloatToUshortInline(-bigf)) return 124; bigf = 987654321001234567899876543210.0f; if (FloatToShort(bigf) != FloatToShortInline(bigf)) return 121; if (FloatToUshort(bigf) != FloatToUshortInline(bigf)) return 122; if (FloatToShort(-bigf) != FloatToShortInline(-bigf)) return 123; if (FloatToUshort(-bigf) != FloatToUshortInline(-bigf)) return 124; bigf = 254783961024896571038054632179.0f; if (FloatToShort(bigf) != FloatToShortInline(bigf)) return 121; if (FloatToUshort(bigf) != FloatToUshortInline(bigf)) return 122; if (FloatToShort(-bigf) != FloatToShortInline(-bigf)) return 123; if (FloatToUshort(-bigf) != FloatToUshortInline(-bigf)) return 124; return 100; } public static int TestValuesFloatByte() { float bigf = 100000000000000000000000000000.0f; if (FloatToSbyte(bigf) != FloatToSbyteInline(bigf)) return 141; if (FloatToByte(bigf) != FloatToByteInline(bigf)) return 142; if (FloatToSbyte(-bigf) != FloatToSbyteInline(-bigf)) return 143; if (FloatToByte(-bigf) != FloatToByteInline(-bigf)) return 144; bigf = 987654321001234567899876543210.0f; if (FloatToSbyte(bigf) != FloatToSbyteInline(bigf)) return 141; if (FloatToByte(bigf) != FloatToByteInline(bigf)) return 142; if (FloatToSbyte(-bigf) != FloatToSbyteInline(-bigf)) return 143; if (FloatToByte(-bigf) != FloatToByteInline(-bigf)) return 144; bigf = 254783961024896571038054632179.0f; if (FloatToSbyte(bigf) != FloatToSbyteInline(bigf)) return 141; if (FloatToByte(bigf) != FloatToByteInline(bigf)) return 142; if (FloatToSbyte(-bigf) != FloatToSbyteInline(-bigf)) return 143; if (FloatToByte(-bigf) != FloatToByteInline(-bigf)) return 144; return 100; } public static int TestValuesDoubleLong() { double bigd = 100000000000000000000000000000.0; if (DoubleToLong(bigd) != DoubleToLongInline(bigd)) return 201; if (DoubleToUlong(bigd) != DoubleToUlongInline(bigd)) return 202; if (DoubleToLong(-bigd) != DoubleToLongInline(-bigd)) return 203; if (DoubleToUlong(-bigd) != DoubleToUlongInline(-bigd)) return 204; bigd = 987654321001234567899876543210.0; if (DoubleToLong(bigd) != DoubleToLongInline(bigd)) return 201; if (DoubleToUlong(bigd) != DoubleToUlongInline(bigd)) return 202; if (DoubleToLong(-bigd) != DoubleToLongInline(-bigd)) return 203; if (DoubleToUlong(-bigd) != DoubleToUlongInline(-bigd)) return 204; bigd = 254783961024896571038054632179.0; if (DoubleToLong(bigd) != DoubleToLongInline(bigd)) return 201; if (DoubleToUlong(bigd) != DoubleToUlongInline(bigd)) return 202; if (DoubleToLong(-bigd) != DoubleToLongInline(-bigd)) return 203; if (DoubleToUlong(-bigd) != DoubleToUlongInline(-bigd)) return 204; return 100; } public static int TestValuesDoubleInt() { double bigd = 100000000000000000000000000000.0; if (DoubleToInt(bigd) != DoubleToIntInline(bigd)) return 211; if (DoubleToUint(bigd) != DoubleToUintInline(bigd)) return 212; if (DoubleToInt(-bigd) != DoubleToIntInline(-bigd)) return 213; if (DoubleToUint(-bigd) != DoubleToUintInline(-bigd)) return 214; bigd = 987654321001234567899876543210.0; if (DoubleToInt(bigd) != DoubleToIntInline(bigd)) return 211; if (DoubleToUint(bigd) != DoubleToUintInline(bigd)) return 212; if (DoubleToInt(-bigd) != DoubleToIntInline(-bigd)) return 213; if (DoubleToUint(-bigd) != DoubleToUintInline(-bigd)) return 214; bigd = 254783961024896571038054632179.0; if (DoubleToInt(bigd) != DoubleToIntInline(bigd)) return 211; if (DoubleToUint(bigd) != DoubleToUintInline(bigd)) return 212; if (DoubleToInt(-bigd) != DoubleToIntInline(-bigd)) return 213; if (DoubleToUint(-bigd) != DoubleToUintInline(-bigd)) return 214; return 100; } public static int TestValuesDoubleShort() { double bigd = 100000000000000000000000000000.0; if (DoubleToShort(bigd) != DoubleToShortInline(bigd)) return 221; if (DoubleToUshort(bigd) != DoubleToUshortInline(bigd)) return 222; if (DoubleToShort(-bigd) != DoubleToShortInline(-bigd)) return 223; if (DoubleToUshort(-bigd) != DoubleToUshortInline(-bigd)) return 224; bigd = 987654321001234567899876543210.0; if (DoubleToShort(bigd) != DoubleToShortInline(bigd)) return 221; if (DoubleToUshort(bigd) != DoubleToUshortInline(bigd)) return 222; if (DoubleToShort(-bigd) != DoubleToShortInline(-bigd)) return 223; if (DoubleToUshort(-bigd) != DoubleToUshortInline(-bigd)) return 224; bigd = 254783961024896571038054632179.0; if (DoubleToShort(bigd) != DoubleToShortInline(bigd)) return 221; if (DoubleToUshort(bigd) != DoubleToUshortInline(bigd)) return 222; if (DoubleToShort(-bigd) != DoubleToShortInline(-bigd)) return 223; if (DoubleToUshort(-bigd) != DoubleToUshortInline(-bigd)) return 224; return 100; } public static int TestValuesDoubleByte() { double bigd = 100000000000000000000000000000.0; if (DoubleToSbyte(bigd) != DoubleToSbyteInline(bigd)) return 241; if (DoubleToByte(bigd) != DoubleToByteInline(bigd)) return 242; if (DoubleToSbyte(-bigd) != DoubleToSbyteInline(-bigd)) return 243; if (DoubleToByte(-bigd) != DoubleToByteInline(-bigd)) return 244; bigd = 987654321001234567899876543210.0; if (DoubleToSbyte(bigd) != DoubleToSbyteInline(bigd)) return 241; if (DoubleToByte(bigd) != DoubleToByteInline(bigd)) return 242; if (DoubleToSbyte(-bigd) != DoubleToSbyteInline(-bigd)) return 243; if (DoubleToByte(-bigd) != DoubleToByteInline(-bigd)) return 244; bigd = 254783961024896571038054632179.0; if (DoubleToSbyte(bigd) != DoubleToSbyteInline(bigd)) return 241; if (DoubleToByte(bigd) != DoubleToByteInline(bigd)) return 242; if (DoubleToSbyte(-bigd) != DoubleToSbyteInline(-bigd)) return 243; if (DoubleToByte(-bigd) != DoubleToByteInline(-bigd)) return 244; return 100; } public static int TestValues() { int res = TestValuesFloatLong(); if (res != 100) return res; res = TestValuesFloatInt(); if (res != 100) return res; res = TestValuesFloatShort(); if (res != 100) return res; res = TestValuesFloatByte(); if (res != 100) return res; res = TestValuesDoubleLong(); if (res != 100) return res; res = TestValuesDoubleInt(); if (res != 100) return res; res = TestValuesDoubleShort(); if (res != 100) return res; res = TestValuesDoubleByte(); if (res != 100) return res; return res; } public static void Usage() { Console.WriteLine("FloatOvfToInt [print|test]"); } public static int Main(String[] args) { if (args.Length != 1) { int res = TestValues(); Console.WriteLine("Test " + (res == 100 ? "passed" : "failed")); return res; } switch (args[0]) { case "print": PrintValues(); break; case "test": int res = TestValues(); Console.WriteLine("Test " + (res == 100 ? "passed" : "failed")); return res; default: Usage(); break; } return 0; } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Reflection; using System.Runtime.CompilerServices; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Scripting.Hosting { using static ObjectFormatterHelpers; using TypeInfo = System.Reflection.TypeInfo; internal abstract partial class CommonObjectFormatter { private sealed partial class Visitor { private readonly CommonObjectFormatter _formatter; private readonly BuilderOptions _builderOptions; private CommonPrimitiveFormatterOptions _primitiveOptions; private CommonTypeNameFormatterOptions _typeNameOptions; private MemberDisplayFormat _memberDisplayFormat; private HashSet<object> _lazyVisitedObjects; private HashSet<object> VisitedObjects { get { if (_lazyVisitedObjects == null) { _lazyVisitedObjects = new HashSet<object>(ReferenceEqualityComparer.Instance); } return _lazyVisitedObjects; } } public Visitor( CommonObjectFormatter formatter, BuilderOptions builderOptions, CommonPrimitiveFormatterOptions primitiveOptions, CommonTypeNameFormatterOptions typeNameOptions, MemberDisplayFormat memberDisplayFormat) { _formatter = formatter; _builderOptions = builderOptions; _primitiveOptions = primitiveOptions; _typeNameOptions = typeNameOptions; _memberDisplayFormat = memberDisplayFormat; } private Builder MakeMemberBuilder(int limit) { return new Builder(_builderOptions.WithMaximumOutputLength(Math.Min(_builderOptions.MaximumLineLength, limit)), suppressEllipsis: true); } public string FormatObject(object obj) { try { var builder = new Builder(_builderOptions, suppressEllipsis: false); string _; return FormatObjectRecursive(builder, obj, isRoot: true, debuggerDisplayName: out _).ToString(); } catch (InsufficientExecutionStackException) { return ScriptingResources.StackOverflowWhileEvaluating; } } private Builder FormatObjectRecursive(Builder result, object obj, bool isRoot, out string debuggerDisplayName) { // TODO (https://github.com/dotnet/roslyn/issues/6689): remove this if (!isRoot && _memberDisplayFormat == MemberDisplayFormat.SeparateLines) { _memberDisplayFormat = MemberDisplayFormat.SingleLine; } debuggerDisplayName = null; string primitive = _formatter.PrimitiveFormatter.FormatPrimitive(obj, _primitiveOptions); if (primitive != null) { result.Append(primitive); return result; } Type type = obj.GetType(); TypeInfo typeInfo = type.GetTypeInfo(); // // Override KeyValuePair<,>.ToString() to get better dictionary elements formatting: // // { { format(key), format(value) }, ... } // instead of // { [key.ToString(), value.ToString()], ... } // // This is more general than overriding Dictionary<,> debugger proxy attribute since it applies on all // types that return an array of KeyValuePair in their DebuggerDisplay to display items. // if (typeInfo.IsGenericType && typeInfo.GetGenericTypeDefinition() == typeof(KeyValuePair<,>)) { if (isRoot) { result.Append(_formatter.TypeNameFormatter.FormatTypeName(type, _typeNameOptions)); result.Append(' '); } FormatKeyValuePair(result, obj); return result; } if (typeInfo.IsArray) { if (VisitedObjects.Add(obj)) { FormatArray(result, (Array)obj); VisitedObjects.Remove(obj); } else { result.AppendInfiniteRecursionMarker(); } return result; } DebuggerDisplayAttribute debuggerDisplay = GetApplicableDebuggerDisplayAttribute(typeInfo); if (debuggerDisplay != null) { debuggerDisplayName = debuggerDisplay.Name; } // Suppresses members if inlineMembers is true, // does nothing otherwise. bool suppressInlineMembers = false; // // TypeName(count) for ICollection implementers // or // TypeName([[DebuggerDisplay.Value]]) // Inline // [[DebuggerDisplay.Value]] // Inline && !isRoot // or // [[ToString()]] if ToString overridden // or // TypeName // ICollection collection; if ((collection = obj as ICollection) != null) { FormatCollectionHeader(result, collection); } else if (debuggerDisplay != null && !string.IsNullOrEmpty(debuggerDisplay.Value)) { if (isRoot) { result.Append(_formatter.TypeNameFormatter.FormatTypeName(type, _typeNameOptions)); result.Append('('); } FormatWithEmbeddedExpressions(result, debuggerDisplay.Value, obj); if (isRoot) { result.Append(')'); } suppressInlineMembers = true; } else if (HasOverriddenToString(typeInfo)) { ObjectToString(result, obj); suppressInlineMembers = true; } else { result.Append(_formatter.TypeNameFormatter.FormatTypeName(type, _typeNameOptions)); } MemberDisplayFormat memberFormat = _memberDisplayFormat; if (memberFormat == MemberDisplayFormat.Hidden) { if (collection != null) { // NB: Collections specifically ignore MemberDisplayFormat.Hidden. memberFormat = MemberDisplayFormat.SingleLine; } else { return result; } } bool includeNonPublic = memberFormat == MemberDisplayFormat.SeparateLines; bool inlineMembers = memberFormat == MemberDisplayFormat.SingleLine; object proxy = GetDebuggerTypeProxy(obj); if (proxy != null) { includeNonPublic = false; suppressInlineMembers = false; } if (!suppressInlineMembers || !inlineMembers) { FormatMembers(result, obj, proxy, includeNonPublic, inlineMembers); } return result; } #region Members private void FormatMembers(Builder result, object obj, object proxy, bool includeNonPublic, bool inlineMembers) { // TODO (tomat): we should not use recursion RuntimeHelpers.EnsureSufficientExecutionStack(); result.Append(' '); // Note: Even if we've seen it before, we show a header if (!VisitedObjects.Add(obj)) { result.AppendInfiniteRecursionMarker(); return; } bool membersFormatted = false; // handle special types only if a proxy isn't defined if (proxy == null) { IDictionary dictionary; IEnumerable enumerable; if ((dictionary = obj as IDictionary) != null) { FormatDictionaryMembers(result, dictionary, inlineMembers); membersFormatted = true; } else if ((enumerable = obj as IEnumerable) != null) { FormatSequenceMembers(result, enumerable, inlineMembers); membersFormatted = true; } } if (!membersFormatted) { FormatObjectMembers(result, proxy ?? obj, obj.GetType().GetTypeInfo(), includeNonPublic, inlineMembers); } VisitedObjects.Remove(obj); } /// <summary> /// Formats object members to a list. /// /// Inline == false: /// <code> /// { A=true, B=false, C=new int[3] { 1, 2, 3 } } /// </code> /// /// Inline == true: /// <code> /// { /// A: true, /// B: false, /// C: new int[3] { 1, 2, 3 } /// } /// </code> /// </summary> private void FormatObjectMembers(Builder result, object obj, TypeInfo preProxyTypeInfo, bool includeNonPublic, bool inline) { int lengthLimit = result.Remaining; if (lengthLimit < 0) { return; } var members = new List<FormattedMember>(); // Limits the number of members added into the result. Some more members may be added than it will fit into the result // and will be thrown away later but not many more. FormatObjectMembersRecursive(members, obj, includeNonPublic, ref lengthLimit); bool useCollectionFormat = UseCollectionFormat(members, preProxyTypeInfo); result.AppendGroupOpening(); for (int i = 0; i < members.Count; i++) { result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline); if (useCollectionFormat) { members[i].AppendAsCollectionEntry(result); } else { members[i].Append(result, inline ? "=" : ": "); } if (result.Remaining <= 0) { break; } } result.AppendGroupClosing(inline); } private static bool UseCollectionFormat(IEnumerable<FormattedMember> members, TypeInfo originalType) { return typeof(IEnumerable).GetTypeInfo().IsAssignableFrom(originalType) && members.All(member => member.Index >= 0); } /// <summary> /// Enumerates sorted object members to display. /// </summary> private void FormatObjectMembersRecursive(List<FormattedMember> result, object obj, bool includeNonPublic, ref int lengthLimit) { Debug.Assert(obj != null); var members = new List<MemberInfo>(); var type = obj.GetType().GetTypeInfo(); while (type != null) { members.AddRange(type.DeclaredFields.Where(f => !f.IsStatic)); members.AddRange(type.DeclaredProperties.Where(f => f.GetMethod != null && !f.GetMethod.IsStatic)); type = type.BaseType?.GetTypeInfo(); } members.Sort((x, y) => { // Need case-sensitive comparison here so that the order of members is // always well-defined (members can differ by case only). And we don't want to // depend on that order. int comparisonResult = StringComparer.OrdinalIgnoreCase.Compare(x.Name, y.Name); if (comparisonResult == 0) { comparisonResult = StringComparer.Ordinal.Compare(x.Name, y.Name); } return comparisonResult; }); foreach (var member in members) { if (!_formatter.Filter.Include(member)) { continue; } bool rootHidden = false, ignoreVisibility = false; var browsable = (DebuggerBrowsableAttribute)member.GetCustomAttributes(typeof(DebuggerBrowsableAttribute), false).FirstOrDefault(); if (browsable != null) { if (browsable.State == DebuggerBrowsableState.Never) { continue; } ignoreVisibility = true; rootHidden = browsable.State == DebuggerBrowsableState.RootHidden; } FieldInfo field = member as FieldInfo; if (field != null) { if (!(includeNonPublic || ignoreVisibility || field.IsPublic || field.IsFamily || field.IsFamilyOrAssembly)) { continue; } } else { PropertyInfo property = (PropertyInfo)member; var getter = property.GetMethod; if (getter == null) { continue; } var setter = property.SetMethod; // If not ignoring visibility include properties that has a visible getter or setter. if (!(includeNonPublic || ignoreVisibility || getter.IsPublic || getter.IsFamily || getter.IsFamilyOrAssembly || (setter != null && (setter.IsPublic || setter.IsFamily || setter.IsFamilyOrAssembly)))) { continue; } if (getter.GetParameters().Length > 0) { continue; } } var debuggerDisplay = GetApplicableDebuggerDisplayAttribute(member); if (debuggerDisplay != null) { string k = FormatWithEmbeddedExpressions(lengthLimit, debuggerDisplay.Name, obj) ?? member.Name; string v = FormatWithEmbeddedExpressions(lengthLimit, debuggerDisplay.Value, obj) ?? string.Empty; // TODO: ? if (!AddMember(result, new FormattedMember(-1, k, v), ref lengthLimit)) { return; } continue; } Exception exception; object value = GetMemberValue(member, obj, out exception); if (exception != null) { var memberValueBuilder = MakeMemberBuilder(lengthLimit); FormatException(memberValueBuilder, exception); if (!AddMember(result, new FormattedMember(-1, member.Name, memberValueBuilder.ToString()), ref lengthLimit)) { return; } continue; } if (rootHidden) { if (value != null && !VisitedObjects.Contains(value)) { Array array; if ((array = value as Array) != null) // TODO (tomat): n-dim arrays { int i = 0; foreach (object item in array) { string name; Builder valueBuilder = MakeMemberBuilder(lengthLimit); FormatObjectRecursive(valueBuilder, item, isRoot: false, debuggerDisplayName: out name); if (!string.IsNullOrEmpty(name)) { name = FormatWithEmbeddedExpressions(MakeMemberBuilder(lengthLimit), name, item).ToString(); } if (!AddMember(result, new FormattedMember(i, name, valueBuilder.ToString()), ref lengthLimit)) { return; } i++; } } else if (_formatter.PrimitiveFormatter.FormatPrimitive(value, _primitiveOptions) == null && VisitedObjects.Add(value)) { FormatObjectMembersRecursive(result, value, includeNonPublic, ref lengthLimit); VisitedObjects.Remove(value); } } } else { string name; Builder valueBuilder = MakeMemberBuilder(lengthLimit); FormatObjectRecursive(valueBuilder, value, isRoot: false, debuggerDisplayName: out name); if (string.IsNullOrEmpty(name)) { name = member.Name; } else { name = FormatWithEmbeddedExpressions(MakeMemberBuilder(lengthLimit), name, value).ToString(); } if (!AddMember(result, new FormattedMember(-1, name, valueBuilder.ToString()), ref lengthLimit)) { return; } } } } private bool AddMember(List<FormattedMember> members, FormattedMember member, ref int remainingLength) { // Add this item even if we exceed the limit - its prefix might be appended to the result. members.Add(member); // We don't need to calculate an exact length, just a lower bound on the size. // We can add more members to the result than it will eventually fit, we shouldn't add less. // Add 2 more, even if only one or half of it fit, so that the separator is included in edge cases. if (remainingLength == int.MinValue) { return false; } remainingLength -= member.MinimalLength; if (remainingLength <= 0) { remainingLength = int.MinValue; } return true; } private void FormatException(Builder result, Exception exception) { result.Append("!<"); result.Append(_formatter.TypeNameFormatter.FormatTypeName(exception.GetType(), _typeNameOptions)); result.Append('>'); } #endregion #region Collections private void FormatKeyValuePair(Builder result, object obj) { TypeInfo type = obj.GetType().GetTypeInfo(); object key = type.GetDeclaredProperty("Key").GetValue(obj, Array.Empty<object>()); object value = type.GetDeclaredProperty("Value").GetValue(obj, Array.Empty<object>()); string _; result.AppendGroupOpening(); result.AppendCollectionItemSeparator(isFirst: true, inline: true); FormatObjectRecursive(result, key, isRoot: false, debuggerDisplayName: out _); result.AppendCollectionItemSeparator(isFirst: false, inline: true); FormatObjectRecursive(result, value, isRoot: false, debuggerDisplayName: out _); result.AppendGroupClosing(inline: true); } private void FormatCollectionHeader(Builder result, ICollection collection) { Array array = collection as Array; if (array != null) { result.Append(_formatter.TypeNameFormatter.FormatArrayTypeName(array.GetType(), array, _typeNameOptions)); return; } result.Append(_formatter.TypeNameFormatter.FormatTypeName(collection.GetType(), _typeNameOptions)); try { result.Append('('); result.Append(collection.Count.ToString()); result.Append(')'); } catch (Exception) { // skip } } private void FormatArray(Builder result, Array array) { FormatCollectionHeader(result, array); // NB: Arrays specifically ignore MemberDisplayFormat.Hidden. if (array.Rank > 1) { FormatMultidimensionalArrayElements(result, array, inline: _memberDisplayFormat != MemberDisplayFormat.SeparateLines); } else { result.Append(' '); FormatSequenceMembers(result, array, inline: _memberDisplayFormat != MemberDisplayFormat.SeparateLines); } } private void FormatDictionaryMembers(Builder result, IDictionary dict, bool inline) { result.AppendGroupOpening(); int i = 0; try { IDictionaryEnumerator enumerator = dict.GetEnumerator(); IDisposable disposable = enumerator as IDisposable; try { while (enumerator.MoveNext()) { var entry = enumerator.Entry; string _; result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline); result.AppendGroupOpening(); result.AppendCollectionItemSeparator(isFirst: true, inline: true); FormatObjectRecursive(result, entry.Key, isRoot: false, debuggerDisplayName: out _); result.AppendCollectionItemSeparator(isFirst: false, inline: true); FormatObjectRecursive(result, entry.Value, isRoot: false, debuggerDisplayName: out _); result.AppendGroupClosing(inline: true); i++; } } finally { if (disposable != null) { disposable.Dispose(); } } } catch (Exception e) { result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline); FormatException(result, e); result.Append(' '); result.Append(_builderOptions.Ellipsis); } result.AppendGroupClosing(inline); } private void FormatSequenceMembers(Builder result, IEnumerable sequence, bool inline) { result.AppendGroupOpening(); int i = 0; try { foreach (var item in sequence) { string _; result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline); FormatObjectRecursive(result, item, isRoot: false, debuggerDisplayName: out _); i++; } } catch (Exception e) { result.AppendCollectionItemSeparator(isFirst: i == 0, inline: inline); FormatException(result, e); result.Append(" ..."); } result.AppendGroupClosing(inline); } private void FormatMultidimensionalArrayElements(Builder result, Array array, bool inline) { Debug.Assert(array.Rank > 1); if (array.Length == 0) { result.AppendCollectionItemSeparator(isFirst: true, inline: true); result.AppendGroupOpening(); result.AppendGroupClosing(inline: true); return; } int[] indices = new int[array.Rank]; for (int i = array.Rank - 1; i >= 0; i--) { indices[i] = array.GetLowerBound(i); } int nesting = 0; int flatIndex = 0; while (true) { // increment indices (lower index overflows to higher): int i = indices.Length - 1; while (indices[i] > array.GetUpperBound(i)) { indices[i] = array.GetLowerBound(i); result.AppendGroupClosing(inline: inline || nesting != 1); nesting--; i--; if (i < 0) { return; } indices[i]++; } result.AppendCollectionItemSeparator(isFirst: flatIndex == 0, inline: inline || nesting != 1); i = indices.Length - 1; while (i >= 0 && indices[i] == array.GetLowerBound(i)) { result.AppendGroupOpening(); nesting++; // array isn't empty, so there is always an element following this separator result.AppendCollectionItemSeparator(isFirst: true, inline: inline || nesting != 1); i--; } string _; FormatObjectRecursive(result, array.GetValue(indices), isRoot: false, debuggerDisplayName: out _); indices[indices.Length - 1]++; flatIndex++; } } #endregion #region Scalars private void ObjectToString(Builder result, object obj) { try { string str = obj.ToString(); result.Append('['); result.Append(str); result.Append(']'); } catch (Exception e) { FormatException(result, e); } } #endregion #region DebuggerDisplay Embedded Expressions /// <summary> /// Evaluate a format string with possible member references enclosed in braces. /// E.g. "foo = {GetFooString(),nq}, bar = {Bar}". /// </summary> /// <remarks> /// Although in theory any expression is allowed to be embedded in the string such behavior is in practice fundamentally broken. /// The attribute doesn't specify what language (VB, C#, F#, etc.) to use to parse these expressions. Even if it did all languages /// would need to be able to evaluate each other language's expressions, which is not viable and the Expression Evaluator doesn't /// work that way today. Instead it evaluates the embedded expressions in the language of the current method frame. When consuming /// VB objects from C#, for example, the evaluation might fail due to language mismatch (evaluating VB expression using C# parser). /// /// Therefore we limit the expressions to a simple language independent syntax: {clr-member-name} '(' ')' ',nq', /// where parentheses and ,nq suffix (no-quotes) are optional and the name is an arbitrary CLR field, property, or method name. /// We then resolve the member by name using case-sensitive lookup first with fallback to case insensitive and evaluate it. /// If parentheses are present we only look for methods. /// Only parameterless members are considered. /// </remarks> private string FormatWithEmbeddedExpressions(int lengthLimit, string format, object obj) { if (string.IsNullOrEmpty(format)) { return null; } var builder = new Builder(_builderOptions.WithMaximumOutputLength(lengthLimit), suppressEllipsis: true); return FormatWithEmbeddedExpressions(builder, format, obj).ToString(); } private Builder FormatWithEmbeddedExpressions(Builder result, string format, object obj) { int i = 0; while (i < format.Length) { char c = format[i++]; if (c == '{') { if (i >= 2 && format[i - 2] == '\\') { result.Append('{'); } else { int expressionEnd = format.IndexOf('}', i); bool noQuotes, callableOnly; string memberName; if (expressionEnd == -1 || (memberName = ParseSimpleMemberName(format, i, expressionEnd, out noQuotes, out callableOnly)) == null) { // the expression isn't properly formatted result.Append(format, i - 1, format.Length - i + 1); break; } MemberInfo member = ResolveMember(obj, memberName, callableOnly); if (member == null) { result.AppendFormat(callableOnly ? "!<Method '{0}' not found>" : "!<Member '{0}' not found>", memberName); } else { Exception exception; object value = GetMemberValue(member, obj, out exception); if (exception != null) { FormatException(result, exception); } else { MemberDisplayFormat oldMemberDisplayFormat = _memberDisplayFormat; CommonPrimitiveFormatterOptions oldPrimitiveOptions = _primitiveOptions; _memberDisplayFormat = MemberDisplayFormat.Hidden; _primitiveOptions = new CommonPrimitiveFormatterOptions( _primitiveOptions.NumberRadix, _primitiveOptions.IncludeCharacterCodePoints, quoteStringsAndCharacters: !noQuotes, escapeNonPrintableCharacters: _primitiveOptions.EscapeNonPrintableCharacters, cultureInfo: _primitiveOptions.CultureInfo); string _; FormatObjectRecursive(result, value, isRoot: false, debuggerDisplayName: out _); _primitiveOptions = oldPrimitiveOptions; _memberDisplayFormat = oldMemberDisplayFormat; } } i = expressionEnd + 1; } } else { result.Append(c); } } return result; } #endregion } } }
/** * Copyright 2016 Dartmouth-Hitchcock * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Configuration; using System.Data; using System.Data.Linq; using System.Data.SqlClient; using System.Linq; using System.Reflection; using System.Threading; using System.Xml; using Legion.Core.Caching; using Legion.Core.Clients; using Legion.Core.Databases; using Legion.Core.DataStructures; using Legion.Core.Extensions; using Legion.Core.Modules; namespace Legion.Core.Services { /// <summary> /// A Legion Method /// </summary> public class Method { private const string CACHING_LOCK_TYPE = "MethodResultCache"; private const string CACHING_KEY_FORMAT = "{0}:{1}"; private int _id, _serviceid; private string _name, _key, _servicekey; private bool _isLogged, _isPublic, _isResultCacheable, _isAuthenticatedUserRequired, _isAuthorizedUserRequired, _isLogReplayDetailsOnException; private int? _cachedResultLifetime; private MethodInfo _mi; private static ConcurrentQueue<LogMethodParams> _bufferedCallLogQueue = new ConcurrentQueue<LogMethodParams>(); internal static Dictionary<string, string> SPECIAL_METHODS = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase) { {"__status", "status"} }; #region accessors /// <summary> /// The method's Legion ID /// </summary> internal int Id { get { return _id; } } /// <summary> /// The method's key /// </summary> internal string Key { get { return _key; } } /// <summary> /// The method's name /// </summary> internal string Name { get { return _name; } } /// <summary> /// The id of the service this method belongs to /// </summary> internal int ServiceId { get { return _serviceid; } } /// <summary> /// The key of the service this method belongs to /// </summary> internal string ServiceKey { get { return _servicekey; } } /// <summary> /// Is this method public /// </summary> internal bool IsPublic { get { return _isPublic; } } /// <summary> /// Are calls to this method logged /// </summary> internal bool IsLogged { get { return _isLogged; } } /// <summary> /// Is an authenticated user required to call this method /// </summary> internal bool IsAuthenticatedUserRequired { get { return _isAuthenticatedUserRequired; } } /// <summary> /// Is authorization required to call this method /// </summary> internal bool IsAuthorizedUserRequired { get { return _isAuthorizedUserRequired; } } /// <summary> /// Is the method result cacheable /// </summary> internal bool IsResultCacheable { get { return _isResultCacheable; } } /// <summary> /// Log the details of this method call for replay when an exception occurs /// </summary> internal bool IsLogReplayDetailsOnException { get { return true; } } /// <summary> /// The lifetime of a cached result /// </summary> internal int? CachedResultLifetime { get { return _cachedResultLifetime; } } /// <summary> /// The reflection info associated with the method /// </summary> internal MethodInfo Info { get { return _mi; } } #endregion /// <summary> /// Constructor /// </summary> /// <param name="id">The method's Legion ID</param> /// <param name="key">The method's key</param> /// <param name="name">The method's name</param> /// <param name="serviceid">The id of the Service this Mehtod belongs to</param> /// <param name="servicekey">The key of the Service this Mehtod belongs to</param> /// <param name="isLogged">Is this a logged method</param> /// <param name="isPublic">Is this method publicly available</param> /// <param name="isAuthenticatedUserRequired">is an authenticated user required to call this method</param> /// <param name="isAuthorizedUserRequired">is an authorized user required to call this method</param> /// <param name="isResultCacheable">is this result cachable</param> /// <param name="isLogReplayDetailsOnException">is the method replayable in the event of an exception</param> /// <param name="cachedResultLifetime">the lifetime this method's results are valid for</param> /// <param name="mi">The method's info</param> internal Method(int id, string key, string name, int serviceid, string servicekey, bool isPublic, bool isLogged, bool isAuthenticatedUserRequired, bool isAuthorizedUserRequired, bool isResultCacheable, bool isLogReplayDetailsOnException, int? cachedResultLifetime, MethodInfo mi) { _id = id; _key = key; _name = name; _serviceid = serviceid; _servicekey = servicekey; _isPublic = isPublic; _isLogged = isLogged; _isAuthenticatedUserRequired = isAuthenticatedUserRequired; _isAuthorizedUserRequired = isAuthorizedUserRequired; _isResultCacheable = isResultCacheable; _isLogReplayDetailsOnException = isLogReplayDetailsOnException; _cachedResultLifetime = cachedResultLifetime; _mi = mi; } /// <summary> /// Invokes the method /// </summary> /// <param name="request">The Request object to pass to the Method</param> /// <param name="reply">The Reply to the client</param> internal void Invoke(Request request, Reply reply) { bool invoke = true; Binary resultKey = null; string cachingKey = null; if (IsResultCacheable) { resultKey = request.ParameterSet.GetHash(); cachingKey = string.Format(CACHING_KEY_FORMAT, _id, resultKey); CachedResult cachedResult = ResultCache.GetCachedResult(_id, resultKey); if (cachedResult.Found) { if (cachedResult.IsExpired && !Monitor.IsEntered(DynamicLock.Get(CACHING_LOCK_TYPE, cachingKey))) { request.MakeThreadSafe(); ParameterizedThreadStart work = new ParameterizedThreadStart(CacheMethodCall); Thread thread = new Thread(work); thread.Start(new CacheMethodCallParams() { Method = this, ResultKey = resultKey, Request = request, Reply = new Reply(request.FormatKey) }); } reply.Result.Raw.InnerXml = cachedResult.Result.GetInnerXml(); XmlElement xCached = reply.Response.AddElement(Settings.GetString("NodeNameCachedResult")); xCached.SetAttribute(Settings.GetString("NodeAttributeCachedResultUpdatedOn"), Settings.FormatDateTime(cachedResult.UpdatedOn)); xCached.SetAttribute(Settings.GetString("NodeAttributeCachedResultExpiresOn"), Settings.FormatDateTime(cachedResult.ExpiresOn)); invoke = false; } else { //if the current result is caching, sleep while (Monitor.IsEntered(DynamicLock.Get(CACHING_LOCK_TYPE, cachingKey))) Thread.Sleep(Settings.GetInt("ResultCachingConcurrentCallSleepInterval")); cachedResult = ResultCache.GetCachedResult(_id, resultKey); if (cachedResult.Found) { reply.Result.Raw.InnerXml = cachedResult.Result.GetInnerXml(); XmlElement xCached = reply.Response.AddElement(Settings.GetString("NodeNameCachedResult")); xCached.SetAttribute(Settings.GetString("NodeAttributeCachedResultUpdatedOn"), Settings.FormatDateTime(cachedResult.UpdatedOn)); xCached.SetAttribute(Settings.GetString("NodeAttributeCachedResultExpiresOn"), Settings.FormatDateTime(cachedResult.ExpiresOn)); invoke = false; } } } if (invoke) { if (SPECIAL_METHODS.ContainsKey(_key)) InvokeSpecialMethod(reply.Result); else { if (IsResultCacheable) { request.MakeThreadSafe(); CacheMethodCall(new CacheMethodCallParams() { Method = this, ResultKey = resultKey, CachingKey = cachingKey, Request = request, Reply = reply }); } else _mi.Invoke(null, (new object[] { request, reply.Result, reply.Error })); } } } /// <summary> /// Logs the method call to database /// </summary> /// <param name="calledAt">The DateTime the Requeswas made</param> /// <param name="executeStart">The DateTime the Request was passed to the Method</param> /// <param name="executeEnd">The DateTime the Method finished processing and control was passed back to the framework</param> /// <param name="application">The application calling the method</param> /// <param name="serviceKey">The key of the service which was called</param> /// <param name="parameters">The parameters which were passed into the method</param> /// <param name="hostIPAddress">The IP address of the host calling the method</param> /// <param name="userIPAddress">The IP address of the user calling the method</param> /// <param name="permanentLog">Should a record of this call be made in the premenant log</param> internal void LogCall(DateTime calledAt, DateTime executeStart, DateTime executeEnd, Application application, string serviceKey, ParameterSet parameters, string hostIPAddress, string userIPAddress, bool permanentLog) { LogMethodParams call = new LogMethodParams() { MethodId = (_id == -1 ? null : (int?)_id), ExecutionDuration = (executeEnd - executeStart).TotalMilliseconds, CalledAt = calledAt, ApplicationId = application.Id, APIKey = application.APIKey, HandledByIpAddress = ServerDetails.IPv4Addresses.First().ToString(), HostIpAddress = hostIPAddress, UserIpAddress = userIPAddress, PermanentLog = permanentLog, ParameterSet = parameters }; if (Settings.GetBool("CallLogBufferEnabled")) { _bufferedCallLogQueue.Enqueue(call); } else { ParameterizedThreadStart work = new ParameterizedThreadStart(LogMethodCallThread); Thread thread = new Thread(work); thread.Start(call); } if (permanentLog) { string entry = Settings.GetString("LogFormatMethodCall", new Dictionary<string, string>(){ {"ApplicationId", application.Id.ToString()}, {"ApplicationName", application.Name}, {"MethodId", _id.ToString()}, {"ServiceKey", serviceKey}, {"MethodKey", _key}, {"Parameters", parameters.ToXml().OuterXml} }); Logging.Module.WriteEvent(new LoggedEvent(EventLevel.Event, string.Format("{0}.{1}", serviceKey, _key), entry) { Async = true, Group = "ServiceCallAudit", ApplicationType = ApplicationType.Application, ApplicationName = application.Name, ClientIp = userIPAddress, HostIp = hostIPAddress }); } } internal static void FlushCallBuffer() { DataTable tblCallLogBuffer = GetCallLogBufferTable(); LogMethodParams callLog; while (_bufferedCallLogQueue.TryDequeue(out callLog)) { DataRow row = tblCallLogBuffer.NewRow(); row["MethodId"] = callLog.MethodId; row["ExecutionDuration"] = callLog.ExecutionDuration; row["CalledAt"] = callLog.CalledAt; row["ApplicationId"] = callLog.ApplicationId; row["HandledByIpAddress"] = callLog.HandledByIpAddress; row["HostIpAddress"] = callLog.HostIpAddress; row["UserIpAddress"] = callLog.UserIpAddress; tblCallLogBuffer.Rows.Add(row); } using (LegionLinqDataContext db = new LegionLinqDataContext(ConfigurationManager.ConnectionStrings["LegionConnectionString"].ToString())) using (SqlBulkCopy bulkCopy = new SqlBulkCopy(ConfigurationManager.ConnectionStrings["LegionConnectionString"].ToString())) { bulkCopy.DestinationTableName = "dbo.tblCallLogBuffer"; try { bulkCopy.WriteToServer(tblCallLogBuffer); } catch (Exception ex) { Console.WriteLine(ex.Message); } } } private void InvokeSpecialMethod(object oDetailsNode) { ReplyNode detailsNode = (ReplyNode)oDetailsNode; bool r = (bool)_mi.Invoke(null, new object[1] { oDetailsNode }); detailsNode.AddElement(_name, (r ? "true" : "false")); } private static void LogMethodCallThread(object oParameters){ LogMethodParams parameters = (LogMethodParams)oParameters; try { using (LegionLinqDataContext legion = new LegionLinqDataContext(ConfigurationManager.ConnectionStrings["LegionConnectionString"].ToString())) { legion.xspLogMethodCall( parameters.MethodId, parameters.ExecutionDuration, parameters.CalledAt, parameters.ApplicationId, parameters.HandledByIpAddress, parameters.HostIpAddress, parameters.UserIpAddress, parameters.PermanentLog ); } } catch (SqlException) { //TODO: Log to flat file } } private static void CacheMethodCall(object oParameters) { CacheMethodCallParams parameters = (CacheMethodCallParams)oParameters; try { Monitor.Enter(DynamicLock.Get(CACHING_LOCK_TYPE, parameters.CachingKey)); parameters.Method._mi.Invoke(null, (new object[] { parameters.Request, parameters.Reply.Result, parameters.Reply.Error })); if (!parameters.Reply.Error.Exists) { ResultCache.CacheResult( parameters.Method.Id, parameters.ResultKey, parameters.Request.ParameterSet.ToXml().ToXElement(), parameters.Reply.Result.Raw.ToXElement() ); } } catch (Exception e) { Manager.LogFault(parameters.Request, parameters.Reply, parameters.Method, "AsynchronousInvocationException", e); } finally { lock (DynamicLock.Lock) { Monitor.Exit(DynamicLock.Get(CACHING_LOCK_TYPE, parameters.CachingKey)); DynamicLock.Remove(CACHING_LOCK_TYPE, parameters.CachingKey); } } } private static DataTable GetCallLogBufferTable() { DataTable tblCallLogBuffer = new DataTable("tblCallLogBuffer"); Dictionary<string, string> columns = new Dictionary<string, string>() { {"MethodId", "System.Int32" }, {"ExecutionDuration", "System.Float" }, {"CalledAt", "System.DateTime" }, {"ApplicationId", "System.Int32" }, {"HandledByIpAddress", "System.String" }, {"HostIpAddress", "System.String" }, {"ClientIpAddress", "System.String" } }; foreach (KeyValuePair<string, string> column in columns) { tblCallLogBuffer.AddColumn(column.Key, column.Value); } return tblCallLogBuffer; } private struct LogMethodParams { public int? MethodId; public double ExecutionDuration; public DateTime CalledAt; public int ApplicationId; public string APIKey; public string HandledByIpAddress; public string HostIpAddress; public string UserIpAddress; public bool PermanentLog; public ParameterSet ParameterSet; } private struct CacheMethodCallParams { public Binary ResultKey; public string CachingKey; public Method Method; public Request Request; public Reply Reply; } } }
/* * This file is part of AceQL C# Client SDK. * AceQL C# Client SDK: Remote SQL access over HTTP with AceQL HTTP. * Copyright (C) 2020, KawanSoft SAS * (http://www.kawansoft.com). All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using AceQL.Client.Api.File; using Newtonsoft.Json; using PCLStorage; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading.Tasks; namespace AceQL.Client.Api.Util { /// <summary> /// Class OutParamBuilder. /// Parses the result out Json file and extracts the callable statement / stored procedure out parameters dictionnaries. /// <seealso cref="System.IDisposable" /> /// </summary> internal class OutParamBuilder : IDisposable { private StreamReader streamReader; JsonTextReader reader; /// <summary> /// The trace on /// </summary> private bool traceOn; /// <summary> /// The values per col index /// </summary> private readonly Dictionary<int, string> valuesPerParamIndex = new Dictionary<int, string>(); private readonly Dictionary<string, string> valuesPerParamName = new Dictionary<string, string>(); /// <summary> /// Constructor /// </summary> /// <param name="readStream">The reading stream on file.</param> public OutParamBuilder(Stream readStream) { streamReader = new StreamReader(readStream); reader = new JsonTextReader(streamReader); BuildOutParametersDicts(); } /// <summary> /// Buils the out parameter dictionnaries reading Json in stream.s /// </summary> private void BuildOutParametersDicts() { // Necessary because a SQL columns could have the name "parameters_out_per_index" or "parameters_out_per_name" // We know that we are reading the good end of file // We are not any more in a array bool isInsideArray = false; while (reader.Read()) { if (reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("parameters_out_per_index") || isInsideArray) { continue; } String typeParameter = "per_index"; string paramName = null; string paramValue = null; while (reader.Read()) { if (reader.Value != null && reader.TokenType == JsonToken.PropertyName && reader.Value.Equals("parameters_out_per_name")) { typeParameter = "per_name"; } if (reader.TokenType == JsonToken.PropertyName) { paramName = reader.Value.ToString(); paramName = paramName.Trim(); Trace("property: " + paramName + ":"); } else if (reader.TokenType == JsonToken.String) { paramValue = reader.Value.ToString(); Trace("paramIndex: " + paramName + ":"); Trace("value : " + paramValue + ":"); if (paramValue.Equals("NULL")) { paramValue = null; } if (typeParameter.Equals("per_index")) { int paramIndexInt = Int32.Parse(paramName); valuesPerParamIndex.Add(paramIndexInt, paramValue); } else { valuesPerParamName.Add(paramName, paramValue); } } } // We are 2 or 3 lines near end of Json file, go to end for file without tests... } } /// <summary> /// Retuns the dict of parameter indexes/values. /// </summary> /// <returns>The dict of parameter indexes/values</returns> public Dictionary<int, string> GetvaluesPerParamIndex() { return valuesPerParamIndex; } /// <summary> /// Retuns the dict of parameter name/values. /// </summary> /// <returns>The dict of parameter name/values.</returns> public Dictionary<string, string> GetvaluesPerParamName() { return valuesPerParamName; } /** * Says if trace is on * * @return true if trace is on */ /// <summary> /// Determines whether [is trace on]. /// </summary> /// <returns><c>true</c> if [is trace on]; otherwise, <c>false</c>.</returns> internal bool IsTraceOn() { return traceOn; } /** * Sets the trace on/off * * @param traceOn * if true, trace will be on */ /// <summary> /// Sets the trace on. /// </summary> /// <param name="traceOn">if set to <c>true</c> [trace on].</param> internal void SetTraceOn(bool traceOn) { this.traceOn = traceOn; } /// <summary> /// Traces this instance. /// </summary> internal void Trace() { if (traceOn) { ConsoleEmul.WriteLine(); } } /// <summary> /// Traces the specified s. /// </summary> /// <param name="s">The s.</param> private void Trace(String s) { if (traceOn) { ConsoleEmul.WriteLine(DateTime.Now + " " + s); } } /// <summary> /// Releases all resources used by the current instance of the <see cref="OutParamBuilder"/> class. /// This is recommended in order to delete the local corresponding temporary files. /// </summary> public void Dispose() { if (this.streamReader != null) { this.streamReader.Dispose(); if (this.reader != null) { this.reader.Close(); } this.streamReader = null; this.reader = null; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.ComponentModel; using System.IO; using System.Net; using System.Net.Security; using System.Net.Sockets; using System.Runtime.CompilerServices; using System.Security.Authentication; using System.Security.Cryptography.X509Certificates; using System.Threading; using System.Threading.Tasks; namespace System.Data.SqlClient.SNI { /// <summary> /// TCP connection handle /// </summary> internal class SNITCPHandle : SNIHandle { private readonly string _targetServer; private readonly Socket _socket; private NetworkStream _tcpStream; private Stream _stream; private SslStream _sslStream; private SslOverTdsStream _sslOverTdsStream; private SNIAsyncCallback _receiveCallback; private SNIAsyncCallback _sendCallback; private bool _validateCert = true; private int _bufferSize = TdsEnums.DEFAULT_LOGIN_PACKET_SIZE; private SNIPacket _sniPacket = new SNIPacket(); private uint _status = TdsEnums.SNI_UNINITIALIZED; private Guid _connectionId = Guid.NewGuid(); private const int MaxParallelIpAddresses = 64; /// <summary> /// Dispose object /// </summary> public override void Dispose() { if (_sslOverTdsStream != null) { _sslOverTdsStream.Dispose(); _sslOverTdsStream = null; } if (_sslStream != null) { _sslStream.Dispose(); _sslStream = null; } if (_tcpStream != null) { _tcpStream.Dispose(); _tcpStream = null; } if (_sniPacket != null) { _sniPacket.Dispose(); _sniPacket = null; } //Release any references held by _stream. _stream = null; } /// <summary> /// Connection ID /// </summary> public override Guid ConnectionId { get { return _connectionId; } } /// <summary> /// Connection status /// </summary> public override uint Status { get { return _status; } } /// <summary> /// Constructor /// </summary> /// <param name="serverName">Server name</param> /// <param name="port">TCP port number</param> /// <param name="timerExpire">Connection timer expiration</param> /// <param name="callbackObject">Callback object</param> public SNITCPHandle(string serverName, int port, long timerExpire, object callbackObject, bool parallel) { _targetServer = serverName; try { TimeSpan ts = default(TimeSpan); // In case the Timeout is Infinite, we will receive the max value of Int64 as the tick count // The infinite Timeout is a function of ConnectionString Timeout=0 bool isInfiniteTimeOut = long.MaxValue == timerExpire; if (!isInfiniteTimeOut) { ts = DateTime.FromFileTime(timerExpire) - DateTime.Now; ts = ts.Ticks < 0 ? TimeSpan.FromTicks(0) : ts; } Task<Socket> connectTask; if (parallel) { Task<IPAddress[]> serverAddrTask = Dns.GetHostAddressesAsync(serverName); serverAddrTask.Wait(ts); IPAddress[] serverAddresses = serverAddrTask.Result; if (serverAddresses.Length > MaxParallelIpAddresses) { // Fail if above 64 to match legacy behavior ReportTcpSNIError(0, SNICommon.MultiSubnetFailoverWithMoreThan64IPs, string.Empty); return; } connectTask = ParallelConnectAsync(serverAddresses, port); if (!(isInfiniteTimeOut ? connectTask.Wait(-1) : connectTask.Wait(ts))) { ReportTcpSNIError(0, SNICommon.ConnOpenFailedError, string.Empty); return; } _socket = connectTask.Result; } else { _socket = Connect(serverName, port, ts); } if (_socket == null || !_socket.Connected) { if (_socket != null) { _socket.Dispose(); _socket = null; } ReportTcpSNIError(0, SNICommon.ConnOpenFailedError, string.Empty); return; } _socket.NoDelay = true; _tcpStream = new NetworkStream(_socket, true); _sslOverTdsStream = new SslOverTdsStream(_tcpStream); _sslStream = new SslStream(_sslOverTdsStream, true, new RemoteCertificateValidationCallback(ValidateServerCertificate), null); } catch (SocketException se) { ReportTcpSNIError(se); return; } catch (Exception e) { ReportTcpSNIError(e); return; } _stream = _tcpStream; _status = TdsEnums.SNI_SUCCESS; } private static Socket Connect(string serverName, int port, TimeSpan timeout) { IPAddress[] ipAddresses = Dns.GetHostAddresses(serverName); IPAddress serverIPv4 = null; IPAddress serverIPv6 = null; foreach (IPAddress ipAdress in ipAddresses) { if (ipAdress.AddressFamily == AddressFamily.InterNetwork) { serverIPv4 = ipAdress; } else if (ipAdress.AddressFamily == AddressFamily.InterNetworkV6) { serverIPv6 = ipAdress; } } ipAddresses = new IPAddress[] { serverIPv4, serverIPv6 }; Socket[] sockets = new Socket[2]; CancellationTokenSource cts = new CancellationTokenSource(); cts.CancelAfter(timeout); void Cancel() { for (int i = 0; i < sockets.Length; ++i) { try { if (sockets[i] != null && !sockets[i].Connected) { sockets[i].Dispose(); sockets[i] = null; } } catch { } } } cts.Token.Register(Cancel); Socket availableSocket = null; for (int i = 0; i < sockets.Length; ++i) { try { if (ipAddresses[i] != null) { sockets[i] = new Socket(ipAddresses[i].AddressFamily, SocketType.Stream, ProtocolType.Tcp); sockets[i].Connect(ipAddresses[i], port); if (sockets[i] != null) // sockets[i] can be null if cancel callback is executed during connect() { if (sockets[i].Connected) { availableSocket = sockets[i]; break; } else { sockets[i].Dispose(); sockets[i] = null; } } } } catch { } } return availableSocket; } private static Task<Socket> ParallelConnectAsync(IPAddress[] serverAddresses, int port) { if (serverAddresses == null) { throw new ArgumentNullException(nameof(serverAddresses)); } if (serverAddresses.Length == 0) { throw new ArgumentOutOfRangeException(nameof(serverAddresses)); } var sockets = new List<Socket>(serverAddresses.Length); var connectTasks = new List<Task>(serverAddresses.Length); var tcs = new TaskCompletionSource<Socket>(); var lastError = new StrongBox<Exception>(); var pendingCompleteCount = new StrongBox<int>(serverAddresses.Length); foreach (IPAddress address in serverAddresses) { var socket = new Socket(address.AddressFamily, SocketType.Stream, ProtocolType.Tcp); sockets.Add(socket); // Start all connection tasks now, to prevent possible race conditions with // calling ConnectAsync on disposed sockets. try { connectTasks.Add(socket.ConnectAsync(address, port)); } catch (Exception e) { connectTasks.Add(Task.FromException(e)); } } for (int i = 0; i < sockets.Count; i++) { ParallelConnectHelper(sockets[i], connectTasks[i], tcs, pendingCompleteCount, lastError, sockets); } return tcs.Task; } private static async void ParallelConnectHelper( Socket socket, Task connectTask, TaskCompletionSource<Socket> tcs, StrongBox<int> pendingCompleteCount, StrongBox<Exception> lastError, List<Socket> sockets) { bool success = false; try { // Try to connect. If we're successful, store this task into the result task. await connectTask.ConfigureAwait(false); success = tcs.TrySetResult(socket); if (success) { // Whichever connection completes the return task is responsible for disposing // all of the sockets (except for whichever one is stored into the result task). // This ensures that only one thread will attempt to dispose of a socket. // This is also the closest thing we have to canceling connect attempts. foreach (Socket otherSocket in sockets) { if (otherSocket != socket) { otherSocket.Dispose(); } } } } catch (Exception e) { // Store an exception to be published if no connection succeeds Interlocked.Exchange(ref lastError.Value, e); } finally { // If we didn't successfully transition the result task to completed, // then someone else did and they would have cleaned up, so there's nothing // more to do. Otherwise, no one completed it yet or we failed; either way, // see if we're the last outstanding connection, and if we are, try to complete // the task, and if we're successful, it's our responsibility to dispose all of the sockets. if (!success && Interlocked.Decrement(ref pendingCompleteCount.Value) == 0) { if (lastError.Value != null) { tcs.TrySetException(lastError.Value); } else { tcs.TrySetCanceled(); } foreach (Socket s in sockets) { s.Dispose(); } } } } /// <summary> /// Enable SSL /// </summary> public override uint EnableSsl(uint options) { _validateCert = (options & TdsEnums.SNI_SSL_VALIDATE_CERTIFICATE) != 0; try { _sslStream.AuthenticateAsClient(_targetServer); _sslOverTdsStream.FinishHandshake(); } catch (AuthenticationException aue) { return ReportTcpSNIError(aue); } catch (InvalidOperationException ioe) { return ReportTcpSNIError(ioe); } _stream = _sslStream; return TdsEnums.SNI_SUCCESS; } /// <summary> /// Disable SSL /// </summary> public override void DisableSsl() { _sslStream.Dispose(); _sslStream = null; _sslOverTdsStream.Dispose(); _sslOverTdsStream = null; _stream = _tcpStream; } /// <summary> /// Validate server certificate callback /// </summary> /// <param name="sender">Sender object</param> /// <param name="cert">X.509 certificate</param> /// <param name="chain">X.509 chain</param> /// <param name="policyErrors">Policy errors</param> /// <returns>True if certificate is valid</returns> private bool ValidateServerCertificate(object sender, X509Certificate cert, X509Chain chain, SslPolicyErrors policyErrors) { if (!_validateCert) { return true; } return SNICommon.ValidateSslServerCertificate(_targetServer, sender, cert, chain, policyErrors); } /// <summary> /// Set buffer size /// </summary> /// <param name="bufferSize">Buffer size</param> public override void SetBufferSize(int bufferSize) { _bufferSize = bufferSize; } /// <summary> /// Send a packet synchronously /// </summary> /// <param name="packet">SNI packet</param> /// <returns>SNI error code</returns> public override uint Send(SNIPacket packet) { try { packet.WriteToStream(_stream); return TdsEnums.SNI_SUCCESS; } catch (ObjectDisposedException ode) { return ReportTcpSNIError(ode); } catch (SocketException se) { return ReportTcpSNIError(se); } catch (IOException ioe) { return ReportTcpSNIError(ioe); } } /// <summary> /// Receive a packet synchronously /// </summary> /// <param name="packet">SNI packet</param> /// <param name="timeoutInMilliseconds">Timeout in Milliseconds</param> /// <returns>SNI error code</returns> public override uint Receive(out SNIPacket packet, int timeoutInMilliseconds) { packet = null; if (timeoutInMilliseconds > 0) { _socket.ReceiveTimeout = timeoutInMilliseconds; } else if (timeoutInMilliseconds == -1) { // SqlCient internally represents infinite timeout by -1, and for TcpClient this is translated to a timeout of 0 _socket.ReceiveTimeout = 0; } else { // otherwise it is timeout for 0 or less than -1 ReportTcpSNIError(0, SNICommon.ConnTimeoutError, string.Empty); return TdsEnums.SNI_WAIT_TIMEOUT; } _sniPacket.Allocate(_bufferSize); try { _sniPacket.ReadFromStream(_stream); if (_sniPacket.Length == 0) { Win32Exception e = new Win32Exception(); return ReportErrorAndReleasePacket(_sniPacket, (uint)e.NativeErrorCode, 0, e.Message); } packet = _sniPacket; return TdsEnums.SNI_SUCCESS; } catch (ObjectDisposedException ode) { return ReportErrorAndReleasePacket(packet, ode); } catch (SocketException se) { return ReportErrorAndReleasePacket(packet, se); } catch (IOException ioe) { if (ioe.InnerException is SocketException && ((SocketException)(ioe.InnerException)).SocketErrorCode == SocketError.TimedOut) { return TdsEnums.SNI_WAIT_TIMEOUT; } else { return ReportErrorAndReleasePacket(packet, ioe); } } finally { _socket.ReceiveTimeout = 0; } } /// <summary> /// Set async callbacks /// </summary> /// <param name="receiveCallback">Receive callback</param> /// <param name="sendCallback">Send callback</param> /// <summary> public override void SetAsyncCallbacks(SNIAsyncCallback receiveCallback, SNIAsyncCallback sendCallback) { _receiveCallback = receiveCallback; _sendCallback = sendCallback; } /// <summary> /// Send a packet asynchronously /// </summary> /// <param name="packet">SNI packet</param> /// <param name="callback">Completion callback</param> /// <returns>SNI error code</returns> public override uint SendAsync(SNIPacket packet, SNIAsyncCallback callback = null) { Task writeTask = packet.WriteToStreamAsync(_stream); writeTask.ContinueWith((t) => { SNIAsyncCallback cb = callback ?? _sendCallback; uint status = TdsEnums.SNI_SUCCESS; if (t.IsFaulted) { SNILoadHandle.SingletonInstance.LastError = new SNIError(SNIProviders.TCP_PROV, SNICommon.InternalExceptionError, t.Exception); status = TdsEnums.SNI_ERROR; } cb(packet, status); } ); return TdsEnums.SNI_SUCCESS_IO_PENDING; } /// <summary> /// Receive a packet asynchronously /// </summary> /// <param name="packet">SNI packet</param> /// <returns>SNI error code</returns> public override uint ReceiveAsync(ref SNIPacket packet, bool isMars = false) { packet = null; _sniPacket.Allocate(_bufferSize); try { _sniPacket.ReadFromStreamAsync(_stream, _receiveCallback, isMars); packet = _sniPacket; return TdsEnums.SNI_SUCCESS_IO_PENDING; } catch (ObjectDisposedException ode) { return ReportErrorAndReleasePacket(packet, ode); } catch (SocketException se) { return ReportErrorAndReleasePacket(packet, se); } catch (IOException ioe) { return ReportErrorAndReleasePacket(packet, ioe); } } /// <summary> /// Check SNI handle connection /// </summary> /// <param name="handle"></param> /// <returns>SNI error status</returns> public override uint CheckConnection() { try { if (!_socket.Connected || _socket.Poll(0, SelectMode.SelectError)) { return TdsEnums.SNI_ERROR; } } catch (SocketException se) { return ReportTcpSNIError(se); } catch (ObjectDisposedException ode) { return ReportTcpSNIError(ode); } return TdsEnums.SNI_SUCCESS; } private uint ReportTcpSNIError(Exception sniException) { _status = TdsEnums.SNI_ERROR; return SNICommon.ReportSNIError(SNIProviders.TCP_PROV, SNICommon.InternalExceptionError, sniException); } private uint ReportTcpSNIError(uint nativeError, uint sniError, string errorMessage) { _status = TdsEnums.SNI_ERROR; return SNICommon.ReportSNIError(SNIProviders.TCP_PROV, nativeError, sniError, errorMessage); } private uint ReportErrorAndReleasePacket(SNIPacket packet, Exception sniException) { if (packet != null) { packet.Release(); } return ReportTcpSNIError(sniException); } private uint ReportErrorAndReleasePacket(SNIPacket packet, uint nativeError, uint sniError, string errorMessage) { if (packet != null) { packet.Release(); } return ReportTcpSNIError(nativeError, sniError, errorMessage); } #if DEBUG /// <summary> /// Test handle for killing underlying connection /// </summary> public override void KillConnection() { _socket.Shutdown(SocketShutdown.Both); } #endif } }
// Copyright (c) 2003, Paul Welter // All rights reserved. using System; using System.Drawing; using System.Collections; using System.ComponentModel; using System.Windows.Forms; using System.Data; using System.IO; using System.Text; using NetSpell.SpellChecker; using NetSpell.SpellChecker.Dictionary; using NetSpell.SpellChecker.Dictionary.Affix; using NetSpell.SpellChecker.Dictionary.Phonetic; namespace NetSpell.DictionaryBuild { /// <summary> /// Summary description for Form1. /// </summary> public class MainForm : System.Windows.Forms.Form { private ArrayList _fileList = new ArrayList(); private System.ComponentModel.IContainer components; private System.Windows.Forms.ToolBarButton copyBarButton; private System.Windows.Forms.ToolBarButton cutBarButton; private System.Windows.Forms.ToolBar editToolBar; private System.Windows.Forms.MainMenu mainMenu; private System.Windows.Forms.MenuItem menuEdit; private System.Windows.Forms.MenuItem menuEditCopy; private System.Windows.Forms.MenuItem menuEditCut; private System.Windows.Forms.MenuItem menuEditPaste; private System.Windows.Forms.MenuItem menuEditSelect; private System.Windows.Forms.MenuItem menuEditUndo; private System.Windows.Forms.MenuItem menuFile; private System.Windows.Forms.MenuItem menuFileClose; private System.Windows.Forms.MenuItem menuFileCloseAll; private System.Windows.Forms.MenuItem menuFileExit; private System.Windows.Forms.MenuItem menuFileNew; private System.Windows.Forms.MenuItem menuFileOpen; private System.Windows.Forms.MenuItem menuFileSave; private System.Windows.Forms.MenuItem menuFileSaveAll; private System.Windows.Forms.MenuItem menuHelp; private System.Windows.Forms.MenuItem menuHelpAbout; private System.Windows.Forms.MenuItem menuItem3; private System.Windows.Forms.MenuItem menuItem5; private System.Windows.Forms.MenuItem menuItem8; private System.Windows.Forms.MenuItem menuItem9; private System.Windows.Forms.MenuItem menuWindow; private System.Windows.Forms.MenuItem menuWindowCascade; private System.Windows.Forms.MenuItem menuWindowHorizontal; private System.Windows.Forms.MenuItem menuWindowVertical; private System.Windows.Forms.ToolBarButton newBarButton; private System.Windows.Forms.ToolBarButton openBarButton; private System.Windows.Forms.ToolBarButton pasteBarButton; private System.Windows.Forms.ToolBarButton saveBarButton; private System.Windows.Forms.ToolBarButton toolBarButton11; private System.Windows.Forms.ToolBarButton toolBarButton4; private System.Windows.Forms.ToolBarButton toolBarButton8; private System.Windows.Forms.ImageList toolBarImages; internal System.Windows.Forms.StatusBar statusBar; private System.Windows.Forms.ToolBarButton undoBarButton; public MainForm() { // // Required for Windows Form Designer support // InitializeComponent(); } private void editToolBar_ButtonClick(object sender, System.Windows.Forms.ToolBarButtonClickEventArgs e) { if(e.Button == newBarButton) { this.menuFileNew_Click(sender, new EventArgs()); } else if(e.Button == openBarButton) { this.menuFileOpen_Click(sender, new EventArgs()); } else if(e.Button == saveBarButton) { this.menuFileSave_Click(sender, new EventArgs()); } else if(e.Button == cutBarButton) { this.menuEditCut_Click(sender, new EventArgs()); } else if(e.Button == copyBarButton) { this.menuEditCopy_Click(sender, new EventArgs()); } else if(e.Button == pasteBarButton) { this.menuEditPaste_Click(sender, new EventArgs()); } else if(e.Button == undoBarButton) { this.menuEditUndo_Click(sender, new EventArgs()); } } private TextBox GetActiveTextBox() { if (this.ActiveMdiChild != null) { if (this.ActiveMdiChild.ActiveControl != null) { if (this.ActiveMdiChild.ActiveControl.GetType() == typeof(TextBox)) { return (TextBox)this.ActiveMdiChild.ActiveControl; } } } return null; } /// <summary> /// The main entry point for the application. /// </summary> [STAThread] static void Main() { Application.Run(new MainForm()); } private void MainForm_Closing(object sender, System.ComponentModel.CancelEventArgs e) { //this.menuFileCloseAll_Click(sender, new EventArgs()); } private void menuEditCopy_Click(object sender, System.EventArgs e) { TextBox current = GetActiveTextBox(); if (current != null) { current.Copy(); } } private void menuEditCut_Click(object sender, System.EventArgs e) { TextBox current = GetActiveTextBox(); if (current != null) { current.Cut(); } } private void menuEditPaste_Click(object sender, System.EventArgs e) { TextBox current = GetActiveTextBox(); if (current != null) { current.Paste(); } } private void menuEditSelect_Click(object sender, System.EventArgs e) { TextBox current = GetActiveTextBox(); if (current != null) { current.SelectAll(); } } private void menuEditUndo_Click(object sender, System.EventArgs e) { TextBox current = GetActiveTextBox(); if (current != null) { current.Undo(); } } private void menuFileClose_Click(object sender, System.EventArgs e) { if (this.ActiveMdiChild != null) { this.ActiveMdiChild.Close(); } } private void menuFileCloseAll_Click(object sender, System.EventArgs e) { foreach (Form child in this.MdiChildren) { child.Close(); } } private void menuFileExit_Click(object sender, System.EventArgs e) { this.menuFileCloseAll_Click(sender, e); Application.Exit(); } private void menuFileNew_Click(object sender, System.EventArgs e) { DictionaryForm newForm = new DictionaryForm(); newForm.MdiParent = this; newForm.Show(); } private void menuFileOpen_Click(object sender, System.EventArgs e) { DictionaryForm newForm = new DictionaryForm(); if (newForm.OpenDictionary()) { newForm.MdiParent = this; newForm.Show(); } } private void menuFileSave_Click(object sender, System.EventArgs e) { if (this.ActiveMdiChild != null) { DictionaryForm child = (DictionaryForm)this.ActiveMdiChild; child.SaveDictionary(); } } private void menuFileSaveAll_Click(object sender, System.EventArgs e) { foreach (DictionaryForm child in this.MdiChildren) { child.SaveDictionary(); } } private void menuHelpAbout_Click(object sender, System.EventArgs e) { AboutForm about = new AboutForm(); about.ShowDialog(this); } private void menuWindowCascade_Click(object sender, System.EventArgs e) { this.LayoutMdi(MdiLayout.Cascade); } private void menuWindowHorizontal_Click(object sender, System.EventArgs e) { this.LayoutMdi(MdiLayout.TileHorizontal); } private void menuWindowVertical_Click(object sender, System.EventArgs e) { this.LayoutMdi(MdiLayout.TileVertical); } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if (components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(MainForm)); this.statusBar = new System.Windows.Forms.StatusBar(); this.mainMenu = new System.Windows.Forms.MainMenu(); this.menuFile = new System.Windows.Forms.MenuItem(); this.menuFileNew = new System.Windows.Forms.MenuItem(); this.menuFileOpen = new System.Windows.Forms.MenuItem(); this.menuFileClose = new System.Windows.Forms.MenuItem(); this.menuFileCloseAll = new System.Windows.Forms.MenuItem(); this.menuItem5 = new System.Windows.Forms.MenuItem(); this.menuFileSave = new System.Windows.Forms.MenuItem(); this.menuFileSaveAll = new System.Windows.Forms.MenuItem(); this.menuItem9 = new System.Windows.Forms.MenuItem(); this.menuFileExit = new System.Windows.Forms.MenuItem(); this.menuEdit = new System.Windows.Forms.MenuItem(); this.menuEditUndo = new System.Windows.Forms.MenuItem(); this.menuItem3 = new System.Windows.Forms.MenuItem(); this.menuEditCut = new System.Windows.Forms.MenuItem(); this.menuEditCopy = new System.Windows.Forms.MenuItem(); this.menuEditPaste = new System.Windows.Forms.MenuItem(); this.menuItem8 = new System.Windows.Forms.MenuItem(); this.menuEditSelect = new System.Windows.Forms.MenuItem(); this.menuWindow = new System.Windows.Forms.MenuItem(); this.menuWindowHorizontal = new System.Windows.Forms.MenuItem(); this.menuWindowVertical = new System.Windows.Forms.MenuItem(); this.menuWindowCascade = new System.Windows.Forms.MenuItem(); this.menuHelp = new System.Windows.Forms.MenuItem(); this.menuHelpAbout = new System.Windows.Forms.MenuItem(); this.toolBarImages = new System.Windows.Forms.ImageList(this.components); this.editToolBar = new System.Windows.Forms.ToolBar(); this.newBarButton = new System.Windows.Forms.ToolBarButton(); this.openBarButton = new System.Windows.Forms.ToolBarButton(); this.saveBarButton = new System.Windows.Forms.ToolBarButton(); this.toolBarButton4 = new System.Windows.Forms.ToolBarButton(); this.cutBarButton = new System.Windows.Forms.ToolBarButton(); this.copyBarButton = new System.Windows.Forms.ToolBarButton(); this.pasteBarButton = new System.Windows.Forms.ToolBarButton(); this.toolBarButton8 = new System.Windows.Forms.ToolBarButton(); this.undoBarButton = new System.Windows.Forms.ToolBarButton(); this.toolBarButton11 = new System.Windows.Forms.ToolBarButton(); this.SuspendLayout(); // // statusBar // this.statusBar.Location = new System.Drawing.Point(0, 401); this.statusBar.Name = "statusBar"; this.statusBar.Size = new System.Drawing.Size(648, 16); this.statusBar.TabIndex = 0; // // mainMenu // this.mainMenu.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] { this.menuFile, this.menuEdit, this.menuWindow, this.menuHelp}); // // menuFile // this.menuFile.Index = 0; this.menuFile.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] { this.menuFileNew, this.menuFileOpen, this.menuFileClose, this.menuFileCloseAll, this.menuItem5, this.menuFileSave, this.menuFileSaveAll, this.menuItem9, this.menuFileExit}); this.menuFile.Text = "File"; // // menuFileNew // this.menuFileNew.Index = 0; this.menuFileNew.Text = "New"; this.menuFileNew.Click += new System.EventHandler(this.menuFileNew_Click); // // menuFileOpen // this.menuFileOpen.Index = 1; this.menuFileOpen.Text = "Open..."; this.menuFileOpen.Click += new System.EventHandler(this.menuFileOpen_Click); // // menuFileClose // this.menuFileClose.Index = 2; this.menuFileClose.Text = "Close"; this.menuFileClose.Click += new System.EventHandler(this.menuFileClose_Click); // // menuFileCloseAll // this.menuFileCloseAll.Index = 3; this.menuFileCloseAll.Text = "Close All"; this.menuFileCloseAll.Click += new System.EventHandler(this.menuFileCloseAll_Click); // // menuItem5 // this.menuItem5.Index = 4; this.menuItem5.Text = "-"; // // menuFileSave // this.menuFileSave.Index = 5; this.menuFileSave.Text = "Save"; this.menuFileSave.Click += new System.EventHandler(this.menuFileSave_Click); // // menuFileSaveAll // this.menuFileSaveAll.Index = 6; this.menuFileSaveAll.Text = "Save All"; this.menuFileSaveAll.Click += new System.EventHandler(this.menuFileSaveAll_Click); // // menuItem9 // this.menuItem9.Index = 7; this.menuItem9.Text = "-"; // // menuFileExit // this.menuFileExit.Index = 8; this.menuFileExit.Text = "Exit"; this.menuFileExit.Click += new System.EventHandler(this.menuFileExit_Click); // // menuEdit // this.menuEdit.Index = 1; this.menuEdit.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] { this.menuEditUndo, this.menuItem3, this.menuEditCut, this.menuEditCopy, this.menuEditPaste, this.menuItem8, this.menuEditSelect}); this.menuEdit.Text = "Edit"; // // menuEditUndo // this.menuEditUndo.Index = 0; this.menuEditUndo.Text = "Undo"; this.menuEditUndo.Click += new System.EventHandler(this.menuEditUndo_Click); // // menuItem3 // this.menuItem3.Index = 1; this.menuItem3.Text = "-"; // // menuEditCut // this.menuEditCut.Index = 2; this.menuEditCut.Text = "Cut"; this.menuEditCut.Click += new System.EventHandler(this.menuEditCut_Click); // // menuEditCopy // this.menuEditCopy.Index = 3; this.menuEditCopy.Text = "Copy"; this.menuEditCopy.Click += new System.EventHandler(this.menuEditCopy_Click); // // menuEditPaste // this.menuEditPaste.Index = 4; this.menuEditPaste.Text = "Paste"; this.menuEditPaste.Click += new System.EventHandler(this.menuEditPaste_Click); // // menuItem8 // this.menuItem8.Index = 5; this.menuItem8.Text = "-"; // // menuEditSelect // this.menuEditSelect.Index = 6; this.menuEditSelect.Text = "Select All"; this.menuEditSelect.Click += new System.EventHandler(this.menuEditSelect_Click); // // menuWindow // this.menuWindow.Index = 2; this.menuWindow.MdiList = true; this.menuWindow.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] { this.menuWindowHorizontal, this.menuWindowVertical, this.menuWindowCascade}); this.menuWindow.MergeOrder = 7; this.menuWindow.Text = "Window"; // // menuWindowHorizontal // this.menuWindowHorizontal.Index = 0; this.menuWindowHorizontal.Text = "Tile Horizontal"; this.menuWindowHorizontal.Click += new System.EventHandler(this.menuWindowHorizontal_Click); // // menuWindowVertical // this.menuWindowVertical.Index = 1; this.menuWindowVertical.Text = "Tile Vertical"; this.menuWindowVertical.Click += new System.EventHandler(this.menuWindowVertical_Click); // // menuWindowCascade // this.menuWindowCascade.Index = 2; this.menuWindowCascade.Text = "Cascade"; this.menuWindowCascade.Click += new System.EventHandler(this.menuWindowCascade_Click); // // menuHelp // this.menuHelp.Index = 3; this.menuHelp.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] { this.menuHelpAbout}); this.menuHelp.MergeOrder = 8; this.menuHelp.Text = "Help"; // // menuHelpAbout // this.menuHelpAbout.Index = 0; this.menuHelpAbout.Text = "About"; this.menuHelpAbout.Click += new System.EventHandler(this.menuHelpAbout_Click); // // toolBarImages // this.toolBarImages.ImageSize = new System.Drawing.Size(16, 16); this.toolBarImages.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("toolBarImages.ImageStream"))); this.toolBarImages.TransparentColor = System.Drawing.Color.Transparent; // // editToolBar // this.editToolBar.AutoSize = false; this.editToolBar.Buttons.AddRange(new System.Windows.Forms.ToolBarButton[] { this.newBarButton, this.openBarButton, this.saveBarButton, this.toolBarButton4, this.cutBarButton, this.copyBarButton, this.pasteBarButton, this.toolBarButton8, this.undoBarButton, this.toolBarButton11}); this.editToolBar.ButtonSize = new System.Drawing.Size(24, 24); this.editToolBar.DropDownArrows = true; this.editToolBar.ImageList = this.toolBarImages; this.editToolBar.Location = new System.Drawing.Point(0, 0); this.editToolBar.Name = "editToolBar"; this.editToolBar.ShowToolTips = true; this.editToolBar.Size = new System.Drawing.Size(648, 32); this.editToolBar.TabIndex = 1; this.editToolBar.TextAlign = System.Windows.Forms.ToolBarTextAlign.Right; this.editToolBar.ButtonClick += new System.Windows.Forms.ToolBarButtonClickEventHandler(this.editToolBar_ButtonClick); // // newBarButton // this.newBarButton.ImageIndex = 4; this.newBarButton.ToolTipText = "New"; // // openBarButton // this.openBarButton.ImageIndex = 5; this.openBarButton.ToolTipText = "Open"; // // saveBarButton // this.saveBarButton.ImageIndex = 8; this.saveBarButton.ToolTipText = "Save"; // // toolBarButton4 // this.toolBarButton4.Style = System.Windows.Forms.ToolBarButtonStyle.Separator; // // cutBarButton // this.cutBarButton.ImageIndex = 1; this.cutBarButton.ToolTipText = "Cut"; // // copyBarButton // this.copyBarButton.ImageIndex = 0; this.copyBarButton.ToolTipText = "Copy"; // // pasteBarButton // this.pasteBarButton.ImageIndex = 6; this.pasteBarButton.ToolTipText = "Paste"; // // toolBarButton8 // this.toolBarButton8.Style = System.Windows.Forms.ToolBarButtonStyle.Separator; // // undoBarButton // this.undoBarButton.ImageIndex = 10; this.undoBarButton.ToolTipText = "Undo"; // // toolBarButton11 // this.toolBarButton11.Style = System.Windows.Forms.ToolBarButtonStyle.Separator; // // MainForm // this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.ClientSize = new System.Drawing.Size(648, 417); this.Controls.Add(this.editToolBar); this.Controls.Add(this.statusBar); this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon"))); this.IsMdiContainer = true; this.Menu = this.mainMenu; this.Name = "MainForm"; this.Text = "Dictionary Build"; this.Closing += new System.ComponentModel.CancelEventHandler(this.MainForm_Closing); this.ResumeLayout(false); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information. using System.Collections; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using Xunit; namespace System.Json.Tests { public class JsonObjectTests { [Fact] public void Ctor_EmptyArray_Works() { Assert.Equal(0, new JsonObject(new KeyValuePair<string, JsonValue>[0]).Count); Assert.Equal(0, new JsonObject(Enumerable.Empty<KeyValuePair<string, JsonValue>>()).Count); } [Fact] public void Ctor_Array() { // Workaround xunit/xunit#987: InvalidOperationException thrown if this is in MemberData KeyValuePair<string, JsonValue>[] items = new KeyValuePair<string, JsonValue>[] { new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)) }; JsonObject obj = new JsonObject(items); Assert.Equal(items.Length, obj.Count); for (int i = 0; i < items.Length; i++) { Assert.Equal(items[i].Value.ToString(), obj[items[i].Key].ToString()); JsonValue value; Assert.True(obj.TryGetValue(items[i].Key, out value)); Assert.Equal(items[i].Value.ToString(), value.ToString()); } } [Fact] public void Ctor_IEnumerable() { // Workaround xunit/xunit#987: InvalidOperationException thrown if this is in MemberData KeyValuePair<string, JsonValue>[] items = new KeyValuePair<string, JsonValue>[] { new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)) }; JsonObject obj = new JsonObject((IEnumerable<KeyValuePair<string, JsonValue>>)items); Assert.Equal(items.Length, obj.Count); for (int i = 0; i < items.Length; i++) { Assert.Equal(items[i].Value.ToString(), obj[items[i].Key].ToString()); JsonValue value; Assert.True(obj.TryGetValue(items[i].Key, out value)); Assert.Equal(items[i].Value.ToString(), value.ToString()); } } [Fact] public void Ctor_NullArray_Works() { JsonObject obj = new JsonObject(null); Assert.Equal(0, obj.Count); } [Fact] public void Ctor_NullIEnumerable_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("items", () => new JsonObject((IEnumerable<KeyValuePair<string, JsonValue>>)null)); } [Fact] public void JsonType_ReturnsObject() { Assert.Equal(JsonType.Object, new JsonObject().JsonType); } [Fact] public void IsReadOnly_ReturnsFalse() { ICollection<KeyValuePair<string, JsonValue>> iCollection = new JsonObject(); Assert.False(iCollection.IsReadOnly); } [Fact] public void Item_Set_Get() { JsonObject obj = new JsonObject(); string key = "key"; JsonValue value = new JsonPrimitive(true); obj[key] = value; Assert.Equal(1, obj.Count); Assert.Same(value, obj[key]); } [Fact] public void Item_NoSuchKey_ThrowsKeyNotFoundException() { JsonObject obj = new JsonObject(); Assert.Throws<KeyNotFoundException>(() => obj["no-such-key"]); } [Fact] public void TryGetValue_NoSuchKey_ReturnsNull() { JsonObject obj = new JsonObject(); JsonValue value; Assert.False(obj.TryGetValue("no-such-key", out value)); Assert.Null(value); } [Fact] public void Add() { JsonObject obj = new JsonObject(); KeyValuePair<string, JsonValue> item = new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)); obj.Add(item); Assert.Equal(1, obj.Count); Assert.Equal(item.Key, obj.Keys.First()); Assert.Equal(item.Value.ToString(), obj.Values.First().ToString()); } [Fact] public void Add_NullKey_ThrowsArgumentNullException() { JsonObject obj = new JsonObject(); AssertExtensions.Throws<ArgumentNullException>("key", () => obj.Add(null, new JsonPrimitive(true))); } [Fact] public void Add_NullKeyInKeyValuePair_ThrowsArgumentNullException() { JsonObject obj = new JsonObject(); KeyValuePair<string, JsonValue> item = new KeyValuePair<string, JsonValue>(null, new JsonPrimitive(true)); AssertExtensions.Throws<ArgumentNullException>("key", () => obj.Add(item)); } [Fact] public void AddRange_Array() { KeyValuePair<string, JsonValue>[] items = new KeyValuePair<string, JsonValue>[] { new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)) }; JsonObject obj = new JsonObject(); obj.AddRange(items); Assert.Equal(items.Length, obj.Count); for (int i = 0; i < items.Length; i++) { Assert.Equal(items[i].Value.ToString(), obj[items[i].Key].ToString()); } } [Fact] public void AddRange_IEnumerable() { KeyValuePair<string, JsonValue>[] items = new KeyValuePair<string, JsonValue>[] { new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)) }; JsonObject obj = new JsonObject(); obj.AddRange((IEnumerable<KeyValuePair<string, JsonValue>>)items); Assert.Equal(items.Length, obj.Count); for (int i = 0; i < items.Length; i++) { Assert.Equal(items[i].Value.ToString(), obj[items[i].Key].ToString()); } } [Fact] public void AddRange_NullItems_ThrowsArgumentNullException() { JsonObject obj = new JsonObject(); AssertExtensions.Throws<ArgumentNullException>("items", () => obj.AddRange(null)); AssertExtensions.Throws<ArgumentNullException>("items", () => obj.AddRange((IEnumerable<KeyValuePair<string, JsonValue>>)null)); } [Fact] public void Clear() { JsonObject obj = new JsonObject(new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true))); obj.Clear(); Assert.Equal(0, obj.Count); obj.Clear(); Assert.Equal(0, obj.Count); } [Fact] public void ContainsKey() { KeyValuePair<string, JsonValue> item = new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)); JsonObject obj = new JsonObject(item); Assert.True(obj.ContainsKey(item.Key)); Assert.False(obj.ContainsKey("abc")); ICollection<KeyValuePair<string, JsonValue>> iCollection = obj; Assert.True(iCollection.Contains(item)); Assert.False(iCollection.Contains(new KeyValuePair<string, JsonValue>())); } [Fact] public void ContainsKey_NullKey_ThrowsArgumentNullException() { JsonObject obj = new JsonObject(); AssertExtensions.Throws<ArgumentNullException>("key", () => obj.ContainsKey(null)); } [Theory] [InlineData(0)] [InlineData(1)] public void CopyTo(int arrayIndex) { KeyValuePair<string, JsonValue>[] items = new KeyValuePair<string, JsonValue>[] { new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)) }; JsonObject array = new JsonObject(items); KeyValuePair<string, JsonValue>[] copy = new KeyValuePair<string, JsonValue>[array.Count + arrayIndex]; array.CopyTo(copy, arrayIndex); for (int i = 0; i < arrayIndex; i++) { Assert.Equal(default(KeyValuePair<string, JsonValue>), copy[i]); } for (int i = arrayIndex; i < copy.Length; i++) { Assert.Equal(items[i - arrayIndex], copy[i]); } } [Fact] public void Remove() { KeyValuePair<string, JsonValue> item = new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)); JsonObject obj = new JsonObject(item); obj.Remove(item.Key); Assert.Equal(0, obj.Count); Assert.False(obj.ContainsKey(item.Key)); obj.Remove(item.Key); Assert.Equal(0, obj.Count); } [Fact] public void Remove_NullKey_ThrowsArgumentNullException() { JsonObject obj = new JsonObject(); AssertExtensions.Throws<ArgumentNullException>("key", () => obj.Remove(null)); } [Fact] public void ICollection_Remove() { KeyValuePair<string, JsonValue> item = new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)); JsonObject obj = new JsonObject(item); ICollection<KeyValuePair<string, JsonValue>> iCollection = obj; iCollection.Remove(item); Assert.Equal(0, obj.Count); Assert.False(obj.ContainsKey(item.Key)); iCollection.Remove(item); Assert.Equal(0, obj.Count); } [Fact] public void Save_Stream() { JsonObject obj = new JsonObject(new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)), new KeyValuePair<string, JsonValue>("key2", null)); using (MemoryStream stream = new MemoryStream()) { obj.Save(stream); string result = Encoding.UTF8.GetString(stream.ToArray()); Assert.Equal("{\"key\", true\"key2\", null}", result); } } [Fact] public void Save_TextWriter() { JsonObject obj = new JsonObject(new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)), new KeyValuePair<string, JsonValue>("key2", null)); using (StringWriter writer = new StringWriter()) { obj.Save(writer); Assert.Equal("{\"key\": true, \"key2\": null}", writer.ToString()); } } [Fact] public void Save_NullStream_ThrowsArgumentNullException() { JsonObject obj = new JsonObject(); AssertExtensions.Throws<ArgumentNullException>("stream", () => obj.Save((Stream)null)); AssertExtensions.Throws<ArgumentNullException>("textWriter", () => obj.Save((TextWriter)null)); } [Fact] public void GetEnumerator_GenericIEnumerable() { KeyValuePair<string, JsonValue>[] items = new KeyValuePair<string, JsonValue>[] { new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)) }; JsonObject obj = new JsonObject(items); IEnumerator<KeyValuePair<string, JsonValue>> enumerator = ((IEnumerable<KeyValuePair<string, JsonValue>>)obj).GetEnumerator(); for (int i = 0; i < 2; i++) { int counter = 0; while (enumerator.MoveNext()) { Assert.Equal(items[counter].Key, enumerator.Current.Key); Assert.Equal(items[counter].Value.ToString(), enumerator.Current.Value.ToString()); counter++; } Assert.Equal(obj.Count, counter); enumerator.Reset(); } } [Fact] public void GetEnumerator_NonGenericIEnumerable() { KeyValuePair<string, JsonValue>[] items = new KeyValuePair<string, JsonValue>[] { new KeyValuePair<string, JsonValue>("key", new JsonPrimitive(true)) }; JsonObject obj = new JsonObject(items); IEnumerator enumerator = ((IEnumerable)obj).GetEnumerator(); for (int i = 0; i < 2; i++) { int counter = 0; while (enumerator.MoveNext()) { KeyValuePair<string, JsonValue> current = (KeyValuePair<string, JsonValue>)enumerator.Current; Assert.Equal(items[counter].Key, current.Key); Assert.Equal(items[counter].Value.ToString(), current.Value.ToString()); counter++; } Assert.Equal(obj.Count, counter); enumerator.Reset(); } } } }
using System; using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Web.Http.Controllers; using Aranasoft.Cobweb.Extentions; using Aranasoft.Cobweb.Extentions.ObjectExtentions; using Aranasoft.Cobweb.Reflection.Extensions; using FluentAssertions; using FluentAssertions.Execution; using FluentAssertions.Primitives; namespace Aranasoft.Cobweb.Http.Validation.Assertions { /// <summary> /// Contains a number of methods to assert that a <see cref="HttpActionContext" /> is in the expected state. /// </summary> [DebuggerNonUserCode] public class HttpActionContextAssertions : ReferenceTypeAssertions<HttpActionContext, HttpActionContextAssertions> { public HttpActionContextAssertions(HttpActionContext value) { Subject = value; } /// <summary> /// Returns the type of the subject the assertion applies on. /// </summary> protected override string Context => "HttpActionContext"; /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> ActionArguments dictionary does not contain any items. /// </summary> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> HaveNoArguments(string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} to be empty{reason}, but found {0}.", Subject); } if (ReferenceEquals(Subject.ActionArguments, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} to be empty{reason}, but found {0}.", Subject.ActionArguments); } Execute.Assertion .ForCondition(!Subject.ActionArguments.Any()) .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} to not have any arguments{reason}, but found {0}.", Subject.ActionArguments.Count); return new AndConstraint<HttpActionContextAssertions>(this); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> ActionArguments dictionary contains at least 1 item. /// </summary> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> HaveArguments(string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} not to be empty{reason}, but found {0}.", Subject); } if (ReferenceEquals(Subject.ActionArguments, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} not to be empty{reason}, but found {0}.", Subject.ActionArguments); } Execute.Assertion .ForCondition(Subject.ActionArguments.Any()) .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} to have one or more items{reason}, but found none."); return new AndConstraint<HttpActionContextAssertions>(this); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> ActionArguments has a specified argument. /// </summary> /// <param name="key">The action argument key.</param> /// <param name="expectedValue">The expected action argument value.</param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> HaveEquivalentActionArgument(string key, string expectedValue, string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith( "Expected {context:actioncontext} to to have argument {0}{reason}, but {context:actioncontext} was <null>.", key); } Execute.Assertion .BecauseOf(because, reasonArgs) .ForCondition(Subject.ActionArguments.ContainsKey(key)) .FailWith( "Expected {context:actioncontext} to to have argument {0}{reason}, but argument {0} was not found in {context:actioncontext}.", key ); var actualValue = Subject.ActionArguments.ContainsKey(key) && !string.IsNullOrEmpty(Subject.ActionArguments[key].IfExists(val => val.ToString())) ? Subject.ActionArguments[key].IfExists(val => val.ToString()) : null; Execute.Assertion .BecauseOf(because, reasonArgs) .ForCondition( string.Compare(expectedValue, actualValue, StringComparison.InvariantCultureIgnoreCase) == 0) .FailWith( "Expected {context:actioncontext} to to have argument {0} with value {1}{reason}, but value was {2}.", key, expectedValue, actualValue ); return new AndConstraint<HttpActionContextAssertions>(this); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> has a specified route parameter. /// </summary> /// <typeparam name="TValue">The type of <paramref name="expectedValue" /></typeparam> /// <param name="key">The route parameter key.</param> /// <param name="expectedValue">The expected route parameter value.</param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> HaveActionArgument<TValue>(string key, TValue expectedValue, string because = "", params object[] reasonArgs) where TValue : class { return HaveActionArgument(key, expectedValue, typeof(TValue), because, reasonArgs); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> has a specified action argument. /// </summary> /// <param name="key">The action argument key.</param> /// <param name="expectedValue">The expected action argument value.</param> /// <param name="expectedValueType">The type of <paramref name="expectedValue" /></param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> HaveActionArgument(string key, object expectedValue, Type expectedValueType, string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith( "Expected {context:actioncontext} to to have argument {0}{reason}, but {context:actioncontext} was <null>.", key); } Execute.Assertion .BecauseOf(because, reasonArgs) .ForCondition(Subject.ActionArguments.ContainsKey(key)) .FailWith( "Expected {context:actioncontext} to to have argument {0}{reason}, but argument {0} was not found in {context:actioncontext}.", key ); object actualValue; Subject.ActionArguments.TryGetValue(key, out actualValue); actualValue.Should().BeOfType(Nullable.GetUnderlyingType(expectedValueType) ?? expectedValueType); actualValue.ShouldBeEquivalentTo(expectedValue, options => options.RespectingRuntimeTypes()); return new AndConstraint<HttpActionContextAssertions>(this); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> has an optional specified action argument. /// </summary> /// <typeparam name="TValue">The type of <paramref name="expectedValue" /></typeparam> /// <param name="key">The action argument key.</param> /// <param name="expectedValue">The expected action argument value/</param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> HaveOptionalActionArgument<TValue>(string key, TValue expectedValue, string because = "", params object[] reasonArgs) where TValue : class { return HaveOptionalActionArgument(key, expectedValue, typeof(TValue), because, reasonArgs); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> has an optional specified action argument. /// </summary> /// <param name="key">The action argument key.</param> /// <param name="expectedValue">The expected action argument value/</param> /// <param name="expectedValueType">The type of <paramref name="expectedValue" /></param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> HaveOptionalActionArgument(string key, object expectedValue, Type expectedValueType, string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith( "Expected {context:actioncontext} to to have optional argument {0}{reason}, but {context:actioncontext} was <null>.", key); } if (!ReferenceEquals(expectedValue, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .ForCondition(Subject.ActionArguments.ContainsKey(key)) .FailWith( "Expected {context:actioncontext} to to have argument {0} with value {1}{reason}, but argument {0} was not found in {context:actioncontext}.", key, expectedValue ); } object actualValue; Subject.ActionArguments.TryGetValue(key, out actualValue); if (ReferenceEquals(expectedValue, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .ForCondition(actualValue == null) .FailWith( "Expected {context:actioncontext} to to have argument {0} with value <null>{reason}, but value was {1}.", key, actualValue ); } else { if (!ReferenceEquals(actualValue, null)) { actualValue.Should().BeOfType(Nullable.GetUnderlyingType(expectedValueType) ?? expectedValueType); actualValue.ShouldBeEquivalentTo(expectedValue, options => options.RespectingRuntimeTypes()); } } return new AndConstraint<HttpActionContextAssertions>(this); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> maps to a specified /// <typeparamref name="THttpController">controller</typeparamref>. /// </summary> /// <typeparam name="THttpController">The type of Controller.</typeparam> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> MapTo<THttpController>(string because = "", params object[] reasonArgs) where THttpController : IHttpController { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} to not be <null>{reason}."); } var expectedController = HttpRouteDataFactory.HttpControllerName<THttpController>(); Subject.Should().MapToController(expectedController, because, reasonArgs); return new AndConstraint<HttpActionContextAssertions>(this); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> maps to a specified /// <paramref name="expectedController">controller</paramref>. /// </summary> /// <param name="expectedController">The name of the controller.</param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> MapToController(string expectedController, string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} to not be <null>{reason}."); } Subject.ControllerContext.Controller.GetType().Name.Should().Be(expectedController, because, reasonArgs); return new AndConstraint<HttpActionContextAssertions>(this); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> maps to a specified /// <paramref name="expectedAction">action</paramref>. /// </summary> /// <param name="expectedAction">The name of the action.</param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> public AndConstraint<HttpActionContextAssertions> MapToAction(string expectedAction, string because = "", params object[] reasonArgs) { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} to not be <null>{reason}."); } Subject.ActionDescriptor.ActionName.Should().Be(expectedAction, because, reasonArgs); return new AndConstraint<HttpActionContextAssertions>(this); } /// <summary> /// Asserts that a <see cref="HttpActionContext">actionContext</see> maps to a specified action. /// </summary> /// <typeparam name="THttpController">The type of controller.</typeparam> /// <param name="action">The action to call on <typeparamref name="THttpController" />.</param> /// <param name="because"> /// A formatted phrase as is supported by <see cref="string.Format(string,object[])" /> explaining why the assertion /// is needed. If the phrase does not start with the word <i>because</i>, it is prepended automatically. /// </param> /// <param name="reasonArgs"> /// Zero or more objects to format using the placeholders in <see cref="because" />. /// </param> /// <remarks>Assertions are peformed against the specified controller, action, and action parameters.</remarks> public AndConstraint<HttpActionContextAssertions> MapTo<THttpController>( Expression<Func<THttpController, object>> action, string because = "", params object[] reasonArgs) where THttpController : IHttpController { if (ReferenceEquals(Subject, null)) { Execute.Assertion .BecauseOf(because, reasonArgs) .FailWith("Expected {context:actioncontext} to not be <null>{reason}."); } Subject.Should().MapTo<THttpController>(because, reasonArgs); Subject.Should().MapToAction(action.ActionName(), because, reasonArgs); //check parameters var methodArguments = action.GetMethodArgumentValues(); foreach (var methodArgument in methodArguments) { var param = methodArgument.Key; var expectedValue = methodArgument.Value; var paramName = param.Name; if (param.ParameterType.CanBeNull()) { Subject.Should().HaveOptionalActionArgument(paramName, expectedValue, param.ParameterType); } else { Subject.Should().HaveActionArgument(paramName, expectedValue, param.ParameterType); } } return new AndConstraint<HttpActionContextAssertions>(this); } } }
using System; using System.Drawing; using System.Collections; using System.Windows.Forms; using ChartDirector; namespace CSharpChartExplorer { public partial class FrmViewPortControlDemo : Form { // Data arrays private DateTime[] timeStamps; private double[] dataSeriesA; private double[] dataSeriesB; private double[] dataSeriesC; public FrmViewPortControlDemo() { InitializeComponent(); } private void FrmViewPortControlDemo_Load(object sender, EventArgs e) { // Load the data loadData(); // Initialize the WinChartViewer initChartViewer(winChartViewer1); // Trigger the ViewPortChanged event to draw the chart winChartViewer1.updateViewPort(true, true); // Draw the full thumbnail chart for the ViewPortControl drawFullChart(viewPortControl1, winChartViewer1); } // // Load the data // private void loadData() { // In this example, we just use random numbers as data. RanSeries r = new RanSeries(127); timeStamps = r.getDateSeries(1827, new DateTime(2010, 1, 1), 86400); dataSeriesA = r.getSeries2(1827, 150, -10, 10); dataSeriesB = r.getSeries2(1827, 200, -10, 10); dataSeriesC = r.getSeries2(1827, 250, -8, 8); } // // Initialize the WinChartViewer // private void initChartViewer(WinChartViewer viewer) { // Set the full x range to be the duration of the data viewer.setFullRange("x", timeStamps[0], timeStamps[timeStamps.Length - 1]); // Initialize the view port to show the latest 20% of the time range viewer.ViewPortWidth = 0.2; viewer.ViewPortLeft = 1 - viewer.ViewPortWidth; // Set the maximum zoom to 10 points viewer.ZoomInWidthLimit = 10.0 / timeStamps.Length; // Enable mouse wheel zooming by setting the zoom ratio to 1.1 per wheel event viewer.MouseWheelZoomRatio = 1.1; // Initially set the mouse usage to "Pointer" mode (Drag to Scroll mode) pointerPB.Checked = true; } // // The ViewPortChanged event handler. This event occurs if the user scrolls or zooms in // or out the chart by dragging or clicking on the chart. It can also be triggered by // calling WinChartViewer.updateViewPort. // private void winChartViewer1_ViewPortChanged(object sender, WinViewPortEventArgs e) { // Update the chart if necessary if (e.NeedUpdateChart) drawChart(winChartViewer1); } // // Draw the chart. // private void drawChart(WinChartViewer viewer) { // Get the start date and end date that are visible on the chart. DateTime viewPortStartDate = Chart.NTime(viewer.getValueAtViewPort("x", viewer.ViewPortLeft)); DateTime viewPortEndDate = Chart.NTime(viewer.getValueAtViewPort("x", viewer.ViewPortLeft + viewer.ViewPortWidth)); // Get the array indexes that corresponds to the visible start and end dates int startIndex = (int)Math.Floor(Chart.bSearch(timeStamps, viewPortStartDate)); int endIndex = (int)Math.Ceiling(Chart.bSearch(timeStamps, viewPortEndDate)); int noOfPoints = endIndex - startIndex + 1; // Extract the part of the data array that are visible. DateTime[] viewPortTimeStamps = (DateTime[])Chart.arraySlice(timeStamps, startIndex, noOfPoints); double[] viewPortDataSeriesA = (double[])Chart.arraySlice(dataSeriesA, startIndex, noOfPoints); double[] viewPortDataSeriesB = (double[])Chart.arraySlice(dataSeriesB, startIndex, noOfPoints); double[] viewPortDataSeriesC = (double[])Chart.arraySlice(dataSeriesC, startIndex, noOfPoints); // // At this stage, we have extracted the visible data. We can use those data to plot the chart. // //================================================================================ // Configure overall chart appearance. //================================================================================ // Create an XYChart object of size 640 x 400 pixels XYChart c = new XYChart(640, 400); // Set the plotarea at (55, 55) with width 80 pixels less than chart width, and height 90 pixels // less than chart height. Use a vertical gradient from light blue (f0f6ff) to sky blue (a0c0ff) // as background. Set border to transparent and grid lines to white (ffffff). c.setPlotArea(55, 55, c.getWidth() - 80, c.getHeight() - 90, c.linearGradientColor(0, 55, 0, c.getHeight() - 35, 0xf0f6ff, 0xa0c0ff), -1, Chart.Transparent, 0xffffff, 0xffffff); // As the data can lie outside the plotarea in a zoomed chart, we need enable clipping. c.setClipping(); // Add a title to the chart using 15pt Arial Bold font c.addTitle(" Zooming and Scrolling with Viewport Control", "Arial Bold", 15); // Set legend icon style to use line style icon, sized for 10pt font c.getLegend().setLineStyleKey(); c.getLegend().setFontSize(10); // Set the x and y axis stems to transparent and the label font to 10pt Arial c.xAxis().setColors(Chart.Transparent); c.yAxis().setColors(Chart.Transparent); c.xAxis().setLabelStyle("Arial", 10); c.yAxis().setLabelStyle("Arial", 10); // Add axis title using 10pt Arial Bold font c.yAxis().setTitle("Ionic Temperature (C)", "Arial Bold", 10); //================================================================================ // Add data to chart //================================================================================ // // In this example, we represent the data by lines. You may modify the code below to use other // representations (areas, scatter plot, etc). // // Add a line layer for the lines, using a line width of 2 pixels LineLayer layer = c.addLineLayer2(); layer.setLineWidth(2); // In this demo, we do not have too many data points. In real code, the chart may contain a lot // of data points when fully zoomed out - much more than the number of horizontal pixels in this // plot area. So it is a good idea to use fast line mode. layer.setFastLineMode(); // Now we add the 3 data series to a line layer, using the color red (ff33333), green (008800) // and blue (3333cc) layer.setXData(viewPortTimeStamps); layer.addDataSet(viewPortDataSeriesA, 0xff3333, "Alpha"); layer.addDataSet(viewPortDataSeriesB, 0x008800, "Beta"); layer.addDataSet(viewPortDataSeriesC, 0x3333cc, "Gamma"); //================================================================================ // Configure axis scale and labelling //================================================================================ // Set the x-axis as a date/time axis with the scale according to the view port x range. viewer.syncDateAxisWithViewPort("x", c.xAxis()); // For the automatic y-axis labels, set the minimum spacing to 30 pixels. c.yAxis().setTickDensity(30); // // In this demo, the time range can be from a few years to a few days. We demonstrate how to set // up different date/time format based on the time range. // // If all ticks are yearly aligned, then we use "yyyy" as the label format. c.xAxis().setFormatCondition("align", 360 * 86400); c.xAxis().setLabelFormat("{value|yyyy}"); // If all ticks are monthly aligned, then we use "mmm yyyy" in bold font as the first label of a // year, and "mmm" for other labels. c.xAxis().setFormatCondition("align", 30 * 86400); c.xAxis().setMultiFormat(Chart.StartOfYearFilter(), "<*font=bold*>{value|mmm<*br*>yyyy}", Chart.AllPassFilter(), "{value|mmm}"); // If all ticks are daily algined, then we use "mmm dd<*br*>yyyy" in bold font as the first // label of a year, and "mmm dd" in bold font as the first label of a month, and "dd" for other // labels. c.xAxis().setFormatCondition("align", 86400); c.xAxis().setMultiFormat(Chart.StartOfYearFilter(), "<*block,halign=left*><*font=bold*>{value|mmm dd<*br*>yyyy}", Chart.StartOfMonthFilter(), "<*font=bold*>{value|mmm dd}"); c.xAxis().setMultiFormat2(Chart.AllPassFilter(), "{value|dd}"); // For all other cases (sub-daily ticks), use "hh:nn<*br*>mmm dd" for the first label of a day, // and "hh:nn" for other labels. c.xAxis().setFormatCondition("else"); c.xAxis().setMultiFormat(Chart.StartOfDayFilter(), "<*font=bold*>{value|hh:nn<*br*>mmm dd}", Chart.AllPassFilter(), "{value|hh:nn}"); //================================================================================ // Output the chart //================================================================================ // We need to update the track line too. If the mouse is moving on the chart (eg. if // the user drags the mouse on the chart to scroll it), the track line will be updated // in the MouseMovePlotArea event. Otherwise, we need to update the track line here. if (!viewer.IsInMouseMoveEvent) { trackLineLegend(c, (null == viewer.Chart) ? c.getPlotArea().getRightX() : viewer.PlotAreaMouseX); } viewer.Chart = c; } private void drawFullChart(WinViewPortControl vpc, WinChartViewer viewer) { // Create an XYChart object of size 640 x 60 pixels XYChart c = new XYChart(640, 60); // Set the plotarea with the same horizontal position as that in the main chart for alignment. c.setPlotArea(55, 0, c.getWidth() - 80, c.getHeight() - 1, 0xc0d8ff, -1, 0x888888, Chart.Transparent, 0xffffff); // Set the x axis stem to transparent and the label font to 10pt Arial c.xAxis().setColors(Chart.Transparent); c.xAxis().setLabelStyle("Arial", 10); // Put the x-axis labels inside the plot area by setting a negative label gap. Use // setLabelAlignment to put the label at the right side of the tick. c.xAxis().setLabelGap(-1); c.xAxis().setLabelAlignment(1); // Set the y axis stem and labels to transparent (that is, hide the labels) c.yAxis().setColors(Chart.Transparent, Chart.Transparent); // Add a line layer for the lines with fast line mode enabled LineLayer layer = c.addLineLayer(); layer.setFastLineMode(); // Now we add the 3 data series to a line layer, using the color red (0xff3333), green // (0x008800) and blue (0x3333cc) layer.setXData(timeStamps); layer.addDataSet(dataSeriesA, 0xff3333); layer.addDataSet(dataSeriesB, 0x008800); layer.addDataSet(dataSeriesC, 0x3333cc); // The x axis scales should reflect the full range of the view port c.xAxis().setDateScale(viewer.getValueAtViewPort("x", 0), viewer.getValueAtViewPort("x", 1)); // For the automatic x-axis labels, set the minimum spacing to 75 pixels. c.xAxis().setTickDensity(75); // For the auto-scaled y-axis, as we hide the labels, we can disable axis rounding. This can // make the axis scale fit the data tighter. c.yAxis().setRounding(false, false); // Output the chart vpc.Chart = c; } // // Pointer (Drag to Scroll) button event handler // private void pointerPB_CheckedChanged(object sender, EventArgs e) { if (((RadioButton)sender).Checked) winChartViewer1.MouseUsage = WinChartMouseUsage.ScrollOnDrag; } // // Zoom In button event handler // private void zoomInPB_CheckedChanged(object sender, EventArgs e) { if (((RadioButton)sender).Checked) winChartViewer1.MouseUsage = WinChartMouseUsage.ZoomIn; } // // Zoom Out button event handler // private void zoomOutPB_CheckedChanged(object sender, EventArgs e) { if (((RadioButton)sender).Checked) winChartViewer1.MouseUsage = WinChartMouseUsage.ZoomOut; } // // Save button event handler // private void savePB_Click(object sender, EventArgs e) { // The standard Save File dialog SaveFileDialog fileDlg = new SaveFileDialog(); fileDlg.Filter = "PNG (*.png)|*.png|JPG (*.jpg)|*.jpg|GIF (*.gif)|*.gif|BMP (*.bmp)|*.bmp|" + "SVG (*.svg)|*.svg|PDF (*.pdf)|*.pdf"; fileDlg.FileName = "chartdirector_demo"; if (fileDlg.ShowDialog() != DialogResult.OK) return; // Save the chart if (null != winChartViewer1.Chart) winChartViewer1.Chart.makeChart(fileDlg.FileName); } // // Draw track cursor when mouse is moving over plotarea // private void winChartViewer1_MouseMovePlotArea(object sender, MouseEventArgs e) { WinChartViewer viewer = (WinChartViewer)sender; trackLineLegend((XYChart)viewer.Chart, viewer.PlotAreaMouseX); viewer.updateDisplay(); } // // Draw the track line with legend // private void trackLineLegend(XYChart c, int mouseX) { // Clear the current dynamic layer and get the DrawArea object to draw on it. DrawArea d = c.initDynamicLayer(); // The plot area object PlotArea plotArea = c.getPlotArea(); // Get the data x-value that is nearest to the mouse, and find its pixel coordinate. double xValue = c.getNearestXValue(mouseX); int xCoor = c.getXCoor(xValue); // Draw a vertical track line at the x-position d.vline(plotArea.getTopY(), plotArea.getBottomY(), xCoor, 0xaaaaaa); // Container to hold the legend entries ArrayList legendEntries = new ArrayList(); // Iterate through all layers to build the legend array for (int i = 0; i < c.getLayerCount(); ++i) { Layer layer = c.getLayerByZ(i); // The data array index of the x-value int xIndex = layer.getXIndexOf(xValue); // Iterate through all the data sets in the layer for (int j = 0; j < layer.getDataSetCount(); ++j) { ChartDirector.DataSet dataSet = layer.getDataSetByZ(j); // We are only interested in visible data sets with names string dataName = dataSet.getDataName(); int color = dataSet.getDataColor(); if ((!string.IsNullOrEmpty(dataName)) && (color != Chart.Transparent)) { // Build the legend entry, consist of the legend icon, name and data value. double dataValue = dataSet.getValue(xIndex); legendEntries.Add("<*block*>" + dataSet.getLegendIcon() + " " + dataName + ": " + (( dataValue == Chart.NoValue) ? "N/A" : c.formatValue(dataValue, "{value|P4}")) + "<*/*>"); // Draw a track dot for data points within the plot area int yCoor = c.getYCoor(dataSet.getPosition(xIndex), dataSet.getUseYAxis()); if ((yCoor >= plotArea.getTopY()) && (yCoor <= plotArea.getBottomY())) { d.circle(xCoor, yCoor, 4, 4, color, color); } } } } // Create the legend by joining the legend entries legendEntries.Reverse(); string legendText = "<*block,maxWidth=" + plotArea.getWidth() + "*><*block*><*font=Arial Bold*>[" + c.xAxis().getFormattedLabel(xValue, "mmm dd, yyyy") + "]<*/*> " + String.Join( " ", (string[])legendEntries.ToArray(typeof(string))) + "<*/*>"; // Display the legend on the top of the plot area TTFText t = d.text(legendText, "Arial Bold", 10); t.draw(plotArea.getLeftX() + 5, plotArea.getTopY() - 3, 0x000000, Chart.BottomLeft); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics.Arm\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.Arm; namespace JIT.HardwareIntrinsics.Arm { public static partial class Program { private static void Add_Vector64_Byte() { var test = new SimpleBinaryOpTest__Add_Vector64_Byte(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (AdvSimd.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (AdvSimd.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); } // Validates passing a static member works test.RunClsVarScenario(); if (AdvSimd.IsSupported) { // Validates passing a static member works, using pinning and Load test.RunClsVarScenario_Load(); } // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (AdvSimd.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); if (AdvSimd.IsSupported) { // Validates passing the field of a local class works, using pinning and Load test.RunClassLclFldScenario_Load(); } // Validates passing an instance member of a class works test.RunClassFldScenario(); if (AdvSimd.IsSupported) { // Validates passing an instance member of a class works, using pinning and Load test.RunClassFldScenario_Load(); } // Validates passing the field of a local struct works test.RunStructLclFldScenario(); if (AdvSimd.IsSupported) { // Validates passing the field of a local struct works, using pinning and Load test.RunStructLclFldScenario_Load(); } // Validates passing an instance member of a struct works test.RunStructFldScenario(); if (AdvSimd.IsSupported) { // Validates passing an instance member of a struct works, using pinning and Load test.RunStructFldScenario_Load(); } } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__Add_Vector64_Byte { private struct DataTable { private byte[] inArray1; private byte[] inArray2; private byte[] outArray; private GCHandle inHandle1; private GCHandle inHandle2; private GCHandle outHandle; private ulong alignment; public DataTable(Byte[] inArray1, Byte[] inArray2, Byte[] outArray, int alignment) { int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Byte>(); int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Byte>(); int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Byte>(); if ((alignment != 16 && alignment != 8) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray) { throw new ArgumentException("Invalid value of alignment"); } this.inArray1 = new byte[alignment * 2]; this.inArray2 = new byte[alignment * 2]; this.outArray = new byte[alignment * 2]; this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned); this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned); this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned); this.alignment = (ulong)alignment; Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Byte, byte>(ref inArray1[0]), (uint)sizeOfinArray1); Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Byte, byte>(ref inArray2[0]), (uint)sizeOfinArray2); } public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment); public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment); public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment); public void Dispose() { inHandle1.Free(); inHandle2.Free(); outHandle.Free(); } private static unsafe void* Align(byte* buffer, ulong expectedAlignment) { return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1)); } } private struct TestStruct { public Vector64<Byte> _fld1; public Vector64<Byte> _fld2; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref testStruct._fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>()); return testStruct; } public void RunStructFldScenario(SimpleBinaryOpTest__Add_Vector64_Byte testClass) { var result = AdvSimd.Add(_fld1, _fld2); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } public void RunStructFldScenario_Load(SimpleBinaryOpTest__Add_Vector64_Byte testClass) { fixed (Vector64<Byte>* pFld1 = &_fld1) fixed (Vector64<Byte>* pFld2 = &_fld2) { var result = AdvSimd.Add( AdvSimd.LoadVector64((Byte*)(pFld1)), AdvSimd.LoadVector64((Byte*)(pFld2)) ); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } } } private static readonly int LargestVectorSize = 8; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte); private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte); private static readonly int RetElementCount = Unsafe.SizeOf<Vector64<Byte>>() / sizeof(Byte); private static Byte[] _data1 = new Byte[Op1ElementCount]; private static Byte[] _data2 = new Byte[Op2ElementCount]; private static Vector64<Byte> _clsVar1; private static Vector64<Byte> _clsVar2; private Vector64<Byte> _fld1; private Vector64<Byte> _fld2; private DataTable _dataTable; static SimpleBinaryOpTest__Add_Vector64_Byte() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _clsVar2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>()); } public SimpleBinaryOpTest__Add_Vector64_Byte() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld1), ref Unsafe.As<Byte, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector64<Byte>, byte>(ref _fld2), ref Unsafe.As<Byte, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector64<Byte>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetByte(); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetByte(); } _dataTable = new DataTable(_data1, _data2, new Byte[RetElementCount], LargestVectorSize); } public bool IsSupported => AdvSimd.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = AdvSimd.Add( Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = AdvSimd.Add( AdvSimd.LoadVector64((Byte*)(_dataTable.inArray1Ptr)), AdvSimd.LoadVector64((Byte*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.Add), new Type[] { typeof(Vector64<Byte>), typeof(Vector64<Byte>) }) .Invoke(null, new object[] { Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Byte>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(AdvSimd).GetMethod(nameof(AdvSimd.Add), new Type[] { typeof(Vector64<Byte>), typeof(Vector64<Byte>) }) .Invoke(null, new object[] { AdvSimd.LoadVector64((Byte*)(_dataTable.inArray1Ptr)), AdvSimd.LoadVector64((Byte*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector64<Byte>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = AdvSimd.Add( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunClsVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load)); fixed (Vector64<Byte>* pClsVar1 = &_clsVar1) fixed (Vector64<Byte>* pClsVar2 = &_clsVar2) { var result = AdvSimd.Add( AdvSimd.LoadVector64((Byte*)(pClsVar1)), AdvSimd.LoadVector64((Byte*)(pClsVar2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var op1 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray1Ptr); var op2 = Unsafe.Read<Vector64<Byte>>(_dataTable.inArray2Ptr); var result = AdvSimd.Add(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var op1 = AdvSimd.LoadVector64((Byte*)(_dataTable.inArray1Ptr)); var op2 = AdvSimd.LoadVector64((Byte*)(_dataTable.inArray2Ptr)); var result = AdvSimd.Add(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new SimpleBinaryOpTest__Add_Vector64_Byte(); var result = AdvSimd.Add(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load)); var test = new SimpleBinaryOpTest__Add_Vector64_Byte(); fixed (Vector64<Byte>* pFld1 = &test._fld1) fixed (Vector64<Byte>* pFld2 = &test._fld2) { var result = AdvSimd.Add( AdvSimd.LoadVector64((Byte*)(pFld1)), AdvSimd.LoadVector64((Byte*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = AdvSimd.Add(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunClassFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load)); fixed (Vector64<Byte>* pFld1 = &_fld1) fixed (Vector64<Byte>* pFld2 = &_fld2) { var result = AdvSimd.Add( AdvSimd.LoadVector64((Byte*)(pFld1)), AdvSimd.LoadVector64((Byte*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = AdvSimd.Add(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load)); var test = TestStruct.Create(); var result = AdvSimd.Add( AdvSimd.LoadVector64((Byte*)(&test._fld1)), AdvSimd.LoadVector64((Byte*)(&test._fld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunStructFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load)); var test = TestStruct.Create(); test.RunStructFldScenario_Load(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector64<Byte> op1, Vector64<Byte> op2, void* result, [CallerMemberName] string method = "") { Byte[] inArray1 = new Byte[Op1ElementCount]; Byte[] inArray2 = new Byte[Op2ElementCount]; Byte[] outArray = new Byte[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), op1); Unsafe.WriteUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), op2); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Byte>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "") { Byte[] inArray1 = new Byte[Op1ElementCount]; Byte[] inArray2 = new Byte[Op2ElementCount]; Byte[] outArray = new Byte[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector64<Byte>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector64<Byte>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Byte, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector64<Byte>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(Byte[] left, Byte[] right, Byte[] result, [CallerMemberName] string method = "") { bool succeeded = true; if ((byte)(left[0] + right[0]) != result[0]) { succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if ((byte)(left[i] + right[i]) != result[i]) { succeeded = false; break; } } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(AdvSimd)}.{nameof(AdvSimd.Add)}<Byte>(Vector64<Byte>, Vector64<Byte>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})"); TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.IO; using System.Linq; using System.Runtime.Versioning; using System.Windows; using EnvDTE; using Microsoft.VisualStudio.ExtensionsExplorer; using Microsoft.VisualStudio.ExtensionsExplorer.UI; using NuGet.VisualStudio; using NuGetConsole; using NuGetConsole.Host.PowerShellProvider; namespace NuGet.Dialog.Providers { /// <summary> /// Base class for all tree node types. /// </summary> internal abstract class PackagesProviderBase : VsExtensionsProvider, ILogger, IDisposable { private PackagesSearchNode _searchNode; private PackagesTreeNodeBase _lastSelectedNode; private readonly ResourceDictionary _resources; private readonly Lazy<IConsole> _outputConsole; private readonly IPackageRepository _localRepository; private readonly ProviderServices _providerServices; private Dictionary<Project, Exception> _failedProjects; private string _readmeFile, _originalPackageId; private object _mediumIconDataTemplate; private object _detailViewDataTemplate; private IList<IVsSortDescriptor> _sortDescriptors; private readonly IProgressProvider _progressProvider; private CultureInfo _uiCulture, _culture; private readonly ISolutionManager _solutionManager; private IDisposable _expandedNodesDisposable; protected PackagesProviderBase( IPackageRepository localRepository, ResourceDictionary resources, ProviderServices providerServices, IProgressProvider progressProvider, ISolutionManager solutionManager) { if (resources == null) { throw new ArgumentNullException("resources"); } if (providerServices == null) { throw new ArgumentNullException("providerServices"); } if (solutionManager == null) { throw new ArgumentNullException("solutionManager"); } _localRepository = localRepository; _providerServices = providerServices; _progressProvider = progressProvider; _solutionManager = solutionManager; _resources = resources; _outputConsole = new Lazy<IConsole>(() => providerServices.OutputConsoleProvider.CreateOutputConsole(requirePowerShellHost: false)); } /// <summary> /// Copy constructor for PackagesProviderBase /// </summary> protected PackagesProviderBase(PackagesProviderBase other) : this(other._localRepository, other._resources, other._providerServices, other._progressProvider, other._solutionManager) { } /// <summary> /// Returns either the solution repository or the active project repository, depending on whether we are targeting solution. /// </summary> protected IPackageRepository LocalRepository { get { return _localRepository; } } public virtual bool RefreshOnNodeSelection { get { return false; } } public PackagesTreeNodeBase SelectedNode { get; set; } public bool SuppressNextRefresh { get; private set; } internal bool SuppressLoad { get; set; } /// <summary> /// Gets the root node of the tree /// </summary> protected IVsExtensionsTreeNode RootNode { get; set; } public PackageSortDescriptor CurrentSort { get; set; } public virtual bool ShowPrereleaseComboBox { get { return true; } } internal virtual bool IncludePrerelease { get; set; } public virtual IEnumerable<string> SupportedFrameworks { get { yield break; } } public override IVsExtensionsTreeNode ExtensionsTree { get { if (RootNode == null) { RootNode = new RootPackagesTreeNode(null, String.Empty); CreateExtensionsTree(); } return RootNode; } } public override object MediumIconDataTemplate { get { if (_mediumIconDataTemplate == null) { _mediumIconDataTemplate = _resources["PackageItemTemplate"]; } return _mediumIconDataTemplate; } } public override object DetailViewDataTemplate { get { if (_detailViewDataTemplate == null) { _detailViewDataTemplate = _resources["PackageDetailTemplate"]; } return _detailViewDataTemplate; } } // hook for unit test internal Action ExecuteCompletedCallback { get; set; } public IList<IVsSortDescriptor> SortDescriptors { get { if (_sortDescriptors == null) { _sortDescriptors = CreateSortDescriptors(); } return _sortDescriptors; } } protected virtual IList<IVsSortDescriptor> CreateSortDescriptors() { return new[] { new PackageSortDescriptor(Resources.Dialog_SortOption_MostDownloads, "DownloadCount", ListSortDirection.Descending), new PackageSortDescriptor(Resources.Dialog_SortOption_PublishedDate, "Published", ListSortDirection.Descending), new PackageSortDescriptor(String.Format(CultureInfo.CurrentCulture, "{0}: {1}", Resources.Dialog_SortOption_Name, Resources.Dialog_SortAscending), new[] { "Title", "Id" }, ListSortDirection.Ascending), new PackageSortDescriptor(String.Format(CultureInfo.CurrentCulture, "{0}: {1}", Resources.Dialog_SortOption_Name, Resources.Dialog_SortDescending), new[] { "Title", "Id" }, ListSortDirection.Descending) }; } public override string ToString() { return Name; } public override IVsExtensionsTreeNode Search(string searchText) { if (OperationCoordinator.IsBusy) { return null; } if (!String.IsNullOrWhiteSpace(searchText) && SelectedNode != null) { searchText = searchText.Trim(); if (_searchNode != null) { _searchNode.Extensions.Clear(); _searchNode.SetSearchText(searchText); } else { var provider = GetSearchProvider(); _searchNode = new PackagesSearchNode(provider, RootNode, SelectedNode, searchText); AddSearchNode(); } } else { RemoveSearchNode(); } return _searchNode; } protected internal virtual void RemoveSearchNode() { if (_searchNode != null) { // When remove the search node, the dialog will automatically select the first node (All node) // Since we are going to restore the previously selected node anyway, we don't want the first node // to refresh. Hence suppress it here. SuppressNextRefresh = true; try { // dispose any search results RootNode.Nodes.Remove(_searchNode); } finally { _searchNode = null; SuppressNextRefresh = false; } if (_lastSelectedNode != null) { // after search, we want to reset the original node to page 1 (Work Item #461) _lastSelectedNode.CurrentPage = 1; SelectNode(_lastSelectedNode); } } } private void AddSearchNode() { if (_searchNode != null && !RootNode.Nodes.Contains(_searchNode)) { // remember the currently selected node so that when search term is cleared, we can restore it. _lastSelectedNode = SelectedNode; RootNode.Nodes.Add(_searchNode); SelectNode(_searchNode); } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope", Justification = "Need to pass it to the base type. Can't dispose here")] protected virtual PackagesProviderBase GetSearchProvider() { return this; } protected void SelectNode(PackagesTreeNodeBase node) { node.IsSelected = true; SelectedNode = node; } private void CreateExtensionsTree() { // The user may have done a search before we finished getting the category list; temporarily remove it RemoveSearchNode(); // give subclass a chance to populate the child nodes under Root node FillRootNodes(); // Re-add the search node and select it if the user was doing a search AddSearchNode(); } [System.Diagnostics.CodeAnalysis.SuppressMessage( "Microsoft.Globalization", "CA1303:Do not pass literals as localized parameters", MessageId = "NuGet.Dialog.Providers.PackagesProviderBase.WriteLineToOutputWindow(System.String)", Justification = "No need to localize the --- strings"), System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope")] public virtual void Execute(PackageItem item) { if (OperationCoordinator.IsBusy) { return; } // disable all operations while this install is in progress OperationCoordinator.IsBusy = true; _readmeFile = null; _originalPackageId = item.Id; _progressProvider.ProgressAvailable += OnProgressAvailable; _uiCulture = System.Threading.Thread.CurrentThread.CurrentUICulture; _culture = System.Threading.Thread.CurrentThread.CurrentCulture; _failedProjects = new Dictionary<Project, Exception>(); ClearProgressMessages(); SaveExpandedNodes(); var worker = new BackgroundWorker(); worker.DoWork += OnRunWorkerDoWork; worker.RunWorkerCompleted += OnRunWorkerCompleted; worker.RunWorkerAsync(item); // write an introductory sentence before every operation starts to make the console easier to read string progressMessage = GetProgressMessage(item.PackageIdentity); WriteLineToOutputWindow("------- " + progressMessage + " -------"); } private void OnProgressAvailable(object sender, ProgressEventArgs e) { _providerServices.ProgressWindow.ShowProgress(e.Operation, e.PercentComplete); } private void OnRunWorkerDoWork(object sender, DoWorkEventArgs e) { // make sure the new thread has the same cultures as the UI thread's cultures System.Threading.Thread.CurrentThread.CurrentUICulture = _uiCulture; System.Threading.Thread.CurrentThread.CurrentCulture = _culture; var item = (PackageItem)e.Argument; bool succeeded = ExecuteCore(item); e.Cancel = !succeeded; e.Result = item; } private void OnRunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { OperationCoordinator.IsBusy = false; _progressProvider.ProgressAvailable -= OnProgressAvailable; if (e.Error == null) { if (e.Cancelled) { CloseProgressWindow(); } else { OnExecuteCompleted((PackageItem)e.Result); _providerServices.ProgressWindow.SetCompleted(successful: true); OpenReadMeFile(); CollapseNodes(); } } else { // show error message in the progress window in case of error Log(MessageLevel.Error, ExceptionUtility.Unwrap(e.Error).Message); _providerServices.ProgressWindow.SetCompleted(successful: false); } if (_failedProjects != null && _failedProjects.Count > 0) { // BUG 1401: if we are going to show the Summary window, // then hide the progress window. _providerServices.ProgressWindow.Close(); _providerServices.UserNotifierServices.ShowSummaryWindow(_failedProjects); } // write a blank line into the output window to separate entries from different operations WriteLineToOutputWindow(new string('=', 30)); WriteLineToOutputWindow(); if (ExecuteCompletedCallback != null) { ExecuteCompletedCallback(); } } private void ClearProgressMessages() { _providerServices.ProgressWindow.ClearMessages(); } protected void ShowProgressWindow() { _providerServices.ProgressWindow.Show(ProgressWindowTitle, PackageManagerWindow.CurrentInstance); } protected void HideProgressWindow() { _providerServices.ProgressWindow.Hide(); } protected void CloseProgressWindow() { _providerServices.ProgressWindow.Close(); } protected virtual void FillRootNodes() { } protected void AddFailedProject(Project project, Exception exception) { if (project == null) { throw new ArgumentNullException("project"); } if (exception == null) { throw new ArgumentNullException("exception"); } _failedProjects[project] = ExceptionUtility.Unwrap(exception); } public abstract IVsExtension CreateExtension(IPackage package); public abstract bool CanExecute(PackageItem item); protected virtual string GetProgressMessage(IPackage package) { return package.ToString(); } /// <summary> /// This method is called on background thread. /// </summary> /// <returns><c>true</c> if the method succeeded. <c>false</c> otherwise.</returns> protected virtual bool ExecuteCore(PackageItem item) { return true; } protected virtual void OnExecuteCompleted(PackageItem item) { // After every operation, just update the status of all packages in the current node. // Strictly speaking, this is not required; only affected packages need to be updated. // But doing so would require us to keep a Dictionary<IPackage, PackageItem> which is not worth it. if (SelectedNode != null) { foreach (PackageItem node in SelectedNode.Extensions) { node.UpdateEnabledStatus(); } } } public virtual string NoItemsMessage { get { return String.Empty; } } public virtual string ProgressWindowTitle { get { return String.Empty; } } public void Log(MessageLevel level, string message, params object[] args) { string formattedMessage = String.Format(CultureInfo.CurrentCulture, message, args); // for the dialog we ignore debug messages if (_providerServices.ProgressWindow.IsOpen && level != MessageLevel.Debug) { _providerServices.ProgressWindow.AddMessage(level, formattedMessage); } WriteLineToOutputWindow(formattedMessage); } protected void WriteLineToOutputWindow(string message = "") { _outputConsole.Value.WriteLine(message); } protected void ShowProgress(string operation, int percentComplete) { if (_providerServices.ProgressWindow.IsOpen) { _providerServices.ProgressWindow.ShowProgress(operation, percentComplete); } } protected void RegisterPackageOperationEvents(IPackageManager packageManager, IProjectManager projectManager) { if (packageManager != null) { packageManager.PackageInstalled += OnPackageInstalled; } if (projectManager != null) { projectManager.PackageReferenceAdded += OnPackageReferenceAdded; projectManager.PackageReferenceRemoving += OnPackageReferenceRemoving; } } protected void UnregisterPackageOperationEvents(IPackageManager packageManager, IProjectManager projectManager) { if (packageManager != null) { packageManager.PackageInstalled -= OnPackageInstalled; } if (projectManager != null) { projectManager.PackageReferenceAdded -= OnPackageReferenceAdded; projectManager.PackageReferenceRemoving -= OnPackageReferenceRemoving; } } private void OnPackageInstalled(object sender, PackageOperationEventArgs e) { _providerServices.ScriptExecutor.ExecuteInitScript(e.InstallPath, e.Package, this); PrepareOpenReadMeFile(e); } private void OnPackageReferenceAdded(object sender, PackageOperationEventArgs e) { Project project = FindProjectFromFileSystem(e.FileSystem); Debug.Assert(project != null); _providerServices.ScriptExecutor.ExecuteScript( e.InstallPath, PowerShellScripts.Install, e.Package, project, project.GetTargetFrameworkName(), this); } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")] private void OnPackageReferenceRemoving(object sender, PackageOperationEventArgs e) { Project project = FindProjectFromFileSystem(e.FileSystem); Debug.Assert(project != null); try { _providerServices.ScriptExecutor.ExecuteScript( e.InstallPath, PowerShellScripts.Uninstall, e.Package, project, GetTargetFrameworkForPackage(e.Package.Id) ?? project.GetTargetFrameworkName(), this); } catch (Exception ex) { // Swallow exception for uninstall.ps1. Otherwise, there is no way to uninstall a package. // But we log it as a warning. Log(MessageLevel.Warning, ExceptionUtility.Unwrap(ex).Message); } } private FrameworkName GetTargetFrameworkForPackage(string packageId) { var packageReferenceRepository = LocalRepository as PackageReferenceRepository; if (packageReferenceRepository != null) { return packageReferenceRepository.GetPackageTargetFramework(packageId); } return null; } private Project FindProjectFromFileSystem(IFileSystem fileSystem) { var projectSystem = fileSystem as IVsProjectSystem; return _solutionManager.GetProject(projectSystem.UniqueName); } protected void CheckInstallPSScripts( IPackage package, IPackageRepository sourceRepository, FrameworkName targetFramework, bool includePrerelease, out IList<PackageOperation> operations) { CheckInstallPSScripts( package, LocalRepository, sourceRepository, targetFramework, includePrerelease, out operations); } protected void CheckInstallPSScripts( IPackage package, IPackageRepository localRepository, IPackageRepository sourceRepository, FrameworkName targetFramework, bool includePrerelease, out IList<PackageOperation> operations) { // Review: Is there any way the user could get into a position that we would need to allow pre release versions here? var walker = new InstallWalker( localRepository, sourceRepository, targetFramework, this, ignoreDependencies: false, allowPrereleaseVersions: includePrerelease); operations = walker.ResolveOperations(package).ToList(); var scriptPackages = from o in operations where o.Package.HasPowerShellScript() select o.Package; if (scriptPackages.Any()) { if (!RegistryHelper.CheckIfPowerShell2Installed()) { throw new InvalidOperationException(Resources.Dialog_PackageHasPSScript); } } } protected bool ShowLicenseAgreement(IVsPackageManager packageManager, IEnumerable<PackageOperation> operations) { var licensePackages = from o in operations where o.Action == PackageAction.Install && o.Package.RequireLicenseAcceptance && !packageManager.LocalRepository.Exists(o.Package) select o.Package; // display license window if necessary if (licensePackages.Any()) { // hide the progress window if we are going to show license window HideProgressWindow(); bool accepted = _providerServices.UserNotifierServices.ShowLicenseWindow(licensePackages); if (!accepted) { return false; } ShowProgressWindow(); } return true; } private void PrepareOpenReadMeFile(PackageOperationEventArgs e) { // only open the read me file for the first package that initiates this operation. if (e.Package.Id.Equals(_originalPackageId, StringComparison.OrdinalIgnoreCase) && e.Package.HasReadMeFileAtRoot()) { _readmeFile = Path.Combine(e.InstallPath, NuGetConstants.ReadmeFileName); } } private void OpenReadMeFile() { if (_readmeFile != null) { _providerServices.VsCommonOperations.OpenFile(_readmeFile); } } private void SaveExpandedNodes() { // remember which nodes are currently open so that we can keep them open after the operation _expandedNodesDisposable = _providerServices.VsCommonOperations.SaveSolutionExplorerNodeStates(_solutionManager); } private void CollapseNodes() { // collapse all nodes in solution explorer that we expanded during the operation if (_expandedNodesDisposable != null) { _expandedNodesDisposable.Dispose(); _expandedNodesDisposable = null; } } public virtual void Dispose() { } } }
using System; using System.Drawing; using System.Drawing.Drawing2D; using System.Windows.Forms; using System.ComponentModel; using System.Runtime.InteropServices; using System.IO; using System.Text; using System.Diagnostics.CodeAnalysis; using System.Collections.Generic; // To simplify the process of finding the toolbox bitmap resource: // #1 Create an internal class called "resfinder" outside of the root namespace. // #2 Use "resfinder" in the toolbox bitmap attribute instead of the control name. // #3 use the "<default namespace>.<resourcename>" string to locate the resource. // See: http://www.bobpowell.net/toolboxbitmap.htm internal class resfinder { } namespace WeifenLuo.WinFormsUI.Docking { [SuppressMessage("Microsoft.Naming", "CA1720:AvoidTypeNamesInParameters", MessageId = "0#")] public delegate IDockContent DeserializeDockContent(string persistString); [LocalizedDescription("DockPanel_Description")] [Designer("System.Windows.Forms.Design.ControlDesigner, System.Design")] [ToolboxBitmap(typeof(resfinder), "WeifenLuo.WinFormsUI.Docking.DockPanel.bmp")] [DefaultProperty("DocumentStyle")] [DefaultEvent("ActiveContentChanged")] public partial class DockPanel : Panel { private readonly FocusManagerImpl m_focusManager; private readonly DockPanelExtender m_extender; private readonly DockPaneCollection m_panes; private readonly FloatWindowCollection m_floatWindows; private readonly AutoHideWindowControl m_autoHideWindow; private readonly DockWindowCollection m_dockWindows; private readonly DockContent m_dummyContent; private readonly Control m_dummyControl; public DockPanel() { ShowAutoHideContentOnHover = true; m_focusManager = new FocusManagerImpl(this); m_extender = new DockPanelExtender(this); m_panes = new DockPaneCollection(); m_floatWindows = new FloatWindowCollection(); SuspendLayout(); m_autoHideWindow = new AutoHideWindowControl(this); m_autoHideWindow.Visible = false; m_autoHideWindow.ActiveContentChanged += m_autoHideWindow_ActiveContentChanged; SetAutoHideWindowParent(); m_dummyControl = new DummyControl(); m_dummyControl.Bounds = new Rectangle(0, 0, 1, 1); Controls.Add(m_dummyControl); m_dockWindows = new DockWindowCollection(this); Controls.AddRange(new Control[] { DockWindows[DockState.Document], DockWindows[DockState.DockLeft], DockWindows[DockState.DockRight], DockWindows[DockState.DockTop], DockWindows[DockState.DockBottom] }); m_dummyContent = new DockContent(); ResumeLayout(); } private Color m_BackColor; /// <summary> /// Determines the color with which the client rectangle will be drawn. /// If this property is used instead of the BackColor it will not have any influence on the borders to the surrounding controls (DockPane). /// The BackColor property changes the borders of surrounding controls (DockPane). /// Alternatively both properties may be used (BackColor to draw and define the color of the borders and DockBackColor to define the color of the client rectangle). /// For Backgroundimages: Set your prefered Image, then set the DockBackColor and the BackColor to the same Color (Control) /// </summary> [Description("Determines the color with which the client rectangle will be drawn.\r\n" + "If this property is used instead of the BackColor it will not have any influence on the borders to the surrounding controls (DockPane).\r\n" + "The BackColor property changes the borders of surrounding controls (DockPane).\r\n" + "Alternatively both properties may be used (BackColor to draw and define the color of the borders and DockBackColor to define the color of the client rectangle).\r\n" + "For Backgroundimages: Set your prefered Image, then set the DockBackColor and the BackColor to the same Color (Control).")] public Color DockBackColor { get { return !m_BackColor.IsEmpty ? m_BackColor : base.BackColor; } set { if (m_BackColor != value) { m_BackColor = value; this.Refresh(); } } } private bool ShouldSerializeDockBackColor() { return !m_BackColor.IsEmpty; } private void ResetDockBackColor() { DockBackColor = Color.Empty; } private AutoHideStripBase m_autoHideStripControl = null; internal AutoHideStripBase AutoHideStripControl { get { if (m_autoHideStripControl == null) { m_autoHideStripControl = AutoHideStripFactory.CreateAutoHideStrip(this); Controls.Add(m_autoHideStripControl); } return m_autoHideStripControl; } } internal void ResetAutoHideStripControl() { if (m_autoHideStripControl != null) m_autoHideStripControl.Dispose(); m_autoHideStripControl = null; } private void MdiClientHandleAssigned(object sender, EventArgs e) { SetMdiClient(); PerformLayout(); } private void MdiClient_Layout(object sender, LayoutEventArgs e) { if (DocumentStyle != DocumentStyle.DockingMdi) return; foreach (DockPane pane in Panes) if (pane.DockState == DockState.Document) pane.SetContentBounds(); InvalidateWindowRegion(); } private bool m_disposed = false; protected override void Dispose(bool disposing) { if (!m_disposed && disposing) { m_focusManager.Dispose(); if (m_mdiClientController != null) { m_mdiClientController.HandleAssigned -= new EventHandler(MdiClientHandleAssigned); m_mdiClientController.MdiChildActivate -= new EventHandler(ParentFormMdiChildActivate); m_mdiClientController.Layout -= new LayoutEventHandler(MdiClient_Layout); m_mdiClientController.Dispose(); } FloatWindows.Dispose(); Panes.Dispose(); DummyContent.Dispose(); m_disposed = true; } base.Dispose(disposing); } [Browsable(false)] [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public IDockContent ActiveAutoHideContent { get { return AutoHideWindow.ActiveContent; } set { AutoHideWindow.ActiveContent = value; } } private bool m_allowEndUserDocking = !Win32Helper.IsRunningOnMono; [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_AllowEndUserDocking_Description")] [DefaultValue(true)] public bool AllowEndUserDocking { get { if (Win32Helper.IsRunningOnMono && m_allowEndUserDocking) m_allowEndUserDocking = false; return m_allowEndUserDocking; } set { if (Win32Helper.IsRunningOnMono && value) throw new InvalidOperationException("AllowEndUserDocking can only be false if running on Mono"); m_allowEndUserDocking = value; } } private bool m_raiseTabsOnDragOver = true; [LocalizedCategory("Category_Docking")] [Description("Raises tabs in a document pane when dragging over them")] [DefaultValue(true)] public bool RaiseTabsOnDragOver { get { return m_raiseTabsOnDragOver; } set { m_raiseTabsOnDragOver = value; } } private bool m_closeTabsToLeft = true; [LocalizedCategory("Category_Docking")] [Description("When closing the active tab, select next to the left")] [DefaultValue(true)] public bool CloseTabsToLeft { get { return m_closeTabsToLeft; } set { m_closeTabsToLeft = value; } } private bool m_allowEndUserNestedDocking = !Win32Helper.IsRunningOnMono; [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_AllowEndUserNestedDocking_Description")] [DefaultValue(true)] public bool AllowEndUserNestedDocking { get { if (Win32Helper.IsRunningOnMono && m_allowEndUserDocking) m_allowEndUserDocking = false; return m_allowEndUserNestedDocking; } set { if (Win32Helper.IsRunningOnMono && value) throw new InvalidOperationException("AllowEndUserNestedDocking can only be false if running on Mono"); m_allowEndUserNestedDocking = value; } } private DockContentCollection m_contents = new DockContentCollection(); [Browsable(false)] public DockContentCollection Contents { get { return m_contents; } } internal DockContent DummyContent { get { return m_dummyContent; } } private bool m_rightToLeftLayout = false; [DefaultValue(false)] [LocalizedCategory("Appearance")] [LocalizedDescription("DockPanel_RightToLeftLayout_Description")] public bool RightToLeftLayout { get { return m_rightToLeftLayout; } set { if (m_rightToLeftLayout == value) return; m_rightToLeftLayout = value; foreach (FloatWindow floatWindow in FloatWindows) floatWindow.RightToLeftLayout = value; } } protected override void OnRightToLeftChanged(EventArgs e) { base.OnRightToLeftChanged(e); foreach (FloatWindow floatWindow in FloatWindows) { if (floatWindow.RightToLeft != RightToLeft) floatWindow.RightToLeft = RightToLeft; } } private bool m_showDocumentIcon = false; [DefaultValue(false)] [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_ShowDocumentIcon_Description")] public bool ShowDocumentIcon { get { return m_showDocumentIcon; } set { if (m_showDocumentIcon == value) return; m_showDocumentIcon = value; Refresh(); } } private DocumentTabStripLocation m_documentTabStripLocation = DocumentTabStripLocation.Top; [DefaultValue(DocumentTabStripLocation.Top)] [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_DocumentTabStripLocation")] public DocumentTabStripLocation DocumentTabStripLocation { get { return m_documentTabStripLocation; } set { m_documentTabStripLocation = value; } } [Browsable(false)] public DockPanelExtender Extender { get { return m_extender; } } [Browsable(false)] public DockPanelExtender.IDockPaneFactory DockPaneFactory { get { return Extender.DockPaneFactory; } } [Browsable(false)] public DockPanelExtender.IFloatWindowFactory FloatWindowFactory { get { return Extender.FloatWindowFactory; } } internal DockPanelExtender.IDockPaneCaptionFactory DockPaneCaptionFactory { get { return Extender.DockPaneCaptionFactory; } } internal DockPanelExtender.IDockPaneStripFactory DockPaneStripFactory { get { return Extender.DockPaneStripFactory; } } internal DockPanelExtender.IAutoHideStripFactory AutoHideStripFactory { get { return Extender.AutoHideStripFactory; } } [Browsable(false)] public DockPaneCollection Panes { get { return m_panes; } } internal Rectangle DockArea { get { return new Rectangle(DockPadding.Left, DockPadding.Top, ClientRectangle.Width - DockPadding.Left - DockPadding.Right, ClientRectangle.Height - DockPadding.Top - DockPadding.Bottom); } } private double m_dockBottomPortion = 0.25; [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_DockBottomPortion_Description")] [DefaultValue(0.25)] public double DockBottomPortion { get { return m_dockBottomPortion; } set { if (value <= 0) throw new ArgumentOutOfRangeException("value"); if (value == m_dockBottomPortion) return; m_dockBottomPortion = value; if (m_dockBottomPortion < 1 && m_dockTopPortion < 1) { if (m_dockTopPortion + m_dockBottomPortion > 1) m_dockTopPortion = 1 - m_dockBottomPortion; } PerformLayout(); } } private double m_dockLeftPortion = 0.25; [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_DockLeftPortion_Description")] [DefaultValue(0.25)] public double DockLeftPortion { get { return m_dockLeftPortion; } set { if (value <= 0) throw new ArgumentOutOfRangeException("value"); if (value == m_dockLeftPortion) return; m_dockLeftPortion = value; if (m_dockLeftPortion < 1 && m_dockRightPortion < 1) { if (m_dockLeftPortion + m_dockRightPortion > 1) m_dockRightPortion = 1 - m_dockLeftPortion; } PerformLayout(); } } private double m_dockRightPortion = 0.25; [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_DockRightPortion_Description")] [DefaultValue(0.25)] public double DockRightPortion { get { return m_dockRightPortion; } set { if (value <= 0) throw new ArgumentOutOfRangeException("value"); if (value == m_dockRightPortion) return; m_dockRightPortion = value; if (m_dockLeftPortion < 1 && m_dockRightPortion < 1) { if (m_dockLeftPortion + m_dockRightPortion > 1) m_dockLeftPortion = 1 - m_dockRightPortion; } PerformLayout(); } } private double m_dockTopPortion = 0.25; [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_DockTopPortion_Description")] [DefaultValue(0.25)] public double DockTopPortion { get { return m_dockTopPortion; } set { if (value <= 0) throw new ArgumentOutOfRangeException("value"); if (value == m_dockTopPortion) return; m_dockTopPortion = value; if (m_dockTopPortion < 1 && m_dockBottomPortion < 1) { if (m_dockTopPortion + m_dockBottomPortion > 1) m_dockBottomPortion = 1 - m_dockTopPortion; } PerformLayout(); } } [Browsable(false)] public DockWindowCollection DockWindows { get { return m_dockWindows; } } public void UpdateDockWindowZOrder(DockStyle dockStyle, bool fullPanelEdge) { if (dockStyle == DockStyle.Left) { if (fullPanelEdge) DockWindows[DockState.DockLeft].SendToBack(); else DockWindows[DockState.DockLeft].BringToFront(); } else if (dockStyle == DockStyle.Right) { if (fullPanelEdge) DockWindows[DockState.DockRight].SendToBack(); else DockWindows[DockState.DockRight].BringToFront(); } else if (dockStyle == DockStyle.Top) { if (fullPanelEdge) DockWindows[DockState.DockTop].SendToBack(); else DockWindows[DockState.DockTop].BringToFront(); } else if (dockStyle == DockStyle.Bottom) { if (fullPanelEdge) DockWindows[DockState.DockBottom].SendToBack(); else DockWindows[DockState.DockBottom].BringToFront(); } } [Browsable(false)] public int DocumentsCount { get { int count = 0; foreach (IDockContent content in Documents) count++; return count; } } public IDockContent[] DocumentsToArray() { int count = DocumentsCount; IDockContent[] documents = new IDockContent[count]; int i = 0; foreach (IDockContent content in Documents) { documents[i] = content; i++; } return documents; } [Browsable(false)] public IEnumerable<IDockContent> Documents { get { foreach (IDockContent content in Contents) { if (content.DockHandler.DockState == DockState.Document) yield return content; } } } private Control DummyControl { get { return m_dummyControl; } } [Browsable(false)] public FloatWindowCollection FloatWindows { get { return m_floatWindows; } } private Size m_defaultFloatWindowSize = new Size(300, 300); [Category("Layout")] [LocalizedDescription("DockPanel_DefaultFloatWindowSize_Description")] public Size DefaultFloatWindowSize { get { return m_defaultFloatWindowSize; } set { m_defaultFloatWindowSize = value; } } private bool ShouldSerializeDefaultFloatWindowSize() { return DefaultFloatWindowSize != new Size(300, 300); } private void ResetDefaultFloatWindowSize() { DefaultFloatWindowSize = new Size(300, 300); } private DocumentStyle m_documentStyle = DocumentStyle.DockingMdi; [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_DocumentStyle_Description")] [DefaultValue(DocumentStyle.DockingMdi)] public DocumentStyle DocumentStyle { get { return m_documentStyle; } set { if (value == m_documentStyle) return; if (!Enum.IsDefined(typeof(DocumentStyle), value)) throw new InvalidEnumArgumentException(); if (value == DocumentStyle.SystemMdi && DockWindows[DockState.Document].VisibleNestedPanes.Count > 0) throw new InvalidEnumArgumentException(); m_documentStyle = value; SuspendLayout(true); SetAutoHideWindowParent(); SetMdiClient(); InvalidateWindowRegion(); foreach (IDockContent content in Contents) { if (content.DockHandler.DockState == DockState.Document) content.DockHandler.SetPaneAndVisible(content.DockHandler.Pane); } PerformMdiClientLayout(); ResumeLayout(true, true); } } private bool _supprtDeeplyNestedContent = false; [LocalizedCategory("Category_Performance")] [LocalizedDescription("DockPanel_SupportDeeplyNestedContent_Description")] [DefaultValue(false)] public bool SupportDeeplyNestedContent { get { return _supprtDeeplyNestedContent; } set { _supprtDeeplyNestedContent = value; } } [LocalizedCategory("Category_Docking")] [LocalizedDescription("DockPanel_ShowAutoHideContentOnHover_Description")] [DefaultValue(true)] public bool ShowAutoHideContentOnHover { get; set; } private int GetDockWindowSize(DockState dockState) { if (dockState == DockState.DockLeft || dockState == DockState.DockRight) { int width = ClientRectangle.Width - DockPadding.Left - DockPadding.Right; int dockLeftSize = m_dockLeftPortion >= 1 ? (int)m_dockLeftPortion : (int)(width * m_dockLeftPortion); int dockRightSize = m_dockRightPortion >= 1 ? (int)m_dockRightPortion : (int)(width * m_dockRightPortion); if (dockLeftSize < MeasurePane.MinSize) dockLeftSize = MeasurePane.MinSize; if (dockRightSize < MeasurePane.MinSize) dockRightSize = MeasurePane.MinSize; if (dockLeftSize + dockRightSize > width - MeasurePane.MinSize) { int adjust = (dockLeftSize + dockRightSize) - (width - MeasurePane.MinSize); dockLeftSize -= adjust / 2; dockRightSize -= adjust / 2; } return dockState == DockState.DockLeft ? dockLeftSize : dockRightSize; } else if (dockState == DockState.DockTop || dockState == DockState.DockBottom) { int height = ClientRectangle.Height - DockPadding.Top - DockPadding.Bottom; int dockTopSize = m_dockTopPortion >= 1 ? (int)m_dockTopPortion : (int)(height * m_dockTopPortion); int dockBottomSize = m_dockBottomPortion >= 1 ? (int)m_dockBottomPortion : (int)(height * m_dockBottomPortion); if (dockTopSize < MeasurePane.MinSize) dockTopSize = MeasurePane.MinSize; if (dockBottomSize < MeasurePane.MinSize) dockBottomSize = MeasurePane.MinSize; if (dockTopSize + dockBottomSize > height - MeasurePane.MinSize) { int adjust = (dockTopSize + dockBottomSize) - (height - MeasurePane.MinSize); dockTopSize -= adjust / 2; dockBottomSize -= adjust / 2; } return dockState == DockState.DockTop ? dockTopSize : dockBottomSize; } else return 0; } protected override void OnLayout(LayoutEventArgs levent) { SuspendLayout(true); AutoHideStripControl.Bounds = ClientRectangle; CalculateDockPadding(); DockWindows[DockState.DockLeft].Width = GetDockWindowSize(DockState.DockLeft); DockWindows[DockState.DockRight].Width = GetDockWindowSize(DockState.DockRight); DockWindows[DockState.DockTop].Height = GetDockWindowSize(DockState.DockTop); DockWindows[DockState.DockBottom].Height = GetDockWindowSize(DockState.DockBottom); AutoHideWindow.Bounds = GetAutoHideWindowBounds(AutoHideWindowRectangle); DockWindows[DockState.Document].BringToFront(); AutoHideWindow.BringToFront(); base.OnLayout(levent); if (DocumentStyle == DocumentStyle.SystemMdi && MdiClientExists) { SetMdiClientBounds(SystemMdiClientBounds); InvalidateWindowRegion(); } else if (DocumentStyle == DocumentStyle.DockingMdi) InvalidateWindowRegion(); ResumeLayout(true, true); } internal Rectangle GetTabStripRectangle(DockState dockState) { return AutoHideStripControl.GetTabStripRectangle(dockState); } protected override void OnPaint(PaintEventArgs e) { base.OnPaint(e); if (DockBackColor == BackColor) return; Graphics g = e.Graphics; SolidBrush bgBrush = new SolidBrush(DockBackColor); g.FillRectangle(bgBrush, ClientRectangle); } internal void AddContent(IDockContent content) { if (content == null) throw(new ArgumentNullException()); if (!Contents.Contains(content)) { Contents.Add(content); OnContentAdded(new DockContentEventArgs(content)); } } internal void AddPane(DockPane pane) { if (Panes.Contains(pane)) return; Panes.Add(pane); } internal void AddFloatWindow(FloatWindow floatWindow) { if (FloatWindows.Contains(floatWindow)) return; FloatWindows.Add(floatWindow); } private void CalculateDockPadding() { DockPadding.All = 0; int height = AutoHideStripControl.MeasureHeight(); if (AutoHideStripControl.GetNumberOfPanes(DockState.DockLeftAutoHide) > 0) DockPadding.Left = height; if (AutoHideStripControl.GetNumberOfPanes(DockState.DockRightAutoHide) > 0) DockPadding.Right = height; if (AutoHideStripControl.GetNumberOfPanes(DockState.DockTopAutoHide) > 0) DockPadding.Top = height; if (AutoHideStripControl.GetNumberOfPanes(DockState.DockBottomAutoHide) > 0) DockPadding.Bottom = height; } internal void RemoveContent(IDockContent content) { if (content == null) throw(new ArgumentNullException()); if (Contents.Contains(content)) { Contents.Remove(content); OnContentRemoved(new DockContentEventArgs(content)); } } internal void RemovePane(DockPane pane) { if (!Panes.Contains(pane)) return; Panes.Remove(pane); } internal void RemoveFloatWindow(FloatWindow floatWindow) { if (!FloatWindows.Contains(floatWindow)) return; FloatWindows.Remove(floatWindow); if (FloatWindows.Count != 0) return; if (ParentForm == null) return; ParentForm.Focus(); } public void SetPaneIndex(DockPane pane, int index) { int oldIndex = Panes.IndexOf(pane); if (oldIndex == -1) throw(new ArgumentException(Strings.DockPanel_SetPaneIndex_InvalidPane)); if (index < 0 || index > Panes.Count - 1) if (index != -1) throw(new ArgumentOutOfRangeException(Strings.DockPanel_SetPaneIndex_InvalidIndex)); if (oldIndex == index) return; if (oldIndex == Panes.Count - 1 && index == -1) return; Panes.Remove(pane); if (index == -1) Panes.Add(pane); else if (oldIndex < index) Panes.AddAt(pane, index - 1); else Panes.AddAt(pane, index); } public void SuspendLayout(bool allWindows) { FocusManager.SuspendFocusTracking(); SuspendLayout(); if (allWindows) SuspendMdiClientLayout(); } public void ResumeLayout(bool performLayout, bool allWindows) { FocusManager.ResumeFocusTracking(); ResumeLayout(performLayout); if (allWindows) ResumeMdiClientLayout(performLayout); } internal Form ParentForm { get { if (!IsParentFormValid()) throw new InvalidOperationException(Strings.DockPanel_ParentForm_Invalid); return GetMdiClientController().ParentForm; } } private bool IsParentFormValid() { if (DocumentStyle == DocumentStyle.DockingSdi || DocumentStyle == DocumentStyle.DockingWindow) return true; if (!MdiClientExists) GetMdiClientController().RenewMdiClient(); return (MdiClientExists); } protected override void OnParentChanged(EventArgs e) { SetAutoHideWindowParent(); GetMdiClientController().ParentForm = (this.Parent as Form); base.OnParentChanged (e); } private void SetAutoHideWindowParent() { Control parent; if (DocumentStyle == DocumentStyle.DockingMdi || DocumentStyle == DocumentStyle.SystemMdi) parent = this.Parent; else parent = this; if (AutoHideWindow.Parent != parent) { AutoHideWindow.Parent = parent; AutoHideWindow.BringToFront(); } } protected override void OnVisibleChanged(EventArgs e) { base.OnVisibleChanged (e); if (Visible) SetMdiClient(); } private Rectangle SystemMdiClientBounds { get { if (!IsParentFormValid() || !Visible) return Rectangle.Empty; Rectangle rect = ParentForm.RectangleToClient(RectangleToScreen(DocumentWindowBounds)); return rect; } } internal Rectangle DocumentWindowBounds { get { Rectangle rectDocumentBounds = DisplayRectangle; if (DockWindows[DockState.DockLeft].Visible) { rectDocumentBounds.X += DockWindows[DockState.DockLeft].Width; rectDocumentBounds.Width -= DockWindows[DockState.DockLeft].Width; } if (DockWindows[DockState.DockRight].Visible) rectDocumentBounds.Width -= DockWindows[DockState.DockRight].Width; if (DockWindows[DockState.DockTop].Visible) { rectDocumentBounds.Y += DockWindows[DockState.DockTop].Height; rectDocumentBounds.Height -= DockWindows[DockState.DockTop].Height; } if (DockWindows[DockState.DockBottom].Visible) rectDocumentBounds.Height -= DockWindows[DockState.DockBottom].Height; return rectDocumentBounds; } } private PaintEventHandler m_dummyControlPaintEventHandler = null; private void InvalidateWindowRegion() { if (DesignMode) return; if (m_dummyControlPaintEventHandler == null) m_dummyControlPaintEventHandler = new PaintEventHandler(DummyControl_Paint); DummyControl.Paint += m_dummyControlPaintEventHandler; DummyControl.Invalidate(); } void DummyControl_Paint(object sender, PaintEventArgs e) { DummyControl.Paint -= m_dummyControlPaintEventHandler; UpdateWindowRegion(); } private void UpdateWindowRegion() { if (this.DocumentStyle == DocumentStyle.DockingMdi) UpdateWindowRegion_ClipContent(); else if (this.DocumentStyle == DocumentStyle.DockingSdi || this.DocumentStyle == DocumentStyle.DockingWindow) UpdateWindowRegion_FullDocumentArea(); else if (this.DocumentStyle == DocumentStyle.SystemMdi) UpdateWindowRegion_EmptyDocumentArea(); } private void UpdateWindowRegion_FullDocumentArea() { SetRegion(null); } private void UpdateWindowRegion_EmptyDocumentArea() { Rectangle rect = DocumentWindowBounds; SetRegion(new Rectangle[] { rect }); } private void UpdateWindowRegion_ClipContent() { int count = 0; foreach (DockPane pane in this.Panes) { if (!pane.Visible || pane.DockState != DockState.Document) continue; count ++; } if (count == 0) { SetRegion(null); return; } Rectangle[] rects = new Rectangle[count]; int i = 0; foreach (DockPane pane in this.Panes) { if (!pane.Visible || pane.DockState != DockState.Document) continue; rects[i] = RectangleToClient(pane.RectangleToScreen(pane.ContentRectangle)); i++; } SetRegion(rects); } private Rectangle[] m_clipRects = null; private void SetRegion(Rectangle[] clipRects) { if (!IsClipRectsChanged(clipRects)) return; m_clipRects = clipRects; if (m_clipRects == null || m_clipRects.GetLength(0) == 0) Region = null; else { Region region = new Region(new Rectangle(0, 0, this.Width, this.Height)); foreach (Rectangle rect in m_clipRects) region.Exclude(rect); Region = region; } } private bool IsClipRectsChanged(Rectangle[] clipRects) { if (clipRects == null && m_clipRects == null) return false; else if ((clipRects == null) != (m_clipRects == null)) return true; foreach (Rectangle rect in clipRects) { bool matched = false; foreach (Rectangle rect2 in m_clipRects) { if (rect == rect2) { matched = true; break; } } if (!matched) return true; } foreach (Rectangle rect2 in m_clipRects) { bool matched = false; foreach (Rectangle rect in clipRects) { if (rect == rect2) { matched = true; break; } } if (!matched) return true; } return false; } private static readonly object ActiveAutoHideContentChangedEvent = new object(); [LocalizedCategory("Category_DockingNotification")] [LocalizedDescription("DockPanel_ActiveAutoHideContentChanged_Description")] public event EventHandler ActiveAutoHideContentChanged { add { Events.AddHandler(ActiveAutoHideContentChangedEvent, value); } remove { Events.RemoveHandler(ActiveAutoHideContentChangedEvent, value); } } protected virtual void OnActiveAutoHideContentChanged(EventArgs e) { EventHandler handler = (EventHandler)Events[ActiveAutoHideContentChangedEvent]; if (handler != null) handler(this, e); } private void m_autoHideWindow_ActiveContentChanged(object sender, EventArgs e) { OnActiveAutoHideContentChanged(e); } private static readonly object ContentAddedEvent = new object(); [LocalizedCategory("Category_DockingNotification")] [LocalizedDescription("DockPanel_ContentAdded_Description")] public event EventHandler<DockContentEventArgs> ContentAdded { add { Events.AddHandler(ContentAddedEvent, value); } remove { Events.RemoveHandler(ContentAddedEvent, value); } } protected virtual void OnContentAdded(DockContentEventArgs e) { EventHandler<DockContentEventArgs> handler = (EventHandler<DockContentEventArgs>)Events[ContentAddedEvent]; if (handler != null) handler(this, e); } private static readonly object ContentRemovedEvent = new object(); [LocalizedCategory("Category_DockingNotification")] [LocalizedDescription("DockPanel_ContentRemoved_Description")] public event EventHandler<DockContentEventArgs> ContentRemoved { add { Events.AddHandler(ContentRemovedEvent, value); } remove { Events.RemoveHandler(ContentRemovedEvent, value); } } protected virtual void OnContentRemoved(DockContentEventArgs e) { EventHandler<DockContentEventArgs> handler = (EventHandler<DockContentEventArgs>)Events[ContentRemovedEvent]; if (handler != null) handler(this, e); } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using TDTK; using BEERPath; namespace TDTK { public class BuildManager : MonoBehaviour { public delegate void AddNewTowerHandler(UnitTower tower); public static event AddNewTowerHandler onAddNewTowerE; //add new tower in runtime //prefabID of tower unavailable int this level public List<int> unavailableTowerIDList = new List<int>(); //only used in runtime, filled up using info from unavailableTowerIDList public List<UnitTower> towerList = new List<UnitTower>(); private static float _gridSize = 0; public float gridSize = 1.5f; public List<PlatformTD> buildPlatforms = new List<PlatformTD>(); public bool AutoAdjustTextureToGrid = true; public enum _CursorIndicatorMode { All, ValidOnly, None } public _CursorIndicatorMode cursorIndicatorMode = _CursorIndicatorMode.All; public bool autoSearchForPlatform = false; public static BuildManager instance; private static BuildInfo buildInfo; private int towerCount = 0; public static int GetTowerCount() { return instance.towerCount; } public void Init() { instance = this; gridSize = Mathf.Max(0.25f, gridSize); _gridSize = gridSize; buildInfo = null; InitTower(); InitPlatform(); InitPathFinder(); } public GameObject indicatorBuildPoint; public GameObject indicatorCursor; private Renderer indicatorBuildPointRen; private Renderer indicatorCursorRen; void Start() { if (cursorIndicatorMode != _CursorIndicatorMode.None) { if (indicatorBuildPoint != null) { indicatorBuildPoint = (GameObject)Instantiate(indicatorBuildPoint); indicatorBuildPoint.transform.localScale = new Vector3(gridSize, 1, gridSize); indicatorBuildPoint.transform.parent = transform; indicatorBuildPoint.SetActive(false); indicatorCursor.name = "TileIndicator_BuildPoint"; foreach (Transform child in indicatorBuildPoint.transform) indicatorBuildPointRen = child.GetComponent<Renderer>(); } if (indicatorCursor != null) { indicatorCursor = (GameObject)Instantiate(indicatorCursor); indicatorCursor.transform.localScale = new Vector3(gridSize, 1, gridSize); indicatorCursor.transform.parent = transform; indicatorCursor.SetActive(false); indicatorCursor.name = "TileIndicator_Cursor"; foreach (Transform child in indicatorCursor.transform) indicatorCursorRen = child.GetComponent<Renderer>(); } } InitiateSampleTowerList(); } public void InitTower() { List<UnitTower> towerListDB = TowerDB.Load(); towerList = new List<UnitTower>(); for (int i = 0; i < towerListDB.Count; i++) { if (towerListDB[i].disableInBuildManager) continue; if (!unavailableTowerIDList.Contains(towerListDB[i].prefabID)) { towerList.Add(towerListDB[i]); } } } // Use this for initialization void InitPlatform() { if (autoSearchForPlatform) { buildPlatforms = new List<PlatformTD>(); PlatformTD[] platList = FindObjectsOfType(typeof(PlatformTD)) as PlatformTD[]; for (int i = 0; i < platList.Length; i++) { buildPlatforms.Add(platList[i]); } } for (int i = 0; i < buildPlatforms.Count; i++) { FormatPlatform(buildPlatforms[i].transform); buildPlatforms[i].VerifyTowers(towerList); } } void FormatPlatform(Transform platformT) { //clear the platform of any unneeded collider ClearPlatformColliderRecursively(platformT); //make sure the plane is perfectly horizontal, rotation around the y-axis is presreved platformT.eulerAngles = new Vector3(0, platformT.rotation.eulerAngles.y, 0); //adjusting the scale float scaleX = Mathf.Max(1, Mathf.Round(Utility.GetWorldScale(platformT).x / gridSize)) * gridSize; float scaleZ = Mathf.Max(1, Mathf.Round(Utility.GetWorldScale(platformT).z / gridSize)) * gridSize; platformT.localScale = new Vector3(scaleX, 1, scaleZ); //adjusting the texture if (AutoAdjustTextureToGrid) { Material mat = platformT.GetComponent<Renderer>().material; float x = (Utility.GetWorldScale(platformT).x) / gridSize; float z = (Utility.GetWorldScale(platformT).z) / gridSize; mat.mainTextureOffset = new Vector2(0, 0); mat.mainTextureScale = new Vector2(x, z); } } void ClearPlatformColliderRecursively(Transform t) { foreach (Transform child in t) { ClearPlatformColliderRecursively(child); Collider col = child.gameObject.GetComponent<Collider>(); if (col != null && !col.enabled) { Destroy(col); } } } public static void AddNewTower(UnitTower newTower) { if (instance.towerList.Contains(newTower)) return; instance.towerList.Add(newTower); instance.AddNewSampleTower(newTower); if (onAddNewTowerE != null) onAddNewTowerE(newTower); } // Update is called once per frame void Update() { } static public void ClearBuildPoint() { //Debug.Log("ClearBuildPoint"); buildInfo = null; ClearIndicator(); } static public void ClearIndicator() { if (instance.indicatorBuildPoint != null) instance.indicatorBuildPoint.SetActive(false); } static public Vector3 GetTilePos(Transform platformT, Vector3 hitPos) { //check if the row count is odd or even number float remainderX = Utility.GetWorldScale(platformT).x / _gridSize % 2; float remainderZ = Utility.GetWorldScale(platformT).z / _gridSize % 2; //get the rotation offset of the plane Quaternion rot = Quaternion.LookRotation(hitPos - platformT.position); //get the x and z distance from the centre of the plane in the baseplane orientation //from this point on all x and z will be in reference to the basePlane orientation float dist = Vector3.Distance(hitPos, platformT.position); float distX = Mathf.Sin((rot.eulerAngles.y - platformT.rotation.eulerAngles.y) * Mathf.Deg2Rad) * dist; float distZ = Mathf.Cos((rot.eulerAngles.y - platformT.rotation.eulerAngles.y) * Mathf.Deg2Rad) * dist; //get the sign (1/-1) of the x and y direction float signX = distX != 0 ? distX / Mathf.Abs(distX) : 1; float signZ = distZ != 0 ? distZ / Mathf.Abs(distZ) : 1; //calculate the tile number selected in z and z direction float numX = Mathf.Round((distX + (remainderX - 1) * (signX * _gridSize / 2)) / _gridSize); float numZ = Mathf.Round((distZ + (remainderZ - 1) * (signZ * _gridSize / 2)) / _gridSize); //calculate offset in x-axis, float offsetX = -(remainderX - 1) * signX * _gridSize / 2; float offsetZ = -(remainderZ - 1) * signZ * _gridSize / 2; //get the pos and apply the offset Vector3 p = platformT.TransformDirection(new Vector3(numX, 0, numZ) * _gridSize); p += platformT.TransformDirection(new Vector3(offsetX, 0, offsetZ)); //set the position; Vector3 pos = p + platformT.position; return pos; } //called to set indicator to a particular node, set the color as well //not iOS performance friendly public static void SetIndicator(Vector3 pointer) { instance._SetIndicator(pointer); } public void _SetIndicator(Vector3 pointer) { //~ if(!buildManager.enableTileIndicator) return; if (cursorIndicatorMode == _CursorIndicatorMode.None) return; if (buildInfo != null) { indicatorCursor.SetActive(false); return; } //layerMask for platform only LayerMask maskPlatform = 1 << LayerManager.LayerPlatform(); //layerMask for detect all collider within buildPoint LayerMask maskAll = 1 << LayerManager.LayerPlatform(); int terrainLayer = LayerManager.LayerTerrain(); if (terrainLayer >= 0) maskAll |= 1 << terrainLayer; Camera mainCam = Camera.main; if (mainCam != null) { Ray ray = mainCam.ScreenPointToRay(pointer); RaycastHit hit; if (Physics.Raycast(ray, out hit, Mathf.Infinity, maskPlatform)) { for (int i = 0; i < buildPlatforms.Count; i++) { if (hit.transform == buildPlatforms[i].thisT) { //calculating the build center point base on the input position Vector3 pos = GetTilePos(buildPlatforms[i].thisT, hit.point); //Debug.Log(new Vector3(remainderX, 0, remainderZ)+" "+new Vector3(signX, 0, signZ)+" "+p+" "+basePlane.position); indicatorCursor.transform.position = pos; indicatorCursor.transform.rotation = buildPlatforms[i].thisT.rotation; Collider[] cols = Physics.OverlapSphere(pos, _gridSize / 2 * 0.9f, ~maskAll); if (cols.Length > 0) { if (cursorIndicatorMode == _CursorIndicatorMode.All) { indicatorCursor.SetActive(true); indicatorCursorRen.material.SetColor("_TintColor", Color.red); } else indicatorCursor.SetActive(false); } else { indicatorCursor.SetActive(true); indicatorCursorRen.material.SetColor("_TintColor", Color.green); } } } } else { indicatorCursor.SetActive(false); } } else { indicatorCursor.SetActive(false); } } //not in use outside this script public static void HideCursorIndicator() { instance.indicatorCursor.SetActive(false); } public static void ShowIndicator(UnitTower tower) { instance.indicatorCursor.SetActive(true); instance.indicatorCursor.transform.position = tower.thisT.position; instance.indicatorCursor.transform.rotation = tower.thisT.rotation; } public static void HideIndicator() { instance.indicatorCursor.SetActive(false); } public static _TileStatus CheckBuildPoint(Vector3 pointer, int footprint = -1, int ID = -1) { return instance._CheckBuildPoint(pointer, footprint, ID); } public _TileStatus _CheckBuildPoint(Vector3 pointer, int footprint = -1, int ID = -1) { _TileStatus status = _TileStatus.Available; BuildInfo newBuildInfo = new BuildInfo(); //disable indicator first (for dragNdrop mode), it will be re-enable if the build-point is valid indicatorBuildPoint.SetActive(false); //layerMask for platform only LayerMask maskPlatform = 1 << LayerManager.LayerPlatform(); //layerMask for detect all collider within buildPoint LayerMask maskAll = 1 << LayerManager.LayerPlatform(); int terrainLayer = LayerManager.LayerTerrain(); if (terrainLayer >= 0) maskAll |= 1 << terrainLayer; //int creepLayer=LayerManager.layerCreep(); //if(creepLayer>=0) maskAll|=1<<creepLayer; Camera mainCam = Camera.main; if (mainCam != null) { Ray ray = mainCam.ScreenPointToRay(pointer); RaycastHit hit; if (Physics.Raycast(ray, out hit, Mathf.Infinity, maskPlatform)) { for (int i = 0; i < buildPlatforms.Count; i++) { if (hit.transform == buildPlatforms[i].thisT) { PlatformTD platform = buildPlatforms[i]; //checking if tower can be built on the platform, for dragNdrop mode if (ID >= 0 && !platform.availableTowerIDList.Contains(ID)) return _TileStatus.Unavailable; //calculating the build center point base on the input position Vector3 pos = GetTilePos(platform.thisT, hit.point); //check if the position is blocked, by any other obstabcle other than the baseplane itself Collider[] cols = Physics.OverlapSphere(pos, _gridSize / 2 * 0.9f + footprint * _gridSize, ~maskAll); if (cols.Length > 0) { //Debug.Log("something's in the way "+cols[0]); return _TileStatus.Unavailable; } else { //confirm that we can build here newBuildInfo.position = pos; newBuildInfo.platform = platform; } //newBuildInfo.availableTowerIDList=platform.availableTowerIDList; //map platform availableTowerIDList (which is the towers' prefabID) to the list elements' ID in towerList newBuildInfo.availableTowerIDList = new List<int>(); for (int m = 0; m < platform.availableTowerIDList.Count; m++) { for (int n = 0; n < towerList.Count; n++) { if (platform.availableTowerIDList[m] == towerList[n].prefabID) { newBuildInfo.availableTowerIDList.Add(n); break; } } } //List<int> tempList=new List<int>(); //for(int n=0; n<towerList.Count; n++) tempList.Add(towerList[n].prefabID); //newBuildInfo.availableTowerIDList=tempList; buildInfo = newBuildInfo; break; } } } else return _TileStatus.NoPlatform; } else return _TileStatus.NoPlatform; if (buildInfo != null && cursorIndicatorMode != _CursorIndicatorMode.None) { if (status == _TileStatus.Available) indicatorBuildPointRen.material.SetColor("_TintColor", new Color(0, 1, 0, 1)); else indicatorBuildPointRen.material.SetColor("_TintColor", new Color(1, 0, 0, 1)); indicatorBuildPoint.SetActive(true); indicatorBuildPoint.transform.position = buildInfo.position; if (buildInfo.platform != null) { indicatorBuildPoint.transform.rotation = buildInfo.platform.thisT.rotation; } HideCursorIndicator(); } return status; } //called when a tower building is initated in DragNDrop, use the sample tower as the model and set it in DragNDrop mode public static string BuildTowerDragNDrop(UnitTower tower) { return instance._BuildTowerDragNDrop(tower); } public string _BuildTowerDragNDrop(UnitTower tower) { UnitTower sampleTower = GetSampleTower(tower); List<int> cost = sampleTower.GetCost(); int suffCost = ResourceManager.HasSufficientResource(cost); if (suffCost == -1) { sampleTower.thisObj.SetActive(true); GameControl.SelectTower(sampleTower); UnitTower towerInstance = sampleTower; towerInstance.StartCoroutine(towerInstance.DragNDropRoutine()); return ""; } return "Insufficient Resource " + suffCost; } //called by any external component to build tower, uses buildInfo public static string BuildTower(UnitTower tower) { if (buildInfo == null) return "Select a Build Point First"; UnitTower sampleTower = GetSampleTower(tower); /***/ // check if there's energy reciving tower if (tower.electricityNeeded && !tower.electricityReciever && !tower.electricityFacility) { LayerMask maskTarget = 1 << LayerManager.LayerTower(); Collider[] cols = Physics.OverlapSphere(buildInfo.position, 1000 /*GetRange()*/, maskTarget); if (cols.Length > 0) { tower.electicitySources.Clear(); // find all electric facility for (int i = 0; i < cols.Length; i++) { // if it's not electric reciever skip if (!cols[i].gameObject.GetComponent<UnitTower>().electricityReciever) continue; //float test = cols[i].gameObject.GetComponent<UnitTower>().GetRange(); //float test2 = Vector3.Distance(cols[i].gameObject.GetComponent<UnitTower>().transform.position, buildInfo.position); // if this tower is in range of electricityReciever if (Vector3.Distance(cols[i].gameObject.GetComponent<UnitTower>().transform.position, buildInfo.position) <= cols[i].gameObject.GetComponent<UnitTower>().GetRange()) { tower.electicitySources.Add(cols[i].gameObject.GetComponent<UnitTower>()); } } if (tower.electicitySources.Count == 0) { // set electricity source for tower weapon return "There is not enough electricity"; } } else return "There is not enough electricity"; } /***/ //check if there are sufficient resource List<int> cost = sampleTower.GetCost(); int suffCost = ResourceManager.HasSufficientResource(cost); if (suffCost == -1) { ResourceManager.SpendResource(cost); GameObject towerObj = (GameObject)Instantiate(tower.gameObject, buildInfo.position, buildInfo.platform.thisT.rotation); UnitTower towerInstance = towerObj.GetComponent<UnitTower>(); towerInstance.InitTower(instance.towerCount += 1, buildInfo.platform); towerInstance.Build(); // if new electricity reciver is placed search for all towers in it's range and add itself as electricity source if (tower.electricityReciever) { LayerMask maskTarget = 1 << LayerManager.LayerTower(); Collider[] cols = Physics.OverlapSphere(buildInfo.position, tower.GetRange(), maskTarget); if (cols.Length > 0) { UnitTower tmp_tow; for (int i = 0; i < cols.Length; i++) { tmp_tow = cols[i].gameObject.GetComponent<UnitTower>(); if (tmp_tow.electricityReciever || tmp_tow.electricityFacility) continue; tmp_tow.electicitySources.Add(towerInstance); } } } //clear the build info and indicator for build manager ClearBuildPoint(); return ""; } return "Insufficient Resource"; } public static void PreBuildTower(UnitTower tower) { PlatformTD platform = null; LayerMask mask = 1 << LayerManager.LayerPlatform(); Collider[] cols = Physics.OverlapSphere(tower.thisT.position, _gridSize, mask); if (cols.Length > 0) platform = cols[0].gameObject.GetComponent<PlatformTD>(); if (platform != null) { Vector3 buildPos = GetTilePos(platform.thisT, tower.thisT.position); tower.thisT.position = buildPos; tower.thisT.rotation = platform.thisT.rotation; } else Debug.Log("no platform found for pre-placed tower"); tower.InitTower(instance.towerCount += 1, platform); } private List<UnitTower> sampleTowerList = new List<UnitTower>(); private int currentSampleID = -1; public void InitiateSampleTowerList() { sampleTowerList = new List<UnitTower>(); for (int i = 0; i < towerList.Count; i++) { UnitTower towerInstance = CreateSampleTower(towerList[i]); sampleTowerList.Add(towerInstance); } } public void AddNewSampleTower(UnitTower newTower) { UnitTower towerInstance = CreateSampleTower(newTower); sampleTowerList.Add(towerInstance); } public UnitTower CreateSampleTower(UnitTower towerPrefab) { GameObject towerObj = (GameObject)Instantiate(towerPrefab.gameObject); towerObj.transform.parent = transform; if (towerObj.GetComponent<Collider>() != null) Destroy(towerObj.GetComponent<Collider>()); Utility.DestroyColliderRecursively(towerObj.transform); towerObj.SetActive(false); UnitTower towerInstance = towerObj.GetComponent<UnitTower>(); towerInstance.SetAsSampleTower(towerPrefab); return towerInstance; } public static UnitTower GetSampleTower(int ID) { return instance.sampleTowerList[ID]; } public static UnitTower GetSampleTower(UnitTower tower) { for (int i = 0; i < instance.sampleTowerList.Count; i++) { if (instance.sampleTowerList[i].prefabID == tower.prefabID) return instance.sampleTowerList[i]; } return null; } public static void ShowSampleTower(int ID) { instance._ShowsampleTowerList(ID); } public void _ShowsampleTowerList(int ID) { if (currentSampleID == ID || buildInfo == null) return; if (currentSampleID >= 0) ClearSampleTower(); currentSampleID = ID; sampleTowerList[ID].thisT.position = buildInfo.position; sampleTowerList[ID].thisT.rotation = buildInfo.platform.transform.rotation; GameControl.SelectTower(sampleTowerList[ID]); sampleTowerList[ID].thisObj.SetActive(true); } public static void ClearSampleTower() { instance._ClearSampleTower(); } public void _ClearSampleTower() { if (currentSampleID < 0) return; sampleTowerList[currentSampleID].thisObj.SetActive(false); GameControl.ClearSelectedTower(); currentSampleID = -1; } public static BuildInfo GetBuildInfo() { return buildInfo; } public static int GetTowerListCount() { return instance.towerList.Count; } public static List<UnitTower> GetTowerList() { return instance.towerList; } public static UnitTower GetTower(int ID) { foreach (UnitTower tower in instance.towerList) { if (tower.prefabID == ID) return tower; } return null; } public static float GetGridSize() { return _gridSize; } // init beermap and nodemap private bool[,] beerMap = new bool[39, 36]; private Object[,] nodeMap = new Object[39, 36]; PathFindingParameters parameters; PathFinder pf; private List<Point> lastPathOne; private List<Point> lastPathTwo; Point pt_spawnOne = new Point(0, 0); Point pt_spawnTwo = new Point(0, 0); Point pt_goal = new Point(0, 0); float platform_min_z = 0; float platform_min_x = 0; float platform_max_z = 0; public void InitPathFinder() { GameObject[] platformsMain = GameObject.FindGameObjectsWithTag("Grid"); foreach (GameObject item in platformsMain) { if (item.transform.position.z < platform_min_z) platform_min_z = item.transform.position.z; else if (item.transform.position.z > platform_max_z) platform_max_z = item.transform.position.z; if (item.transform.position.x < platform_min_x) platform_min_x = item.transform.position.x; } foreach (GameObject item in platformsMain) { int index_z = (int)Mathf.Round(item.transform.position.z - platform_min_z) / 2; int index_x = (int)Mathf.Round(item.transform.position.x - platform_min_x) / 2; beerMap[index_x, index_z + 1] = true; nodeMap[index_x, index_z + 1] = item; } // Spawn Plaforms GameObject[] platformsStart = GameObject.FindGameObjectsWithTag("StartPlatform"); foreach (GameObject item in platformsStart) { int index_x = (int)Mathf.Round(item.transform.position.x - platform_min_x) / 2; beerMap[index_x, 35] = true; nodeMap[index_x, 35] = item; if (item.transform.position.x < 0f) pt_spawnOne = new Point(index_x, 35); else pt_spawnTwo = new Point(index_x, 35); } // Goal Plaforms GameObject[] platformsGoal = GameObject.FindGameObjectsWithTag("GoalPlatform"); foreach (GameObject item in platformsGoal) { int index_x = (int)Mathf.Round(item.transform.position.x - platform_min_x) / 2; beerMap[index_x, 0] = true; nodeMap[index_x, 0] = item; pt_goal = new Point(index_x, 0); } //string msg = ""; //for (int x = 0; x < 39; x++) //{ // for (int z = 0; z < 36; z++) // { // if (beerMap[x, z] == true) // msg += "*"; // else // msg += "-"; // } // msg += "\r\n"; //} //Debug.Log(msg); GenerateGlobalPaths(); } public bool GenerateGlobalPaths() { // Delete the old paths from the previous calculations Object[] existingPaths = GameObject.FindObjectsOfType(typeof(PathTD)); foreach (PathTD item in existingPaths) { Destroy(item.gameObject); } parameters = new PathFindingParameters(pt_spawnOne, pt_goal, beerMap, nodeMap); pf = new PathFinder(parameters); Transform spawnOneTf = (nodeMap[pt_spawnOne.X, pt_spawnOne.Y] as GameObject).transform; PathTD pathOne = pf.FindPathTD(spawnOneTf, "GlobalPathOne"); lastPathOne = pf.getLastPath(); parameters = new PathFindingParameters(pt_spawnTwo, pt_goal, beerMap, nodeMap); pf = new PathFinder(parameters); Transform spawnTwoTf = (nodeMap[pt_spawnTwo.X, pt_spawnTwo.Y] as GameObject).transform; PathTD pathTwo = pf.FindPathTD(spawnTwoTf, "GlobalPathTwo"); lastPathTwo = pf.getLastPath(); if (pathOne.wpList.Count == 0 || pathTwo.wpList.Count == 0) { return false; } // TODO better capsulation from the Spawnmanager // and also check the initialization of the spawnmanager SpawnManager.instance.defaultPath = pathOne; SpawnManager.instance.waveGenerator.pathList.Clear(); SpawnManager.instance.waveGenerator.pathList.Add(pathOne); SpawnManager.instance.waveGenerator.pathList.Add(pathTwo); return true; } public bool UpdatePathMaps(UnitTower tow, bool destroy = false) { int index_z = (int)Mathf.Round(tow.Platform.transform.position.z - platform_min_z) / 2; int index_x = (int)Mathf.Round(tow.Platform.transform.position.x - platform_min_x) / 2; if (index_z < 0) return true; beerMap[index_x, index_z + 1] = destroy; // false -> new tower build there // Check if the last paths are still walkable // exit if true if (lastPathOne.Count >= 0 && lastPathTwo.Count >= 0) { bool lastPathOK = true; for (int i = 0; i < lastPathOne.Count; i++) { if (beerMap[lastPathOne[i].X, lastPathOne[i].Y] == false) lastPathOK = false; } for (int i = 0; i < lastPathTwo.Count; i++) { if (beerMap[lastPathTwo[i].X, lastPathTwo[i].Y] == false) lastPathOK = false; } if (lastPathOK) return true; } if (!GenerateGlobalPaths()) { beerMap[index_x, index_z + 1] = !destroy; GenerateGlobalPaths(); return false; } // TODO position of this SpawnManager functions ? // Here becazse GenerateGlobalPaths is called at startup // Waves not generated at that time for (int i = 0; i < SpawnManager.instance.waveList.Count; i++) { SpawnManager.instance.waveGenerator.UpdateWavePath(SpawnManager.instance.waveList[i]); } UnitCreep[] creeps = ObjectPoolManager.FindObjectsOfType<UnitCreep>(); foreach (UnitCreep creep in creeps) { // Problem with creeps not in the maze -> between start and normal platforms // or between normal platforms and goal // No new Path, if creep is already past the last tower row if (creep.transform.position.z > platform_min_z) { int c_z; if (creep.transform.position.z > platform_max_z) c_z = 34; else c_z = Mathf.CeilToInt(creep.transform.position.z - platform_min_z) / 2; // Changed to CeilToInt and not round int c_x = (int)Mathf.Round(creep.transform.position.x - platform_min_x) / 2; parameters = new PathFindingParameters(new Point(c_x, c_z), pt_goal, beerMap, nodeMap); pf = new PathFinder(parameters); PathTD pt = pf.FindPathTD(creep.transform, "CreepCustomPath"); creep.SetNewPath(pt); } } //Object[] towers = GameObject.FindObjectsOfType(typeof(UnitTower)); //Object[] platforms = GameObject.FindObjectsOfType(typeof(PlatformTD)); //Object[] paths = GameObject.FindObjectsOfType(typeof(PathTD)); //SpawnManager.ChangeDefaultPath(paths[0] as PathTD); return true; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using System.Threading.Tasks; using LibGit2Sharp; using NuKeeper.Abstractions; using NuKeeper.Abstractions.CollaborationModels; using NuKeeper.Abstractions.Git; using NuKeeper.Abstractions.Inspections.Files; using NuKeeper.Abstractions.Logging; using GitCommands = LibGit2Sharp.Commands; using Repository = LibGit2Sharp.Repository; namespace NuKeeper.Git { public class LibGit2SharpDriver : IGitDriver { private readonly INuKeeperLogger _logger; private readonly Credentials _gitCredentials; private readonly Identity _identity; private bool _fetchFinished; public IFolder WorkingFolder { get; } public LibGit2SharpDriver(INuKeeperLogger logger, IFolder workingFolder, GitUsernamePasswordCredentials credentials, User user) { if (workingFolder == null) { throw new ArgumentNullException(nameof(workingFolder)); } if (credentials == null) { throw new ArgumentNullException(nameof(credentials)); } _logger = logger; WorkingFolder = workingFolder; _gitCredentials = new UsernamePasswordCredentials { Password = credentials.Password, Username = credentials.Username }; _identity = GetUserIdentity(user); } public async Task Clone(Uri pullEndpoint) { await Clone(pullEndpoint, null); } public Task Clone(Uri pullEndpoint, string branchName) { return Task.Run(() => { _logger.Normal($"Git clone {pullEndpoint}, branch {branchName ?? "default"}, to {WorkingFolder.FullPath}"); Repository.Clone(pullEndpoint.AbsoluteUri, WorkingFolder.FullPath, new CloneOptions { CredentialsProvider = UsernamePasswordCredentials, OnTransferProgress = OnTransferProgress, BranchName = branchName }); _logger.Detailed("Git clone complete"); }); } private bool OnTransferProgress(TransferProgress progress) { if (progress.ReceivedObjects % (Math.Max(progress.TotalObjects / 10, 1)) == 0 && !_fetchFinished) { _logger.Detailed($"{progress.ReceivedObjects} / {progress.TotalObjects}"); _fetchFinished = progress.ReceivedObjects == progress.TotalObjects; } return true; } public Task AddRemote(string name, Uri endpoint) { return Task.Run(() => { using (var repo = MakeRepo()) { repo.Network.Remotes.Add(name, endpoint.AbsoluteUri); } }); } public Task Checkout(string branchName) { return Task.Run(() => { _logger.Detailed($"Git checkout '{branchName}'"); using (var repo = MakeRepo()) { if (BranchExists(branchName)) { _logger.Normal($"Git checkout local branch '{branchName}'"); GitCommands.Checkout(repo, repo.Branches[branchName]); } else { throw new NuKeeperException( $"Git Cannot checkout branch: the branch named '{branchName}' doesn't exist"); } } }); } public Task CheckoutRemoteToLocal(string branchName) { return Task.Run(() => { var qualifiedBranchName = "origin/" + branchName; _logger.Detailed($"Git checkout '{qualifiedBranchName}'"); using (var repo = MakeRepo()) { if (!BranchExists(qualifiedBranchName)) { throw new NuKeeperException( $"Git Cannot checkout branch: the branch named '{qualifiedBranchName}' doesn't exist"); } if (BranchExists(branchName)) { throw new NuKeeperException( $"Git Cannot checkout branch '{qualifiedBranchName}' to '{branchName}': the branch named '{branchName}' does already exist"); } _logger.Normal($"Git checkout existing branch '{qualifiedBranchName}' to '{branchName}'"); // Get a reference on the remote tracking branch var trackedBranch = repo.Branches[qualifiedBranchName]; // Create a local branch pointing at the same Commit var branch = repo.CreateBranch(branchName, trackedBranch.Tip); // Configure the local branch to track the remote one. repo.Branches.Update(branch, b => b.TrackedBranch = trackedBranch.CanonicalName); // go to the just created branch Checkout(branchName); } }); } public Task<bool> CheckoutNewBranch(string branchName) { return Task.Run(() => { _logger.Detailed($"Git checkout new branch '{branchName}'"); var qualifiedBranchName = "origin/" + branchName; if (BranchExists(qualifiedBranchName)) { _logger.Normal($"Git Cannot checkout new branch: a branch named '{qualifiedBranchName}' already exists"); return false; } _logger.Detailed($"Git checkout new branch '{branchName}'"); using (var repo = MakeRepo()) { var branch = repo.CreateBranch(branchName); GitCommands.Checkout(repo, branch); } return true; }); } private bool BranchExists(string branchName) { using (var repo = MakeRepo()) { var branchFound = repo.Branches.Any( br => string.Equals(br.FriendlyName, branchName, StringComparison.Ordinal)); return branchFound; } } public Task Commit(string message) { return Task.Run(() => { _logger.Detailed($"Git commit with message '{message}'"); using (var repo = MakeRepo()) { var signature = GetSignature(repo); GitCommands.Stage(repo, "*"); repo.Commit(message, signature, signature); } }); } private Signature GetSignature(Repository repo) { if (_identity != null) { return new Signature(_identity, DateTimeOffset.Now); } var repoSignature = repo.Config.BuildSignature(DateTimeOffset.Now); if (repoSignature == null) { throw new NuKeeperException( "Failed to build signature, did not get valid git user identity from token or from repo config"); } return repoSignature; } public Task Push(string remoteName, string branchName) { return Task.Run(() => { _logger.Detailed($"Git push to {remoteName}/{branchName}"); using (var repo = MakeRepo()) { var localBranch = repo.Branches .Single(b => b.CanonicalName.EndsWith(branchName, StringComparison.OrdinalIgnoreCase) && !b.IsRemote); var remote = repo.Network.Remotes .Single(r => r.Name.EndsWith(remoteName, StringComparison.OrdinalIgnoreCase)); repo.Branches.Update(localBranch, b => b.Remote = remote.Name, b => b.UpstreamBranch = localBranch.CanonicalName); repo.Network.Push(localBranch, new PushOptions { CredentialsProvider = UsernamePasswordCredentials }); } }); } public Task<string> GetCurrentHead() { return Task.Run(() => { using (var repo = MakeRepo()) { return repo.Branches.Single(b => b.IsCurrentRepositoryHead).FriendlyName; } }); } private Repository MakeRepo() { return new Repository(WorkingFolder.FullPath); } private Credentials UsernamePasswordCredentials( string url, string usernameFromUrl, SupportedCredentialTypes types) { return _gitCredentials; } private Identity GetUserIdentity(User user) { if (string.IsNullOrWhiteSpace(user?.Name)) { _logger.Minimal("User name missing from profile, falling back to .gitconfig"); return null; } if (string.IsNullOrWhiteSpace(user?.Email)) { _logger.Minimal("Email missing from profile, falling back to .gitconfig"); return null; } return new Identity(user.Name, user.Email); } public Task<IReadOnlyCollection<string>> GetNewCommitMessages(string baseBranchName, string headBranchName) { return Task.Run(() => { if (!BranchExists(baseBranchName)) { throw new NuKeeperException( $"Git Cannot compare branches: the branch named '{baseBranchName}' doesn't exist"); } if (!BranchExists(headBranchName)) { throw new NuKeeperException( $"Git Cannot compare branches: the branch named '{headBranchName}' doesn't exist"); } using (var repo = MakeRepo()) { var baseBranch = repo.Branches[baseBranchName]; var headBranch = repo.Branches[headBranchName]; var filter = new CommitFilter { SortBy = CommitSortStrategies.Time, ExcludeReachableFrom = baseBranch, IncludeReachableFrom = headBranch }; return (IReadOnlyCollection<string>)repo.Commits.QueryBy(filter).Select(c => c.MessageShort.TrimEnd(new[] { '\r', '\n' })).ToList().AsReadOnly(); } }); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using Hyak.Common; using Microsoft.Azure.Gallery; namespace Microsoft.Azure.Gallery { public partial class GalleryItem { private IList<Artifact> _artifacts; /// <summary> /// Optional. Gets or sets gallery item artifacts. /// </summary> public IList<Artifact> Artifacts { get { return this._artifacts; } set { this._artifacts = value; } } private IList<string> _categories; /// <summary> /// Optional. Gets or sets gallery item category identifiers. /// </summary> public IList<string> Categories { get { return this._categories; } set { this._categories = value; } } private string _description; /// <summary> /// Optional. Gets or sets gallery item description. /// </summary> public string Description { get { return this._description; } set { this._description = value; } } private string _displayName; /// <summary> /// Optional. Gets or sets gallery item display name. /// </summary> public string DisplayName { get { return this._displayName; } set { this._displayName = value; } } private IList<Filter> _filters; /// <summary> /// Optional. Gets or sets gallery item filters. /// </summary> public IList<Filter> Filters { get { return this._filters; } set { this._filters = value; } } private IDictionary<string, string> _iconFileUris; /// <summary> /// Optional. Gets or sets gallery item screenshot Uris /// </summary> public IDictionary<string, string> IconFileUris { get { return this._iconFileUris; } set { this._iconFileUris = value; } } private string _identity; /// <summary> /// Optional. Gets or sets gallery item identity. /// </summary> public string Identity { get { return this._identity; } set { this._identity = value; } } private IList<Link> _links; /// <summary> /// Optional. Gets or sets gallery item links. /// </summary> public IList<Link> Links { get { return this._links; } set { this._links = value; } } private string _longSummary; /// <summary> /// Optional. Gets or sets gallery item long summary. /// </summary> public string LongSummary { get { return this._longSummary; } set { this._longSummary = value; } } private MarketingMaterial _marketingMaterial; /// <summary> /// Optional. Gets or sets gallery item marketing information. /// </summary> public MarketingMaterial MarketingMaterial { get { return this._marketingMaterial; } set { this._marketingMaterial = value; } } private IDictionary<string, string> _metadata; /// <summary> /// Optional. Gets or sets gallery item metadata. /// </summary> public IDictionary<string, string> Metadata { get { return this._metadata; } set { this._metadata = value; } } private string _name; /// <summary> /// Optional. Gets or sets gallery item name. /// </summary> public string Name { get { return this._name; } set { this._name = value; } } private IList<Product> _products; /// <summary> /// Optional. Gets or sets gallery item product definition. /// </summary> public IList<Product> Products { get { return this._products; } set { this._products = value; } } private IDictionary<string, string> _properties; /// <summary> /// Optional. Gets or sets gallery item user visible properties. /// </summary> public IDictionary<string, string> Properties { get { return this._properties; } set { this._properties = value; } } private string _publisher; /// <summary> /// Optional. Gets or sets gallery item publisher. /// </summary> public string Publisher { get { return this._publisher; } set { this._publisher = value; } } private string _publisherDisplayName; /// <summary> /// Optional. Gets or sets gallery item publisher display name. /// </summary> public string PublisherDisplayName { get { return this._publisherDisplayName; } set { this._publisherDisplayName = value; } } private IList<string> _screenshotUris; /// <summary> /// Optional. Gets or sets gallery item screenshot Uris /// </summary> public IList<string> ScreenshotUris { get { return this._screenshotUris; } set { this._screenshotUris = value; } } private string _summary; /// <summary> /// Optional. Gets or sets gallery item summary. /// </summary> public string Summary { get { return this._summary; } set { this._summary = value; } } private string _uiDefinitionUri; /// <summary> /// Optional. Gets or sets Azure Portal Uder Interface Definition /// artificat Uri. /// </summary> public string UiDefinitionUri { get { return this._uiDefinitionUri; } set { this._uiDefinitionUri = value; } } private string _version; /// <summary> /// Optional. Gets or sets gallery item version. /// </summary> public string Version { get { return this._version; } set { this._version = value; } } /// <summary> /// Initializes a new instance of the GalleryItem class. /// </summary> public GalleryItem() { this.Artifacts = new LazyList<Artifact>(); this.Categories = new LazyList<string>(); this.Filters = new LazyList<Filter>(); this.IconFileUris = new LazyDictionary<string, string>(); this.Links = new LazyList<Link>(); this.Metadata = new LazyDictionary<string, string>(); this.Products = new LazyList<Product>(); this.Properties = new LazyDictionary<string, string>(); this.ScreenshotUris = new LazyList<string>(); } } }
// ------------------------------------- // Domain : Avariceonline.com // Author : Nicholas Ventimiglia // Product : Unity3d Foundation // Published : 2015 // ------------------------------------- using System; using System.Collections; using System.Collections.Generic; using System.Linq; using UnityEngine; using UnityEngine.UI; namespace Foundation.Debuging.Internal { /// <summary> /// renders the Terminal using new 4.6 uGUI /// </summary> [AddComponentMenu("Foundation/Terminal/TerminalView")] public class TerminalView : MonoBehaviour { /// <summary> /// Option /// </summary> public bool DoDontDestoryOnLoad = true; /// <summary> /// For Hide / show /// </summary> public GameObject DisplayRoot; /// <summary> /// For Input /// </summary> public InputField TextInput; /// <summary> /// Button View Template /// </summary> public TerminalCommandView CommandPrototype; /// <summary> /// Log View Template /// </summary> public TerminalItemView ItemPrototype; /// <summary> /// Parent for Command Views /// </summary> public Transform CommandLayout; /// <summary> /// Parent for Log Views /// </summary> public Transform ItemLayout; /// <summary> /// Snap to bottom /// </summary> public Scrollbar ItemScrollBar; [HideInInspector] public List<TerminalCommandView> CommandItems = new List<TerminalCommandView>(); [HideInInspector] public List<TerminalItemView> TextItems = new List<TerminalItemView>(); public bool IsVisible { get { return DisplayRoot.activeSelf; } set { DisplayRoot.SetActive(value); } } public KeyCode VisiblityKey = KeyCode.BackQuote; public Color LogColor = Color.white; public Color WarningColor = Color.yellow; public Color ErrorColor = Color.red; public Color SuccessColor = Color.green; public Color InputColor = Color.cyan; public Color ImportantColor = Color.yellow; protected void Awake() { // Display Terminal.Instance.LogColor = LogColor; Terminal.Instance.WarningColor = WarningColor; Terminal.Instance.ErrorColor = ErrorColor; Terminal.Instance.SuccessColor = SuccessColor; Terminal.Instance.InputColor = InputColor; Terminal.Instance.ImportantColor = ImportantColor; //Hide prototypes CommandPrototype.gameObject.SetActive(false); ItemPrototype.gameObject.SetActive(false); //wire Terminal.Instance.Items.OnAdd += Items_OnAdd; Terminal.Instance.Items.OnClear += Items_OnClear; Terminal.Instance.Items.OnRemove += Items_OnRemove; Terminal.Instance.Commands.OnAdd += Commands_OnAdd; Terminal.Instance.Commands.OnClear += Commands_OnClear; Terminal.Instance.Commands.OnRemove += Commands_OnRemove; //add items preadded foreach (var item in Terminal.Instance.Items) { Items_OnAdd(item); } foreach (var item in Terminal.Instance.Commands) { Commands_OnAdd(item); } Application.logMessageReceived += HandlerLog; if (DoDontDestoryOnLoad) { var t = transform; while (t.parent != null) { t = transform.parent; } DontDestroyOnLoad(t); } Debug.Log("Console Ready"); } protected void OnDestroy() { //remove handlers Terminal.Instance.Items.OnAdd -= Items_OnAdd; Terminal.Instance.Items.OnClear -= Items_OnClear; Terminal.Instance.Items.OnRemove -= Items_OnRemove; Terminal.Instance.Commands.OnAdd -= Commands_OnAdd; Terminal.Instance.Commands.OnClear -= Commands_OnClear; Terminal.Instance.Commands.OnRemove -= Commands_OnRemove; Application.logMessageReceived -= HandlerLog; } private void HandlerLog(string condition, string stackTrace, LogType type) { switch (type) { case LogType.Error: case LogType.Exception: Terminal.LogError(condition); break; case LogType.Warning: Terminal.LogWarning(condition); break; case LogType.Log: case LogType.Assert: Terminal.Log(condition); break; } } void Commands_OnRemove(TerminalCommand obj) { var item = CommandItems.FirstOrDefault(o => o.Model.Equals(obj)); if (item != null) { CommandItems.Remove(item); Destroy(item.gameObject); } } void Commands_OnClear() { foreach (var item in CommandItems) { Destroy(item.gameObject); } CommandItems.Clear(); } void Commands_OnAdd(TerminalCommand obj) { //inst var instance = Instantiate(CommandPrototype.gameObject); var script = instance.GetComponent<TerminalCommandView>(); script.Label.text = obj.Label; script.Handler = obj.Method; script.Model = obj; //parent instance.transform.SetParent(CommandLayout); //wtf instance.transform.localScale = new Vector3(1, 1, 1); instance.SetActive(true); CommandItems.Add(script); } void Items_OnRemove(TerminalItem obj) { var item = TextItems.FirstOrDefault(o => o.Model.Equals(obj)); if (item != null) { TextItems.Remove(item); Destroy(item.gameObject); } } void Items_OnClear() { foreach (var item in TextItems) { Destroy(item.gameObject); } TextItems.Clear(); } void Items_OnAdd(TerminalItem obj) { StartCoroutine(AddItemAsync(obj)); } IEnumerator AddItemAsync(TerminalItem obj) { //inst var instance = Instantiate(ItemPrototype.gameObject); var script = instance.GetComponent<TerminalItemView>(); script.Label.text = obj.Text; script.Label.color = obj.Color; script.Model = obj; //parent instance.transform.SetParent(ItemLayout); instance.SetActive(true); //wtf instance.transform.localScale = new Vector3(1, 1, 1); TextItems.Add(script); yield return 1; if (ItemScrollBar) ItemScrollBar.value = 0; } protected void Update() { if (Input.GetKeyUp(VisiblityKey)) { IsVisible = !IsVisible; } } public void DoSend() { var text = TextInput.text.Replace(Environment.NewLine, ""); if (string.IsNullOrEmpty(text)) return; Terminal.Submit(text); TextInput.text = string.Empty; } public void DoClear() { Terminal.Clear(); } } }
using Cinemachine.Utility; using System; using System.Collections.Generic; using UnityEngine; namespace Cinemachine { /// <summary> /// This is a virtual camera "manager" that owns and manages a collection /// of child Virtual Cameras. These child vcams are mapped to individual states in /// an animation state machine, allowing you to associate specific vcams to specific /// animation states. When that state is active in the state machine, then the /// associated camera will be activated. /// /// You can define custom blends and transitions between child cameras. /// /// In order to use this behaviour, you must have an animated target (i.e. an object /// animated with a state machine) to drive the behaviour. /// </summary> [DocumentationSorting(13, DocumentationSortingAttribute.Level.UserRef)] [ExecuteInEditMode, DisallowMultipleComponent] [AddComponentMenu("Cinemachine/CinemachineStateDrivenCamera")] public class CinemachineStateDrivenCamera : CinemachineVirtualCameraBase { /// <summary>Default object for the camera children to look at (the aim target), if not specified in a child rig. May be empty</summary> [Tooltip("Default object for the camera children to look at (the aim target), if not specified in a child camera. May be empty if all of the children define targets of their own.")] public Transform m_LookAt = null; /// <summary>Default object for the camera children wants to move with (the body target), if not specified in a child rig. May be empty</summary> [Tooltip("Default object for the camera children wants to move with (the body target), if not specified in a child camera. May be empty if all of the children define targets of their own.")] public Transform m_Follow = null; /// <summary>When enabled, the current camera and blend will be indicated in the game window, for debugging</summary> [Tooltip("When enabled, the current child camera and blend will be indicated in the game window, for debugging")] public bool m_ShowDebugText = false; /// <summary>Force all child cameras to be enabled. This is useful if animating them in Timeline, but consumes extra resources.</summary> [Tooltip("Force all child cameras to be enabled. This is useful if animating them in Timeline, but consumes extra resources")] public bool m_EnableAllChildCameras; // This is just for the inspector editor. // Probably it can be implemented without this serialized property [HideInInspector][NoSaveDuringPlay] public CinemachineVirtualCameraBase[] m_ChildCameras = null; /// <summary>The state machine whose state changes will drive this camera's choice of active child</summary> [Tooltip("The state machine whose state changes will drive this camera's choice of active child")] public Animator m_AnimatedTarget; /// <summary>Which layer in the target FSM to observe</summary> [Tooltip("Which layer in the target state machine to observe")] public int m_LayerIndex; /// <summary>This represents a single instrunction to the StateDrivenCamera. It associates /// an state from the state machine with a child Virtual Camera, and also holds /// activation tuning parameters.</summary> [Serializable] public struct Instruction { [Tooltip("The full hash of the animation state")] public int m_FullHash; [Tooltip("The virtual camera to activate whrn the animation state becomes active")] public CinemachineVirtualCameraBase m_VirtualCamera; [Tooltip("How long to wait (in seconds) before activating the virtual camera. This filters out very short state durations")] public float m_ActivateAfter; [Tooltip("The minimum length of time (in seconds) to keep a virtual camera active")] public float m_MinDuration; }; [Tooltip("The set of instructions associating virtual cameras with states. These instructions are used to choose the live child at any given moment")] public Instruction[] m_Instructions; /// <summary> /// The blend which is used if you don't explicitly define a blend between two Virtual Camera children. /// </summary> [CinemachineBlendDefinitionProperty] [Tooltip("The blend which is used if you don't explicitly define a blend between two Virtual Camera children")] public CinemachineBlendDefinition m_DefaultBlend = new CinemachineBlendDefinition(CinemachineBlendDefinition.Style.EaseInOut, 0.5f); /// <summary> /// This is the asset which contains custom settings for specific child blends. /// </summary> [HideInInspector] [Tooltip("This is the asset which contains custom settings for specific child blends")] public CinemachineBlenderSettings m_CustomBlends = null; /// <summary>Internal API for the Inspector editor. This implements nested states.</summary> [Serializable] [DocumentationSorting(13.2f, DocumentationSortingAttribute.Level.Undoc)] public struct ParentHash { public int m_Hash; public int m_ParentHash; public ParentHash(int h, int p) { m_Hash = h; m_ParentHash = p; } } [HideInInspector][SerializeField] public ParentHash[] m_ParentHash = null; /// <summary>Get the current "best" child virtual camera, that would be chosen /// if the State Driven Camera were active.</summary> public ICinemachineCamera LiveChild { set; get; } /// <summary>Return the live child.</summary> public override ICinemachineCamera LiveChildOrSelf { get { return LiveChild; } } /// <summary>The State of the current live child</summary> public override CameraState State { get { return m_State; } } /// <summary>Get the current LookAt target. Returns parent's LookAt if parent /// is non-null and no specific LookAt defined for this camera</summary> override public Transform LookAt { get { return ResolveLookAt(m_LookAt); } set { if (m_LookAt != value) PreviousStateInvalid = true; m_LookAt = value; } } /// <summary>Get the current Follow target. Returns parent's Follow if parent /// is non-null and no specific Follow defined for this camera</summary> override public Transform Follow { get { return ResolveFollow(m_Follow); } set { if (m_Follow != value) PreviousStateInvalid = true; m_Follow = value; } } /// <summary>Remove a Pipeline stage hook callback. /// Make sure it is removed from all the children.</summary> /// <param name="d">The delegate to remove.</param> public override void RemovePostPipelineStageHook(OnPostPipelineStageDelegate d) { base.RemovePostPipelineStageHook(d); UpdateListOfChildren(); foreach (var vcam in m_ChildCameras) vcam.RemovePostPipelineStageHook(d); } /// <summary>Called by CinemachineCore at designated update time /// so the vcam can position itself and track its targets. This implementation /// updates all the children, chooses the best one, and implements any required blending.</summary> /// <param name="worldUp">Default world Up, set by the CinemachineBrain</param> /// <param name="deltaTime">Delta time for time-based effects (ignore if less than or equal to 0)</param> public override void UpdateCameraState(Vector3 worldUp, float deltaTime) { if (PreviousStateInvalid) deltaTime = -1; PreviousStateInvalid = false; UpdateListOfChildren(); CinemachineVirtualCameraBase best = ChooseCurrentCamera(deltaTime); if (m_ChildCameras != null) { foreach (CinemachineVirtualCameraBase vcam in m_ChildCameras) { if (vcam != null) { vcam.gameObject.SetActive(m_EnableAllChildCameras || vcam == best); if (vcam.VirtualCameraGameObject.activeInHierarchy) { vcam.AddPostPipelineStageHook(OnPostPipelineStage); CinemachineCore.Instance.UpdateVirtualCamera(vcam, worldUp, deltaTime); } } } } ICinemachineCamera previousCam = LiveChild; LiveChild = best; // Are we transitioning cameras? if (previousCam != null && LiveChild != null && previousCam != LiveChild) { // Create a blend (will be null if a cut) mActiveBlend = CreateBlend( previousCam, LiveChild, LookupBlendCurve(previousCam, LiveChild), mActiveBlend, deltaTime); // Notify incoming camera of transition LiveChild.OnTransitionFromCamera(previousCam); // Generate Camera Activation event if live CinemachineCore.Instance.GenerateCameraActivationEvent(LiveChild); // If cutting, generate a camera cut event if live if (mActiveBlend == null) CinemachineCore.Instance.GenerateCameraCutEvent(LiveChild); } // Advance the current blend (if any) if (mActiveBlend != null) { mActiveBlend.TimeInBlend += (deltaTime > 0) ? deltaTime : mActiveBlend.Duration; if (mActiveBlend.IsComplete) mActiveBlend = null; } if (mActiveBlend != null) { mActiveBlend.UpdateCameraState(worldUp, deltaTime); m_State = mActiveBlend.State; } else if (LiveChild != null) m_State = LiveChild.State; else m_State = CameraState.Default; // Push the raw position back to the game object's transform, so it // moves along with the camera. Leave the orientation alone, because it // screws up camera dragging when there is a LookAt behaviour. if (Follow != null) transform.position = State.RawPosition; } /// <summary>Makes sure the internal child cache is up to date</summary> protected override void OnEnable() { base.OnEnable(); InvalidateListOfChildren(); mActiveBlend = null; } /// <summary>Makes sure the internal child cache is up to date</summary> public void OnTransformChildrenChanged() { InvalidateListOfChildren(); } #if UNITY_EDITOR /// <summary>Displays the current active camera on the game screen, if requested</summary> protected override void OnGUI() { base.OnGUI(); if (!m_ShowDebugText) CinemachineGameWindowDebug.ReleaseScreenPos(this); else { // Show the active camera and blend ICinemachineCamera vcam = LiveChild; string text = "CM " + gameObject.name + ": "; if (mActiveBlend == null) text += (vcam != null) ? vcam.Name : "(none)"; else text += mActiveBlend.Description; Rect r = CinemachineGameWindowDebug.GetScreenPos(this, text, GUI.skin.box); GUI.Label(r, text, GUI.skin.box); } } #endif CameraState m_State = CameraState.Default; /// <summary>The list of child cameras. These are just the immediate children in the hierarchy.</summary> public CinemachineVirtualCameraBase[] ChildCameras { get { UpdateListOfChildren(); return m_ChildCameras; }} /// <summary>API for the inspector editor. Animation module does not have hashes /// for state parents, so we have to invent them in order to implement nested state /// handling</summary> public static string CreateFakeHashName(int parentHash, string stateName) { return parentHash.ToString() + "_" + stateName; } float mActivationTime = 0; Instruction mActiveInstruction; float mPendingActivationTime = 0; Instruction mPendingInstruction; private CinemachineBlend mActiveBlend = null; void InvalidateListOfChildren() { m_ChildCameras = null; LiveChild = null; } void UpdateListOfChildren() { if (m_ChildCameras != null && mInstructionDictionary != null && mStateParentLookup != null) return; List<CinemachineVirtualCameraBase> list = new List<CinemachineVirtualCameraBase>(); CinemachineVirtualCameraBase[] kids = GetComponentsInChildren<CinemachineVirtualCameraBase>(true); foreach (CinemachineVirtualCameraBase k in kids) if (k.transform.parent == transform) list.Add(k); m_ChildCameras = list.ToArray(); ValidateInstructions(); } private Dictionary<int, int> mInstructionDictionary; private Dictionary<int, int> mStateParentLookup; /// <summary>Internal API for the inspector editor.</summary> public void ValidateInstructions() { if (m_Instructions == null) m_Instructions = new Instruction[0]; mInstructionDictionary = new Dictionary<int, int>(); for (int i = 0; i < m_Instructions.Length; ++i) { if (m_Instructions[i].m_VirtualCamera != null && m_Instructions[i].m_VirtualCamera.transform.parent != transform) { m_Instructions[i].m_VirtualCamera = null; } mInstructionDictionary[m_Instructions[i].m_FullHash] = i; } // Create the parent lookup mStateParentLookup = new Dictionary<int, int>(); if (m_ParentHash != null) foreach (var i in m_ParentHash) mStateParentLookup[i.m_Hash] = i.m_ParentHash; // Zap the cached current instructions mActivationTime = mPendingActivationTime = 0; mActiveBlend = null; } private CinemachineVirtualCameraBase ChooseCurrentCamera(float deltaTime) { if (m_ChildCameras == null || m_ChildCameras.Length == 0) { mActivationTime = 0; return null; } CinemachineVirtualCameraBase defaultCam = m_ChildCameras[0]; if (m_AnimatedTarget == null || m_LayerIndex < 0 || m_LayerIndex >= m_AnimatedTarget.layerCount) { mActivationTime = 0; return defaultCam; } // Get the current state AnimatorStateInfo info = m_AnimatedTarget.GetCurrentAnimatorStateInfo(m_LayerIndex); int hash = info.fullPathHash; // Is there an animation clip substate? AnimatorClipInfo[] clips = m_AnimatedTarget.GetCurrentAnimatorClipInfo(m_LayerIndex); if (clips.Length > 1) { // Find the strongest-weighted one int bestClip = -1; for (int i = 0; i < clips.Length; ++i) if (bestClip < 0 || clips[i].weight > clips[bestClip].weight) bestClip = i; // Use its hash if (bestClip >= 0 && clips[bestClip].weight > 0) hash = Animator.StringToHash(CreateFakeHashName(hash, clips[bestClip].clip.name)); } // If we don't have an instruction for this state, find a suitable default while (hash != 0 && !mInstructionDictionary.ContainsKey(hash)) hash = mStateParentLookup.ContainsKey(hash) ? mStateParentLookup[hash] : 0; float now = Time.time; if (mActivationTime != 0) { // Is it active now? if (mActiveInstruction.m_FullHash == hash) { // Yes, cancel any pending mPendingActivationTime = 0; return mActiveInstruction.m_VirtualCamera; } // Is it pending? if (deltaTime > 0) { if (mPendingActivationTime != 0 && mPendingInstruction.m_FullHash == hash) { // Has it been pending long enough, and are we allowed to switch away // from the active action? if ((now - mPendingActivationTime) > mPendingInstruction.m_ActivateAfter && ((now - mActivationTime) > mActiveInstruction.m_MinDuration || mPendingInstruction.m_VirtualCamera.Priority > mActiveInstruction.m_VirtualCamera.Priority)) { // Yes, activate it now mActiveInstruction = mPendingInstruction; mActivationTime = now; mPendingActivationTime = 0; } return mActiveInstruction.m_VirtualCamera; } } } // Neither active nor pending. mPendingActivationTime = 0; // cancel the pending, if any if (!mInstructionDictionary.ContainsKey(hash)) { // No defaults set, we just ignore this state if (mActivationTime != 0) return mActiveInstruction.m_VirtualCamera; return defaultCam; } // Can we activate it now? Instruction newInstr = m_Instructions[mInstructionDictionary[hash]]; if (newInstr.m_VirtualCamera == null) newInstr.m_VirtualCamera = defaultCam; if (deltaTime > 0 && mActivationTime > 0) { if (newInstr.m_ActivateAfter > 0 || ((now - mActivationTime) < mActiveInstruction.m_MinDuration && newInstr.m_VirtualCamera.Priority <= mActiveInstruction.m_VirtualCamera.Priority)) { // Too early - make it pending mPendingInstruction = newInstr; mPendingActivationTime = now; if (mActivationTime != 0) return mActiveInstruction.m_VirtualCamera; return defaultCam; } } // Activate now mActiveInstruction = newInstr; mActivationTime = now; return mActiveInstruction.m_VirtualCamera; } private AnimationCurve LookupBlendCurve( ICinemachineCamera fromKey, ICinemachineCamera toKey) { // Get the blend curve that's most appropriate for these cameras AnimationCurve blendCurve = m_DefaultBlend.BlendCurve; if (m_CustomBlends != null) { string fromCameraName = (fromKey != null) ? fromKey.Name : string.Empty; string toCameraName = (toKey != null) ? toKey.Name : string.Empty; blendCurve = m_CustomBlends.GetBlendCurveForVirtualCameras( fromCameraName, toCameraName, blendCurve); } return blendCurve; } private CinemachineBlend CreateBlend( ICinemachineCamera camA, ICinemachineCamera camB, AnimationCurve blendCurve, CinemachineBlend activeBlend, float deltaTime) { if (blendCurve == null || blendCurve.keys.Length <= 1 || (camA == null && camB == null)) return null; if (camA == null || activeBlend != null) { // Blend from the current camera position CameraState state = (activeBlend != null) ? activeBlend.State : State; camA = new StaticPointVirtualCamera(state, (activeBlend != null) ? "Mid-blend" : "(none)"); } return new CinemachineBlend(camA, camB, blendCurve, 0); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.12.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.MirrorRecursiveTypes { using System; using System.Linq; using System.Collections.Generic; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Models; /// <summary> /// Some cool documentation. /// </summary> public partial class RecursiveTypesAPI : ServiceClient<RecursiveTypesAPI>, IRecursiveTypesAPI { /// <summary> /// The base URI of the service. /// </summary> public Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Initializes a new instance of the RecursiveTypesAPI class. /// </summary> public RecursiveTypesAPI() : base() { this.Initialize(); } /// <summary> /// Initializes a new instance of the RecursiveTypesAPI class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> public RecursiveTypesAPI(params DelegatingHandler[] handlers) : base(handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the RecursiveTypesAPI class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> public RecursiveTypesAPI(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the RecursiveTypesAPI class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> public RecursiveTypesAPI(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { this.BaseUri = new Uri("https://management.azure.com/"); SerializationSettings = new JsonSerializerSettings { Formatting = Formatting.Indented, DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; DeserializationSettings = new JsonSerializerSettings { DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; } /// <summary> /// Products /// </summary> /// The Products endpoint returns information about the Uber products offered /// at a given location. The response includes the display name and other /// details about each product, and lists the products in the proper display /// order. /// <param name='subscriptionId'> /// Subscription Id. /// </param> /// <param name='resourceGroupName'> /// Resource Group Id. /// </param> /// <param name='apiVersion'> /// API Id. /// </param> /// <param name='body'> /// API body mody. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<HttpOperationResponse<Product>> PostWithHttpMessagesAsync(string subscriptionId, string resourceGroupName, string apiVersion, Product body = default(Product), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (subscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "subscriptionId"); } if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (apiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "apiVersion"); } // Tracing bool shouldTrace = ServiceClientTracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("subscriptionId", subscriptionId); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("body", body); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(invocationId, this, "Post", tracingParameters); } // Construct URL var baseUrl = this.BaseUri.AbsoluteUri; var url = new Uri(new Uri(baseUrl + (baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/Microsoft.Cache/Redis?api-version={apiVersion}").ToString(); url = url.Replace("{subscriptionId}", Uri.EscapeDataString(subscriptionId)); url = url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); url = url.Replace("{apiVersion}", Uri.EscapeDataString(apiVersion)); // Create HTTP transport objects HttpRequestMessage httpRequest = new HttpRequestMessage(); httpRequest.Method = new HttpMethod("POST"); httpRequest.RequestUri = new Uri(url); // Set Headers if (customHeaders != null) { foreach(var header in customHeaders) { if (httpRequest.Headers.Contains(header.Key)) { httpRequest.Headers.Remove(header.Key); } httpRequest.Headers.TryAddWithoutValidation(header.Key, header.Value); } } // Serialize Request string requestContent = JsonConvert.SerializeObject(body, this.SerializationSettings); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); // Send Request if (shouldTrace) { ServiceClientTracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); HttpResponseMessage httpResponse = await this.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { ServiceClientTracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); if (!(statusCode == (HttpStatusCode)Enum.Parse(typeof(HttpStatusCode), "OK"))) { var ex = new HttpOperationException(string.Format("Operation returned an invalid status code '{0}'", statusCode)); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Error errorBody = JsonConvert.DeserializeObject<Error>(responseContent, this.DeserializationSettings); if (errorBody != null) { ex.Body = errorBody; } ex.Request = httpRequest; ex.Response = httpResponse; if (shouldTrace) { ServiceClientTracing.Error(invocationId, ex); } throw ex; } // Create Result var result = new HttpOperationResponse<Product>(); result.Request = httpRequest; result.Response = httpResponse; // Deserialize Response if (statusCode == (HttpStatusCode)Enum.Parse(typeof(HttpStatusCode), "OK")) { string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result.Body = JsonConvert.DeserializeObject<Product>(responseContent, this.DeserializationSettings); } if (shouldTrace) { ServiceClientTracing.Exit(invocationId, result); } return result; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.ResourceManager { using System.Linq; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; /// <summary> /// Azure resources can be linked together to form logical relationships. /// You can establish links between resources belonging to different /// resource groups. However, all the linked resources must belong to the /// same subscription. Each resource can be linked to 50 other resources. /// If any of the linked resources are deleted or moved, the link owner /// must clean up the remaining link. /// </summary> public partial class ManagementLinkClient : Microsoft.Rest.ServiceClient<ManagementLinkClient>, IManagementLinkClient, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public System.Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public Newtonsoft.Json.JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public Newtonsoft.Json.JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Credentials needed for the client to connect to Azure. /// </summary> public Microsoft.Rest.ServiceClientCredentials Credentials { get; private set; } /// <summary> /// The ID of the target subscription. /// </summary> public string SubscriptionId { get; set; } /// <summary> /// The API version to use for the operation. /// </summary> public string ApiVersion { get; private set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } /// <summary> /// Gets the IResourceLinksOperations. /// </summary> public virtual IResourceLinksOperations ResourceLinks { get; private set; } /// <summary> /// Initializes a new instance of the ManagementLinkClient class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected ManagementLinkClient(params System.Net.Http.DelegatingHandler[] handlers) : base(handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the ManagementLinkClient class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected ManagementLinkClient(System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : base(rootHandler, handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the ManagementLinkClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected ManagementLinkClient(System.Uri baseUri, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the ManagementLinkClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected ManagementLinkClient(System.Uri baseUri, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the ManagementLinkClient class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public ManagementLinkClient(Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the ManagementLinkClient class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public ManagementLinkClient(Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the ManagementLinkClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public ManagementLinkClient(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the ManagementLinkClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public ManagementLinkClient(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// An optional partial-method to perform custom initialization. /// </summary> partial void CustomInitialize(); /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { this.ResourceLinks = new ResourceLinksOperations(this); this.BaseUri = new System.Uri("https://management.azure.com"); this.ApiVersion = "2016-09-01"; this.AcceptLanguage = "en-US"; this.LongRunningOperationRetryTimeout = 30; this.GenerateClientRequestId = true; SerializationSettings = new Newtonsoft.Json.JsonSerializerSettings { Formatting = Newtonsoft.Json.Formatting.Indented, DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(), Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter> { new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter() } }; DeserializationSettings = new Newtonsoft.Json.JsonSerializerSettings { DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(), Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter> { new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter() } }; CustomInitialize(); DeserializationSettings.Converters.Add(new Microsoft.Rest.Azure.CloudErrorJsonConverter()); } } }
using System; using System.Data; using Csla; using Csla.Data; using ParentLoad.DataAccess; using ParentLoad.DataAccess.ERCLevel; namespace ParentLoad.Business.ERCLevel { /// <summary> /// B10_City (editable child object).<br/> /// This is a generated base class of <see cref="B10_City"/> business object. /// </summary> /// <remarks> /// This class contains one child collection:<br/> /// - <see cref="B11_CityRoadObjects"/> of type <see cref="B11_CityRoadColl"/> (1:M relation to <see cref="B12_CityRoad"/>)<br/> /// This class is an item of <see cref="B09_CityColl"/> collection. /// </remarks> [Serializable] public partial class B10_City : BusinessBase<B10_City> { #region Static Fields private static int _lastID; #endregion #region State Fields [NotUndoable] [NonSerialized] internal int parent_Region_ID = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="City_ID"/> property. /// </summary> public static readonly PropertyInfo<int> City_IDProperty = RegisterProperty<int>(p => p.City_ID, "City ID"); /// <summary> /// Gets the City ID. /// </summary> /// <value>The City ID.</value> public int City_ID { get { return GetProperty(City_IDProperty); } } /// <summary> /// Maintains metadata about <see cref="City_Name"/> property. /// </summary> public static readonly PropertyInfo<string> City_NameProperty = RegisterProperty<string>(p => p.City_Name, "City Name"); /// <summary> /// Gets or sets the City Name. /// </summary> /// <value>The City Name.</value> public string City_Name { get { return GetProperty(City_NameProperty); } set { SetProperty(City_NameProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B11_City_SingleObject"/> property. /// </summary> public static readonly PropertyInfo<B11_City_Child> B11_City_SingleObjectProperty = RegisterProperty<B11_City_Child>(p => p.B11_City_SingleObject, "B11 City Single Object", RelationshipTypes.Child); /// <summary> /// Gets the B11 City Single Object ("parent load" child property). /// </summary> /// <value>The B11 City Single Object.</value> public B11_City_Child B11_City_SingleObject { get { return GetProperty(B11_City_SingleObjectProperty); } private set { LoadProperty(B11_City_SingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B11_City_ASingleObject"/> property. /// </summary> public static readonly PropertyInfo<B11_City_ReChild> B11_City_ASingleObjectProperty = RegisterProperty<B11_City_ReChild>(p => p.B11_City_ASingleObject, "B11 City ASingle Object", RelationshipTypes.Child); /// <summary> /// Gets the B11 City ASingle Object ("parent load" child property). /// </summary> /// <value>The B11 City ASingle Object.</value> public B11_City_ReChild B11_City_ASingleObject { get { return GetProperty(B11_City_ASingleObjectProperty); } private set { LoadProperty(B11_City_ASingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B11_CityRoadObjects"/> property. /// </summary> public static readonly PropertyInfo<B11_CityRoadColl> B11_CityRoadObjectsProperty = RegisterProperty<B11_CityRoadColl>(p => p.B11_CityRoadObjects, "B11 CityRoad Objects", RelationshipTypes.Child); /// <summary> /// Gets the B11 City Road Objects ("parent load" child property). /// </summary> /// <value>The B11 City Road Objects.</value> public B11_CityRoadColl B11_CityRoadObjects { get { return GetProperty(B11_CityRoadObjectsProperty); } private set { LoadProperty(B11_CityRoadObjectsProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="B10_City"/> object. /// </summary> /// <returns>A reference to the created <see cref="B10_City"/> object.</returns> internal static B10_City NewB10_City() { return DataPortal.CreateChild<B10_City>(); } /// <summary> /// Factory method. Loads a <see cref="B10_City"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> /// <returns>A reference to the fetched <see cref="B10_City"/> object.</returns> internal static B10_City GetB10_City(SafeDataReader dr) { B10_City obj = new B10_City(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(dr); obj.LoadProperty(B11_CityRoadObjectsProperty, B11_CityRoadColl.NewB11_CityRoadColl()); obj.MarkOld(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="B10_City"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public B10_City() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="B10_City"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { LoadProperty(City_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID)); LoadProperty(B11_City_SingleObjectProperty, DataPortal.CreateChild<B11_City_Child>()); LoadProperty(B11_City_ASingleObjectProperty, DataPortal.CreateChild<B11_City_ReChild>()); LoadProperty(B11_CityRoadObjectsProperty, DataPortal.CreateChild<B11_CityRoadColl>()); var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="B10_City"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(City_IDProperty, dr.GetInt32("City_ID")); LoadProperty(City_NameProperty, dr.GetString("City_Name")); // parent properties parent_Region_ID = dr.GetInt32("Parent_Region_ID"); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Loads child <see cref="B11_City_Child"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(B11_City_Child child) { LoadProperty(B11_City_SingleObjectProperty, child); } /// <summary> /// Loads child <see cref="B11_City_ReChild"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(B11_City_ReChild child) { LoadProperty(B11_City_ASingleObjectProperty, child); } /// <summary> /// Inserts a new <see cref="B10_City"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(B08_Region parent) { using (var dalManager = DalFactoryParentLoad.GetManager()) { var args = new DataPortalHookArgs(); OnInsertPre(args); var dal = dalManager.GetProvider<IB10_CityDal>(); using (BypassPropertyChecks) { int city_ID = -1; dal.Insert( parent.Region_ID, out city_ID, City_Name ); LoadProperty(City_IDProperty, city_ID); } OnInsertPost(args); // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Updates in the database all changes made to the <see cref="B10_City"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update() { if (!IsDirty) return; using (var dalManager = DalFactoryParentLoad.GetManager()) { var args = new DataPortalHookArgs(); OnUpdatePre(args); var dal = dalManager.GetProvider<IB10_CityDal>(); using (BypassPropertyChecks) { dal.Update( City_ID, City_Name ); } OnUpdatePost(args); // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Self deletes the <see cref="B10_City"/> object from database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf() { using (var dalManager = DalFactoryParentLoad.GetManager()) { var args = new DataPortalHookArgs(); // flushes all pending data operations FieldManager.UpdateChildren(this); OnDeletePre(args); var dal = dalManager.GetProvider<IB10_CityDal>(); using (BypassPropertyChecks) { dal.Delete(ReadProperty(City_IDProperty)); } OnDeletePost(args); } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
using Core.IO; using System; using System.Diagnostics; using System.Text; namespace Core { public abstract partial class VSystem { internal static VSystem _vfsList; //internal static bool _isInit = false; public enum OPEN : uint { READONLY = 0x00000001, // Ok for sqlite3_open_v2() READWRITE = 0x00000002, // Ok for sqlite3_open_v2() CREATE = 0x00000004, // Ok for sqlite3_open_v2() DELETEONCLOSE = 0x00000008, // VFS only EXCLUSIVE = 0x00000010, // VFS only AUTOPROXY = 0x00000020, // VFS only URI = 0x00000040, // Ok for sqlite3_open_v2() MEMORY = 0x00000080, // Ok for sqlite3_open_v2() MAIN_DB = 0x00000100, // VFS only TEMP_DB = 0x00000200, // VFS only TRANSIENT_DB = 0x00000400, // VFS only MAIN_JOURNAL = 0x00000800, // VFS only TEMP_JOURNAL = 0x00001000, // VFS only SUBJOURNAL = 0x00002000, // VFS only MASTER_JOURNAL = 0x00004000, // VFS only NOMUTEX = 0x00008000, // Ok for sqlite3_open_v2() FULLMUTEX = 0x00010000, // Ok for sqlite3_open_v2() SHAREDCACHE = 0x00020000, // Ok for sqlite3_open_v2() PRIVATECACHE = 0x00040000, // Ok for sqlite3_open_v2() WAL = 0x00080000, // VFS only } public enum ACCESS : byte { EXISTS = 0, READWRITE = 1, // Used by PRAGMA temp_store_directory READ = 2, // Unused } public VSystem Next; // Next registered VFS public string Name = "win32"; // Name of this virtual file system public object Tag; // Pointer to application-specific data public int SizeOsFile = -1; // Size of subclassed VirtualFile public int MaxPathname = 256; // Maximum file pathname length public Func<VFile> CreateOsFile; public void _memcpy(VSystem ct) { ct.SizeOsFile = this.SizeOsFile; ct.MaxPathname = this.MaxPathname; ct.Next = this.Next; ct.Name = this.Name; ct.Tag = this.Tag; } public abstract RC Open(string path, VFile file, OPEN flags, out OPEN outFlags); public abstract RC Delete(string path, bool syncDirectory); public abstract RC Access(string path, ACCESS flags, out int outRC); public abstract RC FullPathname(string path, out string outPath); public abstract object DlOpen(string filename); public abstract void DlError(int bufLength, string buf); public abstract object DlSym(object handle, string symbol); public abstract void DlClose(object handle); public abstract int Randomness(int bufLength, byte[] buf); public abstract int Sleep(int microseconds); public abstract RC CurrentTimeInt64(ref long now); public abstract RC CurrentTime(ref double now); public abstract RC GetLastError(int bufLength, ref string buf); public RC OpenAndAlloc(string path, out VFile file, OPEN flags, out OPEN outFlags) { file = null; outFlags = 0; VFile file2 = CreateOsFile(); if (file2 == null) return RC.NOMEM; RC rc = Open(path, file2, flags, out outFlags); if (rc != RC.OK) C._free(ref file2); else file = file2; return rc; } public static VSystem FindVfs(string name) { VSystem vfs = null; var mutex = MutexEx.Alloc(MutexEx.MUTEX.STATIC_MASTER); MutexEx.Enter(mutex); for (vfs = _vfsList; vfs != null && name != vfs.Name; vfs = vfs.Next) { } MutexEx.Leave(mutex); return vfs; } internal static void UnlinkVfs(VSystem vfs) { Debug.Assert(MutexEx.Held(MutexEx.Alloc(MutexEx.MUTEX.STATIC_MASTER))); if (vfs == null) { } else if (_vfsList == vfs) _vfsList = vfs.Next; else if (_vfsList != null) { var p = _vfsList; while (p.Next != null && p.Next != vfs) p = p.Next; if (p.Next == vfs) p.Next = vfs.Next; } } public static RC RegisterVfs(VSystem vfs, bool default_, Func<VFile> createOsFile) { var mutex = MutexEx.Alloc(MutexEx.MUTEX.STATIC_MASTER); MutexEx.Enter(mutex); UnlinkVfs(vfs); vfs.CreateOsFile = createOsFile; if (default_ || _vfsList == null) { vfs.Next = _vfsList; _vfsList = vfs; } else { vfs.Next = _vfsList.Next; _vfsList.Next = vfs; } Debug.Assert(_vfsList != null); MutexEx.Leave(mutex); return RC.OK; } public static RC UnregisterVfs(VSystem vfs) { var mutex = MutexEx.Alloc(MutexEx.MUTEX.STATIC_MASTER); MutexEx.Enter(mutex); UnlinkVfs(vfs); MutexEx.Leave(mutex); return RC.OK; } #region File #if ENABLE_8_3_NAMES public static void FileSuffix3(string baseFilename, ref string z) { #if ENABLE_8_3_NAMESx2 if (!UriBoolean(baseFilename, "8_3_names", 0)) return; #endif int sz = z.Length; int i; for (i = sz - 1; i > 0 && z[i] != '/' && z[i] != '.'; i--) { } //if (z[i] == '.' && C._ALWAYS(sz > i + 4)) C._memmove(&z[i + 1], &z[sz - 3], 4); //if (z[i] == '.' && C._ALWAYS(sz > i + 4)) C._memcpy(&z[i + 1], &z[sz - 3], 4); } #else public static void FileSuffix3(string baseFilename, ref string z) { } #endif class OpenMode { public string Z; public VSystem.OPEN Mode; public OpenMode(string z, VSystem.OPEN mode) { Z = z; Mode = mode; } } static readonly OpenMode[] _cacheModes = new OpenMode[] { new OpenMode("shared", VSystem.OPEN.SHAREDCACHE), new OpenMode("private", VSystem.OPEN.PRIVATECACHE), new OpenMode(null, (VSystem.OPEN)0) }; static readonly OpenMode[] _openModes = new OpenMode[] { new OpenMode("ro", VSystem.OPEN.READONLY), new OpenMode("rw", VSystem.OPEN.READWRITE), new OpenMode("rwc", VSystem.OPEN.READWRITE | VSystem.OPEN.CREATE), new OpenMode(null, (VSystem.OPEN)0) }; public static RC ParseUri(string defaultVfsName, string uri, ref VSystem.OPEN flagsRef, out VSystem vfsOut, out string fileNameOut, out string errMsgOut) { vfsOut = null; fileNameOut = null; errMsgOut = null; VSystem.OPEN flags = flagsRef; string vfsName = defaultVfsName; int uriLength = uri.Length; RC rc = RC.OK; StringBuilder fileName = null; if (((flags & VSystem.OPEN.URI) != 0 || SysEx._GlobalStatics.OpenUri) && uriLength >= 5 && uri.StartsWith("file:")) { // Make sure the SQLITE_OPEN_URI flag is set to indicate to the VFS xOpen method that there may be extra parameters following the file-name. flags |= VSystem.OPEN.URI; int bytes = uriLength + 2; // Bytes of space to allocate int uriIdx; // Input character index for (uriIdx = 0; uriIdx < uriLength; uriIdx++) bytes += (uri[uriIdx] == '&' ? 1 : 0); fileName = new StringBuilder(bytes); if (fileName == null) return RC.NOMEM; // Discard the scheme and authority segments of the URI. if (uri[5] == '/' && uri[6] == '/') { uriIdx = 7; while (uriIdx < uriLength && uri[uriIdx] != '/') uriIdx++; if (uriIdx != 7 && (uriIdx != 16 || !string.Equals("localhost", uri.Substring(7, 9), StringComparison.InvariantCultureIgnoreCase))) { errMsgOut = C._mprintf("invalid uri authority: %.*s", uriIdx - 7, uri.Substring(7)); rc = RC.ERROR; goto parse_uri_out; } } else uriIdx = 5; // Copy the filename and any query parameters into the zFile buffer. Decode %HH escape codes along the way. // // Within this loop, variable eState may be set to 0, 1 or 2, depending on the parsing context. As follows: // // 0: Parsing file-name. // 1: Parsing name section of a name=value query parameter. // 2: Parsing value section of a name=value query parameter. int state = 0; // Parser state when parsing URI char c; //int fileNameIdx = 0; // Output character index while (uriIdx < uriLength && (c = uri[uriIdx]) != 0 && c != '#') { uriIdx++; if (c == '%' && C._isxdigit(uri[uriIdx]) && C._isxdigit(uri[uriIdx + 1])) { int octet = (C._hextobyte(uri[uriIdx++]) << 4); octet += C._hextobyte(uri[uriIdx++]); Debug.Assert(octet >= 0 && octet < 256); if (octet == 0) { // This branch is taken when "%00" appears within the URI. In this case we ignore all text in the remainder of the path, name or // value currently being parsed. So ignore the current character and skip to the next "?", "=" or "&", as appropriate. while (uriIdx < uriLength && (c = uri[uriIdx]) != 0 && c != '#' && (state != 0 || c != '?') && (state != 1 || (c != '=' && c != '&')) && (state != 2 || c != '&')) uriIdx++; continue; } c = (char)octet; } else if (state == 1 && (c == '&' || c == '=')) { if (fileName[fileName.Length - 1] == '\0') { // An empty option name. Ignore this option altogether. while (uri[uriIdx] != '\0' && uri[uriIdx] != '#' && uri[uriIdx - 1] != '&') uriIdx++; continue; } if (c == '&') fileName.Append('\0'); else state = 2; c = '\0'; } else if ((state == 0 && c == '?') || (state == 2 && c == '&')) { c = '\0'; state = 1; } fileName.Append(c); } if (state == 1) fileName.Append('\0'); fileName.Append('\0'); fileName.Append('\0'); // Check if there were any options specified that should be interpreted here. Options that are interpreted here include "vfs" and those that // correspond to flags that may be passed to the sqlite3_open_v2() method. string opt = fileName.ToString().Substring(fileName.Length + 1); while (opt.Length > 0) { int optLength = opt.Length; string val = opt.Substring(optLength); int valLength = val.Length; if (optLength == 3 && opt.StartsWith("vfs")) vfsName = val; else { OpenMode[] modes = null; string modeType = null; VSystem.OPEN mask = (VSystem.OPEN)0; VSystem.OPEN limit = (VSystem.OPEN)0; if (optLength == 5 && opt.StartsWith("cache")) { mask = VSystem.OPEN.SHAREDCACHE | VSystem.OPEN.PRIVATECACHE; modes = _cacheModes; limit = mask; modeType = "cache"; } if (optLength == 4 && opt.StartsWith("mode")) { mask = VSystem.OPEN.READONLY | VSystem.OPEN.READWRITE | VSystem.OPEN.CREATE; modes = _openModes; limit = mask & flags; modeType = "access"; } if (modes != null) { VSystem.OPEN mode = 0; for (int i = 0; modes[i].Z != null; i++) { string z = modes[i].Z; if (valLength == z.Length && z.StartsWith(val)) { mode = modes[i].Mode; break; } } if (mode == 0) { errMsgOut = C._mprintf("no such %s mode: %s", modeType, val); rc = RC.ERROR; goto parse_uri_out; } if (mode > limit) { errMsgOut = C._mprintf("%s mode not allowed: %s", modeType, val); rc = RC.PERM; goto parse_uri_out; } flags = ((flags & ~mask) | mode); } } opt = val.Substring(valLength + 1); } } else { fileName = (uri == null ? new StringBuilder() : new StringBuilder(uri.Substring(0, uriLength))); if (fileName == null) return RC.NOMEM; fileName.Append('\0'); fileName.Append('\0'); } vfsOut = FindVfs(vfsName); if (vfsOut == null) { errMsgOut = C._mprintf("no such vfs: %s", vfsName); rc = RC.ERROR; } parse_uri_out: if (rc != RC.OK) { C._free(ref fileName); fileName = null; } flagsRef = flags; fileNameOut = (fileName == null ? null : fileName.ToString().Substring(0, fileName.Length)); return rc; } public static string UriParameter(string filename, string param) { if (filename == null) return null; //int filenameIdx _strlen30(filename) + 1; //while (filename[0]) //{ // int x = _strcmp(filename, param); // filename += _strlen30(filename) + 1; // if (x == 0) return filename; // filename += _strlen30(filename) + 1; //} return null; } public static bool UriBoolean(string filename, string param, bool dflt) { string z = UriParameter(filename, param); return (z != null ? ConvertEx.GetBoolean(z, (byte)(dflt ? 1 : 0)) : dflt); } public static long UriInt64(string filename, string param, long dflt) { string z = UriParameter(filename, param); long v; return (z != null && ConvertEx.Atoi64(z, out v, z.Length, TEXTENCODE.UTF8) == 0 ? v : dflt); } #endregion } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.IO; using System.Threading; #pragma warning disable 618 namespace Apache.Geode.Client.UnitTests { using NUnit.Framework; using Apache.Geode.DUnitFramework; using Apache.Geode.Client.Tests; using Apache.Geode.Client; using DeltaEx = Apache.Geode.Client.Tests.DeltaEx; public class CqDeltaListener<TKey, TResult> : ICqListener<TKey, TResult> { public CqDeltaListener() { m_deltaCount = 0; m_valueCount = 0; } public void OnEvent(CqEvent<TKey, TResult> aCqEvent) { byte[] deltaValue = aCqEvent.getDeltaValue(); DeltaTestImpl newValue = new DeltaTestImpl(); DataInput input = CacheHelper.DCache.CreateDataInput(deltaValue); newValue.FromDelta(input); if (newValue.GetIntVar() == 5) { m_deltaCount++; } DeltaTestImpl fullObject = (DeltaTestImpl)(object)aCqEvent.getNewValue(); if (fullObject.GetIntVar() == 5) { m_valueCount++; } } public void OnError(CqEvent<TKey, TResult> aCqEvent) { } public void Close() { } public int GetDeltaCount() { return m_deltaCount; } public int GetValueCount() { return m_valueCount; } private int m_deltaCount; private int m_valueCount; } public class DeltaTestAD : IGeodeDelta, IGeodeSerializable { private int _deltaUpdate; private string _staticData; public static DeltaTestAD Create() { return new DeltaTestAD(); } public DeltaTestAD() { _deltaUpdate = 1; _staticData = "Data which don't get updated"; } #region IGeodeDelta Members public void FromDelta(DataInput input) { _deltaUpdate = input.ReadInt32(); } public bool HasDelta() { _deltaUpdate++; bool isDelta = (_deltaUpdate % 2) == 1; Util.Log("In DeltaTestAD.HasDelta _deltaUpdate:" + _deltaUpdate + " : isDelta:" + isDelta); return isDelta; } public void ToDelta(DataOutput output) { output.WriteInt32(_deltaUpdate); } #endregion #region IGeodeSerializable Members public uint ClassId { get { return 151; } } public void FromData(DataInput input) { _deltaUpdate = input.ReadInt32(); _staticData = input.ReadUTF(); } public uint ObjectSize { get { return (uint)(4 + _staticData.Length); } } public void ToData(DataOutput output) { output.WriteInt32(_deltaUpdate); output.WriteUTF(_staticData); } public int DeltaUpdate { get { return _deltaUpdate; } set { _deltaUpdate = value; } } #endregion } [TestFixture] [Category("group1")] [Category("unicast_only")] [Category("generics")] public class ThinClientDeltaTest : ThinClientRegionSteps { #region Private members private UnitProcess m_client1, m_client2; private CqDeltaListener<object, DeltaTestImpl> myCqListener; #endregion protected override ClientBase[] GetClients() { m_client1 = new UnitProcess(); m_client2 = new UnitProcess(); return new ClientBase[] { m_client1, m_client2 }; } [TestFixtureTearDown] public override void EndTests() { CacheHelper.StopJavaServers(); base.EndTests(); } [TearDown] public override void EndTest() { try { CacheHelper.ClearEndpoints(); CacheHelper.ClearLocators(); } finally { CacheHelper.StopJavaServers(); CacheHelper.StopJavaLocators(); } base.EndTest(); } public void createLRURegionAndAttachPool(string regionName, string poolName) { CacheHelper.CreateLRUTCRegion_Pool<object, object>(regionName, true, true, null, null, poolName, false, 3); } public void createRegionAndAttachPool(string regionName, string poolName) { createRegionAndAttachPool(regionName, poolName, false); } public void createRegionAndAttachPool(string regionName, string poolName, bool cloningEnabled) { CacheHelper.CreateTCRegion_Pool<object, object>(regionName, true, true, null, null, poolName, false, false, cloningEnabled); } //public void createPooledRegion(string regionName, string poolName, string endpoints, string locators) //{ // CacheHelper.CreateTCRegion_Pool(regionName, true, true, null, endpoints, locators, poolName, false); //} public void createPool(string name, string locators, string serverGroup, int redundancy, bool subscription) { CacheHelper.CreatePool<object, object>(name, locators, serverGroup, redundancy, subscription); } public void createExpirationRegion(string name, string poolName) { IRegion<object, object> region = CacheHelper.CreateExpirationRegion<object, object>(name, poolName, ExpirationAction.LocalInvalidate, TimeSpan.FromSeconds(5)); } public void createExpirationRegion(string name) { createExpirationRegion(name, null); } public void CreateRegion(string name) { CreateRegion(name, false); } public void CreateRegion(string name, bool enableNotification) { CreateRegion(name, enableNotification, false); } public void CreateRegion(string name, bool enableNotification, bool cloningEnabled) { Apache.Geode.Client.RegionAttributes<object, object> attrs; AttributesFactory<object, object> attrFac = new AttributesFactory<object, object>(); attrFac.SetCacheListener(new SimpleCacheListener<object, object>()); attrFac.SetCloningEnabled(cloningEnabled); attrs = attrFac.CreateRegionAttributes(); CacheHelper.CreateRegion<object, object>(name, attrs); } //public void CreateOverflowRegion(string name, uint entriesLimit) //{ // AttributesFactory af = new AttributesFactory(); // af.SetScope(ScopeType.DistributedAck); // af.SetCachingEnabled(true); // af.SetClientNotificationEnabled(true); // af.SetLruEntriesLimit(entriesLimit);// LRU Entry limit set to 3 // af.SetDiskPolicy(DiskPolicyType.Overflows); // Properties bdbProperties = Properties.Create(); // bdbProperties.Insert("CacheSizeGb", "0"); // bdbProperties.Insert("CacheSizeMb", "512"); // bdbProperties.Insert("PageSize", "65536"); // bdbProperties.Insert("MaxFileSize", "512000000"); // String wdPath = Directory.GetCurrentDirectory(); // String absPersistenceDir = wdPath + "/absBDB"; // String absEnvDir = wdPath + "/absBDBEnv"; // bdbProperties.Insert("PersistenceDirectory", absPersistenceDir); // bdbProperties.Insert("EnvironmentDirectory", absEnvDir); // af.SetPersistenceManager("BDBImpl", "createBDBInstance", bdbProperties); // CacheHelper.CreateRegion(name, af.CreateRegionAttributes()); //} void DoPutWithDelta() { try { Serializable.RegisterTypeGeneric(DeltaEx.create, CacheHelper.DCache); } catch (IllegalStateException) { //do nothng } string cKey = m_keys[0]; DeltaEx val = new DeltaEx(); IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); reg[cKey] = (object)val; val.SetDelta(true); reg[cKey] = (object)val; DeltaEx val1 = new DeltaEx(0); // In this case JAVA side will throw invalid DeltaException reg[cKey] = (object)val1; val1.SetDelta(true); reg[cKey] = (object)val1; if (DeltaEx.ToDeltaCount != 2) { Util.Log("DeltaEx.ToDataCount = " + DeltaEx.ToDataCount); Assert.Fail(" Delta count should have been 2, is " + DeltaEx.ToDeltaCount); } if (DeltaEx.ToDataCount != 3) Assert.Fail("Data count should have been 3, is " + DeltaEx.ToDataCount); DeltaEx.ToDeltaCount = 0; DeltaEx.ToDataCount = 0; DeltaEx.FromDataCount = 0; DeltaEx.FromDeltaCount = 0; } void Do_Put_Contains_Remove_WithDelta() { try { Serializable.RegisterTypeGeneric(DeltaEx.create, CacheHelper.DCache); } catch (IllegalStateException) { //do nothng } string cKey = m_keys[0]; DeltaEx val = new DeltaEx(); IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); reg[cKey] = (object)val; val.SetDelta(true); reg[cKey] = (object)val; DeltaEx val1 = new DeltaEx(0); // In this case JAVA side will throw invalid DeltaException reg[cKey] = (object)val1; val1.SetDelta(true); reg[cKey] = (object)val1; if (DeltaEx.ToDeltaCount != 2) { Util.Log("DeltaEx.ToDataCount = " + DeltaEx.ToDataCount); Assert.Fail(" Delta count should have been 2, is " + DeltaEx.ToDeltaCount); } if (DeltaEx.ToDataCount != 3) Assert.Fail("Data count should have been 3, is " + DeltaEx.ToDataCount); DeltaEx.ToDeltaCount = 0; DeltaEx.ToDataCount = 0; DeltaEx.FromDataCount = 0; DeltaEx.FromDeltaCount = 0; // Try Contains with key & value that are present. Result should be true. KeyValuePair<object, object> myentry = new KeyValuePair<object, object>(cKey, val1); bool containsOpflag = reg.Contains(myentry); Assert.IsTrue(containsOpflag, "Result should be true as key & value are present"); // Try Remove with key & value that are present. Result should be true. bool removeOpflag = reg.Remove(cKey); Assert.IsTrue(removeOpflag, "Result should be true as key & value are present"); //Check Contains with removed entry. Result should be false. bool updatedcontainsOpflag = reg.Contains(myentry); Assert.IsFalse(updatedcontainsOpflag, "Result should be false as key & value are removed"); } void DoNotificationWithDelta() { try { Serializable.RegisterTypeGeneric(DeltaEx.create, CacheHelper.DCache); } catch (IllegalStateException) { //do nothig. } string cKey = m_keys[0]; DeltaEx val = new DeltaEx(); IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); reg[cKey] = val; val.SetDelta(true); reg[cKey] = val; string cKey1 = m_keys[1]; DeltaEx val1 = new DeltaEx(); reg[cKey1] = val1; val1.SetDelta(true); reg[cKey1] = val1; DeltaEx.ToDeltaCount = 0; DeltaEx.ToDataCount = 0; } void DoNotificationWithDefaultCloning() { string cKey = m_keys[0]; DeltaTestImpl val = new DeltaTestImpl(); IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); reg[cKey] = val; val.SetIntVar(2); val.SetDelta(true); reg[cKey] = val; javaobject.PdxDelta pd = new javaobject.PdxDelta(1001); for (int i = 0; i < 10; i++) { reg["pdxdelta"] = pd; } } void DoNotificationWithDeltaLRU() { try { Serializable.RegisterTypeGeneric(DeltaEx.create, CacheHelper.DCache); } catch (IllegalStateException) { //do nothig. } string cKey1 = "key1"; string cKey2 = "key2"; string cKey3 = "key3"; string cKey4 = "key4"; string cKey5 = "key5"; string cKey6 = "key6"; DeltaEx val1 = new DeltaEx(); DeltaEx val2 = new DeltaEx(); IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); reg[cKey1] = val1; reg[cKey2] = val1; reg[cKey3] = val1; reg[cKey4] = val1; reg[cKey5] = val1; reg[cKey6] = val1; val2.SetDelta(true); reg[cKey1] = val2; DeltaEx.ToDeltaCount = 0; DeltaEx.ToDataCount = 0; } void DoExpirationWithDelta() { try { Serializable.RegisterTypeGeneric(DeltaEx.create, CacheHelper.DCache); } catch (IllegalStateException) { //do nothig. } DeltaEx val1 = new DeltaEx(); IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); reg[1] = val1; // Sleep 10 seconds to allow expiration of entry in client 2 Thread.Sleep(10000); val1.SetDelta(true); reg[1] = val1; DeltaEx.ToDeltaCount = 0; DeltaEx.ToDataCount = 0; } void DoCqWithDelta() { string cKey1 = "key1"; IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); DeltaTestImpl value = new DeltaTestImpl(); reg[cKey1] = value; value.SetIntVar(5); value.SetDelta(true); reg[cKey1] = value; } void initializeDeltaClientAD() { try { Serializable.RegisterTypeGeneric(DeltaTestAD.Create, CacheHelper.DCache); } catch (IllegalStateException) { //do nothng } } void DoDeltaAD_C1_1() { DeltaTestAD val = new DeltaTestAD(); IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); reg.GetSubscriptionService().RegisterAllKeys(); Util.Log("clientAD1 put"); reg[1] = val; Util.Log("clientAD1 put done"); } void DoDeltaAD_C2_1() { IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); Util.Log("clientAD2 get"); DeltaTestAD val = (DeltaTestAD)reg[1]; Assert.AreEqual(2, val.DeltaUpdate); Util.Log("clientAD2 get done"); reg[1] = val; Util.Log("clientAD2 put done"); javaobject.PdxDelta pd = new javaobject.PdxDelta(1001); for (int i = 0; i < 10; i++) { reg["pdxdelta"] = pd; } } void DoDeltaAD_C1_afterC2Put() { Thread.Sleep(15000); DeltaTestAD val = null; IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); Util.Log("client fetching entry from local cache"); val = (DeltaTestAD)reg.GetEntry(1).Value; Assert.IsNotNull(val); Assert.AreEqual(3, val.DeltaUpdate); Util.Log("done"); System.Threading.Thread.Sleep(5000); //Assert.Greater(javaobject.PdxDelta.GotDelta, 7, "this should have recieve delta"); javaobject.PdxDelta pd = (javaobject.PdxDelta)(reg.GetLocalView()["pdxdelta"]); Assert.Greater(pd.Delta, 7, "this should have recieve delta"); } void runDeltaWithAppdomian(bool cloningenable) { CacheHelper.SetupJavaServers(true, "cacheserver_with_deltaAD.xml"); CacheHelper.StartJavaLocator(1, "GFELOC1"); CacheHelper.StartJavaServerWithLocators(1, "GFECS5", 1); string regionName = "DistRegionAck"; // if (usePools) { //CacheHelper.CreateTCRegion_Pool_AD("DistRegionAck", false, false, null, null, CacheHelper.Locators, "__TEST_POOL1__", false, false, false); m_client1.Call(CacheHelper.CreateTCRegion_Pool_AD1, regionName, false, true, CacheHelper.Locators, (string)"__TEST_POOL1__", true, cloningenable); m_client2.Call(CacheHelper.CreateTCRegion_Pool_AD1, regionName, false, true, CacheHelper.Locators, (string)"__TEST_POOL1__", false, cloningenable); // m_client1.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, false); // m_client1.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__"); } m_client1.Call(initializeDeltaClientAD); m_client2.Call(initializeDeltaClientAD); m_client1.Call(DoDeltaAD_C1_1); m_client2.Call(DoDeltaAD_C2_1); m_client1.Call(DoDeltaAD_C1_afterC2Put); m_client1.Call(Close); m_client2.Call(Close); CacheHelper.StopJavaServer(1); CacheHelper.StopJavaLocator(1); CacheHelper.ClearEndpoints(); CacheHelper.ClearLocators(); } void runPutWithDelta() { CacheHelper.SetupJavaServers(true, "cacheserver_with_delta.xml"); CacheHelper.StartJavaLocator(1, "GFELOC1"); CacheHelper.StartJavaServerWithLocators(1, "GFECS5", 1); m_client1.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, false); m_client1.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__"); m_client1.Call(DoPutWithDelta); m_client1.Call(Close); CacheHelper.StopJavaServer(1); CacheHelper.StopJavaLocator(1); CacheHelper.ClearEndpoints(); CacheHelper.ClearLocators(); } void runPut_Contains_Remove_WithDelta() { CacheHelper.SetupJavaServers(true, "cacheserver_with_delta.xml"); CacheHelper.StartJavaLocator(1, "GFELOC1"); CacheHelper.StartJavaServerWithLocators(1, "GFECS5", 1); m_client1.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, false); m_client1.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__"); m_client1.Call(Do_Put_Contains_Remove_WithDelta); m_client1.Call(Close); CacheHelper.StopJavaServer(1); CacheHelper.StopJavaLocator(1); CacheHelper.ClearEndpoints(); CacheHelper.ClearLocators(); } void registerClassCl2() { try { Serializable.RegisterTypeGeneric(DeltaEx.create, CacheHelper.DCache); } catch (IllegalStateException) { //do nothing } IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); reg.GetSubscriptionService().RegisterRegex(".*"); AttributesMutator<object, object> attrMutator = reg.AttributesMutator; attrMutator.SetCacheListener(new SimpleCacheListener<object, object>()); } void registerClassDeltaTestImpl() { try { Serializable.RegisterTypeGeneric(DeltaTestImpl.CreateDeserializable, CacheHelper.DCache); } catch (IllegalStateException) { // ARB: ignore exception caused by type reregistration. } DeltaTestImpl.ResetDataCount(); Thread.Sleep(2000); IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); try { reg.GetSubscriptionService().RegisterRegex(".*"); } catch (Exception) { // ARB: ignore regex exception for missing notification channel. } } void registerCq() { Pool thePool = CacheHelper.DCache.GetPoolManager().Find("__TEST_POOL1__"); QueryService cqService = null; cqService = thePool.GetQueryService(); CqAttributesFactory<object, DeltaTestImpl> attrFac = new CqAttributesFactory<object, DeltaTestImpl>(); myCqListener = new CqDeltaListener<object, DeltaTestImpl>(); attrFac.AddCqListener(myCqListener); CqAttributes<object, DeltaTestImpl> cqAttr = attrFac.Create(); CqQuery<object, DeltaTestImpl> theQuery = cqService.NewCq("select * from /DistRegionAck d where d.intVar > 4", cqAttr, false); theQuery.Execute(); } void VerifyDeltaCount() { Thread.Sleep(1000); Util.Log("Total Data count" + DeltaEx.FromDataCount); Util.Log("Total Data count" + DeltaEx.FromDeltaCount); if (DeltaEx.FromDataCount != 3) Assert.Fail("Count of fromData called should be 3 "); if (DeltaEx.FromDeltaCount != 2) Assert.Fail("Count of fromDelta called should be 2 "); if (SimpleCacheListener<object, object>.isSuccess == false) Assert.Fail("Listener failure"); SimpleCacheListener<object, object>.isSuccess = false; if (DeltaEx.CloneCount != 2) Assert.Fail("Clone count should be 2, is " + DeltaEx.CloneCount); DeltaEx.FromDataCount = 0; DeltaEx.FromDeltaCount = 0; DeltaEx.CloneCount = 0; } void VerifyCloning() { Thread.Sleep(1000); string cKey = m_keys[0]; IRegion<object, object> reg = CacheHelper.GetRegion<object, object>("DistRegionAck"); DeltaTestImpl val = reg[cKey] as DeltaTestImpl; if (val.GetIntVar() != 2) Assert.Fail("Int value after cloning should be 2, is " + val.GetIntVar()); if (DeltaTestImpl.GetFromDataCount() != 2) Assert.Fail("After cloning, fromDataCount should have been 2, is " + DeltaTestImpl.GetFromDataCount()); if (DeltaTestImpl.GetToDataCount() != 1) Assert.Fail("After cloning, toDataCount should have been 1, is " + DeltaTestImpl.GetToDataCount()); System.Threading.Thread.Sleep(5000); //Assert.Greater(javaobject.PdxDelta.GotDelta, 7, "this should have recieve delta"); javaobject.PdxDelta pd = (javaobject.PdxDelta)(reg.GetLocalView()["pdxdelta"]); Assert.Greater(pd.Delta, 7, "this should have recieve delta"); } void VerifyDeltaCountLRU() { Thread.Sleep(1000); if (DeltaEx.FromDataCount != 8) { Util.Log("DeltaEx.FromDataCount = " + DeltaEx.FromDataCount); Util.Log("DeltaEx.FromDeltaCount = " + DeltaEx.FromDeltaCount); Assert.Fail("Count should have been 8. 6 for common put and two when pulled from database and deserialized"); } if (DeltaEx.FromDeltaCount != 1) { Util.Log("DeltaEx.FromDeltaCount = " + DeltaEx.FromDeltaCount); Assert.Fail("Count should have been 1"); } DeltaEx.FromDataCount = 0; DeltaEx.FromDeltaCount = 0; } void VerifyCqDeltaCount() { // Wait for Cq event processing in listener Thread.Sleep(1000); if (myCqListener.GetDeltaCount() != 1) { Assert.Fail("Delta from CQ event does not have expected value"); } if (myCqListener.GetValueCount() != 1) { Assert.Fail("Value from CQ event is incorrect"); } } void VerifyExpirationDeltaCount() { Thread.Sleep(1000); if (DeltaEx.FromDataCount != 2) Assert.Fail("Count should have been 2."); if (DeltaEx.FromDeltaCount != 0) Assert.Fail("Count should have been 0."); DeltaEx.FromDataCount = 0; DeltaEx.FromDeltaCount = 0; } void runNotificationWithDelta() { CacheHelper.SetupJavaServers(true, "cacheserver_with_delta.xml"); CacheHelper.StartJavaLocator(1, "GFELOC1"); CacheHelper.StartJavaServerWithLocators(1, "GFECS5", 1); m_client1.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client1.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__", true); m_client2.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client2.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__", true); m_client2.Call(registerClassCl2); m_client1.Call(DoNotificationWithDelta); m_client2.Call(VerifyDeltaCount); m_client1.Call(Close); m_client2.Call(Close); CacheHelper.StopJavaServer(1); CacheHelper.StopJavaLocator(1); CacheHelper.ClearEndpoints(); CacheHelper.ClearLocators(); } void runNotificationWithDefaultCloning() { CacheHelper.SetupJavaServers(true, "cacheserver_with_delta_test_impl.xml"); CacheHelper.StartJavaLocator(1, "GFELOC1"); CacheHelper.StartJavaServerWithLocators(1, "GFECS5", 1); m_client1.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client1.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__", true); m_client2.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client2.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__", true); m_client1.Call(registerClassDeltaTestImpl); m_client2.Call(registerClassDeltaTestImpl); m_client1.Call(DoNotificationWithDefaultCloning); m_client2.Call(VerifyCloning); m_client1.Call(Close); m_client2.Call(Close); CacheHelper.StopJavaServer(1); CacheHelper.StopJavaLocator(1); CacheHelper.ClearEndpoints(); CacheHelper.ClearLocators(); } void runNotificationWithDeltaWithOverFlow() { CacheHelper.SetupJavaServers(true, "cacheserver_with_delta.xml"); CacheHelper.StartJavaLocator(1, "GFELOC1"); CacheHelper.StartJavaServerWithLocators(1, "GFECS1", 1); m_client1.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client1.Call(createLRURegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__"); m_client2.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client2.Call(createLRURegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__"); m_client2.Call(registerClassCl2); m_client1.Call(DoNotificationWithDeltaLRU); m_client2.Call(VerifyDeltaCountLRU); m_client1.Call(Close); m_client2.Call(Close); CacheHelper.StopJavaServer(1); CacheHelper.StopJavaLocator(1); CacheHelper.ClearEndpoints(); CacheHelper.ClearLocators(); } void runCqWithDelta() { CacheHelper.SetupJavaServers(true, "cacheserver_with_delta_test_impl.xml"); CacheHelper.StartJavaLocator(1, "GFELOC1"); CacheHelper.StartJavaServerWithLocators(1, "GFECS5", 1); m_client1.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client1.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__"); m_client2.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client2.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__"); m_client1.Call(registerClassDeltaTestImpl); m_client2.Call(registerClassDeltaTestImpl); m_client2.Call(registerCq); m_client1.Call(DoCqWithDelta); m_client2.Call(VerifyCqDeltaCount); m_client1.Call(Close); m_client2.Call(Close); CacheHelper.StopJavaServer(1); CacheHelper.StopJavaLocator(1); CacheHelper.ClearEndpoints(); CacheHelper.ClearLocators(); } void runExpirationWithDelta() { CacheHelper.SetupJavaServers(true, "cacheserver_with_delta.xml"); CacheHelper.StartJavaLocator(1, "GFELOC1"); CacheHelper.StartJavaServerWithLocators(1, "GFECS5", 1); m_client1.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client1.Call(createRegionAndAttachPool, "DistRegionAck", "__TEST_POOL1__"); m_client2.Call(createPool, "__TEST_POOL1__", CacheHelper.Locators, (string)null, 0, true); m_client2.Call(createExpirationRegion, "DistRegionAck", "__TEST_POOL1__"); m_client2.Call(registerClassCl2); m_client1.Call(DoExpirationWithDelta); m_client2.Call(VerifyExpirationDeltaCount); m_client1.Call(Close); m_client2.Call(Close); CacheHelper.StopJavaServer(1); CacheHelper.StopJavaLocator(1); CacheHelper.ClearEndpoints(); CacheHelper.ClearLocators(); } //#region Tests [Test] public void PutWithDeltaAD() { runDeltaWithAppdomian(false); runDeltaWithAppdomian(true);//cloning enable } [Test] public void PutWithDelta() { runPutWithDelta(); } [Test] public void Put_Contains_Remove_WithDelta() { runPut_Contains_Remove_WithDelta(); } [Test] public void NotificationWithDelta() { runNotificationWithDelta(); } [Test] public void NotificationWithDefaultCloning() { runNotificationWithDefaultCloning(); } [Test] public void NotificationWithDeltaWithOverFlow() { runNotificationWithDeltaWithOverFlow(); } [Test] public void CqWithDelta() { runCqWithDelta(); } [Test] public void ExpirationWithDelta() { runExpirationWithDelta(); } //#endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Dynamic.Utils; using System.Reflection; using System.Runtime.CompilerServices; using System.Text; namespace System.Linq.Expressions { /// <summary> /// Represents indexing a property or array. /// </summary> [DebuggerTypeProxy(typeof(Expression.IndexExpressionProxy))] public sealed class IndexExpression : Expression, IArgumentProvider { private readonly Expression _instance; private readonly PropertyInfo _indexer; private IList<Expression> _arguments; internal IndexExpression( Expression instance, PropertyInfo indexer, IList<Expression> arguments) { if (indexer == null) { Debug.Assert(instance != null && instance.Type.IsArray); Debug.Assert(instance.Type.GetArrayRank() == arguments.Count); } _instance = instance; _indexer = indexer; _arguments = arguments; } /// <summary> /// Returns the node type of this <see cref="Expression" />. (Inherited from <see cref="Expression" />.) /// </summary> /// <returns>The <see cref="ExpressionType"/> that represents this expression.</returns> public sealed override ExpressionType NodeType { get { return ExpressionType.Index; } } /// <summary> /// Gets the static type of the expression that this <see cref="Expression" /> represents. (Inherited from <see cref="Expression"/>.) /// </summary> /// <returns>The <see cref="Type"/> that represents the static type of the expression.</returns> public sealed override Type Type { get { if (_indexer != null) { return _indexer.PropertyType; } return _instance.Type.GetElementType(); } } /// <summary> /// An object to index. /// </summary> public Expression Object { get { return _instance; } } /// <summary> /// Gets the <see cref="PropertyInfo"/> for the property if the expression represents an indexed property, returns null otherwise. /// </summary> public PropertyInfo Indexer { get { return _indexer; } } /// <summary> /// Gets the arguments to be used to index the property or array. /// </summary> public ReadOnlyCollection<Expression> Arguments { get { return ReturnReadOnly(ref _arguments); } } /// <summary> /// Creates a new expression that is like this one, but using the /// supplied children. If all of the children are the same, it will /// return this expression. /// </summary> /// <param name="object">The <see cref="Object" /> property of the result.</param> /// <param name="arguments">The <see cref="Arguments" /> property of the result.</param> /// <returns>This expression if no children changed, or an expression with the updated children.</returns> public IndexExpression Update(Expression @object, IEnumerable<Expression> arguments) { if (@object == Object && arguments == Arguments) { return this; } return Expression.MakeIndex(@object, Indexer, arguments); } public Expression GetArgument(int index) { return _arguments[index]; } public int ArgumentCount { get { return _arguments.Count; } } /// <summary> /// Dispatches to the specific visit method for this node type. /// </summary> protected internal override Expression Accept(ExpressionVisitor visitor) { return visitor.VisitIndex(this); } internal Expression Rewrite(Expression instance, Expression[] arguments) { Debug.Assert(instance != null); Debug.Assert(arguments == null || arguments.Length == _arguments.Count); return Expression.MakeIndex(instance, _indexer, arguments ?? _arguments); } } public partial class Expression { /// <summary> /// Creates an <see cref="IndexExpression"/> that represents accessing an indexed property in an object. /// </summary> /// <param name="instance">The object to which the property belongs. Should be null if the property is static(shared).</param> /// <param name="indexer">An <see cref="Expression"/> representing the property to index.</param> /// <param name="arguments">An IEnumerable{Expression} containing the arguments to be used to index the property.</param> /// <returns>The created <see cref="IndexExpression"/>.</returns> public static IndexExpression MakeIndex(Expression instance, PropertyInfo indexer, IEnumerable<Expression> arguments) { if (indexer != null) { return Property(instance, indexer, arguments); } else { return ArrayAccess(instance, arguments); } } #region ArrayAccess /// <summary> /// Creates an <see cref="IndexExpression"></see> to access an array. /// </summary> /// <param name="array">An expression representing the array to index.</param> /// <param name="indexes">An array containing expressions used to index the array.</param> /// <remarks>The expression representing the array can be obtained by using the MakeMemberAccess method, /// or through NewArrayBounds or NewArrayInit.</remarks> /// <returns>The created <see cref="IndexExpression"/>.</returns> public static IndexExpression ArrayAccess(Expression array, params Expression[] indexes) { return ArrayAccess(array, (IEnumerable<Expression>)indexes); } /// <summary> /// Creates an <see cref="IndexExpression"></see> to access an array. /// </summary> /// <param name="array">An expression representing the array to index.</param> /// <param name="indexes">An <see cref="IEnumerable{Expression}"/> containing expressions used to index the array.</param> /// <remarks>The expression representing the array can be obtained by using the MakeMemberAccess method, /// or through NewArrayBounds or NewArrayInit.</remarks> /// <returns>The created <see cref="IndexExpression"/>.</returns> public static IndexExpression ArrayAccess(Expression array, IEnumerable<Expression> indexes) { RequiresCanRead(array, nameof(array)); Type arrayType = array.Type; if (!arrayType.IsArray) { throw Error.ArgumentMustBeArray(nameof(array)); } var indexList = indexes.ToReadOnly(); if (arrayType.GetArrayRank() != indexList.Count) { throw Error.IncorrectNumberOfIndexes(); } foreach (Expression e in indexList) { RequiresCanRead(e, nameof(indexes)); if (e.Type != typeof(int)) { throw Error.ArgumentMustBeArrayIndexType(nameof(indexes)); } } return new IndexExpression(array, null, indexList); } #endregion #region Property /// <summary> /// Creates an <see cref="IndexExpression"/> representing the access to an indexed property. /// </summary> /// <param name="instance">The object to which the property belongs. If the property is static/shared, it must be null.</param> /// <param name="propertyName">The name of the indexer.</param> /// <param name="arguments">An array of <see cref="Expression"/> objects that are used to index the property.</param> /// <returns>The created <see cref="IndexExpression"/>.</returns> public static IndexExpression Property(Expression instance, string propertyName, params Expression[] arguments) { RequiresCanRead(instance, nameof(instance)); ContractUtils.RequiresNotNull(propertyName, nameof(propertyName)); PropertyInfo pi = FindInstanceProperty(instance.Type, propertyName, arguments); return Property(instance, pi, arguments); } #region methods for finding a PropertyInfo by its name /// <summary> /// The method finds the instance property with the specified name in a type. The property's type signature needs to be compatible with /// the arguments if it is a indexer. If the arguments is null or empty, we get a normal property. /// </summary> private static PropertyInfo FindInstanceProperty(Type type, string propertyName, Expression[] arguments) { // bind to public names first BindingFlags flags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy; PropertyInfo pi = FindProperty(type, propertyName, arguments, flags); if (pi == null) { flags = BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy; pi = FindProperty(type, propertyName, arguments, flags); } if (pi == null) { if (arguments == null || arguments.Length == 0) { throw Error.InstancePropertyWithoutParameterNotDefinedForType(propertyName, type); } else { throw Error.InstancePropertyWithSpecifiedParametersNotDefinedForType(propertyName, GetArgTypesString(arguments), type); } } return pi; } private static string GetArgTypesString(Expression[] arguments) { StringBuilder argTypesStr = new StringBuilder(); argTypesStr.Append('('); for (int i = 0; i < arguments.Length; i++) { if (i != 0) { argTypesStr.Append(", "); } argTypesStr.Append(arguments[i].Type.Name); } argTypesStr.Append(')'); return argTypesStr.ToString(); } private static PropertyInfo FindProperty(Type type, string propertyName, Expression[] arguments, BindingFlags flags) { var props = type.GetProperties(flags).Where(x => x.Name.Equals(propertyName, StringComparison.CurrentCultureIgnoreCase)); ; PropertyInfo[] members = new List<PropertyInfo>(props).ToArray(); if (members == null || members.Length == 0) return null; PropertyInfo pi; var propertyInfos = members.Map(t => (PropertyInfo)t); int count = FindBestProperty(propertyInfos, arguments, out pi); if (count == 0) return null; if (count > 1) throw Error.PropertyWithMoreThanOneMatch(propertyName, type); return pi; } private static int FindBestProperty(IEnumerable<PropertyInfo> properties, Expression[] args, out PropertyInfo property) { int count = 0; property = null; foreach (PropertyInfo pi in properties) { if (pi != null && IsCompatible(pi, args)) { if (property == null) { property = pi; count = 1; } else { count++; } } } return count; } private static bool IsCompatible(PropertyInfo pi, Expression[] args) { MethodInfo mi; mi = pi.GetGetMethod(true); ParameterInfo[] parms; if (mi != null) { parms = mi.GetParametersCached(); } else { mi = pi.GetSetMethod(true); //The setter has an additional parameter for the value to set, //need to remove the last type to match the arguments. parms = mi.GetParametersCached().RemoveLast(); } if (mi == null) { return false; } if (args == null) { return parms.Length == 0; } if (parms.Length != args.Length) return false; for (int i = 0; i < args.Length; i++) { if (args[i] == null) return false; if (!TypeUtils.AreReferenceAssignable(parms[i].ParameterType, args[i].Type)) { return false; } } return true; } #endregion /// <summary> /// Creates an <see cref="IndexExpression"/> representing the access to an indexed property. /// </summary> /// <param name="instance">The object to which the property belongs. If the property is static/shared, it must be null.</param> /// <param name="indexer">The <see cref="PropertyInfo"/> that represents the property to index.</param> /// <param name="arguments">An array of <see cref="Expression"/> objects that are used to index the property.</param> /// <returns>The created <see cref="IndexExpression"/>.</returns> public static IndexExpression Property(Expression instance, PropertyInfo indexer, params Expression[] arguments) { return Property(instance, indexer, (IEnumerable<Expression>)arguments); } /// <summary> /// Creates an <see cref="IndexExpression"/> representing the access to an indexed property. /// </summary> /// <param name="instance">The object to which the property belongs. If the property is static/shared, it must be null.</param> /// <param name="indexer">The <see cref="PropertyInfo"/> that represents the property to index.</param> /// <param name="arguments">An <see cref="IEnumerable{T}"/> of <see cref="Expression"/> objects that are used to index the property.</param> /// <returns>The created <see cref="IndexExpression"/>.</returns> public static IndexExpression Property(Expression instance, PropertyInfo indexer, IEnumerable<Expression> arguments) { var argList = arguments.ToReadOnly(); ValidateIndexedProperty(instance, indexer, ref argList); return new IndexExpression(instance, indexer, argList); } // CTS places no restrictions on properties (see ECMA-335 8.11.3), // so we validate that the property conforms to CLS rules here. // // Does reflection help us out at all? Expression.Property skips all of // these checks, so either it needs more checks or we need less here. private static void ValidateIndexedProperty(Expression instance, PropertyInfo property, ref ReadOnlyCollection<Expression> argList) { // If both getter and setter specified, all their parameter types // should match, with exception of the last setter parameter which // should match the type returned by the get method. // Accessor parameters cannot be ByRef. ContractUtils.RequiresNotNull(property, nameof(property)); if (property.PropertyType.IsByRef) throw Error.PropertyCannotHaveRefType(nameof(property)); if (property.PropertyType == typeof(void)) throw Error.PropertyTypeCannotBeVoid(nameof(property)); ParameterInfo[] getParameters = null; MethodInfo getter = property.GetGetMethod(true); if (getter != null) { getParameters = getter.GetParametersCached(); ValidateAccessor(instance, getter, getParameters, ref argList); } MethodInfo setter = property.GetSetMethod(true); if (setter != null) { ParameterInfo[] setParameters = setter.GetParametersCached(); if (setParameters.Length == 0) throw Error.SetterHasNoParams(nameof(property)); // valueType is the type of the value passed to the setter (last parameter) Type valueType = setParameters[setParameters.Length - 1].ParameterType; if (valueType.IsByRef) throw Error.PropertyCannotHaveRefType(nameof(property)); if (setter.ReturnType != typeof(void)) throw Error.SetterMustBeVoid(nameof(property)); if (property.PropertyType != valueType) throw Error.PropertyTypeMustMatchSetter(nameof(property)); if (getter != null) { if (getter.IsStatic ^ setter.IsStatic) throw Error.BothAccessorsMustBeStatic(nameof(property)); if (getParameters.Length != setParameters.Length - 1) throw Error.IndexesOfSetGetMustMatch(nameof(property)); for (int i = 0; i < getParameters.Length; i++) { if (getParameters[i].ParameterType != setParameters[i].ParameterType) throw Error.IndexesOfSetGetMustMatch(nameof(property)); } } else { ValidateAccessor(instance, setter, setParameters.RemoveLast(), ref argList); } } if (getter == null && setter == null) { throw Error.PropertyDoesNotHaveAccessor(property, nameof(property)); } } private static void ValidateAccessor(Expression instance, MethodInfo method, ParameterInfo[] indexes, ref ReadOnlyCollection<Expression> arguments) { ContractUtils.RequiresNotNull(arguments, nameof(arguments)); ValidateMethodInfo(method, nameof(method)); if ((method.CallingConvention & CallingConventions.VarArgs) != 0) throw Error.AccessorsCannotHaveVarArgs(nameof(method)); if (method.IsStatic) { if (instance != null) throw Error.OnlyStaticMethodsHaveNullInstance(); } else { if (instance == null) throw Error.OnlyStaticMethodsHaveNullInstance(); RequiresCanRead(instance, nameof(instance)); ValidateCallInstanceType(instance.Type, method); } ValidateAccessorArgumentTypes(method, indexes, ref arguments); } private static void ValidateAccessorArgumentTypes(MethodInfo method, ParameterInfo[] indexes, ref ReadOnlyCollection<Expression> arguments) { if (indexes.Length > 0) { if (indexes.Length != arguments.Count) { throw Error.IncorrectNumberOfMethodCallArguments(method); } Expression[] newArgs = null; for (int i = 0, n = indexes.Length; i < n; i++) { Expression arg = arguments[i]; ParameterInfo pi = indexes[i]; RequiresCanRead(arg, nameof(arguments)); Type pType = pi.ParameterType; if (pType.IsByRef) throw Error.AccessorsCannotHaveByRefArgs($"{nameof(indexes)}[{i}]"); TypeUtils.ValidateType(pType, $"{nameof(indexes)}[{i}]"); if (!TypeUtils.AreReferenceAssignable(pType, arg.Type)) { if (!TryQuote(pType, ref arg)) { throw Error.ExpressionTypeDoesNotMatchMethodParameter(arg.Type, pType, method); } } if (newArgs == null && arg != arguments[i]) { newArgs = new Expression[arguments.Count]; for (int j = 0; j < i; j++) { newArgs[j] = arguments[j]; } } if (newArgs != null) { newArgs[i] = arg; } } if (newArgs != null) { arguments = new TrueReadOnlyCollection<Expression>(newArgs); } } else if (arguments.Count > 0) { throw Error.IncorrectNumberOfMethodCallArguments(method); } } #endregion } }
/*************************************************************************** * Feed.cs * * Copyright (C) 2007 Michael C. Urbanski * Written by Mike Urbanski <michael.c.urbanski@gmail.com> ****************************************************************************/ /* THIS FILE IS LICENSED UNDER THE MIT LICENSE AS OUTLINED IMMEDIATELY BELOW: * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ using System; using System.IO; using System.Net; using System.Threading; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using Mono.Unix; using Hyena; using Hyena.Data.Sqlite; using Migo.Net; using Migo.TaskCore; using Migo.DownloadCore; namespace Migo.Syndication { public enum FeedAutoDownload : int { All = 0, One = 1, None = 2 } // TODO remove this, way too redundant with DownloadStatus public enum PodcastFeedActivity : int { Updating = 0, UpdatePending = 1, UpdateFailed = 2, ItemsDownloading = 4, ItemsQueued = 5, None = 6 } public class FeedProvider : MigoModelProvider<Feed> { public FeedProvider (HyenaSqliteConnection connection) : base (connection, "PodcastSyndications") { } protected override void CreateTable () { base.CreateTable (); CreateIndex ("PodcastSyndicationsIndex", "IsSubscribed, Title"); } protected override int ModelVersion { get { return 4; } } protected override void MigrateTable (int old_version) { CheckTable (); if (old_version < 2) { Connection.Execute (String.Format ("UPDATE {0} SET IsSubscribed=1", TableName)); } if (old_version < 3) { CreateIndex ("PodcastSyndicationsIndex", "IsSubscribed, Title"); } if (old_version < 4) { Connection.Execute (String.Format ("UPDATE {0} SET MaxItemCount=0 WHERE MaxItemCount=200", TableName)); } } } public class Feed : MigoItem<Feed> { private static FeedProvider provider; public static FeedProvider Provider { get { return provider; } } public static void Init () { provider = new FeedProvider (FeedsManager.Instance.Connection); } public static bool Exists (string url) { return Provider.Connection.Query<int> (String.Format ("select count(*) from {0} where url = ?", Provider.TableName), url) != 0; } //private bool canceled; //private bool deleted; //private bool updating; //private ManualResetEvent updatingHandle = new ManualResetEvent (true); private readonly object sync = new object (); private string copyright; private string description; private string image_url; private int update_period_minutes = 24 * 60; private string language; private DateTime last_build_date = DateTime.MinValue; private FeedDownloadError lastDownloadError; private DateTime last_download_time = DateTime.MinValue; private string link; //private string local_enclosure_path; private long dbid = -1; private long maxItemCount = 0; private DateTime pubDate; private FeedSyncSetting syncSetting; private string title; private string url; private string keywords, category; #region Database-bound Properties [DatabaseColumn ("FeedID", Constraints = DatabaseColumnConstraints.PrimaryKey)] public override long DbId { get { return dbid; } protected set { dbid = value; } } public static string UnknownPodcastTitle = Catalog.GetString ("Unknown Podcast"); [DatabaseColumn] public string Title { get { return title ?? UnknownPodcastTitle; } set { title = value; } } [DatabaseColumn] public string Description { get { return description; } set { description = value; } } [DatabaseColumn] public string Url { get { return url; } set { url = value; } } [DatabaseColumn] public string Keywords { get { return keywords; } set { keywords = value; } } [DatabaseColumn] public string Category { get { return category; } set { category = value; } } [DatabaseColumn] public string Copyright { get { return copyright; } set { copyright = value; } } [DatabaseColumn] public string ImageUrl { get { return image_url; } set { image_url = value; } } [DatabaseColumn] public int UpdatePeriodMinutes { get { return update_period_minutes; } set { update_period_minutes = value; } } [DatabaseColumn] public string Language { get { return language; } set { language = value; } } [DatabaseColumn] public FeedDownloadError LastDownloadError { get { return lastDownloadError; } set { lastDownloadError = value; } } [DatabaseColumn] public DateTime LastDownloadTime { get { return last_download_time; } set { last_download_time = value; } } [DatabaseColumn] public string Link { get { return link; } set { link = value; } } //[DatabaseColumn] public string LocalEnclosurePath { get { string escaped = Hyena.StringUtil.EscapeFilename (Title); return Path.Combine (FeedsManager.Instance.PodcastStorageDirectory, escaped); } //set { local_enclosure_path = value; } } [DatabaseColumn] public long MaxItemCount { get { return maxItemCount; } set { maxItemCount = value; } } [DatabaseColumn] public DateTime PubDate { get { return pubDate; } set { pubDate = value; } } [DatabaseColumn] public DateTime LastBuildDate { get { return last_build_date; } set { last_build_date = value; } } /*private DateTime last_downloaded; [DatabaseColumn] public DateTime LastDownloaded { get { return last_downloaded; } set { last_downloaded = value; } }*/ [DatabaseColumn] public FeedSyncSetting SyncSetting { get { return syncSetting; } set { syncSetting = value; } } [DatabaseColumn] protected DateTime last_auto_download = DateTime.MinValue; public DateTime LastAutoDownload { get { return last_auto_download; } set { last_auto_download = value; } } [DatabaseColumn("AutoDownload")] protected FeedAutoDownload auto_download = FeedAutoDownload.None; public FeedAutoDownload AutoDownload { get { return auto_download; } set { if (value == auto_download) return; auto_download = value; CheckForItemsToDownload (); } } [DatabaseColumn("DownloadStatus")] private FeedDownloadStatus download_status; public FeedDownloadStatus DownloadStatus { get { return download_status; } set { download_status = value; } } [DatabaseColumn("IsSubscribed")] private bool is_subscribed; public bool IsSubscribed { get { return is_subscribed; } set { is_subscribed = value; } } #endregion #region Other Properties // TODO remove this, way too redundant with DownloadStatus /*public PodcastFeedActivity Activity { get { return activity; } PodcastFeedActivity ret = PodcastFeedActivity.None; if (this == All) { return ret; } switch (DownloadStatus) { case FeedDownloadStatus.Pending: ret = PodcastFeedActivity.UpdatePending; break; case FeedDownloadStatus.Downloading: ret = PodcastFeedActivity.Updating; break; case FeedDownloadStatus.DownloadFailed: ret = PodcastFeedActivity.UpdateFailed; break; } if (ret != PodcastFeedActivity.Updating) { if (ActiveDownloadCount > 0) { ret = PodcastFeedActivity.ItemsDownloading; } else if (QueuedDownloadCount > 0) { ret = PodcastFeedActivity.ItemsQueued; } } return ret; } }*/ public IEnumerable<FeedItem> Items { get { if (DbId > 0) { foreach (FeedItem item in FeedItem.Provider.FetchAllMatching (String.Format ("{0}.FeedID = {1} ORDER BY {0}.PubDate DESC", FeedItem.Provider.TableName, DbId))) { yield return item; } } } } #endregion private static FeedManager Manager { get { return FeedsManager.Instance.FeedManager; } } #region Constructors public Feed (string url, FeedAutoDownload auto_download) : this () { Url = url; this.auto_download = auto_download; } public Feed () { } #endregion #region Internal Methods // Removing a FeedItem means removing the downloaded file. /*public void Remove (FeedItem item) { if (item == null) { throw new ArgumentNullException ("item"); } if (items.Remove (item)) { inactive_items.Add (item); OnFeedItemRemoved (item); } } }*/ /*public void Remove (IEnumerable<FeedItem> itms) { if (removedItems.Count > 0) { OnItemsChanged (); } } }*/ #endregion #region Private Methods public void SetItems (IEnumerable<FeedItem> items) { bool added_any = false; foreach (FeedItem item in items) { added_any |= AddItem (item); } if (added_any) { CheckForItemsToArchive (); Manager.OnFeedsChanged (); CheckForItemsToDownload (); } } private bool AddItem (FeedItem item) { try { if (!FeedItem.Exists (this.DbId, item.Guid)) { item.Feed = this; item.Save (); return true; } } catch (Exception e) { Hyena.Log.Exception (e); } return false; } /*private void UpdateItems (IEnumerable<FeedItem> new_items) { ICollection<FeedItem> tmpNew = null; List<FeedItem> zombies = new List<FeedItem> (); if (items.Count == 0 && inactive_items.Count == 0) { tmpNew = new List<FeedItem> (new_items); } else { // Get remote items that aren't in the items list tmpNew = Diff (items, new_items); // Of those, remove the ones that are in our inactive list tmpNew = Diff (inactive_items, tmpNew); // Get a list of inactive items that aren't in the remote list any longer ICollection<FeedItem> doubleKilledZombies = Diff ( new_items, inactive_items ); foreach (FeedItem zombie in doubleKilledZombies) { inactive_items.Remove (zombie); } zombies.AddRange (doubleKilledZombies); foreach (FeedItem fi in Diff (new_items, items)) { if (fi.Enclosure != null && !String.IsNullOrEmpty (fi.Enclosure.LocalPath)) { // A hack for the podcast plugin, keeps downloaded items // from being deleted when they are no longer in the feed. continue; } zombies.Add (fi); } } if (tmpNew.Count > 0) { Add (tmpNew); } // TODO merge...should we really be deleting these items? if (zombies.Count > 0) { foreach (FeedItem item in zombies) { if (item.Active) { zombie.Delete (); } } // TODO merge //ItemsTableManager.Delete (zombies); } } // Written before LINQ, will update. private ICollection<FeedItem> Diff (IEnumerable<FeedItem> baseSet, IEnumerable<FeedItem> overlay) { bool found; List<FeedItem> diff = new List<FeedItem> (); foreach (FeedItem opi in overlay) { found = false; foreach (FeedItem bpi in baseSet) { if (opi.Title == bpi.Title && opi.Description == bpi.Description) { found = true; break; } } if (!found) { diff.Add (opi); } } return diff; }*/ #endregion #region Public Methods public void Update () { Manager.QueueUpdate (this); } public void Delete () { Delete (true); Manager.OnFeedsChanged (); } public void Delete (bool deleteEnclosures) { lock (sync) { //if (deleted) // return; //if (updating) { // Manager.CancelUpdate (this); //} foreach (FeedItem item in Items) { item.Delete (deleteEnclosures); } Provider.Delete (this); } //updatingHandle.WaitOne (); Manager.OnFeedsChanged (); } public void MarkAllItemsRead () { lock (sync) { foreach (FeedItem i in Items) { i.IsRead = true; } } } public override string ToString () { return String.Format ("Title: {0} - Url: {1}", Title, Url); } public void Save () { Save (true); } public void Save (bool notify) { Provider.Save (this); CheckForItemsToArchive (); if (LastBuildDate > LastAutoDownload) { CheckForItemsToDownload (); } if (notify) { Manager.OnFeedsChanged (); } } private void CheckForItemsToArchive () { if (MaxItemCount == 0) return; int i = 0; foreach (var item in Items) { if (!item.IsRead) { if (i++ >= MaxItemCount) { item.IsRead = true; item.Save (false); } } } } private void CheckForItemsToDownload () { if (LastDownloadError != FeedDownloadError.None || AutoDownload == FeedAutoDownload.None) return; bool only_first = (AutoDownload == FeedAutoDownload.One); bool any = false; foreach (FeedItem item in Items) { if (item.Enclosure != null && item.Active && item.Enclosure.DownloadStatus != FeedDownloadStatus.Downloaded && item.PubDate > LastAutoDownload) { item.Enclosure.AsyncDownload (); any = true; if (only_first) break; } } if (any) { LastAutoDownload = DateTime.Now; Save (); } } /*private bool SetCanceled () { bool ret = false; if (!canceled && updating) { ret = canceled = true; } return ret; }*/ #endregion } }
using System; using System.Collections.Generic; using System.Text; namespace DSPUtil { /* % from iso226.m: % Generates an Equal Loudness Contour as described in ISO 226 % % Usage: [SPL FREQ] = ISO226(PHON); % % PHON is the phon value in dB SPL that you want the equal % loudness curve to represent. (1phon = 1dB @ 1kHz) % SPL is the Sound Pressure Level amplitude returned for % each of the 29 frequencies evaluated by ISO226. % FREQ is the returned vector of frequencies that ISO226 % evaluates to generate the contour. % % Desc: This function will return the equal loudness contour for % your desired phon level. The frequencies evaulated in this % function only span from 20Hz - 12.5kHz, and only 29 selective % frequencies are covered. This is the limitation of the ISO % standard. % % In addition the valid phon range should be 0 - 90 dB SPL. % Values outside this range do not have experimental values % and their contours should be treated as inaccurate. % % If more samples are required you should be able to easily % interpolate these values using spline(). % % Author: Jeff Tackett 03/01/05 */ public class Loudness { static double[] f = { 20, 25, 31.5, 40.0, 50.0, 63.0, 80.0, 100.0, 125.0, 160.0, 200.0, 250.0, 315.0, 400.0, 500.0, 630.0, 800.0, 1000, 1250, 1600, 2000, 2500, 3150, 4000, 5000, 6300, 8000, 10000, 12500}; static double[] af = {0.532, 0.506, 0.480, 0.455, 0.432, 0.409, 0.387, 0.367, 0.349, 0.330, 0.315, 0.301, 0.288, 0.276, 0.267, 0.259, 0.253, 0.250, 0.246, 0.244, 0.243, 0.243, 0.243, 0.242, 0.242, 0.245, 0.254, 0.271, 0.301}; static double[] Lu = {-31.6, -27.2, -23.0, -19.1, -15.9, -13.0, -10.3, -8.1, -6.2, -4.5, -3.1, -2.0, -1.1, -0.4, 0.0, 0.3, 0.5, 0.0, -2.7, -4.1, -1.0, 1.7, 2.5, 1.2, -2.1, -7.1, -11.2, -10.7, -3.1}; static double[] Tf = { 78.5, 68.7, 59.5, 51.1, 44.0, 37.5, 31.5, 26.5, 22.1, 17.9, 14.4, 11.4, 8.6, 6.2, 4.4, 3.0, 2.2, 2.4, 3.5, 1.7, -1.3, -4.2, -6.0, -5.4, -1.5, 6.0, 12.6, 13.9, 12.3 }; /// <summary> /// Create a list of dB SPL equal-loudness values for a given 'phon' loudness /// (from zero, threshold, to 90) /// </summary> /// <param name="phon"></param> /// <returns>list of {frequency Hz, dB SPL}</returns> public static FilterProfile SPL(double phon) { FilterProfile lfg = new FilterProfile(); if ((phon < 0) | (phon > 120)) { throw new ArgumentException("Phon value out of bounds!"); } // Setup user-defined values for equation double Ln = phon; for (int j = 0; j < f.Length; j++) { // Deriving sound pressure level from loudness level (iso226 sect 4.1) double Af = 4.47E-3 * Math.Pow(10, (0.025 * Ln) - 1.15) + Math.Pow(0.4 * Math.Pow(10, (((Tf[j] + Lu[j]) / 10) - 9)), af[j]); double Lp = ((10 / af[j]) * Math.Log10(Af)) - Lu[j] + 94; // Return user data FreqGain fg = new FreqGain(f[j], Lp); lfg.Add(fg); } return lfg; } public static FilterProfile DifferentialSPL(double phon0, double phon1) { return DifferentialSPL(phon0, phon1, 1.0); } public static FilterProfile DifferentialSPL(double phon0, double phon1, double scale) { FilterProfile spl = new FilterProfile(); FilterProfile spl0 = Loudness.SPL(phon0); FilterProfile spl1 = Loudness.SPL(phon1); for (int j = 0; j < spl1.Count; j++) { FreqGain fg = spl1[j]; fg.Gain = scale * ( spl0[j].Gain - fg.Gain ); spl.Add(fg); } return spl; } /// <summary> /// Calculate the weighted volume of a *single channel* sound source. /// NB: this consumes lots of memory for long sources. /// </summary> /// <param name="src"></param> /// <param name="dbSPL"></param> /// <returns>Volume (units, not dB)</returns> public static double WeightedVolume1(ISoundObj src, double dbSPL, double dbSPLBase) { if (src.NumChannels != 1) { throw new ArgumentException("Requires single-channel"); } // Read channel into a buffer SoundBuffer buff = new SoundBuffer(src); buff.ReadAll(); // And then double in length to prevent wraparound buff.PadTo(buff.Count * 2); // Pad to next higher power of two buff.PadToPowerOfTwo(); int n = buff.Count; double wvImpulse = WeightedVolume2(buff, dbSPL, dbSPLBase); // compare with a Dirac pulse the same length CallbackSource dirac = new CallbackSource(1, src.SampleRate, delegate(long j) { if (j >= n) { return null; } double v = 0; if (j == n / 2) { v = 1; } return new Sample(v); }); buff = new SoundBuffer(dirac); buff.ReadAll(); double wvDirac = WeightedVolume2(buff, dbSPL, dbSPLBase); buff = null; GC.Collect(); return wvImpulse / wvDirac; } private static double WeightedVolume2(SoundBuffer src, double dbSPL, double dbSPLBase) { double v = 0; uint sr = src.SampleRate; // Read buffer into array of complex Complex[][] data = src.ToComplexArray(); // We only have a single channel Complex[] cdata = data[0]; // FFT in place Fourier.FFT(cdata.Length, cdata); // Calculate magnitude, weighted by 80-phon loudness, for each loudness band. // These are the ISO measured points: FilterProfile lfg; if (dbSPLBase == 0) { lfg = SPL(dbSPL); } else { lfg = DifferentialSPL(dbSPL, dbSPLBase); } // lfg.Add(new FreqGain(sr / 2, lfg[lfg.Count - 1].Gain)); // Cover the ISO measured range (only...) int nStart = (int)(lfg[0].Freq * (long)cdata.Length / sr); int nEnd = (int)(lfg[lfg.Count - 1].Freq * (long)cdata.Length / sr); // Just use linear interpolation (on a dB scale; linear freq scale) of gain between each measured point int nfg = 0; int startp = nStart; int endp = (int)(lfg[nfg + 1].Freq * (long)cdata.Length / sr); // endpoint of this band double dB1 = lfg[nfg].Gain; // SPL of the ISO223 curve at this freq double dB2 = lfg[nfg+1].Gain; // ...and the next point double vThisBand = 0; int nThisBand = 0; for (int j = nStart; j < nEnd; j++) { if (j > endp) { if (nThisBand > 0) v += Math.Sqrt(vThisBand / nThisBand); // RMS while (j >= endp) { nfg++; startp = j; endp = (int)(lfg[nfg + 1].Freq * (long)cdata.Length / sr); dB1 = lfg[nfg].Gain; dB2 = lfg[nfg + 1].Gain; } vThisBand = 0; nThisBand = 0; } Complex c = cdata[j]; double dbHere = dB1 + ((dB2 - dB1) * (double)(j - startp) / (double)(endp - startp)); vThisBand += (c.Re * c.Re) / MathUtil.gain(dbHere); nThisBand++; } if(nThisBand>0) v += Math.Sqrt(vThisBand / nThisBand); return v; } /// <summary> /// Calculate the weighted volume of a sound source. /// NB: this consumes lots of memory for long sources. /// </summary> /// <param name="src"></param> /// <param name="dbSPL"></param> /// <returns>Volume (units, not dB)</returns> public static double WeightedVolume(ISoundObj src, double dbSPL, double dbSPLBase) { double wv = 0; for (ushort c = 0; c < src.NumChannels; c++) { SingleChannel channel = src.Channel(c); wv += Loudness.WeightedVolume1(channel, dbSPL, dbSPLBase); } src.Reset(); wv = wv / src.NumChannels; return wv; } /// <summary> /// Calculate the weighted volume of a sound source. /// NB: this consumes lots of memory for long sources. /// </summary> /// <param name="src"></param> /// <returns>Volume (units, not dB)</returns> public static double WeightedVolume(ISoundObj src) { return WeightedVolume(src, 40, 0); } } }
// groupem_scrollmaster.cs // // Copyright (c) 2013 Brent Knowles (http://www.brentknowles.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // Review documentation at http://www.yourothermind.com for updated implementation notes, license updates // or other general information/ // // Author information available at http://www.brentknowles.com or http://www.amazon.com/Brent-Knowles/e/B0035WW7OW // Full source code: https://github.com/BrentKnowles/YourOtherMind //### using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using System.Runtime.InteropServices; using Microsoft.Win32; using System.Reflection; using CoreUtilities.Links; namespace Storyboards { public class groupem_scrollmaster : ListView { // Windows messages private const int WM_PAINT = 0x000F; private const int WM_HSCROLL = 0x0114; private const int WM_VSCROLL = 0x0115; private const int WM_MOUSEWHEEL = 0x020A; private const int WM_KEYDOWN = 0x0100; private const int WM_LBUTTONUP = 0x0202; // ScrollBar types private const int SB_HORZ = 0; private const int SB_VERT = 1; // ScrollBar interfaces private const int SIF_TRACKPOS = 0x10; private const int SIF_RANGE = 0x01; private const int SIF_POS = 0x04; private const int SIF_PAGE = 0x02; private const int SIF_ALL = SIF_RANGE | SIF_PAGE | SIF_POS | SIF_TRACKPOS; // ListView messages private const uint LVM_SCROLL = 0x1014; private const int LVM_FIRST = 0x1000; private const int LVM_SETGROUPINFO = (LVM_FIRST + 147); public enum ScrollBarCommands : int { SB_LINEUP = 0, SB_LINELEFT = 0, SB_LINEDOWN = 1, SB_LINERIGHT = 1, SB_PAGEUP = 2, SB_PAGELEFT = 2, SB_PAGEDOWN = 3, SB_PAGERIGHT = 3, SB_THUMBPOSITION = 4, SB_THUMBTRACK = 5, SB_TOP = 6, SB_LEFT = 6, SB_BOTTOM = 7, SB_RIGHT = 7, SB_ENDSCROLL = 8 } protected override void WndProc(ref Message m) { base.WndProc(ref m); if (null == onScroll) return; switch (m.Msg) { case WM_VSCROLL: ScrollEventArgs sargs = new ScrollEventArgs(ScrollEventType.EndScroll, GetScrollPos(this.Handle, SB_VERT)); onScroll(this, sargs); break; case WM_MOUSEWHEEL: ScrollEventArgs sarg = new ScrollEventArgs(ScrollEventType.EndScroll, GetScrollPos(this.Handle, SB_VERT)); onScroll(this, sarg); break; case WM_KEYDOWN: switch (m.WParam.ToInt32()) { case (int)Keys.Down: onScroll(this, new ScrollEventArgs(ScrollEventType.SmallDecrement, GetScrollPos(this.Handle, SB_VERT))); break; case (int)Keys.Up: onScroll(this, new ScrollEventArgs(ScrollEventType.SmallIncrement, GetScrollPos(this.Handle, SB_VERT))); break; case (int)Keys.PageDown: onScroll(this, new ScrollEventArgs(ScrollEventType.SmallDecrement, GetScrollPos(this.Handle, SB_VERT))); break; case (int)Keys.PageUp: onScroll(this, new ScrollEventArgs(ScrollEventType.SmallIncrement, GetScrollPos(this.Handle, SB_VERT))); break; case (int)Keys.Home: onScroll(this, new ScrollEventArgs(ScrollEventType.First, GetScrollPos(this.Handle, SB_VERT))); break; case (int)Keys.End: onScroll(this, new ScrollEventArgs(ScrollEventType.Last, GetScrollPos(this.Handle, SB_VERT))); break; } break; } } public int ScrollPosition { get { return GetScrollPos(this.Handle, SB_VERT); } set { int prevPos; int scrollVal=0; if (ShowGroups == true) { prevPos = GetScrollPos(this.Handle, SB_VERT); scrollVal = -(prevPos - value); } else { } SendMessage(this.Handle, LVM_SCROLL, (IntPtr)0, (IntPtr)scrollVal); } } public event ScrollEventHandler onScroll; [DllImport("user32.dll")] [return: MarshalAs(UnmanagedType.Bool)] private static extern bool GetScrollInfo(IntPtr hwnd, int fnBar, ref SCROLLINFO lpsi); [DllImport("user32.dll")] public static extern int SendMessage( int hWnd, // handle to destination window uint Msg, // message long wParam, // first message parameter long lParam // second message parameter ); [DllImport("user32.dll")] static extern int SendMessage(IntPtr hWnd, int wMsg, int wParam, int lParam); [DllImport("user32.dll")] static extern int SendMessage(IntPtr hWnd, uint wMsg, IntPtr wParam, IntPtr lParam); [DllImport("user32.dll")] static extern int SetScrollPos(IntPtr hWnd, int nBar, int nPos, bool bRedraw); [DllImport("user32.dll", CharSet = CharSet.Auto)] static extern int GetScrollPos(IntPtr hWnd, int nBar); [StructLayout(LayoutKind.Sequential)] struct SCROLLINFO { public uint cbSize; public uint fMask; public int nMin; public int nMax; public uint nPage; public int nPos; public int nTrackPos; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using Microsoft.CodeAnalysis.CSharp.Test.Utilities; using Microsoft.CodeAnalysis.Test.Utilities; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.CSharp.UnitTests.PDB { public class PDBAsyncTests : CSharpTestBase { [Fact] [WorkItem(1137300, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1137300")] [WorkItem(631350, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/631350")] [WorkItem(643501, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/643501")] [WorkItem(689616, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/689616")] public void TestAsyncDebug() { var text = @" using System; using System.Threading; using System.Threading.Tasks; class DynamicMembers { public Func<Task<int>> Prop { get; set; } } class TestCase { public static int Count = 0; public async void Run() { DynamicMembers dc2 = new DynamicMembers(); dc2.Prop = async () => { await Task.Delay(10000); return 3; }; var rez2 = await dc2.Prop(); if (rez2 == 3) Count++; Driver.Result = TestCase.Count - 1; //When test complete, set the flag. Driver.CompletedSignal.Set(); } } class Driver { public static int Result = -1; public static AutoResetEvent CompletedSignal = new AutoResetEvent(false); static int Main() { var t = new TestCase(); t.Run(); CompletedSignal.WaitOne(); return Driver.Result; } }"; var compilation = CreateCompilationWithMscorlib45(text, options: TestOptions.DebugDll).VerifyDiagnostics(); var v = CompileAndVerify(compilation); v.VerifyIL("TestCase.<Run>d__1.System.Runtime.CompilerServices.IAsyncStateMachine.MoveNext", @" { // Code size 301 (0x12d) .maxstack 3 .locals init (int V_0, System.Runtime.CompilerServices.TaskAwaiter<int> V_1, int V_2, TestCase.<Run>d__1 V_3, bool V_4, System.Exception V_5) ~IL_0000: ldarg.0 IL_0001: ldfld ""int TestCase.<Run>d__1.<>1__state"" IL_0006: stloc.0 .try { ~IL_0007: ldloc.0 IL_0008: brfalse.s IL_000c IL_000a: br.s IL_000e IL_000c: br.s IL_008b -IL_000e: nop -IL_000f: ldarg.0 IL_0010: newobj ""DynamicMembers..ctor()"" IL_0015: stfld ""DynamicMembers TestCase.<Run>d__1.<dc2>5__1"" -IL_001a: ldarg.0 IL_001b: ldfld ""DynamicMembers TestCase.<Run>d__1.<dc2>5__1"" IL_0020: ldsfld ""System.Func<System.Threading.Tasks.Task<int>> TestCase.<>c.<>9__1_0"" IL_0025: dup IL_0026: brtrue.s IL_003f IL_0028: pop IL_0029: ldsfld ""TestCase.<>c TestCase.<>c.<>9"" IL_002e: ldftn ""System.Threading.Tasks.Task<int> TestCase.<>c.<Run>b__1_0()"" IL_0034: newobj ""System.Func<System.Threading.Tasks.Task<int>>..ctor(object, System.IntPtr)"" IL_0039: dup IL_003a: stsfld ""System.Func<System.Threading.Tasks.Task<int>> TestCase.<>c.<>9__1_0"" IL_003f: callvirt ""void DynamicMembers.Prop.set"" IL_0044: nop -IL_0045: ldarg.0 IL_0046: ldfld ""DynamicMembers TestCase.<Run>d__1.<dc2>5__1"" IL_004b: callvirt ""System.Func<System.Threading.Tasks.Task<int>> DynamicMembers.Prop.get"" IL_0050: callvirt ""System.Threading.Tasks.Task<int> System.Func<System.Threading.Tasks.Task<int>>.Invoke()"" IL_0055: callvirt ""System.Runtime.CompilerServices.TaskAwaiter<int> System.Threading.Tasks.Task<int>.GetAwaiter()"" IL_005a: stloc.1 ~IL_005b: ldloca.s V_1 IL_005d: call ""bool System.Runtime.CompilerServices.TaskAwaiter<int>.IsCompleted.get"" IL_0062: brtrue.s IL_00a7 IL_0064: ldarg.0 IL_0065: ldc.i4.0 IL_0066: dup IL_0067: stloc.0 IL_0068: stfld ""int TestCase.<Run>d__1.<>1__state"" <IL_006d: ldarg.0 IL_006e: ldloc.1 IL_006f: stfld ""System.Runtime.CompilerServices.TaskAwaiter<int> TestCase.<Run>d__1.<>u__1"" IL_0074: ldarg.0 IL_0075: stloc.3 IL_0076: ldarg.0 IL_0077: ldflda ""System.Runtime.CompilerServices.AsyncVoidMethodBuilder TestCase.<Run>d__1.<>t__builder"" IL_007c: ldloca.s V_1 IL_007e: ldloca.s V_3 IL_0080: call ""void System.Runtime.CompilerServices.AsyncVoidMethodBuilder.AwaitUnsafeOnCompleted<System.Runtime.CompilerServices.TaskAwaiter<int>, TestCase.<Run>d__1>(ref System.Runtime.CompilerServices.TaskAwaiter<int>, ref TestCase.<Run>d__1)"" IL_0085: nop IL_0086: leave IL_012c >IL_008b: ldarg.0 IL_008c: ldfld ""System.Runtime.CompilerServices.TaskAwaiter<int> TestCase.<Run>d__1.<>u__1"" IL_0091: stloc.1 IL_0092: ldarg.0 IL_0093: ldflda ""System.Runtime.CompilerServices.TaskAwaiter<int> TestCase.<Run>d__1.<>u__1"" IL_0098: initobj ""System.Runtime.CompilerServices.TaskAwaiter<int>"" IL_009e: ldarg.0 IL_009f: ldc.i4.m1 IL_00a0: dup IL_00a1: stloc.0 IL_00a2: stfld ""int TestCase.<Run>d__1.<>1__state"" IL_00a7: ldloca.s V_1 IL_00a9: call ""int System.Runtime.CompilerServices.TaskAwaiter<int>.GetResult()"" IL_00ae: stloc.2 IL_00af: ldloca.s V_1 IL_00b1: initobj ""System.Runtime.CompilerServices.TaskAwaiter<int>"" IL_00b7: ldarg.0 IL_00b8: ldloc.2 IL_00b9: stfld ""int TestCase.<Run>d__1.<>s__3"" IL_00be: ldarg.0 IL_00bf: ldarg.0 IL_00c0: ldfld ""int TestCase.<Run>d__1.<>s__3"" IL_00c5: stfld ""int TestCase.<Run>d__1.<rez2>5__2"" -IL_00ca: ldarg.0 IL_00cb: ldfld ""int TestCase.<Run>d__1.<rez2>5__2"" IL_00d0: ldc.i4.3 IL_00d1: ceq IL_00d3: stloc.s V_4 ~IL_00d5: ldloc.s V_4 IL_00d7: brfalse.s IL_00e5 -IL_00d9: ldsfld ""int TestCase.Count"" IL_00de: ldc.i4.1 IL_00df: add IL_00e0: stsfld ""int TestCase.Count"" -IL_00e5: ldsfld ""int TestCase.Count"" IL_00ea: ldc.i4.1 IL_00eb: sub IL_00ec: stsfld ""int Driver.Result"" -IL_00f1: ldsfld ""System.Threading.AutoResetEvent Driver.CompletedSignal"" IL_00f6: callvirt ""bool System.Threading.EventWaitHandle.Set()"" IL_00fb: pop ~IL_00fc: leave.s IL_0118 } catch System.Exception { ~$IL_00fe: stloc.s V_5 IL_0100: ldarg.0 IL_0101: ldc.i4.s -2 IL_0103: stfld ""int TestCase.<Run>d__1.<>1__state"" IL_0108: ldarg.0 IL_0109: ldflda ""System.Runtime.CompilerServices.AsyncVoidMethodBuilder TestCase.<Run>d__1.<>t__builder"" IL_010e: ldloc.s V_5 IL_0110: call ""void System.Runtime.CompilerServices.AsyncVoidMethodBuilder.SetException(System.Exception)"" IL_0115: nop IL_0116: leave.s IL_012c } -IL_0118: ldarg.0 IL_0119: ldc.i4.s -2 IL_011b: stfld ""int TestCase.<Run>d__1.<>1__state"" ~IL_0120: ldarg.0 IL_0121: ldflda ""System.Runtime.CompilerServices.AsyncVoidMethodBuilder TestCase.<Run>d__1.<>t__builder"" IL_0126: call ""void System.Runtime.CompilerServices.AsyncVoidMethodBuilder.SetResult()"" IL_012b: nop IL_012c: ret }", sequencePoints: "TestCase+<Run>d__1.MoveNext"); v.VerifyPdb(@"<symbols> <methods> <method containingType=""DynamicMembers"" name=""get_Prop""> <sequencePoints> <entry offset=""0x0"" startLine=""8"" startColumn=""35"" endLine=""8"" endColumn=""39"" /> </sequencePoints> </method> <method containingType=""DynamicMembers"" name=""set_Prop"" parameterNames=""value""> <sequencePoints> <entry offset=""0x0"" startLine=""8"" startColumn=""40"" endLine=""8"" endColumn=""44"" /> </sequencePoints> </method> <method containingType=""TestCase"" name=""Run""> <customDebugInfo> <forwardIterator name=""&lt;Run&gt;d__1"" /> <encLocalSlotMap> <slot kind=""0"" offset=""26"" /> <slot kind=""0"" offset=""139"" /> <slot kind=""28"" offset=""146"" /> </encLocalSlotMap> <encLambdaMap> <methodOrdinal>1</methodOrdinal> <lambda offset=""86"" /> </encLambdaMap> </customDebugInfo> </method> <method containingType=""TestCase"" name="".cctor""> <customDebugInfo> <using> <namespace usingCount=""3"" /> </using> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" startLine=""12"" startColumn=""5"" endLine=""12"" endColumn=""33"" /> </sequencePoints> <scope startOffset=""0x0"" endOffset=""0x7""> <namespace name=""System"" /> <namespace name=""System.Threading"" /> <namespace name=""System.Threading.Tasks"" /> </scope> </method> <method containingType=""Driver"" name=""Main""> <customDebugInfo> <forward declaringType=""TestCase"" methodName="".cctor"" /> <encLocalSlotMap> <slot kind=""0"" offset=""15"" /> <slot kind=""21"" offset=""0"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" startLine=""30"" startColumn=""5"" endLine=""30"" endColumn=""6"" /> <entry offset=""0x1"" startLine=""31"" startColumn=""9"" endLine=""31"" endColumn=""32"" /> <entry offset=""0x7"" startLine=""32"" startColumn=""9"" endLine=""32"" endColumn=""17"" /> <entry offset=""0xe"" startLine=""34"" startColumn=""9"" endLine=""34"" endColumn=""35"" /> <entry offset=""0x19"" startLine=""35"" startColumn=""9"" endLine=""35"" endColumn=""30"" /> <entry offset=""0x21"" startLine=""36"" startColumn=""5"" endLine=""36"" endColumn=""6"" /> </sequencePoints> <scope startOffset=""0x0"" endOffset=""0x23""> <local name=""t"" il_index=""0"" il_start=""0x0"" il_end=""0x23"" attributes=""0"" /> </scope> </method> <method containingType=""Driver"" name="".cctor""> <customDebugInfo> <forward declaringType=""TestCase"" methodName="".cctor"" /> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" startLine=""27"" startColumn=""5"" endLine=""27"" endColumn=""35"" /> <entry offset=""0x6"" startLine=""28"" startColumn=""5"" endLine=""28"" endColumn=""78"" /> </sequencePoints> </method> <method containingType=""TestCase+&lt;&gt;c"" name=""&lt;Run&gt;b__1_0""> <customDebugInfo> <forwardIterator name=""&lt;&lt;Run&gt;b__1_0&gt;d"" /> </customDebugInfo> </method> <method containingType=""TestCase+&lt;Run&gt;d__1"" name=""MoveNext""> <customDebugInfo> <forward declaringType=""TestCase"" methodName="".cctor"" /> <hoistedLocalScopes> <slot startOffset=""0x0"" endOffset=""0x12d"" /> <slot startOffset=""0x0"" endOffset=""0x12d"" /> </hoistedLocalScopes> <encLocalSlotMap> <slot kind=""27"" offset=""0"" /> <slot kind=""33"" offset=""146"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> <slot kind=""1"" offset=""173"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0xe"" startLine=""14"" startColumn=""5"" endLine=""14"" endColumn=""6"" /> <entry offset=""0xf"" startLine=""15"" startColumn=""9"" endLine=""15"" endColumn=""51"" /> <entry offset=""0x1a"" startLine=""16"" startColumn=""9"" endLine=""16"" endColumn=""71"" /> <entry offset=""0x45"" startLine=""17"" startColumn=""9"" endLine=""17"" endColumn=""37"" /> <entry offset=""0x5b"" hidden=""true"" /> <entry offset=""0xca"" startLine=""18"" startColumn=""9"" endLine=""18"" endColumn=""23"" /> <entry offset=""0xd5"" hidden=""true"" /> <entry offset=""0xd9"" startLine=""18"" startColumn=""24"" endLine=""18"" endColumn=""32"" /> <entry offset=""0xe5"" startLine=""20"" startColumn=""9"" endLine=""20"" endColumn=""44"" /> <entry offset=""0xf1"" startLine=""22"" startColumn=""9"" endLine=""22"" endColumn=""38"" /> <entry offset=""0xfc"" hidden=""true"" /> <entry offset=""0xfe"" hidden=""true"" /> <entry offset=""0x118"" startLine=""23"" startColumn=""5"" endLine=""23"" endColumn=""6"" /> <entry offset=""0x120"" hidden=""true"" /> </sequencePoints> <asyncInfo> <catchHandler offset=""0xfe"" /> <kickoffMethod declaringType=""TestCase"" methodName=""Run"" /> <await yield=""0x6d"" resume=""0x8b"" declaringType=""TestCase+&lt;Run&gt;d__1"" methodName=""MoveNext"" /> </asyncInfo> </method> <method containingType=""TestCase+&lt;&gt;c+&lt;&lt;Run&gt;b__1_0&gt;d"" name=""MoveNext""> <customDebugInfo> <forward declaringType=""TestCase"" methodName="".cctor"" /> <encLocalSlotMap> <slot kind=""27"" offset=""86"" /> <slot kind=""20"" offset=""86"" /> <slot kind=""33"" offset=""88"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0xe"" startLine=""16"" startColumn=""32"" endLine=""16"" endColumn=""33"" /> <entry offset=""0xf"" startLine=""16"" startColumn=""34"" endLine=""16"" endColumn=""58"" /> <entry offset=""0x1f"" hidden=""true"" /> <entry offset=""0x78"" startLine=""16"" startColumn=""59"" endLine=""16"" endColumn=""68"" /> <entry offset=""0x7c"" hidden=""true"" /> <entry offset=""0x96"" startLine=""16"" startColumn=""69"" endLine=""16"" endColumn=""70"" /> <entry offset=""0x9e"" hidden=""true"" /> </sequencePoints> <asyncInfo> <kickoffMethod declaringType=""TestCase+&lt;&gt;c"" methodName=""&lt;Run&gt;b__1_0"" /> <await yield=""0x31"" resume=""0x4c"" declaringType=""TestCase+&lt;&gt;c+&lt;&lt;Run&gt;b__1_0&gt;d"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); } [Fact] [WorkItem(734596, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/734596")] public void TestAsyncDebug2() { var text = @" using System; using System.Collections.Generic; using System.Threading.Tasks; namespace ConsoleApplication1 { class Program { private static Random random = new Random(); static void Main(string[] args) { new Program().QBar(); } async void QBar() { await ZBar(); } async Task<List<int>> ZBar() { var addedInts = new List<int>(); foreach (var z in new[] {1, 2, 3}) { var newInt = await GetNextInt(random); addedInts.Add(newInt); } return addedInts; } private Task<int> GetNextInt(Random random) { return Task.FromResult(random.Next()); } } }"; var compilation = CreateCompilationWithMscorlib45(text, options: TestOptions.DebugDll).VerifyDiagnostics(); compilation.VerifyPdb(@" <symbols> <methods> <method containingType=""ConsoleApplication1.Program"" name=""Main"" parameterNames=""args""> <customDebugInfo> <using> <namespace usingCount=""0"" /> <namespace usingCount=""3"" /> </using> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" startLine=""12"" startColumn=""9"" endLine=""12"" endColumn=""10"" /> <entry offset=""0x1"" startLine=""13"" startColumn=""13"" endLine=""13"" endColumn=""34"" /> <entry offset=""0xc"" startLine=""14"" startColumn=""9"" endLine=""14"" endColumn=""10"" /> </sequencePoints> <scope startOffset=""0x0"" endOffset=""0xd""> <namespace name=""System"" /> <namespace name=""System.Collections.Generic"" /> <namespace name=""System.Threading.Tasks"" /> </scope> </method> <method containingType=""ConsoleApplication1.Program"" name=""QBar""> <customDebugInfo> <forwardIterator name=""&lt;QBar&gt;d__2"" /> </customDebugInfo> </method> <method containingType=""ConsoleApplication1.Program"" name=""ZBar""> <customDebugInfo> <forwardIterator name=""&lt;ZBar&gt;d__3"" /> <encLocalSlotMap> <slot kind=""0"" offset=""19"" /> <slot kind=""6"" offset=""61"" /> <slot kind=""8"" offset=""61"" /> <slot kind=""0"" offset=""61"" /> <slot kind=""0"" offset=""132"" /> <slot kind=""28"" offset=""141"" /> </encLocalSlotMap> </customDebugInfo> </method> <method containingType=""ConsoleApplication1.Program"" name=""GetNextInt"" parameterNames=""random""> <customDebugInfo> <forward declaringType=""ConsoleApplication1.Program"" methodName=""Main"" parameterNames=""args"" /> <encLocalSlotMap> <slot kind=""21"" offset=""0"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" startLine=""30"" startColumn=""9"" endLine=""30"" endColumn=""10"" /> <entry offset=""0x1"" startLine=""31"" startColumn=""13"" endLine=""31"" endColumn=""51"" /> <entry offset=""0xf"" startLine=""32"" startColumn=""9"" endLine=""32"" endColumn=""10"" /> </sequencePoints> </method> <method containingType=""ConsoleApplication1.Program"" name="".cctor""> <customDebugInfo> <forward declaringType=""ConsoleApplication1.Program"" methodName=""Main"" parameterNames=""args"" /> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" startLine=""10"" startColumn=""9"" endLine=""10"" endColumn=""53"" /> </sequencePoints> </method> <method containingType=""ConsoleApplication1.Program+&lt;QBar&gt;d__2"" name=""MoveNext""> <customDebugInfo> <forward declaringType=""ConsoleApplication1.Program"" methodName=""Main"" parameterNames=""args"" /> <encLocalSlotMap> <slot kind=""27"" offset=""0"" /> <slot kind=""33"" offset=""15"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0xe"" startLine=""16"" startColumn=""9"" endLine=""16"" endColumn=""10"" /> <entry offset=""0xf"" startLine=""17"" startColumn=""13"" endLine=""17"" endColumn=""26"" /> <entry offset=""0x20"" hidden=""true"" /> <entry offset=""0x79"" hidden=""true"" /> <entry offset=""0x7b"" hidden=""true"" /> <entry offset=""0x93"" startLine=""18"" startColumn=""9"" endLine=""18"" endColumn=""10"" /> <entry offset=""0x9b"" hidden=""true"" /> </sequencePoints> <asyncInfo> <catchHandler offset=""0x7b"" /> <kickoffMethod declaringType=""ConsoleApplication1.Program"" methodName=""QBar"" /> <await yield=""0x32"" resume=""0x4d"" declaringType=""ConsoleApplication1.Program+&lt;QBar&gt;d__2"" methodName=""MoveNext"" /> </asyncInfo> </method> <method containingType=""ConsoleApplication1.Program+&lt;ZBar&gt;d__3"" name=""MoveNext""> <customDebugInfo> <forward declaringType=""ConsoleApplication1.Program"" methodName=""Main"" parameterNames=""args"" /> <hoistedLocalScopes> <slot startOffset=""0x0"" endOffset=""0x14f"" /> <slot /> <slot /> <slot startOffset=""0x41"" endOffset=""0xee"" /> <slot startOffset=""0x54"" endOffset=""0xee"" /> </hoistedLocalScopes> <encLocalSlotMap> <slot kind=""27"" offset=""0"" /> <slot kind=""20"" offset=""0"" /> <slot kind=""33"" offset=""141"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0x11"" startLine=""20"" startColumn=""9"" endLine=""20"" endColumn=""10"" /> <entry offset=""0x12"" startLine=""21"" startColumn=""13"" endLine=""21"" endColumn=""45"" /> <entry offset=""0x1d"" startLine=""22"" startColumn=""13"" endLine=""22"" endColumn=""20"" /> <entry offset=""0x1e"" startLine=""22"" startColumn=""31"" endLine=""22"" endColumn=""46"" /> <entry offset=""0x3c"" hidden=""true"" /> <entry offset=""0x41"" startLine=""22"" startColumn=""22"" endLine=""22"" endColumn=""27"" /> <entry offset=""0x54"" startLine=""23"" startColumn=""13"" endLine=""23"" endColumn=""14"" /> <entry offset=""0x55"" startLine=""24"" startColumn=""17"" endLine=""24"" endColumn=""55"" /> <entry offset=""0x6b"" hidden=""true"" /> <entry offset=""0xdb"" startLine=""25"" startColumn=""17"" endLine=""25"" endColumn=""39"" /> <entry offset=""0xed"" startLine=""26"" startColumn=""13"" endLine=""26"" endColumn=""14"" /> <entry offset=""0xee"" hidden=""true"" /> <entry offset=""0xfc"" startLine=""22"" startColumn=""28"" endLine=""22"" endColumn=""30"" /> <entry offset=""0x116"" startLine=""27"" startColumn=""13"" endLine=""27"" endColumn=""30"" /> <entry offset=""0x11f"" hidden=""true"" /> <entry offset=""0x139"" startLine=""28"" startColumn=""9"" endLine=""28"" endColumn=""10"" /> <entry offset=""0x141"" hidden=""true"" /> </sequencePoints> <asyncInfo> <kickoffMethod declaringType=""ConsoleApplication1.Program"" methodName=""ZBar"" /> <await yield=""0x7d"" resume=""0x9c"" declaringType=""ConsoleApplication1.Program+&lt;ZBar&gt;d__3"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); } [Fact] [WorkItem(1137300, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1137300")] [WorkItem(690180, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/690180")] public void TestAsyncDebug3() { var text = @" class TestCase { static async void Await(dynamic d) { int rez = await d; } }"; var compilation = CreateCompilationWithMscorlib45( text, options: TestOptions.DebugDll, references: new[] { SystemRef_v4_0_30319_17929, SystemCoreRef_v4_0_30319_17929, CSharpRef }) .VerifyDiagnostics(); compilation.VerifyPdb(@" <symbols> <methods> <method containingType=""TestCase"" name=""Await"" parameterNames=""d""> <customDebugInfo> <forwardIterator name=""&lt;Await&gt;d__0"" /> <encLocalSlotMap> <slot kind=""0"" offset=""15"" /> <slot kind=""28"" offset=""21"" /> <slot kind=""28"" offset=""21"" ordinal=""1"" /> <slot kind=""28"" offset=""21"" ordinal=""2"" /> </encLocalSlotMap> </customDebugInfo> </method> <method containingType=""TestCase+&lt;Await&gt;d__0"" name=""MoveNext""> <customDebugInfo> <using> <namespace usingCount=""0"" /> </using> <hoistedLocalScopes> <slot startOffset=""0x0"" endOffset=""0x262"" /> </hoistedLocalScopes> <encLocalSlotMap> <slot kind=""27"" offset=""0"" /> <slot kind=""33"" offset=""21"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0x11"" startLine=""5"" startColumn=""5"" endLine=""5"" endColumn=""6"" /> <entry offset=""0x12"" startLine=""6"" startColumn=""9"" endLine=""6"" endColumn=""27"" /> <entry offset=""0xae"" hidden=""true"" /> <entry offset=""0x231"" hidden=""true"" /> <entry offset=""0x233"" hidden=""true"" /> <entry offset=""0x24d"" startLine=""7"" startColumn=""5"" endLine=""7"" endColumn=""6"" /> <entry offset=""0x255"" hidden=""true"" /> </sequencePoints> <asyncInfo> <catchHandler offset=""0x233"" /> <kickoffMethod declaringType=""TestCase"" methodName=""Await"" parameterNames=""d"" /> <await yield=""0x148"" resume=""0x190"" declaringType=""TestCase+&lt;Await&gt;d__0"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); } [Fact] public void TestAsyncDebug4() { var text = @" using System; using System.Threading.Tasks; class C { static async Task<int> F() { await Task.Delay(1); return 1; } }"; var v = CompileAndVerify(CreateCompilationWithMscorlib45(text, options: TestOptions.DebugDll)); v.VerifyIL("C.F", @" { // Code size 52 (0x34) .maxstack 2 .locals init (C.<F>d__0 V_0, System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> V_1) IL_0000: newobj ""C.<F>d__0..ctor()"" IL_0005: stloc.0 IL_0006: ldloc.0 IL_0007: call ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int>.Create()"" IL_000c: stfld ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> C.<F>d__0.<>t__builder"" IL_0011: ldloc.0 IL_0012: ldc.i4.m1 IL_0013: stfld ""int C.<F>d__0.<>1__state"" IL_0018: ldloc.0 IL_0019: ldfld ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> C.<F>d__0.<>t__builder"" IL_001e: stloc.1 IL_001f: ldloca.s V_1 IL_0021: ldloca.s V_0 IL_0023: call ""void System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int>.Start<C.<F>d__0>(ref C.<F>d__0)"" IL_0028: ldloc.0 IL_0029: ldflda ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> C.<F>d__0.<>t__builder"" IL_002e: call ""System.Threading.Tasks.Task<int> System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int>.Task.get"" IL_0033: ret }", sequencePoints: "C.F"); v.VerifyIL("C.<F>d__0.System.Runtime.CompilerServices.IAsyncStateMachine.MoveNext", @" { // Code size 168 (0xa8) .maxstack 3 .locals init (int V_0, int V_1, System.Runtime.CompilerServices.TaskAwaiter V_2, C.<F>d__0 V_3, System.Exception V_4) ~IL_0000: ldarg.0 IL_0001: ldfld ""int C.<F>d__0.<>1__state"" IL_0006: stloc.0 .try { ~IL_0007: ldloc.0 IL_0008: brfalse.s IL_000c IL_000a: br.s IL_000e IL_000c: br.s IL_0048 -IL_000e: nop -IL_000f: ldc.i4.1 IL_0010: call ""System.Threading.Tasks.Task System.Threading.Tasks.Task.Delay(int)"" IL_0015: callvirt ""System.Runtime.CompilerServices.TaskAwaiter System.Threading.Tasks.Task.GetAwaiter()"" IL_001a: stloc.2 ~IL_001b: ldloca.s V_2 IL_001d: call ""bool System.Runtime.CompilerServices.TaskAwaiter.IsCompleted.get"" IL_0022: brtrue.s IL_0064 IL_0024: ldarg.0 IL_0025: ldc.i4.0 IL_0026: dup IL_0027: stloc.0 IL_0028: stfld ""int C.<F>d__0.<>1__state"" <IL_002d: ldarg.0 IL_002e: ldloc.2 IL_002f: stfld ""System.Runtime.CompilerServices.TaskAwaiter C.<F>d__0.<>u__1"" IL_0034: ldarg.0 IL_0035: stloc.3 IL_0036: ldarg.0 IL_0037: ldflda ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> C.<F>d__0.<>t__builder"" IL_003c: ldloca.s V_2 IL_003e: ldloca.s V_3 IL_0040: call ""void System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int>.AwaitUnsafeOnCompleted<System.Runtime.CompilerServices.TaskAwaiter, C.<F>d__0>(ref System.Runtime.CompilerServices.TaskAwaiter, ref C.<F>d__0)"" IL_0045: nop IL_0046: leave.s IL_00a7 >IL_0048: ldarg.0 IL_0049: ldfld ""System.Runtime.CompilerServices.TaskAwaiter C.<F>d__0.<>u__1"" IL_004e: stloc.2 IL_004f: ldarg.0 IL_0050: ldflda ""System.Runtime.CompilerServices.TaskAwaiter C.<F>d__0.<>u__1"" IL_0055: initobj ""System.Runtime.CompilerServices.TaskAwaiter"" IL_005b: ldarg.0 IL_005c: ldc.i4.m1 IL_005d: dup IL_005e: stloc.0 IL_005f: stfld ""int C.<F>d__0.<>1__state"" IL_0064: ldloca.s V_2 IL_0066: call ""void System.Runtime.CompilerServices.TaskAwaiter.GetResult()"" IL_006b: nop IL_006c: ldloca.s V_2 IL_006e: initobj ""System.Runtime.CompilerServices.TaskAwaiter"" -IL_0074: ldc.i4.1 IL_0075: stloc.1 IL_0076: leave.s IL_0092 } catch System.Exception { ~IL_0078: stloc.s V_4 IL_007a: ldarg.0 IL_007b: ldc.i4.s -2 IL_007d: stfld ""int C.<F>d__0.<>1__state"" IL_0082: ldarg.0 IL_0083: ldflda ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> C.<F>d__0.<>t__builder"" IL_0088: ldloc.s V_4 IL_008a: call ""void System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int>.SetException(System.Exception)"" IL_008f: nop IL_0090: leave.s IL_00a7 } -IL_0092: ldarg.0 IL_0093: ldc.i4.s -2 IL_0095: stfld ""int C.<F>d__0.<>1__state"" ~IL_009a: ldarg.0 IL_009b: ldflda ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> C.<F>d__0.<>t__builder"" IL_00a0: ldloc.1 IL_00a1: call ""void System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int>.SetResult(int)"" IL_00a6: nop IL_00a7: ret } ", sequencePoints: "C+<F>d__0.MoveNext"); } [WorkItem(836491, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/836491")] [WorkItem(827337, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/827337")] [Fact] public void DisplayClass_InBetweenSuspensionPoints_Release() { string source = @" using System; using System.Threading.Tasks; class C { static async Task M(int b) { byte x1 = 1; byte x2 = 1; byte x3 = 1; ((Action)(() => { x1 = x2 = x3; }))(); await M(x1 + x2 + x3); } } "; // TODO: Currently we don't have means necessary to pass information about the display // class being pushed on evaluation stack, so that EE could find the locals. // Thus the locals are not available in EE. var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, options: TestOptions.ReleaseDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "<>u__1", // awaiter }, module.GetFieldNames("C.<M>d__0")); }); v.VerifyPdb("C+<M>d__0.MoveNext", @" <symbols> <methods> <method containingType=""C+&lt;M&gt;d__0"" name=""MoveNext""> <customDebugInfo> <forward declaringType=""C+&lt;&gt;c__DisplayClass0_0"" methodName=""&lt;M&gt;b__0"" /> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0xa"" hidden=""true"" /> <entry offset=""0x10"" startLine=""9"" startColumn=""9"" endLine=""9"" endColumn=""21"" /> <entry offset=""0x17"" startLine=""10"" startColumn=""9"" endLine=""10"" endColumn=""21"" /> <entry offset=""0x1e"" startLine=""11"" startColumn=""9"" endLine=""11"" endColumn=""21"" /> <entry offset=""0x25"" startLine=""13"" startColumn=""9"" endLine=""13"" endColumn=""47"" /> <entry offset=""0x36"" startLine=""15"" startColumn=""9"" endLine=""15"" endColumn=""31"" /> <entry offset=""0x55"" hidden=""true"" /> <entry offset=""0xa9"" hidden=""true"" /> <entry offset=""0xab"" hidden=""true"" /> <entry offset=""0xc2"" startLine=""16"" startColumn=""5"" endLine=""16"" endColumn=""6"" /> <entry offset=""0xca"" hidden=""true"" /> </sequencePoints> <scope startOffset=""0x0"" endOffset=""0xd6""> <scope startOffset=""0xa"" endOffset=""0xab""> <local name=""CS$&lt;&gt;8__locals0"" il_index=""1"" il_start=""0xa"" il_end=""0xab"" attributes=""0"" /> </scope> </scope> <asyncInfo> <kickoffMethod declaringType=""C"" methodName=""M"" parameterNames=""b"" /> <await yield=""0x67"" resume=""0x7e"" declaringType=""C+&lt;M&gt;d__0"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); v.VerifyPdb("C.M", @" <symbols> <methods> <method containingType=""C"" name=""M"" parameterNames=""b""> <customDebugInfo> <forwardIterator name=""&lt;M&gt;d__0"" /> </customDebugInfo> </method> </methods> </symbols>"); } [WorkItem(836491, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/836491")] [WorkItem(827337, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/827337")] [Fact] public void DisplayClass_InBetweenSuspensionPoints_Debug() { string source = @" using System; using System.Threading.Tasks; class C { static async Task M(int b) { byte x1 = 1; byte x2 = 1; byte x3 = 1; ((Action)(() => { x1 = x2 = x3; }))(); await M(x1 + x2 + x3); // possible EnC edit: // Console.WriteLine(x1); } } "; var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "b", "<>8__1", // display class "<>u__1", // awaiter }, module.GetFieldNames("C.<M>d__0")); }); v.VerifyPdb("C+<M>d__0.MoveNext", @" <symbols> <methods> <method containingType=""C+&lt;M&gt;d__0"" name=""MoveNext""> <customDebugInfo> <forward declaringType=""C+&lt;&gt;c__DisplayClass0_0"" methodName=""&lt;M&gt;b__0"" /> <hoistedLocalScopes> <slot startOffset=""0x0"" endOffset=""0x10e"" /> </hoistedLocalScopes> <encLocalSlotMap> <slot kind=""27"" offset=""0"" /> <slot kind=""33"" offset=""129"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0x11"" hidden=""true"" /> <entry offset=""0x1c"" startLine=""8"" startColumn=""5"" endLine=""8"" endColumn=""6"" /> <entry offset=""0x1d"" startLine=""9"" startColumn=""9"" endLine=""9"" endColumn=""21"" /> <entry offset=""0x29"" startLine=""10"" startColumn=""9"" endLine=""10"" endColumn=""21"" /> <entry offset=""0x35"" startLine=""11"" startColumn=""9"" endLine=""11"" endColumn=""21"" /> <entry offset=""0x41"" startLine=""13"" startColumn=""9"" endLine=""13"" endColumn=""47"" /> <entry offset=""0x58"" startLine=""15"" startColumn=""9"" endLine=""15"" endColumn=""31"" /> <entry offset=""0x86"" hidden=""true"" /> <entry offset=""0xdf"" hidden=""true"" /> <entry offset=""0xe1"" hidden=""true"" /> <entry offset=""0xf9"" startLine=""19"" startColumn=""5"" endLine=""19"" endColumn=""6"" /> <entry offset=""0x101"" hidden=""true"" /> </sequencePoints> <asyncInfo> <kickoffMethod declaringType=""C"" methodName=""M"" parameterNames=""b"" /> <await yield=""0x98"" resume=""0xb3"" declaringType=""C+&lt;M&gt;d__0"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); v.VerifyPdb("C.M", @" <symbols> <methods> <method containingType=""C"" name=""M"" parameterNames=""b""> <customDebugInfo> <forwardIterator name=""&lt;M&gt;d__0"" /> <encLocalSlotMap> <slot kind=""30"" offset=""0"" /> </encLocalSlotMap> <encLambdaMap> <methodOrdinal>0</methodOrdinal> <closure offset=""0"" /> <lambda offset=""95"" closure=""0"" /> </encLambdaMap> </customDebugInfo> </method> </methods> </symbols>"); } [WorkItem(836491, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/836491")] [WorkItem(827337, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/827337")] [Fact] public void DisplayClass_AcrossSuspensionPoints_Release() { string source = @" using System; using System.Threading.Tasks; class C { static async Task M(int b) { byte x1 = 1; byte x2 = 1; byte x3 = 1; ((Action)(() => { x1 = x2 = x3; }))(); await Task.Delay(0); Console.WriteLine(x1); } } "; var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, options: TestOptions.ReleaseDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "<>8__1", // display class "<>u__1", // awaiter }, module.GetFieldNames("C.<M>d__0")); }); v.VerifyPdb("C+<M>d__0.MoveNext", @" <symbols> <methods> <method containingType=""C+&lt;M&gt;d__0"" name=""MoveNext""> <customDebugInfo> <forward declaringType=""C+&lt;&gt;c__DisplayClass0_0"" methodName=""&lt;M&gt;b__0"" /> <hoistedLocalScopes> <slot startOffset=""0x0"" endOffset=""0xec"" /> </hoistedLocalScopes> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0xa"" hidden=""true"" /> <entry offset=""0x15"" startLine=""9"" startColumn=""9"" endLine=""9"" endColumn=""21"" /> <entry offset=""0x21"" startLine=""10"" startColumn=""9"" endLine=""10"" endColumn=""21"" /> <entry offset=""0x2d"" startLine=""11"" startColumn=""9"" endLine=""11"" endColumn=""21"" /> <entry offset=""0x39"" startLine=""13"" startColumn=""9"" endLine=""13"" endColumn=""47"" /> <entry offset=""0x4f"" startLine=""15"" startColumn=""9"" endLine=""15"" endColumn=""29"" /> <entry offset=""0x5b"" hidden=""true"" /> <entry offset=""0xaf"" startLine=""17"" startColumn=""9"" endLine=""17"" endColumn=""31"" /> <entry offset=""0xbf"" hidden=""true"" /> <entry offset=""0xc1"" hidden=""true"" /> <entry offset=""0xd8"" startLine=""18"" startColumn=""5"" endLine=""18"" endColumn=""6"" /> <entry offset=""0xe0"" hidden=""true"" /> </sequencePoints> <asyncInfo> <kickoffMethod declaringType=""C"" methodName=""M"" parameterNames=""b"" /> <await yield=""0x6d"" resume=""0x84"" declaringType=""C+&lt;M&gt;d__0"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); v.VerifyPdb("C.M", @" <symbols> <methods> <method containingType=""C"" name=""M"" parameterNames=""b""> <customDebugInfo> <forwardIterator name=""&lt;M&gt;d__0"" /> </customDebugInfo> </method> </methods> </symbols>"); } [WorkItem(836491, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/836491")] [WorkItem(827337, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/827337")] [Fact] public void DisplayClass_AcrossSuspensionPoints_Debug() { string source = @" using System; using System.Threading.Tasks; class C { static async Task M(int b) { byte x1 = 1; byte x2 = 1; byte x3 = 1; ((Action)(() => { x1 = x2 = x3; }))(); await Task.Delay(0); Console.WriteLine(x1); } } "; var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "b", "<>8__1", // display class "<>u__1", // awaiter }, module.GetFieldNames("C.<M>d__0")); }); v.VerifyPdb("C+<M>d__0.MoveNext", @" <symbols> <methods> <method containingType=""C+&lt;M&gt;d__0"" name=""MoveNext""> <customDebugInfo> <forward declaringType=""C+&lt;&gt;c__DisplayClass0_0"" methodName=""&lt;M&gt;b__0"" /> <hoistedLocalScopes> <slot startOffset=""0x0"" endOffset=""0xfd"" /> </hoistedLocalScopes> <encLocalSlotMap> <slot kind=""27"" offset=""0"" /> <slot kind=""33"" offset=""129"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0x11"" hidden=""true"" /> <entry offset=""0x1c"" startLine=""8"" startColumn=""5"" endLine=""8"" endColumn=""6"" /> <entry offset=""0x1d"" startLine=""9"" startColumn=""9"" endLine=""9"" endColumn=""21"" /> <entry offset=""0x29"" startLine=""10"" startColumn=""9"" endLine=""10"" endColumn=""21"" /> <entry offset=""0x35"" startLine=""11"" startColumn=""9"" endLine=""11"" endColumn=""21"" /> <entry offset=""0x41"" startLine=""13"" startColumn=""9"" endLine=""13"" endColumn=""47"" /> <entry offset=""0x58"" startLine=""15"" startColumn=""9"" endLine=""15"" endColumn=""29"" /> <entry offset=""0x64"" hidden=""true"" /> <entry offset=""0xbd"" startLine=""17"" startColumn=""9"" endLine=""17"" endColumn=""31"" /> <entry offset=""0xce"" hidden=""true"" /> <entry offset=""0xd0"" hidden=""true"" /> <entry offset=""0xe8"" startLine=""18"" startColumn=""5"" endLine=""18"" endColumn=""6"" /> <entry offset=""0xf0"" hidden=""true"" /> </sequencePoints> <asyncInfo> <kickoffMethod declaringType=""C"" methodName=""M"" parameterNames=""b"" /> <await yield=""0x76"" resume=""0x91"" declaringType=""C+&lt;M&gt;d__0"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); v.VerifyPdb("C.M", @" <symbols> <methods> <method containingType=""C"" name=""M"" parameterNames=""b""> <customDebugInfo> <forwardIterator name=""&lt;M&gt;d__0"" /> <encLocalSlotMap> <slot kind=""30"" offset=""0"" /> </encLocalSlotMap> <encLambdaMap> <methodOrdinal>0</methodOrdinal> <closure offset=""0"" /> <lambda offset=""95"" closure=""0"" /> </encLambdaMap> </customDebugInfo> </method> </methods> </symbols>"); } [WorkItem(836491, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/836491")] [WorkItem(827337, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/827337")] [Fact] public void DynamicLocal_AcrossSuspensionPoints_Debug() { string source = @" using System.Threading.Tasks; class C { static async Task M() { dynamic d = 1; await Task.Delay(0); d.ToString(); } } "; var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, new[] { SystemCoreRef, CSharpRef }, options: TestOptions.DebugDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "<d>5__1", "<>u__1", // awaiter }, module.GetFieldNames("C.<M>d__0")); }); // CHANGE: Dev12 emits a <dynamiclocal> entry for "d", but gives it slot "-1", preventing it from matching // any locals when consumed by the EE (i.e. it has no effect). See FUNCBRECEE::IsLocalDynamic. v.VerifyPdb("C+<M>d__0.MoveNext", @" <symbols> <methods> <method containingType=""C+&lt;M&gt;d__0"" name=""MoveNext""> <customDebugInfo> <using> <namespace usingCount=""1"" /> </using> <hoistedLocalScopes> <slot startOffset=""0x0"" endOffset=""0x10a"" /> </hoistedLocalScopes> <encLocalSlotMap> <slot kind=""27"" offset=""0"" /> <slot kind=""33"" offset=""35"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0xe"" startLine=""7"" startColumn=""5"" endLine=""7"" endColumn=""6"" /> <entry offset=""0xf"" startLine=""8"" startColumn=""9"" endLine=""8"" endColumn=""23"" /> <entry offset=""0x1b"" startLine=""9"" startColumn=""9"" endLine=""9"" endColumn=""29"" /> <entry offset=""0x27"" hidden=""true"" /> <entry offset=""0x83"" startLine=""10"" startColumn=""9"" endLine=""10"" endColumn=""22"" /> <entry offset=""0xdb"" hidden=""true"" /> <entry offset=""0xdd"" hidden=""true"" /> <entry offset=""0xf5"" startLine=""11"" startColumn=""5"" endLine=""11"" endColumn=""6"" /> <entry offset=""0xfd"" hidden=""true"" /> </sequencePoints> <scope startOffset=""0x0"" endOffset=""0x10a""> <namespace name=""System.Threading.Tasks"" /> </scope> <asyncInfo> <kickoffMethod declaringType=""C"" methodName=""M"" /> <await yield=""0x39"" resume=""0x57"" declaringType=""C+&lt;M&gt;d__0"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); v.VerifyPdb("C.M", @" <symbols> <methods> <method containingType=""C"" name=""M""> <customDebugInfo> <forwardIterator name=""&lt;M&gt;d__0"" /> <encLocalSlotMap> <slot kind=""0"" offset=""19"" /> </encLocalSlotMap> </customDebugInfo> </method> </methods> </symbols> "); } [WorkItem(836491, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/836491")] [WorkItem(827337, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/827337")] [WorkItem(1070519, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1070519")] [Fact] public void DynamicLocal_InBetweenSuspensionPoints_Release() { string source = @" using System.Threading.Tasks; class C { static async Task M() { dynamic d = 1; d.ToString(); await Task.Delay(0); } } "; var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, new[] { SystemCoreRef, CSharpRef }, options: TestOptions.ReleaseDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "<>u__1", // awaiter }, module.GetFieldNames("C.<M>d__0")); }); v.VerifyPdb("C+<M>d__0.MoveNext", @" <symbols> <methods> <method containingType=""C+&lt;M&gt;d__0"" name=""MoveNext""> <customDebugInfo> <using> <namespace usingCount=""1"" /> </using> <dynamicLocals> <bucket flagCount=""1"" flags=""1"" slotId=""1"" localName=""d"" /> </dynamicLocals> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0xd"" startLine=""8"" startColumn=""9"" endLine=""8"" endColumn=""23"" /> <entry offset=""0x14"" startLine=""9"" startColumn=""9"" endLine=""9"" endColumn=""22"" /> <entry offset=""0x64"" startLine=""10"" startColumn=""9"" endLine=""10"" endColumn=""29"" /> <entry offset=""0x70"" hidden=""true"" /> <entry offset=""0xc4"" hidden=""true"" /> <entry offset=""0xc6"" hidden=""true"" /> <entry offset=""0xdd"" startLine=""11"" startColumn=""5"" endLine=""11"" endColumn=""6"" /> <entry offset=""0xe5"" hidden=""true"" /> </sequencePoints> <scope startOffset=""0x0"" endOffset=""0xf1""> <namespace name=""System.Threading.Tasks"" /> <scope startOffset=""0xd"" endOffset=""0xc6""> <local name=""d"" il_index=""1"" il_start=""0xd"" il_end=""0xc6"" attributes=""0"" /> </scope> </scope> <asyncInfo> <kickoffMethod declaringType=""C"" methodName=""M"" /> <await yield=""0x82"" resume=""0x99"" declaringType=""C+&lt;M&gt;d__0"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); v.VerifyPdb("C.M", @" <symbols> <methods> <method containingType=""C"" name=""M""> <customDebugInfo> <forwardIterator name=""&lt;M&gt;d__0"" /> </customDebugInfo> </method> </methods> </symbols> "); } [WorkItem(1070519, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1070519")] [Fact] public void DynamicLocal_InBetweenSuspensionPoints_Debug() { string source = @" using System.Threading.Tasks; class C { static async Task M() { dynamic d = 1; d.ToString(); await Task.Delay(0); // Possible EnC edit: // System.Console.WriteLine(d); } } "; var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, new[] { SystemCoreRef, CSharpRef }, options: TestOptions.DebugDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "<d>5__1", "<>u__1", // awaiter }, module.GetFieldNames("C.<M>d__0")); }); v.VerifyPdb("C+<M>d__0.MoveNext", @" <symbols> <methods> <method containingType=""C+&lt;M&gt;d__0"" name=""MoveNext""> <customDebugInfo> <using> <namespace usingCount=""1"" /> </using> <hoistedLocalScopes> <slot startOffset=""0x0"" endOffset=""0x10a"" /> </hoistedLocalScopes> <encLocalSlotMap> <slot kind=""27"" offset=""0"" /> <slot kind=""33"" offset=""58"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0x11"" startLine=""7"" startColumn=""5"" endLine=""7"" endColumn=""6"" /> <entry offset=""0x12"" startLine=""8"" startColumn=""9"" endLine=""8"" endColumn=""23"" /> <entry offset=""0x1e"" startLine=""9"" startColumn=""9"" endLine=""9"" endColumn=""22"" /> <entry offset=""0x76"" startLine=""10"" startColumn=""9"" endLine=""10"" endColumn=""29"" /> <entry offset=""0x82"" hidden=""true"" /> <entry offset=""0xdb"" hidden=""true"" /> <entry offset=""0xdd"" hidden=""true"" /> <entry offset=""0xf5"" startLine=""14"" startColumn=""5"" endLine=""14"" endColumn=""6"" /> <entry offset=""0xfd"" hidden=""true"" /> </sequencePoints> <scope startOffset=""0x0"" endOffset=""0x10a""> <namespace name=""System.Threading.Tasks"" /> </scope> <asyncInfo> <kickoffMethod declaringType=""C"" methodName=""M"" /> <await yield=""0x94"" resume=""0xaf"" declaringType=""C+&lt;M&gt;d__0"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); v.VerifyPdb("C.M", @" <symbols> <methods> <method containingType=""C"" name=""M""> <customDebugInfo> <forwardIterator name=""&lt;M&gt;d__0"" /> <encLocalSlotMap> <slot kind=""0"" offset=""19"" /> </encLocalSlotMap> </customDebugInfo> </method> </methods> </symbols> "); } [Fact] public void VariableScopeNotContainingSuspensionPoint() { string source = @" using System; using System.Threading.Tasks; class C { static async Task M() { { int x = 1; Console.WriteLine(x); } { await Task.Delay(0); } } } "; // We need to hoist x even though its scope doesn't contain await. // The scopes may be merged by an EnC edit: // // { // int x = 1; // Console.WriteLine(x); // await Task.Delay(0); // Console.WriteLine(x); // } var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "<x>5__1", "<>u__1", // awaiter }, module.GetFieldNames("C.<M>d__0")); }); } [Fact] public void AwaitInFinally() { string source = @" using System; using System.Threading.Tasks; class C { static async Task<int> G() { int x = 42; try { } finally { x = await G(); } return x; } }"; var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, new[] { SystemCoreRef, CSharpRef }, options: TestOptions.DebugDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "<x>5__1", "<>s__2", "<>s__3", "<>s__4", "<>u__1", // awaiter }, module.GetFieldNames("C.<G>d__0")); }); v.VerifyPdb("C.G", @" <symbols> <methods> <method containingType=""C"" name=""G""> <customDebugInfo> <forwardIterator name=""&lt;G&gt;d__0"" /> <encLocalSlotMap> <slot kind=""0"" offset=""15"" /> <slot kind=""22"" offset=""34"" /> <slot kind=""23"" offset=""34"" /> <slot kind=""28"" offset=""105"" /> </encLocalSlotMap> </customDebugInfo> </method> </methods> </symbols>"); v.VerifyIL("C.<G>d__0.System.Runtime.CompilerServices.IAsyncStateMachine.MoveNext()", @" { // Code size 286 (0x11e) .maxstack 3 .locals init (int V_0, int V_1, object V_2, System.Runtime.CompilerServices.TaskAwaiter<int> V_3, int V_4, C.<G>d__0 V_5, System.Exception V_6) ~IL_0000: ldarg.0 IL_0001: ldfld ""int C.<G>d__0.<>1__state"" IL_0006: stloc.0 .try { ~IL_0007: ldloc.0 IL_0008: brfalse.s IL_000c IL_000a: br.s IL_000e IL_000c: br.s IL_0070 -IL_000e: nop -IL_000f: ldarg.0 IL_0010: ldc.i4.s 42 IL_0012: stfld ""int C.<G>d__0.<x>5__1"" ~IL_0017: ldarg.0 IL_0018: ldnull IL_0019: stfld ""object C.<G>d__0.<>s__2"" IL_001e: ldarg.0 IL_001f: ldc.i4.0 IL_0020: stfld ""int C.<G>d__0.<>s__3"" .try { -IL_0025: nop -IL_0026: nop ~IL_0027: leave.s IL_0033 } catch object { ~IL_0029: stloc.2 IL_002a: ldarg.0 IL_002b: ldloc.2 IL_002c: stfld ""object C.<G>d__0.<>s__2"" IL_0031: leave.s IL_0033 } -IL_0033: nop -IL_0034: call ""System.Threading.Tasks.Task<int> C.G()"" IL_0039: callvirt ""System.Runtime.CompilerServices.TaskAwaiter<int> System.Threading.Tasks.Task<int>.GetAwaiter()"" IL_003e: stloc.3 ~IL_003f: ldloca.s V_3 IL_0041: call ""bool System.Runtime.CompilerServices.TaskAwaiter<int>.IsCompleted.get"" IL_0046: brtrue.s IL_008c IL_0048: ldarg.0 IL_0049: ldc.i4.0 IL_004a: dup IL_004b: stloc.0 IL_004c: stfld ""int C.<G>d__0.<>1__state"" <IL_0051: ldarg.0 IL_0052: ldloc.3 IL_0053: stfld ""System.Runtime.CompilerServices.TaskAwaiter<int> C.<G>d__0.<>u__1"" IL_0058: ldarg.0 IL_0059: stloc.s V_5 IL_005b: ldarg.0 IL_005c: ldflda ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> C.<G>d__0.<>t__builder"" IL_0061: ldloca.s V_3 IL_0063: ldloca.s V_5 IL_0065: call ""void System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int>.AwaitUnsafeOnCompleted<System.Runtime.CompilerServices.TaskAwaiter<int>, C.<G>d__0>(ref System.Runtime.CompilerServices.TaskAwaiter<int>, ref C.<G>d__0)"" IL_006a: nop IL_006b: leave IL_011d >IL_0070: ldarg.0 IL_0071: ldfld ""System.Runtime.CompilerServices.TaskAwaiter<int> C.<G>d__0.<>u__1"" IL_0076: stloc.3 IL_0077: ldarg.0 IL_0078: ldflda ""System.Runtime.CompilerServices.TaskAwaiter<int> C.<G>d__0.<>u__1"" IL_007d: initobj ""System.Runtime.CompilerServices.TaskAwaiter<int>"" IL_0083: ldarg.0 IL_0084: ldc.i4.m1 IL_0085: dup IL_0086: stloc.0 IL_0087: stfld ""int C.<G>d__0.<>1__state"" IL_008c: ldloca.s V_3 IL_008e: call ""int System.Runtime.CompilerServices.TaskAwaiter<int>.GetResult()"" IL_0093: stloc.s V_4 IL_0095: ldloca.s V_3 IL_0097: initobj ""System.Runtime.CompilerServices.TaskAwaiter<int>"" IL_009d: ldarg.0 IL_009e: ldloc.s V_4 IL_00a0: stfld ""int C.<G>d__0.<>s__4"" IL_00a5: ldarg.0 IL_00a6: ldarg.0 IL_00a7: ldfld ""int C.<G>d__0.<>s__4"" IL_00ac: stfld ""int C.<G>d__0.<x>5__1"" -IL_00b1: nop ~IL_00b2: ldarg.0 IL_00b3: ldfld ""object C.<G>d__0.<>s__2"" IL_00b8: stloc.2 IL_00b9: ldloc.2 IL_00ba: brfalse.s IL_00d7 IL_00bc: ldloc.2 IL_00bd: isinst ""System.Exception"" IL_00c2: stloc.s V_6 IL_00c4: ldloc.s V_6 IL_00c6: brtrue.s IL_00ca IL_00c8: ldloc.2 IL_00c9: throw IL_00ca: ldloc.s V_6 IL_00cc: call ""System.Runtime.ExceptionServices.ExceptionDispatchInfo System.Runtime.ExceptionServices.ExceptionDispatchInfo.Capture(System.Exception)"" IL_00d1: callvirt ""void System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()"" IL_00d6: nop IL_00d7: ldarg.0 IL_00d8: ldfld ""int C.<G>d__0.<>s__3"" IL_00dd: pop IL_00de: ldarg.0 IL_00df: ldnull IL_00e0: stfld ""object C.<G>d__0.<>s__2"" -IL_00e5: ldarg.0 IL_00e6: ldfld ""int C.<G>d__0.<x>5__1"" IL_00eb: stloc.1 IL_00ec: leave.s IL_0108 } catch System.Exception { ~IL_00ee: stloc.s V_6 IL_00f0: ldarg.0 IL_00f1: ldc.i4.s -2 IL_00f3: stfld ""int C.<G>d__0.<>1__state"" IL_00f8: ldarg.0 IL_00f9: ldflda ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> C.<G>d__0.<>t__builder"" IL_00fe: ldloc.s V_6 IL_0100: call ""void System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int>.SetException(System.Exception)"" IL_0105: nop IL_0106: leave.s IL_011d } -IL_0108: ldarg.0 IL_0109: ldc.i4.s -2 IL_010b: stfld ""int C.<G>d__0.<>1__state"" ~IL_0110: ldarg.0 IL_0111: ldflda ""System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int> C.<G>d__0.<>t__builder"" IL_0116: ldloc.1 IL_0117: call ""void System.Runtime.CompilerServices.AsyncTaskMethodBuilder<int>.SetResult(int)"" IL_011c: nop IL_011d: ret }", sequencePoints: "C+<G>d__0.MoveNext"); v.VerifyPdb("C+<G>d__0.MoveNext", @" <symbols> <methods> <method containingType=""C+&lt;G&gt;d__0"" name=""MoveNext""> <customDebugInfo> <using> <namespace usingCount=""2"" /> </using> <hoistedLocalScopes> <slot startOffset=""0x0"" endOffset=""0x11e"" /> <slot startOffset=""0x29"" endOffset=""0x33"" /> </hoistedLocalScopes> <encLocalSlotMap> <slot kind=""27"" offset=""0"" /> <slot kind=""20"" offset=""0"" /> <slot kind=""temp"" /> <slot kind=""33"" offset=""105"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> <slot kind=""temp"" /> </encLocalSlotMap> </customDebugInfo> <sequencePoints> <entry offset=""0x0"" hidden=""true"" /> <entry offset=""0x7"" hidden=""true"" /> <entry offset=""0xe"" startLine=""8"" startColumn=""5"" endLine=""8"" endColumn=""6"" /> <entry offset=""0xf"" startLine=""9"" startColumn=""9"" endLine=""9"" endColumn=""20"" /> <entry offset=""0x17"" hidden=""true"" /> <entry offset=""0x25"" startLine=""12"" startColumn=""9"" endLine=""12"" endColumn=""10"" /> <entry offset=""0x26"" startLine=""13"" startColumn=""9"" endLine=""13"" endColumn=""10"" /> <entry offset=""0x27"" hidden=""true"" /> <entry offset=""0x29"" hidden=""true"" /> <entry offset=""0x33"" startLine=""15"" startColumn=""9"" endLine=""15"" endColumn=""10"" /> <entry offset=""0x34"" startLine=""16"" startColumn=""13"" endLine=""16"" endColumn=""27"" /> <entry offset=""0x3f"" hidden=""true"" /> <entry offset=""0xb1"" startLine=""17"" startColumn=""9"" endLine=""17"" endColumn=""10"" /> <entry offset=""0xb2"" hidden=""true"" /> <entry offset=""0xe5"" startLine=""19"" startColumn=""9"" endLine=""19"" endColumn=""18"" /> <entry offset=""0xee"" hidden=""true"" /> <entry offset=""0x108"" startLine=""20"" startColumn=""5"" endLine=""20"" endColumn=""6"" /> <entry offset=""0x110"" hidden=""true"" /> </sequencePoints> <scope startOffset=""0x0"" endOffset=""0x11e""> <namespace name=""System"" /> <namespace name=""System.Threading.Tasks"" /> </scope> <asyncInfo> <kickoffMethod declaringType=""C"" methodName=""G"" /> <await yield=""0x51"" resume=""0x70"" declaringType=""C+&lt;G&gt;d__0"" methodName=""MoveNext"" /> </asyncInfo> </method> </methods> </symbols>"); } [Fact] public void HoistedSpilledVariables() { string source = @" using System; using System.Threading.Tasks; class C { int[] a = new int[] { 1, 2 }; static async Task<int> G() { int z0 = H(ref new C().a[F(1)], F(2), ref new C().a[F(3)], await G()); int z1 = H(ref new C().a[F(1)], F(2), ref new C().a[F(3)], await G()); return z0 + z1; } static int H(ref int a, int b, ref int c, int d) => 1; static int F(int a) => a; }"; var v = CompileAndVerify(CreateCompilationWithMscorlib45(source, new[] { SystemCoreRef, CSharpRef }, options: TestOptions.DebugDll.WithMetadataImportOptions(MetadataImportOptions.All)), symbolValidator: module => { Assert.Equal(new[] { "<>1__state", "<>t__builder", "<z0>5__1", "<z1>5__2", "<>s__3", "<>s__4", "<>s__5", "<>s__6", "<>s__7", "<>s__8", "<>s__9", "<>s__10", "<>u__1", // awaiter "<>s__11", // ref-spills "<>s__12", "<>s__13", "<>s__14", }, module.GetFieldNames("C.<G>d__1")); }); v.VerifyPdb("C.G", @" <symbols> <methods> <method containingType=""C"" name=""G""> <customDebugInfo> <forwardIterator name=""&lt;G&gt;d__1"" /> <encLocalSlotMap> <slot kind=""0"" offset=""15"" /> <slot kind=""0"" offset=""95"" /> <slot kind=""28"" offset=""70"" /> <slot kind=""28"" offset=""70"" ordinal=""1"" /> <slot kind=""28"" offset=""150"" /> <slot kind=""28"" offset=""150"" ordinal=""1"" /> <slot kind=""29"" offset=""70"" /> <slot kind=""29"" offset=""70"" ordinal=""1"" /> <slot kind=""29"" offset=""70"" ordinal=""2"" /> <slot kind=""29"" offset=""70"" ordinal=""3"" /> <slot kind=""29"" offset=""150"" /> <slot kind=""29"" offset=""150"" ordinal=""1"" /> <slot kind=""29"" offset=""150"" ordinal=""2"" /> <slot kind=""29"" offset=""150"" ordinal=""3"" /> </encLocalSlotMap> </customDebugInfo> </method> </methods> </symbols>"); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Management.Automation; using System.Management.Automation.Remoting; using System.Management.Automation.Runspaces; namespace Microsoft.PowerShell.Commands { /// <summary> /// This class implements New-PSSessionOption cmdlet. /// Spec: TBD. /// </summary> [Cmdlet(VerbsCommon.New, "PSSessionOption", HelpUri = "https://go.microsoft.com/fwlink/?LinkID=144305", RemotingCapability = RemotingCapability.None)] [OutputType(typeof(PSSessionOption))] public sealed class NewPSSessionOptionCommand : PSCmdlet { #region Parameters (specific to PSSessionOption) #if !UNIX /// <summary> /// The MaximumRedirection parameter enables the implicit redirection functionality /// -1 = no limit /// 0 = no redirection. /// </summary> [Parameter] public int MaximumRedirection { get { return _maximumRedirection.Value; } set { _maximumRedirection = value; } } private int? _maximumRedirection; /// <summary> /// If false, underlying WSMan infrastructure will compress data sent on the network. /// If true, data will not be compressed. Compression improves performance by /// reducing the amount of data sent on the network. Compression my require extra /// memory consumption and CPU usage. In cases where available memory / CPU is less, /// set this property to "true". /// By default the value of this property is "false". /// </summary> [Parameter] public SwitchParameter NoCompression { get; set; } /// <summary> /// If <c>true</c> then Operating System won't load the user profile (i.e. registry keys under HKCU) on the remote server /// which can result in a faster session creation time. This option won't have any effect if the remote machine has /// already loaded the profile (i.e. in another session). /// </summary> [Parameter] public SwitchParameter NoMachineProfile { get; set; } /// <summary> /// Culture that the remote session should use. /// </summary> [Parameter] [ValidateNotNull] public CultureInfo Culture { get; set; } /// <summary> /// UI culture that the remote session should use. /// </summary> [Parameter] [ValidateNotNull] public CultureInfo UICulture { get; set; } /// <summary> /// Total data (in bytes) that can be received from a remote machine /// targeted towards a command. If null, then the size is unlimited. /// Default is unlimited data. /// </summary> [Parameter] public int MaximumReceivedDataSizePerCommand { get { return _maxRecvdDataSizePerCommand.Value; } set { _maxRecvdDataSizePerCommand = value; } } private int? _maxRecvdDataSizePerCommand; /// <summary> /// Maximum size (in bytes) of a deserialized object received from a remote machine. /// If null, then the size is unlimited. Default is unlimited object size. /// </summary> [Parameter] public int MaximumReceivedObjectSize { get { return _maxRecvdObjectSize.Value; } set { _maxRecvdObjectSize = value; } } private int? _maxRecvdObjectSize; /// <summary> /// Specifies the output mode on the server when it is in Disconnected mode /// and its output data cache becomes full. /// </summary> [Parameter] public OutputBufferingMode OutputBufferingMode { get; set; } /// <summary> /// Maximum number of times a connection will be re-attempted when a connection fails due to network /// issues. /// </summary> [Parameter] [ValidateRange(0, Int32.MaxValue)] public int MaxConnectionRetryCount { get; set; } /// <summary> /// Application arguments the server can see in <see cref="System.Management.Automation.Remoting.PSSenderInfo.ApplicationArguments"/> /// </summary> [Parameter] [ValidateNotNull] [SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")] public PSPrimitiveDictionary ApplicationArguments { get; set; } /// <summary> /// The duration for which PowerShell remoting waits (in milliseconds) before timing /// out on a connection to a remote machine. Simply put, the timeout for a remote /// runspace creation. /// /// The user would like to tweak this timeout depending on whether /// he/she is connecting to a machine in the data center or across a slow WAN. /// </summary> [Parameter] [Alias("OpenTimeoutMSec")] [ValidateRange(0, Int32.MaxValue)] public int OpenTimeout { get { return _openTimeout.HasValue ? _openTimeout.Value : RunspaceConnectionInfo.DefaultOpenTimeout; } set { _openTimeout = value; } } private int? _openTimeout; /// <summary> /// The duration for which PowerShell should wait (in milliseconds) before it /// times out on cancel operations (close runspace or stop powershell). For /// instance, when the user hits ctrl-C, New-PSSession cmdlet tries to call a /// stop on all remote runspaces which are in the Opening state. The user /// wouldn't mind waiting for 15 seconds, but this should be time bound and of a /// shorter duration. A high timeout here like 3 minutes will give the user /// a feeling that the PowerShell client is not responding. /// </summary> [Parameter] [Alias("CancelTimeoutMSec")] [ValidateRange(0, Int32.MaxValue)] public int CancelTimeout { get { return _cancelTimeout.HasValue ? _cancelTimeout.Value : BaseTransportManager.ClientCloseTimeoutMs; } set { _cancelTimeout = value; } } private int? _cancelTimeout; /// <summary> /// The duration for which a Runspace on server needs to wait (in milliseconds) before it /// declares the client dead and closes itself down. /// This is especially important as these values may have to be configured differently /// for enterprise administration scenarios. /// </summary> [Parameter] [ValidateRange(-1, Int32.MaxValue)] [Alias("IdleTimeoutMSec")] public int IdleTimeout { get { return _idleTimeout.HasValue ? _idleTimeout.Value : RunspaceConnectionInfo.DefaultIdleTimeout; } set { _idleTimeout = value; } } private int? _idleTimeout; #endif #endregion Parameters #region Parameters copied from New-WSManSessionOption #if !UNIX /// <summary> /// By default, ProxyAccessType is None, that means Proxy information (ProxyAccessType, /// ProxyAuthenticationMechanism and ProxyCredential)is not passed to WSMan at all. /// </summary> [Parameter] [ValidateNotNullOrEmpty] public ProxyAccessType ProxyAccessType { get; set; } = ProxyAccessType.None; /// <summary> /// The following is the definition of the input parameter "ProxyAuthentication". /// This parameter takes a set of authentication methods the user can select /// from. The available options should be as follows: /// - Negotiate: Use the default authentication (as defined by the underlying /// protocol) for establishing a remote connection. /// - Basic: Use basic authentication for establishing a remote connection /// - Digest: Use Digest authentication for establishing a remote connection. /// </summary> [Parameter] public AuthenticationMechanism ProxyAuthentication { get; set; } = AuthenticationMechanism.Negotiate; /// <summary> /// The following is the definition of the input parameter "ProxyCredential". /// </summary> [Parameter] [ValidateNotNullOrEmpty] [Credential] public PSCredential ProxyCredential { get; set; } #endif /// <summary> /// The following is the definition of the input parameter "SkipCACheck". /// When connecting over HTTPS, the client does not validate that the server /// certificate is signed by a trusted certificate authority (CA). Use only when /// the remote computer is trusted by other means, for example, if the remote /// computer is part of a network that is physically secure and isolated or the /// remote computer is listed as a trusted host in WinRM configuration. /// </summary> [Parameter] public SwitchParameter SkipCACheck { get { return _skipcacheck; } set { _skipcacheck = value; } } private bool _skipcacheck; /// <summary> /// The following is the definition of the input parameter "SkipCNCheck". /// Indicates that certificate common name (CN) of the server need not match the /// hostname of the server. Used only in remote operations using https. This /// option should only be used for trusted machines. /// </summary> [Parameter] public SwitchParameter SkipCNCheck { get { return _skipcncheck; } set { _skipcncheck = value; } } private bool _skipcncheck; #if !UNIX /// <summary> /// The following is the definition of the input parameter "SkipRevocation". /// Indicates that certificate common name (CN) of the server need not match the /// hostname of the server. Used only in remote operations using https. This /// option should only be used for trusted machines. /// </summary> [Parameter] public SwitchParameter SkipRevocationCheck { get { return _skiprevocationcheck; } set { _skiprevocationcheck = value; } } private bool _skiprevocationcheck; /// <summary> /// The following is the definition of the input parameter "Timeout". /// Defines the timeout in milliseconds for the wsman operation. /// </summary> [Parameter] [Alias("OperationTimeoutMSec")] [ValidateRange(0, Int32.MaxValue)] public int OperationTimeout { get { return (_operationtimeout.HasValue ? _operationtimeout.Value : BaseTransportManager.ClientDefaultOperationTimeoutMs); } set { _operationtimeout = value; } } private int? _operationtimeout; /// <summary> /// The following is the definition of the input parameter "UnEncrypted". /// Specifies that no encryption will be used when doing remote operations over /// http. Unencrypted traffic is not allowed by default and must be enabled in /// the local configuration. /// </summary> [Parameter] public SwitchParameter NoEncryption { get { return _noencryption; } set { _noencryption = value; } } private bool _noencryption; /// <summary> /// The following is the definition of the input parameter "UTF16". /// Indicates the request is encoded in UTF16 format rather than UTF8 format; /// UTF8 is the default. /// </summary> [Parameter] [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "UTF")] public SwitchParameter UseUTF16 { get { return _useutf16; } set { _useutf16 = value; } } private bool _useutf16; /// <summary> /// Uses Service Principal Name (SPN) along with the Port number during authentication. /// </summary> [Parameter] [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "SPN")] public SwitchParameter IncludePortInSPN { get { return _includePortInSPN; } set { _includePortInSPN = value; } } private bool _includePortInSPN; #endif #endregion #region Implementation /// <summary> /// Performs initialization of cmdlet execution. /// </summary> protected override void BeginProcessing() { PSSessionOption result = new PSSessionOption(); // Begin: WSMan specific options #if !UNIX result.ProxyAccessType = this.ProxyAccessType; result.ProxyAuthentication = this.ProxyAuthentication; result.ProxyCredential = this.ProxyCredential; #endif result.SkipCACheck = this.SkipCACheck; result.SkipCNCheck = this.SkipCNCheck; #if !UNIX result.SkipRevocationCheck = this.SkipRevocationCheck; if (_operationtimeout.HasValue) { result.OperationTimeout = TimeSpan.FromMilliseconds(_operationtimeout.Value); } result.NoEncryption = this.NoEncryption; result.UseUTF16 = this.UseUTF16; result.IncludePortInSPN = this.IncludePortInSPN; // End: WSMan specific options if (_maximumRedirection.HasValue) { result.MaximumConnectionRedirectionCount = this.MaximumRedirection; } result.NoCompression = this.NoCompression.IsPresent; result.NoMachineProfile = this.NoMachineProfile.IsPresent; result.MaximumReceivedDataSizePerCommand = _maxRecvdDataSizePerCommand; result.MaximumReceivedObjectSize = _maxRecvdObjectSize; if (this.Culture != null) { result.Culture = this.Culture; } if (this.UICulture != null) { result.UICulture = this.UICulture; } if (_openTimeout.HasValue) { result.OpenTimeout = TimeSpan.FromMilliseconds(_openTimeout.Value); } if (_cancelTimeout.HasValue) { result.CancelTimeout = TimeSpan.FromMilliseconds(_cancelTimeout.Value); } if (_idleTimeout.HasValue) { result.IdleTimeout = TimeSpan.FromMilliseconds(_idleTimeout.Value); } result.OutputBufferingMode = OutputBufferingMode; result.MaxConnectionRetryCount = MaxConnectionRetryCount; if (this.ApplicationArguments != null) { result.ApplicationArguments = this.ApplicationArguments; } #endif this.WriteObject(result); } #endregion Methods } }
// // Copyright (c) .NET Foundation and Contributors // Portions Copyright (c) Microsoft Corporation. All rights reserved. // See LICENSE file in the project root for full license information. // using CorDebugInterop; using System; using System.Diagnostics; using BreakpointDef = nanoFramework.Tools.Debugger.WireProtocol.Commands.Debugging_Execution_BreakpointDef; namespace nanoFramework.Tools.VisualStudio.Extension { public class CorDebugStepper : CorDebugBreakpointBase, ICorDebugStepper, ICorDebugStepper2 { //if a stepper steps into/out of a frame, need to update m_frame CorDebugFrame m_frame; CorDebugThread m_thread; COR_DEBUG_STEP_RANGE[] m_ranges; CorDebugStepReason m_reasonStopped; CorDebugIntercept m_interceptMask; public CorDebugStepper( CorDebugFrame frame ) : base( frame.AppDomain ) { Initialize( frame ); } private void Initialize( CorDebugFrame frame ) { m_frame = frame; m_thread = frame.Thread; InitializeBreakpointDef(); } private new ushort Kind { [DebuggerHidden] get { return base.Kind; } set { if((value & BreakpointDef.c_STEP_IN) != 0) value |= BreakpointDef.c_STEP_OVER; value |= BreakpointDef.c_STEP_OUT | BreakpointDef.c_EXCEPTION_CAUGHT | BreakpointDef.c_THREAD_TERMINATED; base.Kind = value; } } private void InitializeBreakpointDef() { m_breakpointDef.m_depth = m_frame.DepthnanoCLR; m_breakpointDef.m_pid = m_thread.ID; if(m_ranges != null && m_ranges.Length > 0) { m_breakpointDef.m_IPStart = m_ranges[0].startOffset; m_breakpointDef.m_IPEnd = m_ranges[0].endOffset; } else { m_breakpointDef.m_IPStart = 0; m_breakpointDef.m_IPEnd = 0; } Dirty(); } private void Activate( ushort kind ) { InitializeBreakpointDef(); Debug.Assert( !Active); //currently, we don't support ignoring filters in a step. cpde always seems to set this flag though. //So it may not be very important to support ignoring filters. Debug.Assert((m_interceptMask & CorDebugIntercept.INTERCEPT_EXCEPTION_FILTER) != 0); Kind = kind; Active = true; } public override bool ShouldBreak( BreakpointDef breakpointDef ) { bool fStop = true; CorDebugStepReason reason; //optimize, optimize, optimize No reason to get list of threads, and get thread stack for each step!!! ushort flags = breakpointDef.m_flags; int depthOld = (int)m_frame.DepthnanoCLR; int depthNew = (int)breakpointDef.m_depth; int dDepth = depthNew - depthOld; if((flags & BreakpointDef.c_STEP) != 0) { if ((flags & BreakpointDef.c_STEP_IN) != 0) { if (Process.Engine.Capabilities.ExceptionFilters && breakpointDef.m_depthExceptionHandler == BreakpointDef.c_DEPTH_STEP_INTERCEPT) { reason = CorDebugStepReason.STEP_INTERCEPT; } else { reason = CorDebugStepReason.STEP_CALL; } } else if ((flags & BreakpointDef.c_STEP_OVER) != 0) { reason = CorDebugStepReason.STEP_NORMAL; } else { if (Process.Engine.Capabilities.ExceptionFilters & breakpointDef.m_depthExceptionHandler == BreakpointDef.c_DEPTH_STEP_EXCEPTION_HANDLER) { reason = CorDebugStepReason.STEP_EXCEPTION_HANDLER; } else { reason = CorDebugStepReason.STEP_RETURN; } } } else if((flags & BreakpointDef.c_EXCEPTION_CAUGHT) != 0) { reason = CorDebugStepReason.STEP_EXCEPTION_HANDLER; if(dDepth > 0) fStop = false; else if(dDepth == 0) fStop = (Debugging_Execution_BreakpointDef.m_flags & BreakpointDef.c_STEP_OVER) != 0; else fStop = true; } else if ((flags & BreakpointDef.c_THREAD_TERMINATED) != 0) { reason = CorDebugStepReason.STEP_EXIT; Active = false; fStop = false; } else { Debug.Assert(false); throw new ApplicationException("Invalid stepper hit received"); } if(m_ranges != null && reason == CorDebugStepReason.STEP_NORMAL && breakpointDef.m_depth == Debugging_Execution_BreakpointDef.m_depth) { foreach(COR_DEBUG_STEP_RANGE range in m_ranges) { if(Utility.InRange( breakpointDef.m_IP, range.startOffset, range.endOffset - 1 )) { fStop = false; break; } } Debug.Assert( Utility.FImplies( m_ranges != null && m_ranges.Length == 1, fStop ) ); } if(fStop && reason != CorDebugStepReason.STEP_EXIT) { uint depth = breakpointDef.m_depth; CorDebugFrame frame = m_thread.Chain.GetFrameFromDepthnanoCLR( depth ); m_ranges = null; Initialize( frame ); //Will callback with wrong reason if stepping through internal calls????? //If we don't stop at an internal call, we need to reset/remember the range somehow? //This might be broken if a StepRange is called that causes us to enter an internal function fStop = !m_frame.Function.IsInternal; } m_reasonStopped = reason; return fStop; } public override void Hit( BreakpointDef breakpointDef ) { m_ranges = null; Active = false; Process.EnqueueEvent( new ManagedCallbacks.ManagedCallbackStepComplete( m_frame.Thread, this, m_reasonStopped ) ); } #region ICorDebugStepper Members int ICorDebugStepper.IsActive( out int pbActive ) { pbActive = Boolean.BoolToInt(Active); return COM_HResults.S_OK; } int ICorDebugStepper.Deactivate() { Active = false; return COM_HResults.S_OK; } int ICorDebugStepper.StepRange( int bStepIn, COR_DEBUG_STEP_RANGE[] ranges, uint cRangeCount ) { //This isn't a correct method signature. However, since we don't support this (yet), it doesn't really matter //Add CorDebugStepper.StepRange is not implemented m_ranges = ranges; Debug.Assert( cRangeCount == 1 ); for(int iRange = 0; iRange < m_ranges.Length; iRange++) { COR_DEBUG_STEP_RANGE range = m_ranges[iRange]; m_ranges[iRange].startOffset = m_frame.Function.GetILnanoCLRFromILCLR( range.startOffset ); m_ranges[iRange].endOffset = m_frame.Function.GetILnanoCLRFromILCLR( range.endOffset ); } Activate( Boolean.IntToBool( bStepIn ) ? BreakpointDef.c_STEP_IN : BreakpointDef.c_STEP_OVER ); return COM_HResults.S_OK; } int ICorDebugStepper.SetUnmappedStopMask( CorDebugUnmappedStop mask ) { return COM_HResults.S_OK; } int ICorDebugStepper.SetInterceptMask( CorDebugIntercept mask ) { m_interceptMask = mask; return COM_HResults.S_OK; } int ICorDebugStepper.Step( int bStepIn ) { m_ranges = null; Activate( Boolean.IntToBool( bStepIn ) ? BreakpointDef.c_STEP_IN : BreakpointDef.c_STEP_OVER ); return COM_HResults.S_OK; } int ICorDebugStepper.SetRangeIL( int bIL ) { return COM_HResults.E_NOTIMPL; } int ICorDebugStepper.StepOut() { m_ranges = null; Activate( BreakpointDef.c_STEP_OUT ); return COM_HResults.S_OK; } #endregion #region ICorDebugStepper2 Members int ICorDebugStepper2.SetJMC( int fIsJMCStepper ) { // CorDebugStepper.SetJMC is not implemented bool fJMC = Boolean.IntToBool( fIsJMCStepper ); bool fJMCOld = (Debugging_Execution_BreakpointDef.m_flags & BreakpointDef.c_STEP_JMC) != 0; if(fJMC != fJMCOld) { if(fJMC) Debugging_Execution_BreakpointDef.m_flags |= BreakpointDef.c_STEP_JMC; else unchecked { Debugging_Execution_BreakpointDef.m_flags &= (ushort)(~BreakpointDef.c_STEP_JMC); } Dirty(); } return COM_HResults.S_OK; } #endregion } }
using System; using System.Linq; using static FoxTrader.Constants; namespace FoxTrader.UI.Control.Layout { /// <summary>Base class for multi-column tables</summary> internal class Table : GameControl { private readonly int[] m_columnWidth; private int m_columnCount; private int m_maxWidth; // for autosizing, if nonzero - fills last cell up to this size // only children of this control should be TableRow. private bool m_sizeToContents; /// <summary>Initializes a new instance of the <see cref="Table" /> class</summary> /// <param name="c_parentControl">Parent control</param> public Table(GameControl c_parentControl) : base(c_parentControl) { m_columnCount = 1; DefaultRowHeight = 22; m_columnWidth = new int[kMaxTableRowColumns]; for (var a_i = 0; a_i < kMaxTableRowColumns; a_i++) { m_columnWidth[a_i] = 20; } m_sizeToContents = false; } /// <summary>Column count (default 1)</summary> public int ColumnCount { get { return m_columnCount; } set { SetColumnCount(value); Invalidate(); } } /// <summary>Row count</summary> public int RowCount => Children.Count; /// <summary>Gets or sets default height for new table rows</summary> public int DefaultRowHeight { get; set; } /// <summary>Returns specific row of the table</summary> /// <param name="c_index">Row index</param> /// <returns>Row at the specified index</returns> public TableRow this[int c_index] => Children[c_index] as TableRow; /// <summary>Sets the number of columns</summary> /// <param name="c_count">Number of columns</param> public void SetColumnCount(int c_count) { if (m_columnCount == c_count) { return; } foreach (var a_row in Children.OfType<TableRow>()) { a_row.ColumnCount = c_count; } m_columnCount = c_count; } /// <summary>Sets the column width (in pixels)</summary> /// <param name="c_column">Column index</param> /// <param name="c_width">Column width</param> public void SetColumnWidth(int c_column, int c_width) { if (m_columnWidth[c_column] == c_width) { return; } m_columnWidth[c_column] = c_width; Invalidate(); } /// <summary>Gets the column width (in pixels)</summary> /// <param name="c_column">Column index</param> /// <returns>Column width</returns> public int GetColumnWidth(int c_column) { return m_columnWidth[c_column]; } /// <summary>Adds a new empty row</summary> /// <returns>Newly created row</returns> public TableRow AddRow() { var a_row = new TableRow(this); a_row.ColumnCount = m_columnCount; a_row.Height = DefaultRowHeight; a_row.Dock = Pos.Top; return a_row; } /// <summary>Adds a new row</summary> /// <param name="c_row">Row to add</param> public void AddRow(TableRow c_row) { c_row.Parent = this; c_row.ColumnCount = m_columnCount; c_row.Height = DefaultRowHeight; c_row.Dock = Pos.Top; } /// <summary>Adds a new row with specified text in first column</summary> /// <param name="c_text">Text to add</param> /// <returns>New row</returns> public TableRow AddRow(string c_text) { var a_row = AddRow(); a_row.SetCellText(0, c_text); return a_row; } /// <summary>Removes a row by reference</summary> /// <param name="c_row">Row to remove</param> public void RemoveRow(TableRow c_row) { RemoveChild(c_row, true); } /// <summary>Removes a row by index</summary> /// <param name="c_idx">Row index</param> public void RemoveRow(int c_idx) { var a_row = Children[c_idx]; RemoveRow(a_row as TableRow); } /// <summary>Removes all rows</summary> public void RemoveAll() { while (RowCount > 0) { RemoveRow(0); } } /// <summary>Gets the index of a specified row</summary> /// <param name="c_row">Row to search for</param> /// <returns>Row index if found, -1 otherwise</returns> public int GetRowIndex(TableRow c_row) { return Children.IndexOf(c_row); } /// <summary>Lays out the control's interior according to alignment, padding, dock etc</summary> /// <param name="c_skin">Skin to use</param> protected override void OnLayout(Skin c_skin) { base.OnLayout(c_skin); var a_even = false; foreach (TableRow a_row in Children) { a_row.EvenRow = a_even; a_even = !a_even; for (var a_i = 0; a_i < m_columnCount; a_i++) { a_row.SetColumnWidth(a_i, m_columnWidth[a_i]); } } } protected override void PostLayout(Skin c_skin) { base.PostLayout(c_skin); if (m_sizeToContents) { DoSizeToContents(); m_sizeToContents = false; } } /// <summary>Sizes to fit contents</summary> public void SizeToContents(int c_maxWidth) { m_maxWidth = c_maxWidth; m_sizeToContents = true; Invalidate(); } protected void DoSizeToContents() { var a_height = 0; var a_width = 0; foreach (TableRow a_row in Children) { a_row.SizeToContents(); // now all columns fit but only in this particular row for (var a_i = 0; a_i < ColumnCount; a_i++) { GameControl a_cell = a_row.GetColumn(a_i); if (null != a_cell) { if (a_i < ColumnCount - 1 || m_maxWidth == 0) { m_columnWidth[a_i] = Math.Max(m_columnWidth[a_i], a_cell.Width + a_cell.Margin.m_left + a_cell.Margin.m_right); } else { m_columnWidth[a_i] = m_maxWidth - a_width; // last cell - fill } } } a_height += a_row.Height; } // sum all column widths for (var a_i = 0; a_i < ColumnCount; a_i++) { a_width += m_columnWidth[a_i]; } SetSize(a_width, a_height); //InvalidateParent(); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.Data.Entity; using Microsoft.Data.Entity.Metadata; using Microsoft.Data.Entity.Relational.Migrations.Infrastructure; using GroundJobs.MVC.Models; namespace GroundJobs.MVC.Migrations { [ContextType(typeof(ApplicationDbContext))] partial class ApplicationDbContextModelSnapshot : ModelSnapshot { public override void BuildModel(ModelBuilder builder) { builder .Annotation("SqlServer:ValueGeneration", "Identity"); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRole", b => { b.Property<string>("Id") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 0); b.Property<string>("ConcurrencyStamp") .ConcurrencyToken() .Annotation("OriginalValueIndex", 1); b.Property<string>("Name") .Annotation("OriginalValueIndex", 2); b.Property<string>("NormalizedName") .Annotation("OriginalValueIndex", 3); b.Key("Id"); b.Annotation("Relational:TableName", "AspNetRoles"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRoleClaim<string>", b => { b.Property<int>("Id") .GenerateValueOnAdd() .StoreGeneratedPattern(StoreGeneratedPattern.Identity) .Annotation("OriginalValueIndex", 0); b.Property<string>("ClaimType") .Annotation("OriginalValueIndex", 1); b.Property<string>("ClaimValue") .Annotation("OriginalValueIndex", 2); b.Property<string>("RoleId") .Annotation("OriginalValueIndex", 3); b.Key("Id"); b.Annotation("Relational:TableName", "AspNetRoleClaims"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserClaim<string>", b => { b.Property<int>("Id") .GenerateValueOnAdd() .StoreGeneratedPattern(StoreGeneratedPattern.Identity) .Annotation("OriginalValueIndex", 0); b.Property<string>("ClaimType") .Annotation("OriginalValueIndex", 1); b.Property<string>("ClaimValue") .Annotation("OriginalValueIndex", 2); b.Property<string>("UserId") .Annotation("OriginalValueIndex", 3); b.Key("Id"); b.Annotation("Relational:TableName", "AspNetUserClaims"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserLogin<string>", b => { b.Property<string>("LoginProvider") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 0); b.Property<string>("ProviderKey") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 1); b.Property<string>("ProviderDisplayName") .Annotation("OriginalValueIndex", 2); b.Property<string>("UserId") .Annotation("OriginalValueIndex", 3); b.Key("LoginProvider", "ProviderKey"); b.Annotation("Relational:TableName", "AspNetUserLogins"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserRole<string>", b => { b.Property<string>("UserId") .Annotation("OriginalValueIndex", 0); b.Property<string>("RoleId") .Annotation("OriginalValueIndex", 1); b.Key("UserId", "RoleId"); b.Annotation("Relational:TableName", "AspNetUserRoles"); }); builder.Entity("GroundJobs.MVC.Models.ApplicationUser", b => { b.Property<string>("Id") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 0); b.Property<int>("AccessFailedCount") .Annotation("OriginalValueIndex", 1); b.Property<string>("ConcurrencyStamp") .ConcurrencyToken() .Annotation("OriginalValueIndex", 2); b.Property<string>("Email") .Annotation("OriginalValueIndex", 3); b.Property<bool>("EmailConfirmed") .Annotation("OriginalValueIndex", 4); b.Property<bool>("LockoutEnabled") .Annotation("OriginalValueIndex", 5); b.Property<DateTimeOffset?>("LockoutEnd") .Annotation("OriginalValueIndex", 6); b.Property<string>("NormalizedEmail") .Annotation("OriginalValueIndex", 7); b.Property<string>("NormalizedUserName") .Annotation("OriginalValueIndex", 8); b.Property<string>("PasswordHash") .Annotation("OriginalValueIndex", 9); b.Property<string>("PhoneNumber") .Annotation("OriginalValueIndex", 10); b.Property<bool>("PhoneNumberConfirmed") .Annotation("OriginalValueIndex", 11); b.Property<string>("SecurityStamp") .Annotation("OriginalValueIndex", 12); b.Property<bool>("TwoFactorEnabled") .Annotation("OriginalValueIndex", 13); b.Property<string>("UserName") .Annotation("OriginalValueIndex", 14); b.Key("Id"); b.Annotation("Relational:TableName", "AspNetUsers"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRoleClaim<string>", b => { b.Reference("Microsoft.AspNet.Identity.EntityFramework.IdentityRole") .InverseCollection() .ForeignKey("RoleId"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserClaim<string>", b => { b.Reference("GroundJobs.MVC.Models.ApplicationUser") .InverseCollection() .ForeignKey("UserId"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserLogin<string>", b => { b.Reference("GroundJobs.MVC.Models.ApplicationUser") .InverseCollection() .ForeignKey("UserId"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserRole<string>", b => { b.Reference("Microsoft.AspNet.Identity.EntityFramework.IdentityRole") .InverseCollection() .ForeignKey("RoleId"); b.Reference("GroundJobs.MVC.Models.ApplicationUser") .InverseCollection() .ForeignKey("UserId"); }); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.Diagnostics; using System.Numerics; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Security.Cryptography.Asn1; namespace System.Security.Cryptography { internal static class EccKeyFormatHelper { private static readonly string[] s_validOids = { Oids.EcPublicKey, }; internal static void ReadSubjectPublicKeyInfo( ReadOnlySpan<byte> source, out int bytesRead, out ECParameters key) { KeyFormatHelper.ReadSubjectPublicKeyInfo<ECParameters>( s_validOids, source, FromECPublicKey, out bytesRead, out key); } internal static ReadOnlyMemory<byte> ReadSubjectPublicKeyInfo( ReadOnlyMemory<byte> source, out int bytesRead) { return KeyFormatHelper.ReadSubjectPublicKeyInfo( s_validOids, source, out bytesRead); } internal static void ReadEncryptedPkcs8( ReadOnlySpan<byte> source, ReadOnlySpan<char> password, out int bytesRead, out ECParameters key) { KeyFormatHelper.ReadEncryptedPkcs8<ECParameters>( s_validOids, source, password, FromECPrivateKey, out bytesRead, out key); } internal static void ReadEncryptedPkcs8( ReadOnlySpan<byte> source, ReadOnlySpan<byte> passwordBytes, out int bytesRead, out ECParameters key) { KeyFormatHelper.ReadEncryptedPkcs8<ECParameters>( s_validOids, source, passwordBytes, FromECPrivateKey, out bytesRead, out key); } internal static unsafe ECParameters FromECPrivateKey(ReadOnlySpan<byte> key, out int bytesRead) { fixed (byte* ptr = &MemoryMarshal.GetReference(key)) { using (MemoryManager<byte> manager = new PointerMemoryManager<byte>(ptr, key.Length)) { AsnReader reader = new AsnReader(manager.Memory, AsnEncodingRules.BER); AlgorithmIdentifierAsn algId = default; ReadOnlyMemory<byte> firstValue = reader.PeekEncodedValue(); FromECPrivateKey(firstValue, algId, out ECParameters ret); bytesRead = firstValue.Length; return ret; } } } internal static void FromECPrivateKey( ReadOnlyMemory<byte> keyData, in AlgorithmIdentifierAsn algId, out ECParameters ret) { ECPrivateKey key = ECPrivateKey.Decode(keyData, AsnEncodingRules.BER); ValidateParameters(key.Parameters, algId); if (key.Version != 1) { throw new CryptographicException(SR.Cryptography_Der_Invalid_Encoding); } // Implementation limitation if (key.PublicKey == null) { throw new CryptographicException(SR.Cryptography_NotValidPublicOrPrivateKey); } ReadOnlySpan<byte> publicKeyBytes = key.PublicKey.Value.Span; if (publicKeyBytes.Length == 0) { throw new CryptographicException(SR.Cryptography_Der_Invalid_Encoding); } // Implementation limitation // 04 (Uncompressed ECPoint) is almost always used. if (publicKeyBytes[0] != 0x04) { throw new CryptographicException(SR.Cryptography_NotValidPublicOrPrivateKey); } // https://www.secg.org/sec1-v2.pdf, 2.3.4, #3 (M has length 2 * CEIL(log2(q)/8) + 1) if (publicKeyBytes.Length != 2 * key.PrivateKey.Length + 1) { throw new CryptographicException(SR.Cryptography_Der_Invalid_Encoding); } ECDomainParameters domainParameters; if (key.Parameters != null) { domainParameters = key.Parameters.Value; } else { domainParameters = ECDomainParameters.Decode(algId.Parameters.Value, AsnEncodingRules.DER); } ret = new ECParameters { Curve = GetCurve(domainParameters), Q = { X = publicKeyBytes.Slice(1, key.PrivateKey.Length).ToArray(), Y = publicKeyBytes.Slice(1 + key.PrivateKey.Length).ToArray(), }, D = key.PrivateKey.ToArray(), }; ret.Validate(); } internal static void FromECPublicKey( ReadOnlyMemory<byte> key, in AlgorithmIdentifierAsn algId, out ECParameters ret) { if (algId.Parameters == null) { throw new CryptographicException(SR.Cryptography_Der_Invalid_Encoding); } ReadOnlySpan<byte> publicKeyBytes = key.Span; if (publicKeyBytes.Length == 0) { throw new CryptographicException(SR.Cryptography_Der_Invalid_Encoding); } // Implementation limitation. // 04 (Uncompressed ECPoint) is almost always used. if (publicKeyBytes[0] != 0x04) { throw new CryptographicException(SR.Cryptography_NotValidPublicOrPrivateKey); } // https://www.secg.org/sec1-v2.pdf, 2.3.4, #3 (M has length 2 * CEIL(log2(q)/8) + 1) if ((publicKeyBytes.Length & 0x01) != 1) { throw new CryptographicException(SR.Cryptography_Der_Invalid_Encoding); } int fieldWidth = publicKeyBytes.Length / 2; ECDomainParameters domainParameters = ECDomainParameters.Decode( algId.Parameters.Value, AsnEncodingRules.DER); ret = new ECParameters { Curve = GetCurve(domainParameters), Q = { X = publicKeyBytes.Slice(1, fieldWidth).ToArray(), Y = publicKeyBytes.Slice(1 + fieldWidth).ToArray(), }, }; ret.Validate(); } private static void ValidateParameters(ECDomainParameters? keyParameters, in AlgorithmIdentifierAsn algId) { // At least one is required if (keyParameters == null && algId.Parameters == null) { throw new CryptographicException(SR.Cryptography_Der_Invalid_Encoding); } // If they are both specified they must match. if (keyParameters != null && algId.Parameters != null) { ReadOnlySpan<byte> algIdParameters = algId.Parameters.Value.Span; byte[] verify = ArrayPool<byte>.Shared.Rent(algIdParameters.Length); try { // X.509 SubjectPublicKeyInfo specifies DER encoding. // RFC 5915 specifies DER encoding for EC Private Keys. // So we can compare as DER. using (AsnWriter writer = new AsnWriter(AsnEncodingRules.DER)) { keyParameters.Value.Encode(writer); if (!writer.TryEncode(verify, out int written) || written != algIdParameters.Length || !algIdParameters.SequenceEqual(new ReadOnlySpan<byte>(verify, 0, written))) { throw new CryptographicException(SR.Cryptography_Der_Invalid_Encoding); } } } finally { // verify contains public information and does not need to be cleared. ArrayPool<byte>.Shared.Return(verify); } } } private static ECCurve GetCurve(in ECDomainParameters domainParameters) { if (domainParameters.Named == null) { throw new CryptographicException(SR.Cryptography_ECC_NamedCurvesOnly); } Oid curveOid = domainParameters.Named; switch (curveOid.Value) { case Oids.secp256r1: curveOid = new Oid(Oids.secp256r1, nameof(ECCurve.NamedCurves.nistP256)); break; case Oids.secp384r1: curveOid = new Oid(Oids.secp384r1, nameof(ECCurve.NamedCurves.nistP384)); break; case Oids.secp521r1: curveOid = new Oid(Oids.secp521r1, nameof(ECCurve.NamedCurves.nistP521)); break; } return ECCurve.CreateFromOid(curveOid); } internal static AsnWriter WriteSubjectPublicKeyInfo(in ECParameters ecParameters) { ecParameters.Validate(); if (!ecParameters.Curve.IsNamed) { throw new CryptographicException(SR.Cryptography_ECC_NamedCurvesOnly); } // Since the public key format for EC keys is not ASN.1, // write the SPKI structure manually. AsnWriter writer = new AsnWriter(AsnEncodingRules.DER); // SubjectPublicKeyInfo writer.PushSequence(); // algorithm WriteAlgorithmIdentifier(ecParameters, writer); // subjectPublicKey WriteUncompressedPublicKey(ecParameters, writer); writer.PopSequence(); return writer; } private static AsnWriter WriteAlgorithmIdentifier(in ECParameters ecParameters) { AsnWriter writer = new AsnWriter(AsnEncodingRules.DER); WriteAlgorithmIdentifier(ecParameters, writer); return writer; } private static void WriteAlgorithmIdentifier(in ECParameters ecParameters, AsnWriter writer) { writer.PushSequence(); writer.WriteObjectIdentifier(Oids.EcPublicKey); WriteEcParameters(ecParameters, writer); writer.PopSequence(); } internal static AsnWriter WritePkcs8PrivateKey(in ECParameters ecParameters) { ecParameters.Validate(); if (ecParameters.D == null) { throw new CryptographicException(SR.Cryptography_CSP_NoPrivateKey); } if (!ecParameters.Curve.IsNamed) { throw new CryptographicException(SR.Cryptography_ECC_NamedCurvesOnly); } // Don't need the domain parameters because they're contained in the algId. using (AsnWriter ecPrivateKey = WriteEcPrivateKey(ecParameters, includeDomainParameters: false)) using (AsnWriter algorithmIdentifier = WriteAlgorithmIdentifier(ecParameters)) { return KeyFormatHelper.WritePkcs8(algorithmIdentifier, ecPrivateKey); } } private static void WriteEcParameters(in ECParameters ecParameters, AsnWriter writer) { if (ecParameters.Curve.IsNamed) { Oid oid = ecParameters.Curve.Oid; // On Windows the FriendlyName is populated in places where the Value mightn't be. if (string.IsNullOrEmpty(oid.Value)) { oid = Oid.FromFriendlyName(oid.FriendlyName, OidGroup.All); } writer.WriteObjectIdentifier(oid.Value); } else { throw new CryptographicException(SR.Cryptography_ECC_NamedCurvesOnly); } } private static void WriteUncompressedPublicKey(in ECParameters ecParameters, AsnWriter writer) { int publicKeyLength = ecParameters.Q.X.Length * 2 + 1; Span<byte> publicKeyBytes = stackalloc byte[0]; byte[] publicKeyRented = null; if (publicKeyLength < 256) { publicKeyBytes = stackalloc byte[publicKeyLength]; } else { publicKeyRented = ArrayPool<byte>.Shared.Rent(publicKeyLength); publicKeyBytes = publicKeyRented.AsSpan(0, publicKeyLength); } try { publicKeyBytes[0] = 0x04; ecParameters.Q.X.AsSpan().CopyTo(publicKeyBytes.Slice(1)); ecParameters.Q.Y.AsSpan().CopyTo(publicKeyBytes.Slice(1 + ecParameters.Q.X.Length)); writer.WriteBitString(publicKeyBytes); } finally { CryptographicOperations.ZeroMemory(publicKeyBytes); if (publicKeyRented != null) { ArrayPool<byte>.Shared.Return(publicKeyRented); } } } internal static AsnWriter WriteECPrivateKey(in ECParameters ecParameters) { return WriteEcPrivateKey(ecParameters, includeDomainParameters: true); } private static AsnWriter WriteEcPrivateKey(in ECParameters ecParameters, bool includeDomainParameters) { bool returning = false; AsnWriter writer = new AsnWriter(AsnEncodingRules.DER); try { // ECPrivateKey writer.PushSequence(); // version 1 writer.WriteInteger(1); // privateKey writer.WriteOctetString(ecParameters.D); // domainParameters if (includeDomainParameters) { Asn1Tag explicit0 = new Asn1Tag(TagClass.ContextSpecific, 0, isConstructed: true); writer.PushSequence(explicit0); WriteEcParameters(ecParameters, writer); writer.PopSequence(explicit0); } // publicKey { Asn1Tag explicit1 = new Asn1Tag(TagClass.ContextSpecific, 1, isConstructed: true); writer.PushSequence(explicit1); WriteUncompressedPublicKey(ecParameters, writer); writer.PopSequence(explicit1); } writer.PopSequence(); returning = true; return writer; } finally { if (!returning) { writer.Dispose(); } } } } }
using NetApp.Tests.Helpers; using Microsoft.Azure.Management.NetApp.Models; using Microsoft.Azure.Management.NetApp; using Microsoft.Azure.Management.Resources; using Microsoft.Azure.Test.HttpRecorder; using Microsoft.Rest.ClientRuntime.Azure.TestFramework; using System.IO; using System.Linq; using System.Net; using System.Reflection; using Xunit; using System; using System.Collections.Generic; using System.Threading; using System.ComponentModel; using Microsoft.Rest.Azure; namespace NetApp.Tests.ResourceTests { public class VolumeTests : TestBase { private const int delay = 10000; public static ExportPolicyRule exportPolicyRule = new ExportPolicyRule() { RuleIndex = 1, UnixReadOnly = false, UnixReadWrite = true, Cifs = false, Nfsv3 = true, Nfsv41 = false, AllowedClients = "1.2.3.0/24" }; public static IList<ExportPolicyRule> exportPolicyRuleList = new List<ExportPolicyRule>() { exportPolicyRule }; public static VolumePropertiesExportPolicy exportPolicy = new VolumePropertiesExportPolicy() { Rules = exportPolicyRuleList }; public static VolumePatchPropertiesExportPolicy exportPatchPolicy = new VolumePatchPropertiesExportPolicy() { Rules = exportPolicyRuleList }; [Fact] public void CreateDeleteVolume() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create a volume, get all and check var resource = ResourceUtils.CreateVolume(netAppMgmtClient); Assert.Equal(ResourceUtils.defaultExportPolicy.ToString(), resource.ExportPolicy.ToString()); Assert.Null(resource.Tags); // check DP properties exist but unassigned because // dataprotection volume was not created Assert.Null(resource.VolumeType); Assert.Null(resource.DataProtection); var volumesBefore = netAppMgmtClient.Volumes.List(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1); Assert.Single(volumesBefore); // delete the volume and check again netAppMgmtClient.Volumes.Delete(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1, ResourceUtils.volumeName1); var volumesAfter = netAppMgmtClient.Volumes.List(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1); Assert.Empty(volumesAfter); // cleanup ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void CreateVolumeWithProperties() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create a volume with tags and export policy var dict = new Dictionary<string, string>(); dict.Add("Tag2", "Value2"); var protocolTypes = new List<string>() { "NFSv3" }; var resource = ResourceUtils.CreateVolume(netAppMgmtClient, protocolTypes: protocolTypes, tags: dict, exportPolicy: exportPolicy); Assert.Equal(exportPolicy.ToString(), resource.ExportPolicy.ToString()); Assert.Equal(protocolTypes, resource.ProtocolTypes); Assert.True(resource.Tags.ContainsKey("Tag2")); Assert.Equal("Value2", resource.Tags["Tag2"]); var volumesBefore = netAppMgmtClient.Volumes.List(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1); Assert.Single(volumesBefore); // delete the volume and check again netAppMgmtClient.Volumes.Delete(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1, ResourceUtils.volumeName1); var volumesAfter = netAppMgmtClient.Volumes.List(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1); Assert.Empty(volumesAfter); // cleanup ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void ListVolumes() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create two volumes under same pool ResourceUtils.CreateVolume(netAppMgmtClient); ResourceUtils.CreateVolume(netAppMgmtClient, ResourceUtils.volumeName2, volumeOnly: true); // get the account list and check var volumes = netAppMgmtClient.Volumes.List(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1); //Assert.Equal(volumes.ElementAt(1).Name, ResourceUtils.accountName1 + '/' + ResourceUtils.poolName1 + '/' + ResourceUtils.volumeName1); //Assert.Equal(volumes.ElementAt(0).Name, ResourceUtils.accountName1 + '/' + ResourceUtils.poolName1 + '/' + ResourceUtils.volumeName2); Assert.Contains(volumes, item => item.Name == $"{ResourceUtils.accountName1}/{ResourceUtils.poolName1}/{ResourceUtils.volumeName1}"); Assert.Contains(volumes, item => item.Name == $"{ResourceUtils.accountName1}/{ResourceUtils.poolName1}/{ResourceUtils.volumeName2}"); Assert.Equal(2, volumes.Count()); // clean up - delete the two volumes, the pool and the account ResourceUtils.DeleteVolume(netAppMgmtClient); ResourceUtils.DeleteVolume(netAppMgmtClient, ResourceUtils.volumeName2); ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void GetVolumeByName() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create the volume ResourceUtils.CreateVolume(netAppMgmtClient); // retrieve it var volume = netAppMgmtClient.Volumes.Get(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1, ResourceUtils.volumeName1); Assert.Equal(volume.Name, ResourceUtils.accountName1 + '/' + ResourceUtils.poolName1 + '/' + ResourceUtils.volumeName1); // clean up - delete the volume, pool and account ResourceUtils.DeleteVolume(netAppMgmtClient); ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void GetVolumeByNameNotFound() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create volume ResourceUtils.CreatePool(netAppMgmtClient); // try and get a volume in the pool - none have been created yet try { var volume = netAppMgmtClient.Volumes.Get(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1, ResourceUtils.volumeName1); Assert.True(false); // expecting exception } catch (Exception ex) { Assert.Contains("was not found", ex.Message); } // cleanup ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void GetVolumeByNamePoolNotFound() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); ResourceUtils.CreateAccount(netAppMgmtClient); // try and create a volume before the pool exist try { var volume = netAppMgmtClient.Volumes.Get(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1, ResourceUtils.volumeName1); Assert.True(false); // expecting exception } catch (Exception ex) { Assert.Contains("not found", ex.Message); } // cleanup - remove the account ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void CreateVolumePoolNotFound() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); ResourceUtils.CreateAccount(netAppMgmtClient); // try and create a volume before the pool exist try { ResourceUtils.CreateVolume(netAppMgmtClient, volumeOnly: true); Assert.True(false); // expecting exception } catch (Exception ex) { Assert.Contains("not found", ex.Message); } // cleanup - remove the account ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void DeletePoolWithVolumePresent() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create the account and pool ResourceUtils.CreateVolume(netAppMgmtClient); var poolsBefore = netAppMgmtClient.Pools.List(ResourceUtils.resourceGroup, ResourceUtils.accountName1); Assert.Single(poolsBefore); // try and delete the pool try { netAppMgmtClient.Pools.Delete(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1); Assert.True(false); // expecting exception } catch (Exception ex) { Assert.Contains("Can not delete resource before nested resources are deleted", ex.Message); } // clean up ResourceUtils.DeleteVolume(netAppMgmtClient); ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void CheckAvailability() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // check account resource name - should be available var response = netAppMgmtClient.NetAppResource.CheckNameAvailability(ResourceUtils.location, ResourceUtils.accountName1, CheckNameResourceTypes.MicrosoftNetAppNetAppAccounts, ResourceUtils.resourceGroup); Assert.True(response.IsAvailable); // now check file path availability response = netAppMgmtClient.NetAppResource.CheckFilePathAvailability(ResourceUtils.location, ResourceUtils.volumeName1, ResourceUtils.subnetId); Assert.True(response.IsAvailable); // create the volume var volume = ResourceUtils.CreateVolume(netAppMgmtClient); // check volume resource name - should be unavailable after its creation var resourceName = ResourceUtils.accountName1 + '/' + ResourceUtils.poolName1 + '/' + ResourceUtils.volumeName1; response = netAppMgmtClient.NetAppResource.CheckNameAvailability(ResourceUtils.location, resourceName, CheckNameResourceTypes.MicrosoftNetAppNetAppAccountsCapacityPoolsVolumes, ResourceUtils.resourceGroup); Assert.False(response.IsAvailable); // now check file path availability again response = netAppMgmtClient.NetAppResource.CheckFilePathAvailability(ResourceUtils.location, ResourceUtils.volumeName1, ResourceUtils.subnetId); Assert.False(response.IsAvailable); // clean up ResourceUtils.DeleteVolume(netAppMgmtClient); ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void CheckAvailabilityPre2021_04() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // check account resource name - should be available var response = netAppMgmtClient.NetAppResource.CheckNameAvailability(ResourceUtils.location, ResourceUtils.accountName1, CheckNameResourceTypes.MicrosoftNetAppNetAppAccounts, ResourceUtils.resourceGroup); Assert.True(response.IsAvailable); // now check file path availability response = netAppMgmtClient.NetAppResource.CheckFilePathAvailability(ResourceUtils.location, ResourceUtils.volumeName1, CheckNameResourceTypes.MicrosoftNetAppNetAppAccountsCapacityPoolsVolumes, ResourceUtils.resourceGroup); Assert.True(response.IsAvailable); // create the volume var volume = ResourceUtils.CreateVolume(netAppMgmtClient); // check volume resource name - should be unavailable after its creation var resourceName = ResourceUtils.accountName1 + '/' + ResourceUtils.poolName1 + '/' + ResourceUtils.volumeName1; response = netAppMgmtClient.NetAppResource.CheckNameAvailability(ResourceUtils.location, resourceName, CheckNameResourceTypes.MicrosoftNetAppNetAppAccountsCapacityPoolsVolumes, ResourceUtils.resourceGroup); Assert.False(response.IsAvailable); // now check file path availability again response = netAppMgmtClient.NetAppResource.CheckFilePathAvailability(ResourceUtils.location, ResourceUtils.volumeName1, CheckNameResourceTypes.MicrosoftNetAppNetAppAccountsCapacityPoolsVolumes, ResourceUtils.resourceGroup); Assert.False(response.IsAvailable); // clean up ResourceUtils.DeleteVolume(netAppMgmtClient); ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void UpdateVolume() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create the volume var oldVolume = ResourceUtils.CreateVolume(netAppMgmtClient); Assert.Equal("Premium", oldVolume.ServiceLevel); Assert.Equal(100 * ResourceUtils.gibibyte, oldVolume.UsageThreshold); // The returned volume contains some items which cnanot be part of the payload, such as baremetaltenant, therefore create a new object selectively from the old one var volume = new Volume { Location = oldVolume.Location, ServiceLevel = oldVolume.ServiceLevel, CreationToken = oldVolume.CreationToken, SubnetId = oldVolume.SubnetId, }; // update volume.UsageThreshold = 2 * oldVolume.UsageThreshold; var updatedVolume = netAppMgmtClient.Volumes.CreateOrUpdate(volume, ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1, ResourceUtils.volumeName1); Assert.Equal("Premium", updatedVolume.ServiceLevel); // didn't attempt to change - it would be rejected Assert.Equal(100 * ResourceUtils.gibibyte * 2, updatedVolume.UsageThreshold); // cleanup ResourceUtils.DeleteVolume(netAppMgmtClient); ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void PatchVolume() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create the volume var volume = ResourceUtils.CreateVolume(netAppMgmtClient); Assert.Equal("Premium", volume.ServiceLevel); Assert.Equal(100 * ResourceUtils.gibibyte, volume.UsageThreshold); Assert.Equal(ResourceUtils.defaultExportPolicy.ToString(), volume.ExportPolicy.ToString()); Assert.Null(volume.Tags); // create a volume with tags and export policy var dict = new Dictionary<string, string>(); dict.Add("Tag2", "Value2"); // Now try and modify it var volumePatch = new VolumePatch() { UsageThreshold = 2 * volume.UsageThreshold, Tags = dict, ExportPolicy = exportPatchPolicy }; // patch var updatedVolume = netAppMgmtClient.Volumes.Update(volumePatch, ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1, ResourceUtils.volumeName1); Assert.Equal("Premium", updatedVolume.ServiceLevel); // didn't attempt to change - it would be rejected Assert.Equal(200 * ResourceUtils.gibibyte, updatedVolume.UsageThreshold); Assert.Equal(exportPolicy.ToString(), updatedVolume.ExportPolicy.ToString()); Assert.True(updatedVolume.Tags.ContainsKey("Tag2")); Assert.Equal("Value2", updatedVolume.Tags["Tag2"]); // cleanup ResourceUtils.DeleteVolume(netAppMgmtClient); ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } private void WaitForReplicationStatus(AzureNetAppFilesManagementClient netAppMgmtClient, string targetState) { ReplicationStatus replicationStatus = new ReplicationStatus {Healthy=false, MirrorState = "Uninitialized" }; int attempts = 0; do { try { replicationStatus = netAppMgmtClient.Volumes.ReplicationStatusMethod(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); } catch(CloudException ex) { if (!ex.Message.Contains("the volume replication is: 'Creating'")) { throw; } } Thread.Sleep(1); } while (replicationStatus.MirrorState != targetState); //sometimes they dont sync up right away if (!replicationStatus.Healthy.Value) { do { replicationStatus = netAppMgmtClient.Volumes.ReplicationStatusMethod(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); attempts++; if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(1000); } } while (replicationStatus.Healthy.Value || attempts == 10); } Assert.True(replicationStatus.Healthy); } private void WaitForSucceeded(AzureNetAppFilesManagementClient netAppMgmtClient, string accountName = ResourceUtils.accountName1, string poolName = ResourceUtils.poolName1, string volumeName = ResourceUtils.volumeName1) { Volume sourceVolume; Volume dpVolume; do { sourceVolume = netAppMgmtClient.Volumes.Get(ResourceUtils.repResourceGroup, accountName, poolName, volumeName); dpVolume = netAppMgmtClient.Volumes.Get(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); Thread.Sleep(1); } while ((sourceVolume.ProvisioningState != "Succeeded") || (dpVolume.ProvisioningState != "Succeeded")); } [Fact] public void CreateDpVolume() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create the source volume var sourceVolume = ResourceUtils.CreateVolume(netAppMgmtClient, resourceGroup: ResourceUtils.repResourceGroup, vnet: ResourceUtils.repVnet, volumeName: ResourceUtils.volumeName1ReplSource, accountName: ResourceUtils.accountName1Repl, poolName: ResourceUtils.poolName1Repl); sourceVolume = netAppMgmtClient.Volumes.Get(ResourceUtils.repResourceGroup, ResourceUtils.accountName1Repl, ResourceUtils.poolName1Repl, ResourceUtils.volumeName1ReplSource); if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(delay); // some robustness against ARM caching } // create the data protection volume from the source var dpVolume = ResourceUtils.CreateDpVolume(netAppMgmtClient, sourceVolume); Assert.Equal(ResourceUtils.volumeName1ReplDest, dpVolume.Name.Substring(dpVolume.Name.LastIndexOf('/') + 1)); Assert.NotNull(dpVolume.DataProtection); if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(30000); } var getDPVolume = netAppMgmtClient.Volumes.Get(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); var authorizeRequest = new AuthorizeRequest { RemoteVolumeResourceId = dpVolume.Id }; netAppMgmtClient.Volumes.AuthorizeReplication(ResourceUtils.repResourceGroup, ResourceUtils.accountName1Repl, ResourceUtils.poolName1Repl, ResourceUtils.volumeName1ReplSource, authorizeRequest); if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(30000); } WaitForSucceeded(netAppMgmtClient, accountName: ResourceUtils.accountName1Repl, poolName: ResourceUtils.poolName1Repl, volumeName: ResourceUtils.volumeName1ReplSource); WaitForReplicationStatus(netAppMgmtClient, "Mirrored"); netAppMgmtClient.Volumes.BreakReplication(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); WaitForReplicationStatus(netAppMgmtClient, "Broken"); if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(30000); } // sync to the test WaitForSucceeded(netAppMgmtClient, accountName: ResourceUtils.accountName1Repl, poolName: ResourceUtils.poolName1Repl, volumeName: ResourceUtils.volumeName1ReplSource); // resync netAppMgmtClient.Volumes.ResyncReplication(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); WaitForReplicationStatus(netAppMgmtClient, "Mirrored"); if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(30000); } // break again netAppMgmtClient.Volumes.BreakReplication(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); WaitForReplicationStatus(netAppMgmtClient, "Broken"); if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(30000); } // delete the data protection object // - initiate delete replication on destination, this then releases on source, both resulting in object deletion netAppMgmtClient.Volumes.DeleteReplication(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); var replicationFound = true; // because it was previously present while (replicationFound) { try { var replicationStatus = netAppMgmtClient.Volumes.ReplicationStatusMethod(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); } catch { // an exception means the replication was not found // i.e. it has been deleted // ok without checking it could have been for another reason // but then the delete below will fail replicationFound = false; } Thread.Sleep(1); } // seems the volumes are not always in a terminal state here so check again // and ensure the replication objects are removed do { sourceVolume = netAppMgmtClient.Volumes.Get(ResourceUtils.repResourceGroup, ResourceUtils.accountName1Repl, ResourceUtils.poolName1Repl, ResourceUtils.volumeName1ReplSource); dpVolume = netAppMgmtClient.Volumes.Get(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); Thread.Sleep(1); } while ((sourceVolume.ProvisioningState != "Succeeded") || (dpVolume.ProvisioningState != "Succeeded") || (sourceVolume.DataProtection.Replication != null) || (dpVolume.DataProtection.Replication != null)); // now proceed with the delete of the volumes netAppMgmtClient.Volumes.Delete(ResourceUtils.remoteResourceGroup, ResourceUtils.remoteAccountName1, ResourceUtils.remotePoolName1, ResourceUtils.volumeName1ReplDest); netAppMgmtClient.Volumes.Delete(ResourceUtils.repResourceGroup, ResourceUtils.accountName1Repl, ResourceUtils.poolName1Repl, ResourceUtils.volumeName1ReplSource); // cleanup pool and account ResourceUtils.DeletePool(netAppMgmtClient, resourceGroup: ResourceUtils.repResourceGroup, accountName: ResourceUtils.accountName1Repl, poolName: ResourceUtils.poolName1Repl); ResourceUtils.DeletePool(netAppMgmtClient, ResourceUtils.remotePoolName1, ResourceUtils.remoteAccountName1, ResourceUtils.remoteResourceGroup); if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(30000); } ResourceUtils.DeleteAccount(netAppMgmtClient, accountName: ResourceUtils.accountName1Repl, resourceGroup: ResourceUtils.repResourceGroup); ResourceUtils.DeleteAccount(netAppMgmtClient, ResourceUtils.remoteAccountName1, ResourceUtils.remoteResourceGroup); } } [Fact] public void ChangePoolForVolume() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // create the volume var volume = ResourceUtils.CreateVolume(netAppMgmtClient); // create the other pool var secondPool = ResourceUtils.CreatePool(netAppMgmtClient, ResourceUtils.poolName2, accountName: ResourceUtils.accountName1, resourceGroup: ResourceUtils.resourceGroup, location: ResourceUtils.location, poolOnly: true, serviceLevel: ServiceLevel.Standard); Assert.Equal("Premium", volume.ServiceLevel); Assert.Equal(100 * ResourceUtils.gibibyte, volume.UsageThreshold); if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(30000); } var poolChangeRequest = new PoolChangeRequest() { NewPoolResourceId = secondPool.Id }; //Change pools netAppMgmtClient.Volumes.PoolChange(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1, ResourceUtils.volumeName1, poolChangeRequest); if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(30000); } // retrieve the volume and check var volume2 = netAppMgmtClient.Volumes.Get(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName2, ResourceUtils.volumeName1); Assert.Equal(volume2.Name, ResourceUtils.accountName1 + '/' + ResourceUtils.poolName2 + '/' + ResourceUtils.volumeName1); // cleanup ResourceUtils.DeleteVolume(netAppMgmtClient, volumeName: ResourceUtils.volumeName1, accountName: ResourceUtils.accountName1, poolName: ResourceUtils.poolName2); ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeletePool(netAppMgmtClient, poolName: ResourceUtils.poolName2); ResourceUtils.DeleteAccount(netAppMgmtClient); } } [Fact] public void LongListVolumes() { HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); using (MockContext context = MockContext.Start(this.GetType())) { var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); //Create 100 volumes var createdVolumes = new List<string>(); int length = 103; try { long setPoolSize = 4398046511104; ResourceUtils.CreateVolume(netAppMgmtClient, poolSize: setPoolSize); createdVolumes.Add(ResourceUtils.volumeName1); for (int i = 0; i < length-1; i++) { ResourceUtils.CreateVolume(netAppMgmtClient, $"{ResourceUtils.volumeName1}-{i}", volumeOnly: true); createdVolumes.Add($"{ResourceUtils.volumeName1}-{i}"); } if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record") { Thread.Sleep(30000); } //get list of volumnes var volumesPage = netAppMgmtClient.Volumes.List(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1); // Get all resources by polling on next page link var volumeResponseList = ListNextLink<Volume>.GetAllResourcesByPollingNextLink(volumesPage, netAppMgmtClient.Volumes.ListNext); Assert.Equal(length, volumeResponseList.Count); // cleanup foreach(var volumeName in createdVolumes) { ResourceUtils.DeleteVolume(netAppMgmtClient, volumeName: volumeName); } } catch (CloudException) { var volumesList = new List<Volume>(); foreach (var volumeName in createdVolumes) { try { ResourceUtils.DeleteVolume(netAppMgmtClient, volumeName: volumeName); } catch { } } } ResourceUtils.DeletePool(netAppMgmtClient); ResourceUtils.DeleteAccount(netAppMgmtClient); } } //[Fact] //public void CleanLongListVolumes() //{ // HttpMockServer.RecordsDirectory = GetSessionsDirectoryPath(); // using (MockContext context = MockContext.Start(this.GetType())) // { // var netAppMgmtClient = NetAppTestUtilities.GetNetAppManagementClient(context, new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK }); // //get list of volumes // var volumesPage = netAppMgmtClient.Volumes.List(ResourceUtils.resourceGroup, ResourceUtils.accountName1, ResourceUtils.poolName1); // // Get all resources by polling on next page link // var volumeResponseList = ListNextLink<Volume>.GetAllResourcesByPollingNextLink(volumesPage, netAppMgmtClient.Volumes.ListNext); // foreach (var volume in volumeResponseList) // { // var volumeName = volume.Name.Split('/').Last(); // ResourceUtils.DeleteVolume(netAppMgmtClient, resourceGroup: ResourceUtils.resourceGroup, accountName: ResourceUtils.accountName1, poolName: ResourceUtils.poolName1, volumeName: volumeName); // } // ResourceUtils.DeletePool(netAppMgmtClient); // ResourceUtils.DeleteAccount(netAppMgmtClient); // } //} private static string GetSessionsDirectoryPath() { string executingAssemblyPath = typeof(NetApp.Tests.ResourceTests.VolumeTests).GetTypeInfo().Assembly.Location; return Path.Combine(Path.GetDirectoryName(executingAssemblyPath), "SessionRecords"); } } }
// *********************************************************************** // Copyright (c) 2007-2014 Charlie Poole, Rob Prouse // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System.Text; namespace NUnit.Framework.Interfaces { /// <summary> /// The ResultState class represents the outcome of running a test. /// It contains two pieces of information. The Status of the test /// is an enum indicating whether the test passed, failed, was /// skipped or was inconclusive. The Label provides a more /// detailed breakdown for use by client runners. /// </summary> public class ResultState { #region Constructors /// <summary> /// Initializes a new instance of the <see cref="ResultState"/> class. /// </summary> /// <param name="status">The TestStatus.</param> public ResultState(TestStatus status) : this (status, string.Empty, FailureSite.Test) { } /// <summary> /// Initializes a new instance of the <see cref="ResultState"/> class. /// </summary> /// <param name="status">The TestStatus.</param> /// <param name="label">The label.</param> public ResultState(TestStatus status, string label) : this (status, label, FailureSite.Test) { } /// <summary> /// Initializes a new instance of the <see cref="ResultState"/> class. /// </summary> /// <param name="status">The TestStatus.</param> /// <param name="site">The stage at which the result was produced</param> public ResultState(TestStatus status, FailureSite site) : this(status, string.Empty, site) { } /// <summary> /// Initializes a new instance of the <see cref="ResultState"/> class. /// </summary> /// <param name="status">The TestStatus.</param> /// <param name="label">The label.</param> /// <param name="site">The stage at which the result was produced</param> public ResultState(TestStatus status, string label, FailureSite site) { Status = status; Label = label == null ? string.Empty : label; Site = site; } #endregion #region Predefined ResultStates /// <summary> /// The result is inconclusive /// </summary> public readonly static ResultState Inconclusive = new ResultState(TestStatus.Inconclusive); /// <summary> /// The test has been skipped. /// </summary> public readonly static ResultState Skipped = new ResultState(TestStatus.Skipped); /// <summary> /// The test has been ignored. /// </summary> public readonly static ResultState Ignored = new ResultState(TestStatus.Skipped, "Ignored"); /// <summary> /// The test was skipped because it is explicit /// </summary> public readonly static ResultState Explicit = new ResultState(TestStatus.Skipped, "Explicit"); /// <summary> /// The test succeeded /// </summary> public readonly static ResultState Success = new ResultState(TestStatus.Passed); /// <summary> /// The test issued a warning /// </summary> public readonly static ResultState Warning = new ResultState(TestStatus.Warning); /// <summary> /// The test failed /// </summary> public readonly static ResultState Failure = new ResultState(TestStatus.Failed); /// <summary> /// The test encountered an unexpected exception /// </summary> public readonly static ResultState Error = new ResultState(TestStatus.Failed, "Error"); /// <summary> /// The test was cancelled by the user /// </summary> public readonly static ResultState Cancelled = new ResultState(TestStatus.Failed, "Cancelled"); /// <summary> /// The test was not runnable. /// </summary> public readonly static ResultState NotRunnable = new ResultState(TestStatus.Failed, "Invalid"); /// <summary> /// A suite failed because one or more child tests failed or had errors /// </summary> public readonly static ResultState ChildFailure = ResultState.Failure.WithSite(FailureSite.Child); /// <summary> /// A suite failed in its OneTimeSetUp /// </summary> public readonly static ResultState SetUpFailure = ResultState.Failure.WithSite(FailureSite.SetUp); /// <summary> /// A suite had an unexpected exception in its OneTimeSetUp /// </summary> public readonly static ResultState SetUpError = ResultState.Error.WithSite(FailureSite.SetUp); /// <summary> /// A suite had an unexpected exception in its OneTimeDown /// </summary> public readonly static ResultState TearDownError = ResultState.Error.WithSite(FailureSite.TearDown); #endregion #region Properties /// <summary> /// Gets the TestStatus for the test. /// </summary> /// <value>The status.</value> public TestStatus Status { get; private set; } /// <summary> /// Gets the label under which this test result is /// categorized, if any. /// </summary> public string Label { get; private set; } /// <summary> /// Gets the stage of test execution in which /// the failure or other result took place. /// </summary> public FailureSite Site { get; private set; } /// <summary> /// Get a new ResultState, which is the same as the current /// one but with the FailureSite set to the specified value. /// </summary> /// <param name="site">The FailureSite to use</param> /// <returns>A new ResultState</returns> public ResultState WithSite(FailureSite site) { return new ResultState(this.Status, this.Label, site); } /// <summary> /// Test whether this ResultState has the same Status and Label /// as another one. In other words, the whether two are equal /// ignoring the Site. /// </summary> /// <param name="other"></param> /// <returns></returns> public bool Matches(ResultState other) { return Status == other.Status && Label == other.Label; } #endregion #region Equals Override /// <summary> /// Determines whether the specified <see cref="System.Object" />, is equal to this instance. /// </summary> /// <param name="obj">The <see cref="System.Object" /> to compare with this instance.</param> /// <returns> /// <c>true</c> if the specified <see cref="System.Object" /> is equal to this instance; otherwise, <c>false</c>. /// </returns> public override bool Equals(object obj) { var other = obj as ResultState; if (object.ReferenceEquals(other, null)) return false; return Status.Equals(other.Status) && Label.Equals(other.Label) && Site.Equals(other.Site); } /// <summary> /// Returns a hash code for this instance. /// </summary> /// <returns> /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. /// </returns> public override int GetHashCode() { return (int)Status << 8 + (int)Site ^ Label.GetHashCode(); ; } #endregion #region Operator Overloads /// <summary> /// Overload == operator for ResultStates /// </summary> public static bool operator ==(ResultState left, ResultState right) { if (object.ReferenceEquals(left, null)) return object.ReferenceEquals(right, null); return left.Equals(right); } /// <summary> /// Overload != operator for ResultStates /// </summary> public static bool operator !=(ResultState left, ResultState right) { return !(left == right); } #endregion #region ToString Override /// <summary> /// Returns a <see cref="System.String"/> that represents this instance. /// </summary> /// <returns> /// A <see cref="System.String"/> that represents this instance. /// </returns> public override string ToString() { var sb = new StringBuilder(Status.ToString()); if (Label != null && Label.Length > 0) sb.AppendFormat(":{0}", Label); if (Site != FailureSite.Test) sb.AppendFormat("({0})", Site.ToString()); return sb.ToString(); } #endregion } /// <summary> /// The FailureSite enum indicates the stage of a test /// in which an error or failure occurred. /// </summary> public enum FailureSite { /// <summary> /// Failure in the test itself /// </summary> Test, /// <summary> /// Failure in the SetUp method /// </summary> SetUp, /// <summary> /// Failure in the TearDown method /// </summary> TearDown, /// <summary> /// Failure of a parent test /// </summary> Parent, /// <summary> /// Failure of a child test /// </summary> Child } }
using System; using System.Globalization; using System.IO; using System.Reflection; namespace Eto.Drawing { /// <summary> /// Format of bytes used in a <see cref="Bitmap"/> /// </summary> /// <remarks> /// The format is important when modifying the bytes directly via <see cref="Bitmap.Lock"/>. /// </remarks> /// <copyright>(c) 2012-2014 by Curtis Wensley</copyright> /// <license type="BSD-3">See LICENSE for full terms</license> public enum PixelFormat { /// <summary> /// 32-bits (4 bytes) per pixel, ordered by an Empty byte in the highest order, followed by Red, Green, and Blue. /// </summary> Format32bppRgb, /// <summary> /// 24-bits (4 bytes) per pixel, ordered by Red in the highest order, followed by Green, and Blue. /// </summary> Format24bppRgb, /// <summary> /// 32-bits (4 bytes) per pixel, ordered by an Alpha byte in the highest order, followed by Red, Green, and Blue. /// </summary> Format32bppRgba } /// <summary> /// Format of the image to use when saving, loading, etc. /// </summary> /// <remarks> /// The format is typically used only when saving via <see cref="M:Eto.Drawing.Bitmap.Save"/> /// </remarks> /// <copyright>(c) 2012-2014 by Curtis Wensley</copyright> /// <license type="BSD-3">See LICENSE for full terms</license> public enum ImageFormat { /// <summary> /// Jpeg format /// </summary> Jpeg, /// <summary> /// Windows BMP format /// </summary> Bitmap, /// <summary> /// Tiff format /// </summary> Tiff, /// <summary> /// Portable Network Graphics format /// </summary> Png, /// <summary> /// Graphics Interchange Format /// </summary> Gif } /// <summary> /// Represents an image /// </summary> /// <copyright>(c) 2012-2014 by Curtis Wensley</copyright> /// <license type="BSD-3">See LICENSE for full terms</license> [Handler(typeof(Bitmap.IHandler))] public class Bitmap : Image { new IHandler Handler { get { return (IHandler)base.Handler; } } /// <summary> /// Loads a bitmap from the resource in the specified or caller's assembly /// </summary> /// <param name="resourceName">Name of the resource in the caller's assembly to load</param> /// <param name="assembly">Assembly to load the resource from, or null to use the caller's assembly</param> /// <returns>A new instance of a Bitmap loaded from the specified resource</returns> public static Bitmap FromResource(string resourceName, Assembly assembly = null) { if (assembly == null) { #if PCL if (TypeHelper.GetCallingAssembly == null) throw new ArgumentNullException("assembly", string.Format(CultureInfo.CurrentCulture, "This platform doesn't support Assembly.GetCallingAssembly(), so you must pass the assembly directly")); assembly = (Assembly)TypeHelper.GetCallingAssembly.Invoke(null, new object[0]); #else assembly = Assembly.GetCallingAssembly(); #endif } using (var stream = assembly.GetManifestResourceStream(resourceName)) { if (stream == null) throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, "Resource '{0}' not found in assembly '{1}'", resourceName, assembly.FullName)); return new Bitmap(stream); } } /// <summary> /// Loads a bitmap from a resource in the same assembly as the specified <paramref name="type"/> /// </summary> /// <returns>The bitmap instance.</returns> /// <param name="resourceName">Full name of the resource in the type's assembly.</param> /// <param name="type">Type of the assembly to get the resource.</param> public static Bitmap FromResource(string resourceName, Type type) { if (type == null) throw new ArgumentNullException("type"); #if PCL return FromResource(resourceName, type.GetTypeInfo().Assembly); #else return FromResource(resourceName, type.Assembly); #endif } /// <summary> /// Initializes a new instance of a Bitmap from a file /// </summary> /// <param name="fileName">File to load as a bitmap</param> public Bitmap(string fileName) { Handler.Create(fileName); Initialize(); } /// <summary> /// Initializes a new instance of a Bitmap from a stream /// </summary> /// <param name="stream">Stream to load from the bitmap</param> public Bitmap(Stream stream) { Handler.Create(stream); Initialize(); } /// <summary> /// Initializes a new instance of a Bitmap with the specified size and format /// </summary> /// <param name="size">Size of the bitmap to create</param> /// <param name="pixelFormat">Format of each pixel</param> public Bitmap(Size size, PixelFormat pixelFormat) : this(size.Width, size.Height, pixelFormat) { } /// <summary> /// Initializes a new instance of a Bitmap with the specified size and format /// </summary> /// <param name="width">Width of the new bitmap</param> /// <param name="height">Height of the new bitmap</param> /// <param name="pixelFormat">Format of each pixel</param> public Bitmap(int width, int height, PixelFormat pixelFormat) { if (width <= 0) throw new ArgumentOutOfRangeException("width", "width must be greater than zero"); if (height <= 0) throw new ArgumentOutOfRangeException("height", "height must be greater than zero"); Handler.Create(width, height, pixelFormat); Initialize(); } /// <summary> /// Creates a new bitmap optimized for drawing on the specified <paramref name="graphics"/> /// </summary> /// <param name="width">Width of the bitmap</param> /// <param name="height">Height of the bitmap</param> /// <param name="graphics">Graphics context the bitmap is intended to be drawn on</param> public Bitmap(int width, int height, Graphics graphics) { if (width <= 0) throw new ArgumentOutOfRangeException("width", "width must be greater than zero"); if (height <= 0) throw new ArgumentOutOfRangeException("height", "height must be greater than zero"); if (graphics == null) throw new ArgumentNullException("graphics"); Handler.Create(width, height, graphics); Initialize(); } /// <summary> /// Create a new scaled bitmap with the specified <paramref name="width"/> and <paramref name="height"/> /// </summary> /// <param name="image">Image to scale</param> /// <param name="width">Width to scale the source image to</param> /// <param name="height">Height to scale the source image to</param> /// <param name="interpolation">Interpolation quality</param> public Bitmap(Image image, int? width = null, int? height = null, ImageInterpolation interpolation = ImageInterpolation.Default) { if (image == null) throw new ArgumentNullException("image"); if (width != null && width <= 0) throw new ArgumentOutOfRangeException("width", "width must be greater than zero"); if (height != null && height <= 0) throw new ArgumentOutOfRangeException("height", "height must be greater than zero"); Handler.Create(image, width ?? image.Size.Width, height ?? image.Size.Height, interpolation); Initialize(); } /// <summary> /// Initializes a new instance of a Bitmap from a <paramref name="bytes"/> array /// </summary> /// <param name="bytes">Array of bytes containing the image data in one of the supported <see cref="ImageFormat"/> types</param> public Bitmap(byte[] bytes) { if (bytes == null) throw new ArgumentNullException("bytes"); Handler.Create(new MemoryStream(bytes, false)); } /// <summary> /// Initializes a new instance of a Bitmap with the specified handler /// </summary> /// <remarks> /// This is intended to be used by platform specific code to return bitmap instances with a particular handler /// </remarks> /// <param name="handler">Platform handler to use for this instance</param> public Bitmap(IHandler handler) : base(handler) { } /// <summary> /// Locks the data of the image to directly access the bytes of the image /// </summary> /// <remarks> /// This locks the data to read and write to directly using unsafe pointers. After reading or updating /// the data, you must call <see cref="BitmapData.Dispose()"/> to unlock the data before using the bitmap. /// e.g.: /// /// <code> /// using (var bd = bitmap.Lock ()) { /// byte* pdata = bd.Data; /// // access data /// } /// </code> /// </remarks> /// <returns>A BitmapData object that carries a pointer and functions for manipulating the data directly</returns> public BitmapData Lock() { return Handler.Lock(); } /// <summary> /// Saves the bitmap to a file in the specified format /// </summary> /// <param name="fileName">File to save the bitmap to</param> /// <param name="format">Format to save as</param> public void Save(string fileName, ImageFormat format) { Handler.Save(fileName, format); } /// <summary> /// Saves the bitmap to a stream in the specified format /// </summary> /// <param name="stream">Stream to save the bitmap to</param> /// <param name="format">Format to save as</param> public void Save(Stream stream, ImageFormat format) { Handler.Save(stream, format); } /// <summary> /// Saves the bitmap to an image of the specified <paramref name="imageFormat"/> into a byte array /// </summary> /// <remarks> /// This is merely a helper to save to a byte array instead of a stream. /// </remarks> /// <param name="imageFormat"></param> /// <returns></returns> public byte[] ToByteArray(ImageFormat imageFormat) { using (var memoryStream = new MemoryStream()) { Save(memoryStream, imageFormat); return memoryStream.ToArray(); } } /// <summary> /// Creates a clone of the bitmap /// </summary> public Bitmap Clone(Rectangle? rectangle = null) { return Handler.Clone(rectangle); } /// <summary> /// Gets the color of the pixel at the specified <paramref name="position"/> /// </summary> /// <remarks> /// Note that this method can be extremely slow to go through each pixel of a bitmap. /// If you need better performance, use <see cref="Lock"/> to get access to the bitmap's pixel buffer directly, /// then optionally use <see cref="BitmapData.GetPixel(Point)"/> to get each pixel value. /// </remarks> /// <returns>The color of the pixel.</returns> /// <param name="position">Position to get the color of the pixel.</param> public Color GetPixel(Point position) { return GetPixel(position.X, position.Y); } /// <summary> /// Gets the color of the pixel at the specified coordinates. /// </summary> /// <remarks> /// Note that this method can be extremely slow to go through each pixel of a bitmap. /// If you need better performance, use <see cref="Lock"/> to get access to the bitmap's pixel buffer directly, /// then optionally use <see cref="BitmapData.GetPixel(int,int)"/> to get each pixel value. /// </remarks> /// <returns>The color of the pixel at the specified coordinates</returns> /// <param name="x">The x coordinate</param> /// <param name="y">The y coordinate</param> public Color GetPixel(int x, int y) { return Handler.GetPixel(x, y); } /// <summary> /// Sets the pixel color at the specified <paramref name="position"/>. /// </summary> /// <remarks> /// Note that this method can be extremely slow to set each pixel of a bitmap. /// If you need better performance, use <see cref="Lock"/> to get access to the bitmap's pixel buffer directly, /// then optionally use <see cref="BitmapData.SetPixel(Point,Color)"/> to set each pixel value. /// </remarks> /// <param name="position">Position to set the pixel color.</param> /// <param name="color">Color to set.</param> public void SetPixel(Point position, Color color) { SetPixel(position.X, position.Y, color); } /// <summary> /// Sets the color of the pixel at the specified coordinates. /// </summary> /// <remarks> /// Note that this method can be extremely slow to set each pixel of a bitmap. /// If you need better performance, use <see cref="Lock"/> to get access to the bitmap's pixel buffer directly, /// then optionally use <see cref="BitmapData.SetPixel(int,int,Color)"/> to set each pixel value. /// </remarks> /// <param name="x">The x coordinate of the pixel to set.</param> /// <param name="y">The y coordinate of the pixel to set.</param> /// <param name="color">Color to set the pixel to.</param> public void SetPixel(int x, int y, Color color) { using (var bd = Lock()) { bd.SetPixel(x, y, color); } } #region Handler /// <summary> /// Handler interface for the <see cref="Bitmap"/> class /// </summary> /// <copyright>(c) 2012-2014 by Curtis Wensley</copyright> /// <license type="BSD-3">See LICENSE for full terms</license> [AutoInitialize(false)] public new interface IHandler : Image.IHandler, ILockableImage { /// <summary> /// Create a bitmap from a file /// </summary> /// <param name="fileName">File to load as a bitmap</param> void Create(string fileName); /// <summary> /// Create a bitmap from a specified stream /// </summary> /// <param name="stream">Stream to load from the bitmap</param> void Create(Stream stream); /// <summary> /// Creates a new bitmap in-memory with the specified format /// </summary> /// <param name="width">Initial width of the bitmap</param> /// <param name="height">Initial height of the bitmap</param> /// <param name="pixelFormat">Format of each of the pixels in the bitmap</param> void Create(int width, int height, PixelFormat pixelFormat); /// <summary> /// Creates a new bitmap optimized for drawing on the specified <paramref name="graphics"/> /// </summary> /// <param name="width">Width of the bitmap</param> /// <param name="height">Height of the bitmap</param> /// <param name="graphics">Graphics context the bitmap is intended to be drawn on</param> void Create(int width, int height, Graphics graphics); /// <summary> /// Create a new scaled bitmap with the specified <paramref name="width"/> and <paramref name="height"/> /// </summary> /// <param name="image">Image to scale</param> /// <param name="width">Width to scale the source image to</param> /// <param name="height">Height to scale the source image to</param> /// <param name="interpolation">Interpolation quality</param> void Create(Image image, int width, int height, ImageInterpolation interpolation); /// <summary> /// Saves the bitmap to a stream in the specified format /// </summary> /// <param name="stream">Stream to save the bitmap to</param> /// <param name="format">Format to save as</param> void Save(Stream stream, ImageFormat format); /// <summary> /// Saves the bitmap to a file in the specified format /// </summary> /// <param name="fileName">File to save the bitmap to</param> /// <param name="format">Format to save as</param> void Save(string fileName, ImageFormat format); /// <summary> /// Creates a clone of the bitmap /// </summary> /// <param name="rectangle">If specified, the region of the bitmap to clone</param> /// <returns></returns> Bitmap Clone(Rectangle? rectangle = null); /// <summary> /// Gets the color of the pixel at the specified coordinates /// </summary> /// <returns>The color of the pixel at the specified coordinates</returns> /// <param name="x">The x coordinate</param> /// <param name="y">The y coordinate</param> Color GetPixel(int x, int y); } #endregion } }
using System.Collections.Concurrent; using System.Runtime.CompilerServices; using System.Threading.Channels; using Meziantou.Framework.DependencyScanning.Internals; using Meziantou.Framework.Globbing; namespace Meziantou.Framework.DependencyScanning; public abstract class DependencyScanner { public static IAsyncEnumerable<Dependency> ScanDirectoryAsync(string path, ScannerOptions? options, CancellationToken cancellationToken = default) { if (!Directory.Exists(path)) throw new DirectoryNotFoundException(path); options ??= ScannerOptions.Default; if (options.Scanners.Count == 0) return EmptyAsyncEnumerable<Dependency>.Instance; if (options.DegreeOfParallelism == 1) return ScanDirectorySingleThreadedAsync(path, options, cancellationToken); if (options.Scanners.Count <= EnabledScannersArray32.MaxValues) return ScanDirectoryParallelAsync<EnabledScannersArray32>(path, options, cancellationToken); if (options.Scanners.Count <= EnabledScannersArray64.MaxValues) return ScanDirectoryParallelAsync<EnabledScannersArray64>(path, options, cancellationToken); return ScanDirectoryParallelAsync<EnabledScannersArray>(path, options, cancellationToken); } public static Task ScanDirectoryAsync(string path, ScannerOptions? options, DependencyFound onDependencyFound, CancellationToken cancellationToken = default) { if (!Directory.Exists(path)) throw new DirectoryNotFoundException(path); options ??= ScannerOptions.Default; if (options.Scanners.Count == 0) return Task.CompletedTask; if (options.Scanners.Count <= EnabledScannersArray32.MaxValues) return options.DegreeOfParallelism == 1 ? ScanDirectoryAsync<EnabledScannersArray32>(path, options, onDependencyFound, cancellationToken) : ScanDirectoryParallelAsync<EnabledScannersArray32>(path, options, onDependencyFound, cancellationToken); if (options.Scanners.Count <= EnabledScannersArray64.MaxValues) return options.DegreeOfParallelism == 1 ? ScanDirectoryAsync<EnabledScannersArray64>(path, options, onDependencyFound, cancellationToken) : ScanDirectoryParallelAsync<EnabledScannersArray64>(path, options, onDependencyFound, cancellationToken); return options.DegreeOfParallelism == 1 ? ScanDirectoryAsync<EnabledScannersArray>(path, options, onDependencyFound, cancellationToken) : ScanDirectoryParallelAsync<EnabledScannersArray>(path, options, onDependencyFound, cancellationToken); } private static async Task ScanDirectoryAsync<T>(string path, ScannerOptions options, DependencyFound onDependencyFound, CancellationToken cancellationToken = default) where T : struct, IEnabledScannersArray { var fileSystem = options.FileSystem; using var enumerator = new ScannerFileEnumerator<T>(path, options); while (enumerator.MoveNext()) { var entry = enumerator.Current; var scanFileContext = new ScanFileContext(entry.FullPath, onDependencyFound, fileSystem, cancellationToken); try { for (var i = 0; i < options.Scanners.Count; i++) { if (!entry.Scanners.Get(i)) { continue; } var scanner = options.Scanners[i]; scanFileContext.ResetStream(); await scanner.ScanAsync(scanFileContext).ConfigureAwait(false); } } finally { await scanFileContext.DisposeAsync().ConfigureAwait(false); } } } private static async Task ScanDirectoryParallelAsync<T>(string path, ScannerOptions options, DependencyFound onDependencyFound, CancellationToken cancellationToken = default) where T : struct, IEnabledScannersArray { var fileSystem = options.FileSystem; var filesToScanChannel = Channel.CreateBounded<FileToScan<T>>(new BoundedChannelOptions(10000) { AllowSynchronousContinuations = true, SingleWriter = true, SingleReader = false, FullMode = BoundedChannelFullMode.Wait, }); // Start enumerating var enumeratorTask = Task.Run(async () => { try { using var enumerator = new ScannerFileEnumerator<T>(path, options); while (enumerator.MoveNext()) { await filesToScanChannel.Writer.WriteAsync(enumerator.Current, cancellationToken).ConfigureAwait(false); } } finally { filesToScanChannel.Writer.Complete(); } }, cancellationToken); // Parse files var tasks = new Task[options.DegreeOfParallelism + 1]; tasks[0] = enumeratorTask; Array.Fill(tasks, Task.Run(async () => { var reader = filesToScanChannel.Reader; while (await reader.WaitToReadAsync(cancellationToken).ConfigureAwait(false)) { while (reader.TryRead(out var entry)) { var scanFileContext = new ScanFileContext(entry.FullPath, onDependencyFound, fileSystem, cancellationToken); try { for (var i = 0; i < options.Scanners.Count; i++) { if (!entry.Scanners.Get(i)) continue; var scanner = options.Scanners[i]; scanFileContext.ResetStream(); await scanner.ScanAsync(scanFileContext).ConfigureAwait(false); } } finally { await scanFileContext.DisposeAsync().ConfigureAwait(false); } } } }, cancellationToken), startIndex: 1, options.DegreeOfParallelism); await Task.WhenAll(tasks).ConfigureAwait(false); } private static async IAsyncEnumerable<Dependency> ScanDirectorySingleThreadedAsync(string path, ScannerOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default) { var result = new List<Dependency>(); await ScanDirectoryAsync(path, options, dep => { result.Add(dep); return default; }, cancellationToken).ConfigureAwait(false); foreach (var item in result) yield return item; } private static async IAsyncEnumerable<Dependency> ScanDirectoryParallelAsync<T>(string path, ScannerOptions options, [EnumeratorCancellation] CancellationToken cancellationToken = default) where T : struct, IEnabledScannersArray { var fileSystem = options.FileSystem; var filesToScanChannel = Channel.CreateBounded<FileToScan<T>>(new BoundedChannelOptions(1000) { AllowSynchronousContinuations = true, SingleWriter = true, SingleReader = false, FullMode = BoundedChannelFullMode.Wait, }); var dependenciesChannel = Channel.CreateUnbounded<Dependency>(new UnboundedChannelOptions { AllowSynchronousContinuations = true, SingleWriter = false, SingleReader = true, }); // Start enumerating var enumeratorTask = Task.Run(async () => { try { using var enumerator = new ScannerFileEnumerator<T>(path, options); while (enumerator.MoveNext()) { await filesToScanChannel.Writer.WriteAsync(enumerator.Current, cancellationToken).ConfigureAwait(false); } } finally { filesToScanChannel.Writer.Complete(); } }, cancellationToken); // Parse files var exceptions = new ConcurrentBag<Exception>(); var tasks = new Task[options.DegreeOfParallelism]; Array.Fill(tasks, Task.Run(async () => { var reader = filesToScanChannel.Reader; while (await reader.WaitToReadAsync(cancellationToken).ConfigureAwait(false)) { while (reader.TryRead(out var entry)) { var scanFileContext = new ScanFileContext(entry.FullPath, d => dependenciesChannel.Writer.WriteAsync(d, cancellationToken), fileSystem, cancellationToken); try { for (var i = 0; i < options.Scanners.Count; i++) { if (!entry.Scanners.Get(i)) continue; var scanner = options.Scanners[i]; scanFileContext.ResetStream(); await scanner.ScanAsync(scanFileContext).ConfigureAwait(false); } } finally { await scanFileContext.DisposeAsync().ConfigureAwait(false); } } } }, cancellationToken)); var whenAllTasks = Task.WhenAll(tasks); var writerCompleteTask = whenAllTasks.ContinueWith(_ => dependenciesChannel.Writer.Complete(), cancellationToken, TaskContinuationOptions.RunContinuationsAsynchronously, TaskScheduler.Default); await foreach (var value in dependenciesChannel.Reader.ReadAllAsync(cancellationToken).ConfigureAwait(false)) { yield return value; } await Task.WhenAll(enumeratorTask, writerCompleteTask, whenAllTasks).ConfigureAwait(false); } public GlobCollection? FilePatterns { get; set; } public bool ShouldScanFile(ReadOnlySpan<char> fullPath) { return ShouldScanFileCore(new CandidateFileContext(Path.GetDirectoryName(fullPath), Path.GetFileName(fullPath))); } public bool ShouldScanFile(CandidateFileContext context) { if (FilePatterns != null) return FilePatterns.IsMatch(context.Directory, context.FileName); return ShouldScanFileCore(context); } protected abstract bool ShouldScanFileCore(CandidateFileContext context); public abstract ValueTask ScanAsync(ScanFileContext context); public static async Task UpdateAllAsync(IEnumerable<Dependency> dependencies, string newVersion, CancellationToken cancellationToken) { foreach (var dependency in dependencies.Where(d => d.Location.IsUpdatable)) { await dependency.UpdateAsync(newVersion, cancellationToken).ConfigureAwait(false); } } }
#region Imported Types using DeviceSQL.SQLTypes.ROC.Data; using Microsoft.SqlServer.Server; using System; using System.Data.SqlTypes; using System.IO; using System.Linq; #endregion namespace DeviceSQL.SQLTypes.ROC { [Serializable()] [SqlUserDefinedType(Format.UserDefined, IsByteOrdered = false, IsFixedLength = false, MaxByteSize = 29)] public struct AuditLogRecord : INullable, IBinarySerialize { #region Fields private byte[] data; #endregion #region Properties public bool IsNull { get; internal set; } public static AuditLogRecord Null { get { return new AuditLogRecord() { IsNull = true }; } } public byte[] Data { get { if (data == null) { data = new byte[24]; } return data; } set { data = value; } } public SqlDateTime DateTimeStamp { get { var dateTimeStamp = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).DateTimeStamp; return dateTimeStamp.HasValue ? dateTimeStamp.Value : SqlDateTime.Null; } } public int Index { get; set; } public SqlByte FstNumber { get { var fstNumber = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).FstNumber; return fstNumber.HasValue ? fstNumber.Value : SqlByte.Null; } } public SqlByte PointType { get { var pointType = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).PointType; return pointType.HasValue ? pointType.Value : SqlByte.Null; } } public SqlByte LogicalNumber { get { var logicalNumber = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).LogicalNumber; return logicalNumber.HasValue ? logicalNumber.Value : SqlByte.Null; } } public SqlByte ParameterNumber { get { var parameterNumber = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).ParameterNumber; return parameterNumber.HasValue ? parameterNumber.Value : SqlByte.Null; } } public SqlInt32 Tag { get { var tag = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).Tag; return tag.HasValue ? tag.Value : SqlInt32.Null; } } public SqlDateTime PowerRemovedDateTime { get { var powerRemovedDateTime = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).PowerRemovedDateTime; return powerRemovedDateTime.HasValue ? powerRemovedDateTime.Value : SqlDateTime.Null; } } public SqlString CalibrationPointType { get { var calibrationPointType = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).CalibrationPointType; return calibrationPointType.HasValue ? calibrationPointType.Value.ToString() : SqlString.Null; } } public SqlString CalibrationMultivariableSensorInput { get { var calibrationMultivariableSensorInput = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).CalibrationMultivariableSensorInput; return calibrationMultivariableSensorInput.HasValue ? calibrationMultivariableSensorInput.Value.ToString() : SqlString.Null; } } public SqlString CalibrationType { get { var calibrationType = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).CalibrationType; return calibrationType.HasValue ? calibrationType.Value.ToString() : SqlString.Null; } } public SqlString EventCode { get { return new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).EventCode.ToString(); } } public SqlString OperatorId { get { return new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).OperatorId; } } public SqlString EventText { get { return new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).EventText; } } public SqlBinary OldValue { get { return new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).OldValue; } } public SqlSingle FstFloatValue { get { var fstFloatValue = new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).FstFloatValue; return fstFloatValue.HasValue ? fstFloatValue.Value : SqlSingle.Null; } } public SqlBinary NewValue { get { return new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).NewValue; } } public Parameter OldParameterValue { get { if (!PointType.IsNull && !ParameterNumber.IsNull) { var pointType = PointType.Value; var parameterNumber = ParameterNumber.Value; var parameterDefinition = ParameterDatabase.ParameterDefinitions.Where(pd => pd.PointType == pointType && pd.Parameter == parameterNumber).FirstOrDefault(); switch (parameterDefinition.DataType) { case "AC": switch (parameterDefinition.Length) { case 3: return new Parameter() { RawType = ParameterType.AC3, RawValue = OldValue.Value.Take(3).ToArray() }; default: return Parameter.Null; } case "BIN": return new Parameter() { RawType = ParameterType.BIN, RawValue = OldValue.Value.Take(1).ToArray() }; case "FL": return new Parameter() { RawType = ParameterType.FL, RawValue = OldValue.Value }; case "INT16": return new Parameter() { RawType = ParameterType.INT16, RawValue = OldValue.Value.Take(2).ToArray() }; case "INT32": return new Parameter() { RawType = ParameterType.INT32, RawValue = OldValue.Value }; case "INT8": return new Parameter() { RawType = ParameterType.INT8, RawValue = OldValue.Value.Take(1).ToArray() }; case "TLP": return new Parameter() { RawType = ParameterType.TLP, RawValue = OldValue.Value.Take(3).ToArray() }; case "UINT16": return new Parameter() { RawType = ParameterType.UINT16, RawValue = OldValue.Value.Take(2).ToArray() }; case "UINT32": return new Parameter() { RawType = ParameterType.UINT32, RawValue = OldValue.Value }; case "TIME": return new Parameter() { RawType = ParameterType.TIME, RawValue = OldValue.Value }; case "UINT8": return new Parameter() { RawType = ParameterType.UINT8, RawValue = OldValue.Value.Take(1).ToArray() }; default: return Parameter.Null; } } else { return Parameter.Null; } } } public Parameter NewParameterValue { get { if (!PointType.IsNull && !ParameterNumber.IsNull) { var pointType = PointType.Value; var parameterNumber = ParameterNumber.Value; var parameterDefinition = ParameterDatabase.ParameterDefinitions.Where(pd => pd.PointType == pointType && pd.Parameter == parameterNumber).FirstOrDefault(); switch (parameterDefinition.DataType) { case "AC": switch (parameterDefinition.Length) { case 3: return new Parameter() { RawType = ParameterType.AC3, RawValue = NewValue.Value.Take(3).ToArray() }; case 7: return new Parameter() { RawType = ParameterType.AC7, RawValue = NewValue.Value.Union(new byte[3]).ToArray() }; case 10: return new Parameter() { RawType = ParameterType.AC10, RawValue = OldValue.Value.Union(NewValue.Value).Union(BitConverter.GetBytes(Convert.ToUInt16(Tag.Value))).ToArray() }; case 12: return new Parameter() { RawType = ParameterType.AC12, RawValue = OldValue.Value.Union(NewValue.Value).Union(BitConverter.GetBytes(Convert.ToUInt16(Tag.Value))).Union(new byte[2]).ToArray() }; case 20: return new Parameter() { RawType = ParameterType.AC20, RawValue = OldValue.Value.Union(NewValue.Value).Union(BitConverter.GetBytes(Convert.ToUInt16(Tag.Value))).Union(new byte[10]).ToArray() }; case 30: return new Parameter() { RawType = ParameterType.AC30, RawValue = OldValue.Value.Union(NewValue.Value).Union(BitConverter.GetBytes(Convert.ToUInt16(Tag.Value))).Union(new byte[20]).ToArray() }; case 40: return new Parameter() { RawType = ParameterType.AC40, RawValue = OldValue.Value.Union(NewValue.Value).Union(BitConverter.GetBytes(Convert.ToUInt16(Tag.Value))).Union(new byte[30]).ToArray() }; default: return Parameter.Null; } case "BIN": return new Parameter() { RawType = ParameterType.BIN, RawValue = NewValue.Value.Take(1).ToArray() }; case "FL": return new Parameter() { RawType = ParameterType.FL, RawValue = NewValue.Value }; case "INT16": return new Parameter() { RawType = ParameterType.INT16, RawValue = NewValue.Value.Take(2).ToArray() }; case "INT32": return new Parameter() { RawType = ParameterType.INT32, RawValue = NewValue.Value }; case "INT8": return new Parameter() { RawType = ParameterType.INT8, RawValue = NewValue.Value.Take(1).ToArray() }; case "TLP": return new Parameter() { RawType = ParameterType.TLP, RawValue = NewValue.Value.Take(3).ToArray() }; case "UINT16": return new Parameter() { RawType = ParameterType.UINT16, RawValue = NewValue.Value.Take(2).ToArray() }; case "UINT32": return new Parameter() { RawType = ParameterType.UINT32, RawValue = NewValue.Value }; case "TIME": return new Parameter() { RawType = ParameterType.TIME, RawValue = NewValue.Value }; case "UINT8": return new Parameter() { RawType = ParameterType.UINT8, RawValue = NewValue.Value.Take(1).ToArray() }; default: return Parameter.Null; } } else { return Parameter.Null; } } } public SqlInt32 SequenceNumber { get { return new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).SequenceNumber; } } public SqlBoolean EventNotSaved { get { return new Data.AuditLogRecord(Convert.ToUInt16(Index), Data).EventNotSaved; } } #endregion #region Helper Methods public static AuditLogRecord Parse(SqlString stringToParse) { var parsedAuditLogRecord = stringToParse.Value.Split(",".ToCharArray()); var base64Bytes = Convert.FromBase64String(parsedAuditLogRecord[1]); if (base64Bytes.Length == 24) { return new AuditLogRecord() { Index = ushort.Parse(parsedAuditLogRecord[0]), Data = base64Bytes }; } else { throw new ArgumentException("Input must be exactly 24 bytes"); } } public override string ToString() { return string.Format("{0},{1}", Index, Convert.ToBase64String(Data)); } #endregion #region Serialization Methods public void Read(BinaryReader binaryReader) { IsNull = binaryReader.ReadBoolean(); Index = binaryReader.ReadInt32(); if (!IsNull) { Data = binaryReader.ReadBytes(24); } } public void Write(BinaryWriter binaryWriter) { binaryWriter.Write(IsNull); binaryWriter.Write(Index); if (!IsNull) { binaryWriter.Write(Data, 0, 24); } } #endregion } }
using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using GraphQLParser.AST; using GraphQLParser.Exceptions; namespace GraphQLParser; // WARNING: mutable struct, pass it by reference to those methods that will change it internal partial struct ParserContext { private static string[] TopLevelKeywordOneOf { get; set; } = new[] { "query", "mutation", "subscription", "fragment", "schema", "scalar", "type", "interface", "union", "enum", "input", "extend", "directive", }; private static string[] TypeExtensionOneOf { get; set; } = new[] { "schema", "scalar", "type", "interface", "union", "enum", "input", }; private static string[] OperationTypeOneOf { get; set; } = new[] { "query", "mutation", "subscription", }; private static string[] DirectiveLocationOneOf { get; set; } = new[] { // http://spec.graphql.org/June2018/#ExecutableDirectiveLocation "QUERY", "MUTATION", "SUBSCRIPTION", "FIELD", "FRAGMENT_DEFINITION", "FRAGMENT_SPREAD", "INLINE_FRAGMENT", "VARIABLE_DEFINITION", // http://spec.graphql.org/June2018/#TypeSystemDirectiveLocation "SCHEMA", "SCALAR", "OBJECT", "FIELD_DEFINITION", "ARGUMENT_DEFINITION", "INTERFACE", "UNION", "ENUM", "ENUM_VALUE", "INPUT_OBJECT", "INPUT_FIELD_DEFINITION", }; private delegate TResult ParseCallback<out TResult>(ref ParserContext context); private readonly ROM _source; private readonly IgnoreOptions _ignoreOptions; private List<GraphQLComment>? _currentComments; private List<List<GraphQLComment>>? _unattachedComments; private Token _currentToken; private Token _prevToken; private readonly GraphQLDocument _document; private int _currentDepth; private readonly int _maxDepth; public ParserContext(ROM source, ParserOptions options) { _currentComments = null; _unattachedComments = null; _source = source; _ignoreOptions = options.Ignore; _currentDepth = 1; // GraphQLDocument created here _maxDepth = options.MaxDepth ?? 128; // should create document beforehand to use RentedMemoryTracker while parsing comments _document = NodeHelper.CreateGraphQLDocument(options.Ignore); _document.Source = source; _currentToken = _prevToken = new Token ( TokenKind.UNKNOWN, default, 0, 0 ); Advance(); } // This method should be called at the beginning of each ParseXXX method. private void IncreaseDepth() { // Encourage compiler inlining of this method by moving exception to a separate method if (++_currentDepth > _maxDepth) ThrowMaxDepthException(); } // This method should be called at the end of each ParseXXX method. private void DecreaseDepth() => --_currentDepth; [DoesNotReturn] private void ThrowMaxDepthException() { throw new GraphQLMaxDepthExceededException(_source, _currentToken.Start); } private readonly GraphQLLocation GetLocation(int start) => new(start, _prevToken.End); private List<T>? ZeroOrMore<T>(TokenKind open, ParseCallback<T> next, TokenKind close) where T : ASTNode { Expect(open); List<T>? nodes = null; while (!Skip(close)) (nodes ??= new List<T>()).Add(next(ref this)); return nodes; } private List<T> OneOrMore<T>(TokenKind open, ParseCallback<T> next, TokenKind close) where T : ASTNode { Expect(open); var nodes = new List<T> { next(ref this) }; while (!Skip(close)) nodes.Add(next(ref this)); return nodes; } private readonly bool Peek(TokenKind kind) => _currentToken.Kind == kind; private bool Skip(TokenKind kind) { // & instead of && here to get full code coverage, see https://github.com/graphql-dotnet/parser/pull/242 Debug.Assert(kind != TokenKind.COMMENT & kind != TokenKind.UNKNOWN, "Skip should be used only with 'normal' tokens"); bool isCurrentTokenMatching = _currentToken.Kind == kind; if (isCurrentTokenMatching) { Advance(); } return isCurrentTokenMatching; } private void Advance(bool fromParseComment = false) { // We should not advance further if we have already reached the EOF. if (_currentToken.Kind != TokenKind.EOF) { _prevToken = _currentToken; _currentToken = Lexer.Lex(_source, _currentToken.End); // Comments may appear everywhere if (!fromParseComment) ParseComments(); } } private void Expect(TokenKind kind, string? description = null) { if (_currentToken.Kind == kind) { Advance(); } else { Throw_From_Expect(kind, description); } } [DoesNotReturn] private void Throw_From_Expect(TokenKind kind, string? description = null) { throw new GraphQLSyntaxErrorException($"Expected {Token.GetTokenKindDescription(kind)}, found {_currentToken}{description}", _source, _currentToken.Start); } private void ExpectKeyword(string keyword) { if (_currentToken.Kind == TokenKind.NAME && _currentToken.Value == keyword) Advance(); else Throw_From_ExpectKeyword(keyword); } [DoesNotReturn] private void Throw_From_ExpectKeyword(string keyword) { throw new GraphQLSyntaxErrorException($"Expected \"{keyword}\", found {_currentToken}", _source, _currentToken.Start); } private string ExpectOneOf(string[] oneOf, bool advance = true) { if (_currentToken.Kind == TokenKind.NAME) { string? found = IsAny(_currentToken, oneOf); if (found != null) { if (advance) Advance(); return found; } } return Throw_From_ExpectOneOf(oneOf); static string? IsAny(Token token, string[] oneOf) { foreach (string item in oneOf) if (token.Value == item) return item; return null; } } [DoesNotReturn] private string Throw_From_ExpectOneOf(string[] oneOf) { throw new GraphQLSyntaxErrorException($"Expected \"{string.Join("/", oneOf)}\", found {_currentToken}", _source, _currentToken.Start); } [DoesNotReturn] private ASTNode Throw_Unexpected_Token(string? description = null) { throw new GraphQLSyntaxErrorException($"Unexpected {_currentToken}{description}", _source, _currentToken.Start); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Internal.Cryptography.Pal.Native; using System; using System.Diagnostics; using System.IO; using System.Runtime.InteropServices; using System.Security.Cryptography; using NTSTATUS = Interop.BCrypt.NTSTATUS; using SafeBCryptKeyHandle = Microsoft.Win32.SafeHandles.SafeBCryptKeyHandle; using static Interop.Crypt32; namespace Internal.Cryptography.Pal { /// <summary> /// A singleton class that encapsulates the native implementation of various X509 services. (Implementing this as a singleton makes it /// easier to split the class into abstract and implementation classes if desired.) /// </summary> internal sealed partial class X509Pal : IX509Pal { const string BCRYPT_ECC_CURVE_NAME_PROPERTY = "ECCCurveName"; const string BCRYPT_ECC_PARAMETERS_PROPERTY = "ECCParameters"; public AsymmetricAlgorithm DecodePublicKey(Oid oid, byte[] encodedKeyValue, byte[] encodedParameters, ICertificatePal certificatePal) { if (oid.Value == Oids.Ecc && certificatePal != null) { return DecodeECDsaPublicKey((CertificatePal)certificatePal); } int algId = Interop.Crypt32.FindOidInfo(CryptOidInfoKeyType.CRYPT_OID_INFO_OID_KEY, oid.Value, OidGroup.PublicKeyAlgorithm, fallBackToAllGroups: true).AlgId; switch (algId) { case AlgId.CALG_RSA_KEYX: case AlgId.CALG_RSA_SIGN: { byte[] keyBlob = DecodeKeyBlob(CryptDecodeObjectStructType.CNG_RSA_PUBLIC_KEY_BLOB, encodedKeyValue); CngKey cngKey = CngKey.Import(keyBlob, CngKeyBlobFormat.GenericPublicBlob); return new RSACng(cngKey); } #if !uap case AlgId.CALG_DSS_SIGN: { byte[] keyBlob = ConstructDSSPublicKeyCspBlob(encodedKeyValue, encodedParameters); DSACryptoServiceProvider dsa = new DSACryptoServiceProvider(); dsa.ImportCspBlob(keyBlob); return dsa; } #endif default: throw new NotSupportedException(SR.NotSupported_KeyAlgorithm); } } private static ECDsa DecodeECDsaPublicKey(CertificatePal certificatePal) { ECDsa ecdsa; using (SafeBCryptKeyHandle bCryptKeyHandle = ImportPublicKeyInfo(certificatePal.CertContext)) { CngKeyBlobFormat blobFormat; byte[] keyBlob; #if uap blobFormat = CngKeyBlobFormat.EccPublicBlob; keyBlob = ExportKeyBlob(bCryptKeyHandle, blobFormat); using (CngKey cngKey = CngKey.Import(keyBlob, blobFormat)) { ecdsa = new ECDsaCng(cngKey); } #else string curveName = GetCurveName(bCryptKeyHandle); if (curveName == null) { if (HasExplicitParameters(bCryptKeyHandle)) { blobFormat = CngKeyBlobFormat.EccFullPublicBlob; } else { blobFormat = CngKeyBlobFormat.EccPublicBlob; } keyBlob = ExportKeyBlob(bCryptKeyHandle, blobFormat); using (CngKey cngKey = CngKey.Import(keyBlob, blobFormat)) { ecdsa = new ECDsaCng(cngKey); } } else { blobFormat = CngKeyBlobFormat.EccPublicBlob; keyBlob = ExportKeyBlob(bCryptKeyHandle, blobFormat); ECParameters ecparams = new ECParameters(); ExportNamedCurveParameters(ref ecparams, keyBlob, false); ecparams.Curve = ECCurve.CreateFromFriendlyName(curveName); ecdsa = new ECDsaCng(); ecdsa.ImportParameters(ecparams); } #endif } return ecdsa; } private static SafeBCryptKeyHandle ImportPublicKeyInfo(SafeCertContextHandle certContext) { #if uap // CryptImportPublicKeyInfoEx2() not in the UWP api list. throw new PlatformNotSupportedException(); #else unsafe { SafeBCryptKeyHandle bCryptKeyHandle; bool mustRelease = false; certContext.DangerousAddRef(ref mustRelease); try { unsafe { bool success = Interop.crypt32.CryptImportPublicKeyInfoEx2(CertEncodingType.X509_ASN_ENCODING, &(certContext.CertContext->pCertInfo->SubjectPublicKeyInfo), 0, null, out bCryptKeyHandle); if (!success) throw Marshal.GetHRForLastWin32Error().ToCryptographicException(); return bCryptKeyHandle; } } finally { if (mustRelease) certContext.DangerousRelease(); } } #endif // uap } private static byte[] ExportKeyBlob(SafeBCryptKeyHandle bCryptKeyHandle, CngKeyBlobFormat blobFormat) { #if uap // BCryptExportKey() not in the UWP api list. throw new PlatformNotSupportedException(); #else string blobFormatString = blobFormat.Format; int numBytesNeeded = 0; NTSTATUS ntStatus = Interop.BCrypt.BCryptExportKey(bCryptKeyHandle, IntPtr.Zero, blobFormatString, null, 0, out numBytesNeeded, 0); if (ntStatus != NTSTATUS.STATUS_SUCCESS) throw new CryptographicException(Interop.Kernel32.GetMessage((int)ntStatus)); byte[] keyBlob = new byte[numBytesNeeded]; ntStatus = Interop.BCrypt.BCryptExportKey(bCryptKeyHandle, IntPtr.Zero, blobFormatString, keyBlob, keyBlob.Length, out numBytesNeeded, 0); if (ntStatus != NTSTATUS.STATUS_SUCCESS) throw new CryptographicException(Interop.Kernel32.GetMessage((int)ntStatus)); Array.Resize(ref keyBlob, numBytesNeeded); return keyBlob; #endif // uap } #if !uap private static void ExportNamedCurveParameters(ref ECParameters ecParams, byte[] ecBlob, bool includePrivateParameters) { // We now have a buffer laid out as follows: // BCRYPT_ECCKEY_BLOB header // byte[cbKey] Q.X // byte[cbKey] Q.Y // -- Private only -- // byte[cbKey] D unsafe { Debug.Assert(ecBlob.Length >= sizeof(Interop.BCrypt.BCRYPT_ECCKEY_BLOB)); fixed (byte* pEcBlob = &ecBlob[0]) { Interop.BCrypt.BCRYPT_ECCKEY_BLOB* pBcryptBlob = (Interop.BCrypt.BCRYPT_ECCKEY_BLOB*)pEcBlob; int offset = sizeof(Interop.BCrypt.BCRYPT_ECCKEY_BLOB); ecParams.Q = new ECPoint { X = Interop.BCrypt.Consume(ecBlob, ref offset, pBcryptBlob->cbKey), Y = Interop.BCrypt.Consume(ecBlob, ref offset, pBcryptBlob->cbKey) }; if (includePrivateParameters) { ecParams.D = Interop.BCrypt.Consume(ecBlob, ref offset, pBcryptBlob->cbKey); } } } } #endif private static byte[] DecodeKeyBlob(CryptDecodeObjectStructType lpszStructType, byte[] encodedKeyValue) { int cbDecoded = 0; if (!Interop.crypt32.CryptDecodeObject(CertEncodingType.All, lpszStructType, encodedKeyValue, encodedKeyValue.Length, CryptDecodeObjectFlags.None, null, ref cbDecoded)) throw Marshal.GetLastWin32Error().ToCryptographicException(); byte[] keyBlob = new byte[cbDecoded]; if (!Interop.crypt32.CryptDecodeObject(CertEncodingType.All, lpszStructType, encodedKeyValue, encodedKeyValue.Length, CryptDecodeObjectFlags.None, keyBlob, ref cbDecoded)) throw Marshal.GetLastWin32Error().ToCryptographicException(); return keyBlob; } private static byte[] ConstructDSSPublicKeyCspBlob(byte[] encodedKeyValue, byte[] encodedParameters) { byte[] decodedKeyValue = DecodeDssKeyValue(encodedKeyValue); byte[] p, q, g; DecodeDssParameters(encodedParameters, out p, out q, out g); const byte PUBLICKEYBLOB = 0x6; const byte CUR_BLOB_VERSION = 2; int cbKey = p.Length; if (cbKey == 0) throw ErrorCode.NTE_BAD_PUBLIC_KEY.ToCryptographicException(); const int DSS_Q_LEN = 20; int capacity = 8 /* sizeof(CAPI.BLOBHEADER) */ + 8 /* sizeof(CAPI.DSSPUBKEY) */ + cbKey + DSS_Q_LEN + cbKey + cbKey + 24 /* sizeof(CAPI.DSSSEED) */; MemoryStream keyBlob = new MemoryStream(capacity); BinaryWriter bw = new BinaryWriter(keyBlob); // PUBLICKEYSTRUC bw.Write((byte)PUBLICKEYBLOB); // pPubKeyStruc->bType = PUBLICKEYBLOB bw.Write((byte)CUR_BLOB_VERSION); // pPubKeyStruc->bVersion = CUR_BLOB_VERSION bw.Write((short)0); // pPubKeyStruc->reserved = 0; bw.Write((uint)AlgId.CALG_DSS_SIGN); // pPubKeyStruc->aiKeyAlg = CALG_DSS_SIGN; // DSSPUBKEY bw.Write((int)(PubKeyMagic.DSS_MAGIC)); // pCspPubKey->magic = DSS_MAGIC; We are constructing a DSS1 Csp blob. bw.Write((int)(cbKey * 8)); // pCspPubKey->bitlen = cbKey * 8; // rgbP[cbKey] bw.Write(p); // rgbQ[20] int cb = q.Length; if (cb == 0 || cb > DSS_Q_LEN) throw ErrorCode.NTE_BAD_PUBLIC_KEY.ToCryptographicException(); bw.Write(q); if (DSS_Q_LEN > cb) bw.Write(new byte[DSS_Q_LEN - cb]); // rgbG[cbKey] cb = g.Length; if (cb == 0 || cb > cbKey) throw ErrorCode.NTE_BAD_PUBLIC_KEY.ToCryptographicException(); bw.Write(g); if (cbKey > cb) bw.Write(new byte[cbKey - cb]); // rgbY[cbKey] cb = decodedKeyValue.Length; if (cb == 0 || cb > cbKey) throw ErrorCode.NTE_BAD_PUBLIC_KEY.ToCryptographicException(); bw.Write(decodedKeyValue); if (cbKey > cb) bw.Write(new byte[cbKey - cb]); // DSSSEED: set counter to 0xFFFFFFFF to indicate not available bw.Write((uint)0xFFFFFFFF); bw.Write(new byte[20]); return keyBlob.ToArray(); } private static byte[] DecodeDssKeyValue(byte[] encodedKeyValue) { unsafe { byte[] decodedKeyValue = null; encodedKeyValue.DecodeObject( CryptDecodeObjectStructType.X509_DSS_PUBLICKEY, delegate (void* pvDecoded) { CRYPTOAPI_BLOB* pBlob = (CRYPTOAPI_BLOB*)pvDecoded; decodedKeyValue = pBlob->ToByteArray(); } ); return decodedKeyValue; } } private static void DecodeDssParameters(byte[] encodedParameters, out byte[] p, out byte[] q, out byte[] g) { byte[] pLocal = null; byte[] qLocal = null; byte[] gLocal = null; unsafe { encodedParameters.DecodeObject( CryptDecodeObjectStructType.X509_DSS_PARAMETERS, delegate (void* pvDecoded) { CERT_DSS_PARAMETERS* pCertDssParameters = (CERT_DSS_PARAMETERS*)pvDecoded; pLocal = pCertDssParameters->p.ToByteArray(); qLocal = pCertDssParameters->q.ToByteArray(); gLocal = pCertDssParameters->g.ToByteArray(); } ); } p = pLocal; q = qLocal; g = gLocal; } private static bool HasExplicitParameters(SafeBCryptKeyHandle bcryptHandle) { byte[] explicitParams = GetProperty(bcryptHandle, BCRYPT_ECC_PARAMETERS_PROPERTY); return (explicitParams != null && explicitParams.Length > 0); } private static string GetCurveName(SafeBCryptKeyHandle bcryptHandle) { return GetPropertyAsString(bcryptHandle, BCRYPT_ECC_CURVE_NAME_PROPERTY); } private static string GetPropertyAsString(SafeBCryptKeyHandle cryptHandle, string propertyName) { Debug.Assert(!cryptHandle.IsInvalid); byte[] value = GetProperty(cryptHandle, propertyName); if (value == null || value.Length == 0) return null; unsafe { fixed (byte* pValue = &value[0]) { string valueAsString = Marshal.PtrToStringUni((IntPtr)pValue); return valueAsString; } } } private static byte[] GetProperty(SafeBCryptKeyHandle cryptHandle, string propertyName) { Debug.Assert(!cryptHandle.IsInvalid); unsafe { int numBytesNeeded; NTSTATUS errorCode = Interop.BCrypt.BCryptGetProperty(cryptHandle, propertyName, null, 0, out numBytesNeeded, 0); if (errorCode != NTSTATUS.STATUS_SUCCESS) return null; byte[] propertyValue = new byte[numBytesNeeded]; fixed (byte* pPropertyValue = propertyValue) { errorCode = Interop.BCrypt.BCryptGetProperty(cryptHandle, propertyName, pPropertyValue, propertyValue.Length, out numBytesNeeded, 0); } if (errorCode != NTSTATUS.STATUS_SUCCESS) return null; Array.Resize(ref propertyValue, numBytesNeeded); return propertyValue; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Diagnostics.Contracts; using System.Text; namespace System.Net.Http.Headers { // This type is used to store a collection of headers in 'headerStore': // - A header can have multiple values. // - A header can have an associated parser which is able to parse the raw string value into a strongly typed object. // - If a header has an associated parser and the provided raw value can't be parsed, the value is considered // invalid. Invalid values are stored if added using TryAddWithoutValidation(). If the value was added using Add(), // Add() will throw FormatException. // - Since parsing header values is expensive and users usually only care about a few headers, header values are // lazily initialized. // // Given the properties above, a header value can have three states: // - 'raw': The header value was added using TryAddWithoutValidation() and it wasn't parsed yet. // - 'parsed': The header value was successfully parsed. It was either added using Add() where the value was parsed // immediately, or if added using TryAddWithoutValidation() a user already accessed a property/method triggering the // value to be parsed. // - 'invalid': The header value was parsed, but parsing failed because the value is invalid. Storing invalid values // allows users to still retrieve the value (by calling GetValues()), but it will not be exposed as strongly typed // object. E.g. the client receives a response with the following header: 'Via: 1.1 proxy, invalid' // - HttpHeaders.GetValues() will return "1.1 proxy", "invalid" // - HttpResponseHeaders.Via collection will only contain one ViaHeaderValue object with value "1.1 proxy" [SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix", Justification = "This is not a collection")] public abstract class HttpHeaders : IEnumerable<KeyValuePair<string, IEnumerable<string>>> { private Dictionary<string, HeaderStoreItemInfo> _headerStore; private Dictionary<string, HttpHeaderParser> _parserStore; private HashSet<string> _invalidHeaders; private enum StoreLocation { Raw, Invalid, Parsed } protected HttpHeaders() { } public void Add(string name, string value) { CheckHeaderName(name); // We don't use GetOrCreateHeaderInfo() here, since this would create a new header in the store. If parsing // the value then throws, we would have to remove the header from the store again. So just get a // HeaderStoreItemInfo object and try to parse the value. If it works, we'll add the header. HeaderStoreItemInfo info; bool addToStore; PrepareHeaderInfoForAdd(name, out info, out addToStore); ParseAndAddValue(name, info, value); // If we get here, then the value could be parsed correctly. If we created a new HeaderStoreItemInfo, add // it to the store if we added at least one value. if (addToStore && (info.ParsedValue != null)) { AddHeaderToStore(name, info); } } public void Add(string name, IEnumerable<string> values) { if (values == null) { throw new ArgumentNullException("values"); } CheckHeaderName(name); HeaderStoreItemInfo info; bool addToStore; PrepareHeaderInfoForAdd(name, out info, out addToStore); try { // Note that if the first couple of values are valid followed by an invalid value, the valid values // will be added to the store before the exception for the invalid value is thrown. foreach (string value in values) { ParseAndAddValue(name, info, value); } } finally { // Even if one of the values was invalid, make sure we add the header for the valid ones. We need to be // consistent here: If values get added to an _existing_ header, then all values until the invalid one // get added. Same here: If multiple values get added to a _new_ header, make sure the header gets added // with the valid values. // However, if all values for a _new_ header were invalid, then don't add the header. if (addToStore && (info.ParsedValue != null)) { AddHeaderToStore(name, info); } } } public bool TryAddWithoutValidation(string name, string value) { if (!TryCheckHeaderName(name)) { return false; } if (value == null) { // We allow empty header values. (e.g. "My-Header: "). If the user adds multiple null/empty // values, we'll just add them to the collection. This will result in delimiter-only values: // E.g. adding two null-strings (or empty, or whitespace-only) results in "My-Header: ,". value = string.Empty; } HeaderStoreItemInfo info = GetOrCreateHeaderInfo(name, false); AddValue(info, value, StoreLocation.Raw); return true; } public bool TryAddWithoutValidation(string name, IEnumerable<string> values) { if (values == null) { throw new ArgumentNullException("values"); } if (!TryCheckHeaderName(name)) { return false; } HeaderStoreItemInfo info = GetOrCreateHeaderInfo(name, false); foreach (string value in values) { // We allow empty header values. (e.g. "My-Header: "). If the user adds multiple null/empty // values, we'll just add them to the collection. This will result in delimiter-only values: // E.g. adding two null-strings (or empty, or whitespace-only) results in "My-Header: ,". AddValue(info, value ?? string.Empty, StoreLocation.Raw); } return true; } public void Clear() { if (_headerStore != null) { _headerStore.Clear(); } } public bool Remove(string name) { CheckHeaderName(name); if (_headerStore == null) { return false; } return _headerStore.Remove(name); } public IEnumerable<string> GetValues(string name) { CheckHeaderName(name); IEnumerable<string> values; if (!TryGetValues(name, out values)) { throw new InvalidOperationException(SR.net_http_headers_not_found); } return values; } public bool TryGetValues(string name, out IEnumerable<string> values) { if (!TryCheckHeaderName(name)) { values = null; return false; } if (_headerStore == null) { values = null; return false; } HeaderStoreItemInfo info = null; if (TryGetAndParseHeaderInfo(name, out info)) { values = GetValuesAsStrings(info); return true; } values = null; return false; } public bool Contains(string name) { CheckHeaderName(name); if (_headerStore == null) { return false; } // We can't just call headerStore.ContainsKey() since after parsing the value the header may not exist // anymore (if the value contains invalid newline chars, we remove the header). So try to parse the // header value. HeaderStoreItemInfo info = null; return TryGetAndParseHeaderInfo(name, out info); } public override string ToString() { // Return all headers as string similar to: // HeaderName1: Value1, Value2 // HeaderName2: Value1 // ... StringBuilder sb = new StringBuilder(); foreach (var header in this) { sb.Append(header.Key); sb.Append(": "); sb.Append(this.GetHeaderString(header.Key)); sb.Append("\r\n"); } return sb.ToString(); } internal IEnumerable<KeyValuePair<string, string>> GetHeaderStrings() { if (_headerStore == null) { yield break; } foreach (var header in _headerStore) { HeaderStoreItemInfo info = header.Value; string stringValue = GetHeaderString(info); yield return new KeyValuePair<string, string>(header.Key, stringValue); } } internal string GetHeaderString(string headerName) { return GetHeaderString(headerName, null); } internal string GetHeaderString(string headerName, object exclude) { HeaderStoreItemInfo info; if (!TryGetHeaderInfo(headerName, out info)) { return string.Empty; } return GetHeaderString(info, exclude); } private string GetHeaderString(HeaderStoreItemInfo info) { return GetHeaderString(info, null); } private string GetHeaderString(HeaderStoreItemInfo info, object exclude) { string stringValue = string.Empty; // returned if values.Length == 0 string[] values = GetValuesAsStrings(info, exclude); if (values.Length == 1) { stringValue = values[0]; } else { // Note that if we get multiple values for a header that doesn't support multiple values, we'll // just separate the values using a comma (default separator). string separator = HttpHeaderParser.DefaultSeparator; if ((info.Parser != null) && (info.Parser.SupportsMultipleValues)) { separator = info.Parser.Separator; } stringValue = string.Join(separator, values); } return stringValue; } #region IEnumerable<KeyValuePair<string, IEnumerable<string>>> Members public IEnumerator<KeyValuePair<string, IEnumerable<string>>> GetEnumerator() { if (_headerStore == null) { yield break; } List<string> invalidHeaders = null; foreach (var header in _headerStore) { HeaderStoreItemInfo info = header.Value; // Make sure we parse all raw values before returning the result. Note that this has to be // done before we calculate the array length (next line): A raw value may contain a list of // values. if (!ParseRawHeaderValues(header.Key, info, false)) { // We have an invalid header value (contains invalid newline chars). Mark it as "to-be-deleted" // and skip this header. if (invalidHeaders == null) { invalidHeaders = new List<string>(); } invalidHeaders.Add(header.Key); } else { string[] values = GetValuesAsStrings(info); yield return new KeyValuePair<string, IEnumerable<string>>(header.Key, values); } } // While we were enumerating headers, we also parsed header values. If during parsing it turned out that // the header value was invalid (contains invalid newline chars), remove the header from the store after // completing the enumeration. if (invalidHeaders != null) { Debug.Assert(_headerStore != null); foreach (string invalidHeader in invalidHeaders) { _headerStore.Remove(invalidHeader); } } } #endregion #region IEnumerable Members Collections.IEnumerator Collections.IEnumerable.GetEnumerator() { return GetEnumerator(); } #endregion internal void SetConfiguration(Dictionary<string, HttpHeaderParser> parserStore, HashSet<string> invalidHeaders) { Debug.Assert(_parserStore == null, "Parser store was already set."); _parserStore = parserStore; _invalidHeaders = invalidHeaders; } internal void AddParsedValue(string name, object value) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Requires(value != null); HeaderStoreItemInfo info = GetOrCreateHeaderInfo(name, true); Debug.Assert(info.Parser != null, "Can't add parsed value if there is no parser available."); // If the current header has only one value, we can't add another value. The strongly typed property // must not call AddParsedValue(), but SetParsedValue(). E.g. for headers like 'Date', 'Host'. Debug.Assert(info.CanAddValue, "Header '" + name + "' doesn't support multiple values"); AddValue(info, value, StoreLocation.Parsed); } internal void SetParsedValue(string name, object value) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Requires(value != null); // This method will first clear all values. This is used e.g. when setting the 'Date' or 'Host' header. // I.e. headers not supporting collections. HeaderStoreItemInfo info = GetOrCreateHeaderInfo(name, true); Debug.Assert(info.Parser != null, "Can't add parsed value if there is no parser available."); info.InvalidValue = null; info.ParsedValue = null; info.RawValue = null; AddValue(info, value, StoreLocation.Parsed); } internal void SetOrRemoveParsedValue(string name, object value) { if (value == null) { Remove(name); } else { SetParsedValue(name, value); } } internal bool RemoveParsedValue(string name, object value) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Requires(value != null); if (_headerStore == null) { return false; } // If we have a value for this header, then verify if we have a single value. If so, compare that // value with 'item'. If we have a list of values, then remove 'item' from the list. HeaderStoreItemInfo info = null; if (TryGetAndParseHeaderInfo(name, out info)) { Debug.Assert(info.Parser != null, "Can't add parsed value if there is no parser available."); Debug.Assert(info.Parser.SupportsMultipleValues, "This method should not be used for single-value headers. Use Remove(string) instead."); bool result = false; // If there is no entry, just return. if (info.ParsedValue == null) { return false; } IEqualityComparer comparer = info.Parser.Comparer; List<object> parsedValues = info.ParsedValue as List<object>; if (parsedValues == null) { Debug.Assert(info.ParsedValue.GetType() == value.GetType(), "Stored value does not have the same type as 'value'."); if (AreEqual(value, info.ParsedValue, comparer)) { info.ParsedValue = null; result = true; } } else { foreach (object item in parsedValues) { Debug.Assert(item.GetType() == value.GetType(), "One of the stored values does not have the same type as 'value'."); if (AreEqual(value, item, comparer)) { // Remove 'item' rather than 'value', since the 'comparer' may consider two values // equal even though the default obj.Equals() may not (e.g. if 'comparer' does // case-insentive comparison for strings, but string.Equals() is case-sensitive). result = parsedValues.Remove(item); break; } } // If we removed the last item in a list, remove the list. if (parsedValues.Count == 0) { info.ParsedValue = null; } } // If there is no value for the header left, remove the header. if (info.IsEmpty) { bool headerRemoved = Remove(name); Debug.Assert(headerRemoved, "Existing header '" + name + "' couldn't be removed."); } return result; } return false; } internal bool ContainsParsedValue(string name, object value) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Requires(value != null); if (_headerStore == null) { return false; } // If we have a value for this header, then verify if we have a single value. If so, compare that // value with 'item'. If we have a list of values, then compare each item in the list with 'item'. HeaderStoreItemInfo info = null; if (TryGetAndParseHeaderInfo(name, out info)) { Debug.Assert(info.Parser != null, "Can't add parsed value if there is no parser available."); Debug.Assert(info.Parser.SupportsMultipleValues, "This method should not be used for single-value headers. Use equality comparer instead."); // If there is no entry, just return. if (info.ParsedValue == null) { return false; } List<object> parsedValues = info.ParsedValue as List<object>; IEqualityComparer comparer = info.Parser.Comparer; if (parsedValues == null) { Debug.Assert(info.ParsedValue.GetType() == value.GetType(), "Stored value does not have the same type as 'value'."); return AreEqual(value, info.ParsedValue, comparer); } else { foreach (object item in parsedValues) { Debug.Assert(item.GetType() == value.GetType(), "One of the stored values does not have the same type as 'value'."); if (AreEqual(value, item, comparer)) { return true; } } return false; } } return false; } internal virtual void AddHeaders(HttpHeaders sourceHeaders) { Contract.Requires(sourceHeaders != null); Debug.Assert(_parserStore == sourceHeaders._parserStore, "Can only copy headers from an instance with the same header parsers."); if (sourceHeaders._headerStore == null) { return; } List<string> invalidHeaders = null; foreach (var header in sourceHeaders._headerStore) { // Only add header values if they're not already set on the message. Note that we don't merge // collections: If both the default headers and the message have set some values for a certain // header, then we don't try to merge the values. if ((_headerStore == null) || (!_headerStore.ContainsKey(header.Key))) { HeaderStoreItemInfo sourceInfo = header.Value; // If DefaultRequestHeaders values are copied to multiple messages, it is useful to parse these // default header values only once. This is what we're doing here: By parsing raw headers in // 'sourceHeaders' before copying values to our header store. if (!sourceHeaders.ParseRawHeaderValues(header.Key, sourceInfo, false)) { // If after trying to parse source header values no value is left (i.e. all values contain // invalid newline chars), mark this header as 'to-be-deleted' and skip to the next header. if (invalidHeaders == null) { invalidHeaders = new List<string>(); } invalidHeaders.Add(header.Key); } else { AddHeaderInfo(header.Key, sourceInfo); } } } if (invalidHeaders != null) { Debug.Assert(sourceHeaders._headerStore != null); foreach (string invalidHeader in invalidHeaders) { sourceHeaders._headerStore.Remove(invalidHeader); } } } private void AddHeaderInfo(string headerName, HeaderStoreItemInfo sourceInfo) { HeaderStoreItemInfo destinationInfo = CreateAndAddHeaderToStore(headerName); Debug.Assert(sourceInfo.Parser == destinationInfo.Parser, "Expected same parser on both source and destination header store for header '" + headerName + "'."); // We have custom header values. The parsed values are strings. if (destinationInfo.Parser == null) { Debug.Assert((sourceInfo.RawValue == null) && (sourceInfo.InvalidValue == null), "No raw or invalid values expected for custom headers."); // Custom header values are always stored as string or list of strings. destinationInfo.ParsedValue = CloneStringHeaderInfoValues(sourceInfo.ParsedValue); } else { // We have a parser, so we have to copy invalid values and clone parsed values. // Invalid values are always strings. Strings are immutable. So we only have to clone the // collection (if there is one). destinationInfo.InvalidValue = CloneStringHeaderInfoValues(sourceInfo.InvalidValue); // Now clone and add parsed values (if any). if (sourceInfo.ParsedValue != null) { List<object> sourceValues = sourceInfo.ParsedValue as List<object>; if (sourceValues == null) { CloneAndAddValue(destinationInfo, sourceInfo.ParsedValue); } else { foreach (object item in sourceValues) { CloneAndAddValue(destinationInfo, item); } } } } } private static void CloneAndAddValue(HeaderStoreItemInfo destinationInfo, object source) { // We only have one value. Clone it and assign it to the store. ICloneable cloneableValue = source as ICloneable; if (cloneableValue != null) { AddValue(destinationInfo, cloneableValue.Clone(), StoreLocation.Parsed); } else { // If it doesn't implement ICloneable, it's a value type or an immutable type like String/Uri. AddValue(destinationInfo, source, StoreLocation.Parsed); } } private static object CloneStringHeaderInfoValues(object source) { if (source == null) { return null; } List<object> sourceValues = source as List<object>; if (sourceValues == null) { // If we just have one value, return the reference to the string (strings are immutable so it's OK // to use the reference). return source; } else { // If we have a list of strings, create a new list and copy all strings to the new list. return new List<object>(sourceValues); } } private HeaderStoreItemInfo GetOrCreateHeaderInfo(string name, bool parseRawValues) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); Contract.Ensures(Contract.Result<HeaderStoreItemInfo>() != null); HeaderStoreItemInfo result = null; bool found = false; if (parseRawValues) { found = TryGetAndParseHeaderInfo(name, out result); } else { found = TryGetHeaderInfo(name, out result); } if (!found) { result = CreateAndAddHeaderToStore(name); } return result; } private HeaderStoreItemInfo CreateAndAddHeaderToStore(string name) { // If we don't have the header in the store yet, add it now. HeaderStoreItemInfo result = new HeaderStoreItemInfo(GetParser(name)); AddHeaderToStore(name, result); return result; } private void AddHeaderToStore(string name, HeaderStoreItemInfo info) { if (_headerStore == null) { _headerStore = new Dictionary<string, HeaderStoreItemInfo>( StringComparer.OrdinalIgnoreCase); } _headerStore.Add(name, info); } private bool TryGetHeaderInfo(string name, out HeaderStoreItemInfo info) { if (_headerStore == null) { info = null; return false; } return _headerStore.TryGetValue(name, out info); } private bool TryGetAndParseHeaderInfo(string name, out HeaderStoreItemInfo info) { if (TryGetHeaderInfo(name, out info)) { return ParseRawHeaderValues(name, info, true); } return false; } private bool ParseRawHeaderValues(string name, HeaderStoreItemInfo info, bool removeEmptyHeader) { // Prevent multiple threads from parsing the raw value at the same time, or else we would get // false duplicates or false nulls. lock (info) { // Unlike TryGetHeaderInfo() this method tries to parse all non-validated header values (if any) // before returning to the caller. if (info.RawValue != null) { List<string> rawValues = info.RawValue as List<string>; if (rawValues == null) { ParseSingleRawHeaderValue(name, info); } else { ParseMultipleRawHeaderValues(name, info, rawValues); } // At this point all values are either in info.ParsedValue, info.InvalidValue, or were removed since they // contain invalid newline chars. Reset RawValue. info.RawValue = null; // During parsing, we removed tha value since it contains invalid newline chars. Return false to indicate that // this is an empty header. If the caller specified to remove empty headers, we'll remove the header before // returning. if ((info.InvalidValue == null) && (info.ParsedValue == null)) { if (removeEmptyHeader) { // After parsing the raw value, no value is left because all values contain invalid newline // chars. Debug.Assert(_headerStore != null); _headerStore.Remove(name); } return false; } } } return true; } private static void ParseMultipleRawHeaderValues(string name, HeaderStoreItemInfo info, List<string> rawValues) { if (info.Parser == null) { foreach (string rawValue in rawValues) { if (!ContainsInvalidNewLine(rawValue, name)) { AddValue(info, rawValue, StoreLocation.Parsed); } } } else { foreach (string rawValue in rawValues) { if (!TryParseAndAddRawHeaderValue(name, info, rawValue, true)) { if (Logging.On) Logging.PrintWarning(Logging.Http, string.Format(System.Globalization.CultureInfo.InvariantCulture, SR.net_http_log_headers_invalid_value, name, rawValue)); } } } } private static void ParseSingleRawHeaderValue(string name, HeaderStoreItemInfo info) { string rawValue = info.RawValue as string; Debug.Assert(rawValue != null, "RawValue must either be List<string> or string."); if (info.Parser == null) { if (!ContainsInvalidNewLine(rawValue, name)) { AddValue(info, rawValue, StoreLocation.Parsed); } } else { if (!TryParseAndAddRawHeaderValue(name, info, rawValue, true)) { if (Logging.On) Logging.PrintWarning(Logging.Http, string.Format(System.Globalization.CultureInfo.InvariantCulture, SR.net_http_log_headers_invalid_value, name, rawValue)); } } } // See Add(name, string) internal bool TryParseAndAddValue(string name, string value) { // We don't use GetOrCreateHeaderInfo() here, since this would create a new header in the store. If parsing // the value then throws, we would have to remove the header from the store again. So just get a // HeaderStoreItemInfo object and try to parse the value. If it works, we'll add the header. HeaderStoreItemInfo info; bool addToStore; PrepareHeaderInfoForAdd(name, out info, out addToStore); bool result = TryParseAndAddRawHeaderValue(name, info, value, false); if (result && addToStore && (info.ParsedValue != null)) { // If we get here, then the value could be parsed correctly. If we created a new HeaderStoreItemInfo, add // it to the store if we added at least one value. AddHeaderToStore(name, info); } return result; } // See ParseAndAddValue private static bool TryParseAndAddRawHeaderValue(string name, HeaderStoreItemInfo info, string value, bool addWhenInvalid) { Contract.Requires(info != null); Contract.Requires(info.Parser != null); // Values are added as 'invalid' if we either can't parse the value OR if we already have a value // and the current header doesn't support multiple values: e.g. trying to add a date/time value // to the 'Date' header if we already have a date/time value will result in the second value being // added to the 'invalid' header values. if (!info.CanAddValue) { if (addWhenInvalid) { AddValue(info, value ?? string.Empty, StoreLocation.Invalid); } return false; } int index = 0; object parsedValue = null; if (info.Parser.TryParseValue(value, info.ParsedValue, ref index, out parsedValue)) { // The raw string only represented one value (which was successfully parsed). Add the value and return. if ((value == null) || (index == value.Length)) { if (parsedValue != null) { AddValue(info, parsedValue, StoreLocation.Parsed); } return true; } Debug.Assert(index < value.Length, "Parser must return an index value within the string length."); // If we successfully parsed a value, but there are more left to read, store the results in a temp // list. Only when all values are parsed successfully write the list to the store. List<object> parsedValues = new List<object>(); if (parsedValue != null) { parsedValues.Add(parsedValue); } while (index < value.Length) { if (info.Parser.TryParseValue(value, info.ParsedValue, ref index, out parsedValue)) { if (parsedValue != null) { parsedValues.Add(parsedValue); } } else { if (!ContainsInvalidNewLine(value, name) && addWhenInvalid) { AddValue(info, value, StoreLocation.Invalid); } return false; } } // All values were parsed correctly. Copy results to the store. foreach (object item in parsedValues) { AddValue(info, item, StoreLocation.Parsed); } return true; } if (!ContainsInvalidNewLine(value, name) && addWhenInvalid) { AddValue(info, value ?? string.Empty, StoreLocation.Invalid); } return false; } private static void AddValue(HeaderStoreItemInfo info, object value, StoreLocation location) { // Since we have the same pattern for all three store locations (raw, invalid, parsed), we use // this helper method to deal with adding values: // - if 'null' just set the store property to 'value' // - if 'List<T>' append 'value' to the end of the list // - if 'T', i.e. we have already a value stored (but no list), create a list, add the stored value // to the list and append 'value' at the end of the newly created list. Debug.Assert((info.Parser != null) || ((info.Parser == null) && (value.GetType() == typeof(string))), "If no parser is defined, then the value must be string."); object currentStoreValue = null; switch (location) { case StoreLocation.Raw: currentStoreValue = info.RawValue; AddValueToStoreValue<string>(info, value, ref currentStoreValue); info.RawValue = currentStoreValue; break; case StoreLocation.Invalid: currentStoreValue = info.InvalidValue; AddValueToStoreValue<string>(info, value, ref currentStoreValue); info.InvalidValue = currentStoreValue; break; case StoreLocation.Parsed: Debug.Assert((value == null) || (!(value is List<object>)), "Header value types must not derive from List<object> since this type is used internally to store " + "lists of values. So we would not be able to distinguish between a single value and a list of values."); currentStoreValue = info.ParsedValue; AddValueToStoreValue<object>(info, value, ref currentStoreValue); info.ParsedValue = currentStoreValue; break; default: Debug.Assert(false, "Unknown StoreLocation value: " + location.ToString()); break; } } private static void AddValueToStoreValue<T>(HeaderStoreItemInfo info, object value, ref object currentStoreValue) where T : class { // If there is no value set yet, then add current item as value (we don't create a list // if not required). If 'info.Value' is already assigned then make sure 'info.Value' is a // List<T> and append 'item' to the list. if (currentStoreValue == null) { currentStoreValue = value; } else { List<T> storeValues = currentStoreValue as List<T>; if (storeValues == null) { storeValues = new List<T>(2); Debug.Assert(value is T); storeValues.Add(currentStoreValue as T); currentStoreValue = storeValues; } Debug.Assert(value is T); storeValues.Add(value as T); } } // Since most of the time we just have 1 value, we don't create a List<object> for one value, but we change // the return type to 'object'. The caller has to deal with the return type (object vs. List<object>). This // is to optimize the most common scenario where a header has only one value. internal object GetParsedValues(string name) { Contract.Requires((name != null) && (name.Length > 0)); Contract.Requires(HttpRuleParser.GetTokenLength(name, 0) == name.Length); HeaderStoreItemInfo info = null; if (!TryGetAndParseHeaderInfo(name, out info)) { return null; } return info.ParsedValue; } private void PrepareHeaderInfoForAdd(string name, out HeaderStoreItemInfo info, out bool addToStore) { info = null; addToStore = false; if (!TryGetAndParseHeaderInfo(name, out info)) { info = new HeaderStoreItemInfo(GetParser(name)); addToStore = true; } } private void ParseAndAddValue(string name, HeaderStoreItemInfo info, string value) { Contract.Requires(info != null); if (info.Parser == null) { // If we don't have a parser for the header, we consider the value valid if it doesn't contains // invalid newline characters. We add the values as "parsed value". Note that we allow empty values. CheckInvalidNewLine(value); AddValue(info, value ?? string.Empty, StoreLocation.Parsed); return; } // If the header only supports 1 value, we can add the current value only if there is no // value already set. if (!info.CanAddValue) { throw new FormatException(string.Format(System.Globalization.CultureInfo.InvariantCulture, SR.net_http_headers_single_value_header, name)); } int index = 0; object parsedValue = info.Parser.ParseValue(value, info.ParsedValue, ref index); // The raw string only represented one value (which was successfully parsed). Add the value and return. // If value is null we still have to first call ParseValue() to allow the parser to decide whether null is // a valid value. If it is (i.e. no exception thrown), we set the parsed value (if any) and return. if ((value == null) || (index == value.Length)) { // If the returned value is null, then it means the header accepts empty values. I.e. we don't throw // but we don't add 'null' to the store either. if (parsedValue != null) { AddValue(info, parsedValue, StoreLocation.Parsed); } return; } Debug.Assert(index < value.Length, "Parser must return an index value within the string length."); // If we successfully parsed a value, but there are more left to read, store the results in a temp // list. Only when all values are parsed successfully write the list to the store. List<object> parsedValues = new List<object>(); if (parsedValue != null) { parsedValues.Add(parsedValue); } while (index < value.Length) { parsedValue = info.Parser.ParseValue(value, info.ParsedValue, ref index); if (parsedValue != null) { parsedValues.Add(parsedValue); } } // All values were parsed correctly. Copy results to the store. foreach (object item in parsedValues) { AddValue(info, item, StoreLocation.Parsed); } } private HttpHeaderParser GetParser(string name) { if (_parserStore == null) { return null; } HttpHeaderParser parser = null; if (_parserStore.TryGetValue(name, out parser)) { return parser; } return null; } private void CheckHeaderName(string name) { if (string.IsNullOrEmpty(name)) { throw new ArgumentException(SR.net_http_argument_empty_string, "name"); } if (HttpRuleParser.GetTokenLength(name, 0) != name.Length) { throw new FormatException(SR.net_http_headers_invalid_header_name); } if ((_invalidHeaders != null) && (_invalidHeaders.Contains(name))) { throw new InvalidOperationException(SR.net_http_headers_not_allowed_header_name); } } private bool TryCheckHeaderName(string name) { if (string.IsNullOrEmpty(name)) { return false; } if (HttpRuleParser.GetTokenLength(name, 0) != name.Length) { return false; } if ((_invalidHeaders != null) && (_invalidHeaders.Contains(name))) { return false; } return true; } private static void CheckInvalidNewLine(string value) { if (value == null) { return; } if (HttpRuleParser.ContainsInvalidNewLine(value)) { throw new FormatException(SR.net_http_headers_no_newlines); } } private static bool ContainsInvalidNewLine(string value, string name) { if (HttpRuleParser.ContainsInvalidNewLine(value)) { if (Logging.On) Logging.PrintError(Logging.Http, string.Format(System.Globalization.CultureInfo.InvariantCulture, SR.net_http_log_headers_no_newlines, name, value)); return true; } return false; } private static string[] GetValuesAsStrings(HeaderStoreItemInfo info) { return GetValuesAsStrings(info, null); } // When doing exclusion comparison, assume raw values have been parsed. private static string[] GetValuesAsStrings(HeaderStoreItemInfo info, object exclude) { Contract.Ensures(Contract.Result<string[]>() != null); int length = GetValueCount(info); string[] values = new string[length]; if (length > 0) { int currentIndex = 0; ReadStoreValues<string>(values, info.RawValue, null, null, ref currentIndex); ReadStoreValues<object>(values, info.ParsedValue, info.Parser, exclude, ref currentIndex); // Set parser parameter to 'null' for invalid values: The invalid values is always a string so we // don't need the parser to "serialize" the value to a string. ReadStoreValues<string>(values, info.InvalidValue, null, null, ref currentIndex); // The values array may not be full because some values were excluded if (currentIndex < length) { string[] trimmedValues = new string[currentIndex]; Array.Copy(values, trimmedValues, currentIndex); values = trimmedValues; } } return values; } private static int GetValueCount(HeaderStoreItemInfo info) { Contract.Requires(info != null); int valueCount = 0; UpdateValueCount<string>(info.RawValue, ref valueCount); UpdateValueCount<string>(info.InvalidValue, ref valueCount); UpdateValueCount<object>(info.ParsedValue, ref valueCount); return valueCount; } private static void UpdateValueCount<T>(object valueStore, ref int valueCount) { if (valueStore == null) { return; } List<T> values = valueStore as List<T>; if (values != null) { valueCount += values.Count; } else { valueCount++; } } private static void ReadStoreValues<T>(string[] values, object storeValue, HttpHeaderParser parser, T exclude, ref int currentIndex) { Contract.Requires(values != null); if (storeValue != null) { List<T> storeValues = storeValue as List<T>; if (storeValues == null) { if (ShouldAdd<T>(storeValue, parser, exclude)) { values[currentIndex] = parser == null ? storeValue.ToString() : parser.ToString(storeValue); currentIndex++; } } else { foreach (object item in storeValues) { if (ShouldAdd<T>(item, parser, exclude)) { values[currentIndex] = parser == null ? item.ToString() : parser.ToString(item); currentIndex++; } } } } } private static bool ShouldAdd<T>(object storeValue, HttpHeaderParser parser, T exclude) { bool add = true; if (parser != null && exclude != null) { if (parser.Comparer != null) { add = !parser.Comparer.Equals(exclude, storeValue); } else { add = !exclude.Equals(storeValue); } } return add; } private bool AreEqual(object value, object storeValue, IEqualityComparer comparer) { Contract.Requires(value != null); if (comparer != null) { return comparer.Equals(value, storeValue); } // We don't have a comparer, so use the Equals() method. return value.Equals(storeValue); } #region Private Classes private class HeaderStoreItemInfo { private object _rawValue; private object _invalidValue; private object _parsedValue; private HttpHeaderParser _parser; internal object RawValue { get { return _rawValue; } set { _rawValue = value; } } internal object InvalidValue { get { return _invalidValue; } set { _invalidValue = value; } } internal object ParsedValue { get { return _parsedValue; } set { _parsedValue = value; } } internal HttpHeaderParser Parser { get { return _parser; } } internal bool CanAddValue { get { Debug.Assert(_parser != null, "There should be no reason to call CanAddValue if there is no parser for the current header."); // If the header only supports one value, and we have already a value set, then we can't add // another value. E.g. the 'Date' header only supports one value. We can't add multiple timestamps // to 'Date'. // So if this is a known header, ask the parser if it supports multiple values and check wheter // we already have a (valid or invalid) value. // Note that we ignore the rawValue by purpose: E.g. we are parsing 2 raw values for a header only // supporting 1 value. When the first value gets parsed, CanAddValue returns true and we add the // parsed value to ParsedValue. When the second value is parsed, CanAddValue returns false, because // we have already a parsed value. return ((_parser.SupportsMultipleValues) || ((_invalidValue == null) && (_parsedValue == null))); } } internal bool IsEmpty { get { return ((_rawValue == null) && (_invalidValue == null) && (_parsedValue == null)); } } internal HeaderStoreItemInfo(HttpHeaderParser parser) { // Can be null. _parser = parser; } } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Linq; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.Editor.CSharp.TextStructureNavigation; using Microsoft.CodeAnalysis.Editor.Host; using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Operations; using Microsoft.VisualStudio.Utilities; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.TextStructureNavigation { public class TextStructureNavigatorTests { [Fact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task Empty() { await AssertExtentAsync( string.Empty, pos: 0, isSignificant: false, start: 0, length: 0); } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task Whitespace() { await AssertExtentAsync( " ", pos: 0, isSignificant: false, start: 0, length: 3); await AssertExtentAsync( " ", pos: 1, isSignificant: false, start: 0, length: 3); await AssertExtentAsync( " ", pos: 3, isSignificant: false, start: 0, length: 3); } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task EndOfFile() { await AssertExtentAsync( "using System;", pos: 13, isSignificant: true, start: 12, length: 1); } [Fact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task NewLine() { await AssertExtentAsync( "class Class1 {\r\n\r\n}", pos: 14, isSignificant: false, start: 14, length: 2); await AssertExtentAsync( "class Class1 {\r\n\r\n}", pos: 15, isSignificant: false, start: 14, length: 2); await AssertExtentAsync( "class Class1 {\r\n\r\n}", pos: 16, isSignificant: false, start: 16, length: 2); await AssertExtentAsync( "class Class1 {\r\n\r\n}", pos: 17, isSignificant: false, start: 16, length: 2); } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task SingleLineComment() { await AssertExtentAsync( "// Comment ", pos: 0, isSignificant: true, start: 0, length: 12); // It is important that this returns just the comment banner. Returning the whole comment // means Ctrl+Right before the slash will cause it to jump across the entire comment await AssertExtentAsync( "// Comment ", pos: 1, isSignificant: true, start: 0, length: 2); await AssertExtentAsync( "// Comment ", pos: 5, isSignificant: true, start: 3, length: 7); await AssertExtentAsync( "// () test", pos: 4, isSignificant: true, start: 3, length: 2); } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task MultiLineComment() { await AssertExtentAsync( "/* Comment */", pos: 0, isSignificant: true, start: 0, length: 13); // It is important that this returns just the comment banner. Returning the whole comment // means Ctrl+Right before the slash will cause it to jump across the entire comment await AssertExtentAsync( "/* Comment */", pos: 1, isSignificant: true, start: 0, length: 2); await AssertExtentAsync( "/* Comment */", pos: 5, isSignificant: true, start: 3, length: 7); await AssertExtentAsync( "/* () test */", pos: 4, isSignificant: true, start: 3, length: 2); await AssertExtentAsync( "/* () test */", pos: 11, isSignificant: true, start: 11, length: 2); // It is important that this returns just the comment banner. Returning the whole comment // means Ctrl+Left after the slash will cause it to jump across the entire comment await AssertExtentAsync( "/* () test */", pos: 12, isSignificant: true, start: 11, length: 2); await AssertExtentAsync( "/* () test */", pos: 13, isSignificant: true, start: 11, length: 2); } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task Keyword() { for (int i = 7; i <= 7 + 4; i++) { await AssertExtentAsync( "public class Class1", pos: i, isSignificant: true, start: 7, length: 5); } } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task Identifier() { for (int i = 13; i <= 13 + 8; i++) { await AssertExtentAsync( "public class SomeClass : IDisposable", pos: i, isSignificant: true, start: 13, length: 9); } } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task EscapedIdentifier() { for (int i = 12; i <= 12 + 9; i++) { await AssertExtentAsync( "public enum @interface : int", pos: i, isSignificant: true, start: 12, length: 10); } } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task Number() { for (int i = 37; i <= 37 + 10; i++) { await AssertExtentAsync( "class Test { private double num = -1.234678e10; }", pos: i, isSignificant: true, start: 37, length: 11); } } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task String() { const string TestString = "class Test { private string s1 = \" () test \"; }"; int startOfString = TestString.IndexOf('"'); int lengthOfStringIncludingQuotes = TestString.LastIndexOf('"') - startOfString + 1; await AssertExtentAsync( TestString, pos: startOfString, isSignificant: true, start: startOfString, length: 1); // Selects whitespace await AssertExtentAsync( TestString, pos: startOfString + 1, isSignificant: false, start: startOfString + 1, length: 1); await AssertExtentAsync( TestString, pos: startOfString + 2, isSignificant: true, start: startOfString + 2, length: 2); await AssertExtentAsync( TestString, pos: TestString.IndexOf(" \"", StringComparison.Ordinal), isSignificant: false, start: TestString.IndexOf(" \"", StringComparison.Ordinal), length: 2); await AssertExtentAsync( TestString, pos: TestString.LastIndexOf('"'), isSignificant: true, start: startOfString + lengthOfStringIncludingQuotes - 1, length: 1); await AssertExtentAsync( TestString, pos: TestString.LastIndexOf('"') + 1, isSignificant: true, start: TestString.LastIndexOf('"') + 1, length: 1); } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task InterpolatedString1() { const string TestString = "class Test { string x = \"hello\"; string s = $\" { x } hello\"; }"; int startOfFirstString = TestString.IndexOf('"'); int endOfFirstString = TestString.IndexOf('"', startOfFirstString + 1); int startOfString = TestString.IndexOf("$\"", endOfFirstString + 1, StringComparison.Ordinal); int lengthOfStringIncludingQuotes = TestString.LastIndexOf('"') - startOfString + 1; // Selects interpolated string start token await AssertExtentAsync( TestString, pos: startOfString, isSignificant: true, start: startOfString, length: 2); // Selects whitespace await AssertExtentAsync( TestString, pos: startOfString + 2, isSignificant: false, start: startOfString + 2, length: 1); // Selects the opening curly brace await AssertExtentAsync( TestString, pos: startOfString + 3, isSignificant: true, start: startOfString + 3, length: 1); // Selects whitespace await AssertExtentAsync( TestString, pos: startOfString + 4, isSignificant: false, start: startOfString + 4, length: 1); // Selects identifier await AssertExtentAsync( TestString, pos: startOfString + 5, isSignificant: true, start: startOfString + 5, length: 1); // Selects whitespace await AssertExtentAsync( TestString, pos: startOfString + 6, isSignificant: false, start: startOfString + 6, length: 1); // Selects the closing curly brace await AssertExtentAsync( TestString, pos: startOfString + 7, isSignificant: true, start: startOfString + 7, length: 1); // Selects whitespace await AssertExtentAsync( TestString, pos: startOfString + 8, isSignificant: false, start: startOfString + 8, length: 1); // Selects hello await AssertExtentAsync( TestString, pos: startOfString + 9, isSignificant: true, start: startOfString + 9, length: 5); // Selects closing quote await AssertExtentAsync( TestString, pos: startOfString + 14, isSignificant: true, start: startOfString + 14, length: 1); } private static async Task AssertExtentAsync(string code, int pos, bool isSignificant, int start, int length) { await AssertExtentAsync(code, pos, isSignificant, start, length, null); await AssertExtentAsync(code, pos, isSignificant, start, length, Options.Script); } private static async Task AssertExtentAsync(string code, int pos, bool isSignificant, int start, int length, CSharpParseOptions options) { using (var workspace = await TestWorkspace.CreateCSharpAsync(code, options)) { var buffer = workspace.Documents.First().GetTextBuffer(); var provider = new TextStructureNavigatorProvider( workspace.GetService<ITextStructureNavigatorSelectorService>(), workspace.GetService<IContentTypeRegistryService>(), workspace.GetService<IWaitIndicator>()); var navigator = provider.CreateTextStructureNavigator(buffer); var extent = navigator.GetExtentOfWord(new SnapshotPoint(buffer.CurrentSnapshot, pos)); Assert.Equal(isSignificant, extent.IsSignificant); var expectedSpan = new SnapshotSpan(buffer.CurrentSnapshot, start, length); Assert.Equal(expectedSpan, extent.Span); } } private static async Task TestNavigatorAsync( string code, Func<ITextStructureNavigator, SnapshotSpan, SnapshotSpan> func, int startPosition, int startLength, int endPosition, int endLength) { await TestNavigatorAsync(code, func, startPosition, startLength, endPosition, endLength, null); await TestNavigatorAsync(code, func, startPosition, startLength, endPosition, endLength, Options.Script); } private static async Task TestNavigatorAsync( string code, Func<ITextStructureNavigator, SnapshotSpan, SnapshotSpan> func, int startPosition, int startLength, int endPosition, int endLength, CSharpParseOptions options) { using (var workspace = await TestWorkspace.CreateCSharpAsync(code, options)) { var buffer = workspace.Documents.First().GetTextBuffer(); var provider = new TextStructureNavigatorProvider( workspace.GetService<ITextStructureNavigatorSelectorService>(), workspace.GetService<IContentTypeRegistryService>(), workspace.GetService<IWaitIndicator>()); var navigator = provider.CreateTextStructureNavigator(buffer); var actualSpan = func(navigator, new SnapshotSpan(buffer.CurrentSnapshot, startPosition, startLength)); var expectedSpan = new SnapshotSpan(buffer.CurrentSnapshot, endPosition, endLength); Assert.Equal(expectedSpan, actualSpan.Span); } } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task GetSpanOfEnclosingTest() { // First operation returns span of 'Class1' await TestNavigatorAsync( @"class Class1 { }", (n, s) => n.GetSpanOfEnclosing(s), 10, 0, 6, 6); // Second operation returns span of 'class Class1 { }' await TestNavigatorAsync( @"class Class1 { }", (n, s) => n.GetSpanOfEnclosing(s), 6, 6, 0, 16); // Last operation does nothing await TestNavigatorAsync( @"class Class1 { }", (n, s) => n.GetSpanOfEnclosing(s), 0, 16, 0, 16); } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task GetSpanOfFirstChildTest() { // Go from 'class Class1 { }' to 'class' await TestNavigatorAsync( @"class Class1 { }", (n, s) => n.GetSpanOfFirstChild(s), 0, 16, 0, 5); // Next operation should do nothing as we're at the bottom await TestNavigatorAsync( @"class Class1 { }", (n, s) => n.GetSpanOfFirstChild(s), 0, 5, 0, 5); } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task GetSpanOfNextSiblingTest() { // Go from 'class' to 'Class1' await TestNavigatorAsync( @"class Class1 { }", (n, s) => n.GetSpanOfNextSibling(s), 0, 5, 6, 6); } [WpfFact, Trait(Traits.Feature, Traits.Features.TextStructureNavigator)] public async Task GetSpanOfPreviousSiblingTest() { // Go from '{' to 'Class1' await TestNavigatorAsync( @"class Class1 { }", (n, s) => n.GetSpanOfPreviousSibling(s), 13, 1, 6, 6); } } }
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. * Use of this file is governed by the BSD 3-clause license that * can be found in the LICENSE.txt file in the project root. */ using System.Collections.Generic; using System.Text; using erl.Oracle.TnsNames.Antlr4.Runtime; using erl.Oracle.TnsNames.Antlr4.Runtime.Atn; using erl.Oracle.TnsNames.Antlr4.Runtime.Misc; using erl.Oracle.TnsNames.Antlr4.Runtime.Sharpen; using erl.Oracle.TnsNames.Antlr4.Runtime.Tree; namespace erl.Oracle.TnsNames.Antlr4.Runtime { /// <summary>A rule context is a record of a single rule invocation.</summary> /// <remarks> /// A rule context is a record of a single rule invocation. It knows /// which context invoked it, if any. If there is no parent context, then /// naturally the invoking state is not valid. The parent link /// provides a chain upwards from the current rule invocation to the root /// of the invocation tree, forming a stack. We actually carry no /// information about the rule associated with this context (except /// when parsing). We keep only the state number of the invoking state from /// the ATN submachine that invoked this. Contrast this with the s /// pointer inside ParserRuleContext that tracks the current state /// being "executed" for the current rule. /// The parent contexts are useful for computing lookahead sets and /// getting error information. /// These objects are used during parsing and prediction. /// For the special case of parsers, we use the subclass /// ParserRuleContext. /// </remarks> /// <seealso cref="ParserRuleContext"/> public class RuleContext : IRuleNode { /// <summary>What context invoked this rule?</summary> private erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext _parent; /// <summary> /// What state invoked the rule associated with this context? /// The "return address" is the followState of invokingState /// If parent is null, this should be -1. /// </summary> /// <remarks> /// What state invoked the rule associated with this context? /// The "return address" is the followState of invokingState /// If parent is null, this should be -1. /// </remarks> public int invokingState = -1; public RuleContext() { } public RuleContext(erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext parent, int invokingState) { this._parent = parent; //if ( parent!=null ) System.out.println("invoke "+stateNumber+" from "+parent); this.invokingState = invokingState; } public static erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext GetChildContext(erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext parent, int invokingState) { return new erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext(parent, invokingState); } public virtual int Depth() { int n = 0; erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext p = this; while (p != null) { p = p._parent; n++; } return n; } /// <summary> /// A context is empty if there is no invoking state; meaning nobody call /// current context. /// </summary> /// <remarks> /// A context is empty if there is no invoking state; meaning nobody call /// current context. /// </remarks> public virtual bool IsEmpty { get { return invokingState == -1; } } public virtual Interval SourceInterval { get { // satisfy the ParseTree / SyntaxTree interface return Interval.Invalid; } } RuleContext IRuleNode.RuleContext { get { return this; } } public virtual erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext Parent { get { return _parent; } set { _parent = value; } } IRuleNode IRuleNode.Parent { get { return Parent; } } IParseTree IParseTree.Parent { get { return Parent; } } ITree ITree.Parent { get { return Parent; } } public virtual erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext Payload { get { return this; } } object ITree.Payload { get { return Payload; } } /// <summary>Return the combined text of all child nodes.</summary> /// <remarks> /// Return the combined text of all child nodes. This method only considers /// tokens which have been added to the parse tree. /// <p/> /// Since tokens on hidden channels (e.g. whitespace or comments) are not /// added to the parse trees, they will not appear in the output of this /// method. /// </remarks> public virtual string GetText() { if (ChildCount == 0) { return string.Empty; } StringBuilder builder = new StringBuilder(); for (int i = 0; i < ChildCount; i++) { builder.Append(GetChild(i).GetText()); } return builder.ToString(); } public virtual int RuleIndex { get { return -1; } } /* For rule associated with this parse tree internal node, return * the outer alternative number used to match the input. Default * implementation does not compute nor store this alt num. Create * a subclass of ParserRuleContext with backing field and set * option contextSuperClass. * to set it. */ public virtual int getAltNumber() { return Atn.ATN.INVALID_ALT_NUMBER; } /* Set the outer alternative number for this context node. Default * implementation does nothing to avoid backing field overhead for * trees that don't need it. Create * a subclass of ParserRuleContext with backing field and set * option contextSuperClass. */ public virtual void setAltNumber(int altNumber) { } public virtual IParseTree GetChild(int i) { return null; } ITree ITree.GetChild(int i) { return GetChild(i); } public virtual int ChildCount { get { return 0; } } public virtual T Accept<T>(IParseTreeVisitor<T> visitor) { return visitor.VisitChildren(this); } /// <summary> /// Print out a whole tree, not just a node, in LISP format /// (root child1 .. /// </summary> /// <remarks> /// Print out a whole tree, not just a node, in LISP format /// (root child1 .. childN). Print just a node if this is a leaf. /// We have to know the recognizer so we can get rule names. /// </remarks> public virtual string ToStringTree(Parser recog) { return Trees.ToStringTree(this, recog); } /// <summary> /// Print out a whole tree, not just a node, in LISP format /// (root child1 .. /// </summary> /// <remarks> /// Print out a whole tree, not just a node, in LISP format /// (root child1 .. childN). Print just a node if this is a leaf. /// </remarks> public virtual string ToStringTree(IList<string> ruleNames) { return Trees.ToStringTree(this, ruleNames); } public virtual string ToStringTree() { return ToStringTree((IList<string>)null); } public override string ToString() { return ToString((IList<string>)null, (erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext)null); } public string ToString(IRecognizer recog) { return ToString(recog, ParserRuleContext.EmptyContext); } public string ToString(IList<string> ruleNames) { return ToString(ruleNames, null); } // recog null unless ParserRuleContext, in which case we use subclass toString(...) public virtual string ToString(IRecognizer recog, erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext stop) { string[] ruleNames = recog != null ? recog.RuleNames : null; IList<string> ruleNamesList = ruleNames != null ? Arrays.AsList(ruleNames) : null; return ToString(ruleNamesList, stop); } public virtual string ToString(IList<string> ruleNames, erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext stop) { StringBuilder buf = new StringBuilder(); erl.Oracle.TnsNames.Antlr4.Runtime.RuleContext p = this; buf.Append("["); while (p != null && p != stop) { if (ruleNames == null) { if (!p.IsEmpty) { buf.Append(p.invokingState); } } else { int ruleIndex = p.RuleIndex; string ruleName = ruleIndex >= 0 && ruleIndex < ruleNames.Count ? ruleNames[ruleIndex] : ruleIndex.ToString(); buf.Append(ruleName); } if (p.Parent != null && (ruleNames != null || !p.Parent.IsEmpty)) { buf.Append(" "); } p = p.Parent; } buf.Append("]"); return buf.ToString(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Text.Json.Serialization; using System.Text.Json.Serialization.Converters; namespace System.Text.Json { [DebuggerDisplay("ClassType.{ClassType}, {Type.Name}")] internal sealed partial class JsonClassInfo { // The length of the property name embedded in the key (in bytes). // The key is a ulong (8 bytes) containing the first 7 bytes of the property name // followed by a byte representing the length. private const int PropertyNameKeyLength = 7; // The limit to how many property names from the JSON are cached in _propertyRefsSorted before using PropertyCache. private const int PropertyNameCountCacheThreshold = 64; // All of the serializable properties on a POCO (except the optional extension property) keyed on property name. public volatile Dictionary<string, JsonPropertyInfo> PropertyCache; // Serializable runtime/polymorphic properties, keyed on property and runtime type. public ConcurrentDictionary<(JsonPropertyInfo, Type), JsonPropertyInfo> RuntimePropertyCache; // All of the serializable properties on a POCO including the optional extension property. // Used for performance during serialization instead of 'PropertyCache' above. public volatile JsonPropertyInfo[] PropertyCacheArray; // Fast cache of properties by first JSON ordering; may not contain all properties. Accessed before PropertyCache. // Use an array (instead of List<T>) for highest performance. private volatile PropertyRef[] _propertyRefsSorted; public delegate object ConstructorDelegate(); public ConstructorDelegate CreateObject { get; private set; } public ClassType ClassType { get; private set; } public JsonPropertyInfo DataExtensionProperty { get; private set; } // If enumerable, the JsonClassInfo for the element type. private JsonClassInfo _elementClassInfo; /// <summary> /// Return the JsonClassInfo for the element type, or null if the type is not an enumerable or dictionary. /// </summary> /// <remarks> /// This should not be called during warm-up (initial creation of JsonClassInfos) to avoid recursive behavior /// which could result in a StackOverflowException. /// </remarks> public JsonClassInfo ElementClassInfo { get { if (_elementClassInfo == null && ElementType != null) { Debug.Assert(ClassType == ClassType.Enumerable || ClassType == ClassType.Dictionary); _elementClassInfo = Options.GetOrAddClass(ElementType); } return _elementClassInfo; } } public Type ElementType { get; set; } public JsonSerializerOptions Options { get; private set; } public Type Type { get; private set; } public void UpdateSortedPropertyCache(ref ReadStackFrame frame) { Debug.Assert(frame.PropertyRefCache != null); // frame.PropertyRefCache is only read\written by a single thread -- the thread performing // the deserialization for a given object instance. List<PropertyRef> listToAppend = frame.PropertyRefCache; // _propertyRefsSorted can be accessed by multiple threads, so replace the reference when // appending to it. No lock() is necessary. if (_propertyRefsSorted != null) { List<PropertyRef> replacementList = new List<PropertyRef>(_propertyRefsSorted); Debug.Assert(replacementList.Count <= PropertyNameCountCacheThreshold); // Verify replacementList will not become too large. while (replacementList.Count + listToAppend.Count > PropertyNameCountCacheThreshold) { // This code path is rare; keep it simple by using RemoveAt() instead of RemoveRange() which requires calculating index\count. listToAppend.RemoveAt(listToAppend.Count - 1); } // Add the new items; duplicates are possible but that is tolerated during property lookup. replacementList.AddRange(listToAppend); _propertyRefsSorted = replacementList.ToArray(); } else { _propertyRefsSorted = listToAppend.ToArray(); } frame.PropertyRefCache = null; } public JsonClassInfo(Type type, JsonSerializerOptions options) { Type = type; Options = options; ClassType = GetClassType( type, parentClassType: type, propertyInfo: null, out Type runtimeType, out Type elementType, out Type nullableUnderlyingType, out MethodInfo addMethod, out JsonConverter converter, checkForAddMethod: true, options); // Ignore properties on enumerable. switch (ClassType) { case ClassType.Object: { CreateObject = options.MemberAccessorStrategy.CreateConstructor(type); PropertyInfo[] properties = type.GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); Dictionary<string, JsonPropertyInfo> cache = CreatePropertyCache(properties.Length); foreach (PropertyInfo propertyInfo in properties) { // Ignore indexers if (propertyInfo.GetIndexParameters().Length > 0) { continue; } // For now we only support public getters\setters if (propertyInfo.GetMethod?.IsPublic == true || propertyInfo.SetMethod?.IsPublic == true) { JsonPropertyInfo jsonPropertyInfo = AddProperty(propertyInfo.PropertyType, propertyInfo, type, options); Debug.Assert(jsonPropertyInfo != null); // If the JsonPropertyNameAttribute or naming policy results in collisions, throw an exception. if (!JsonHelpers.TryAdd(cache, jsonPropertyInfo.NameAsString, jsonPropertyInfo)) { JsonPropertyInfo other = cache[jsonPropertyInfo.NameAsString]; if (other.ShouldDeserialize == false && other.ShouldSerialize == false) { // Overwrite the one just added since it has [JsonIgnore]. cache[jsonPropertyInfo.NameAsString] = jsonPropertyInfo; } else if (jsonPropertyInfo.ShouldDeserialize == true || jsonPropertyInfo.ShouldSerialize == true) { ThrowHelper.ThrowInvalidOperationException_SerializerPropertyNameConflict(this, jsonPropertyInfo); } // else ignore jsonPropertyInfo since it has [JsonIgnore]. } } } JsonPropertyInfo[] cacheArray; if (DetermineExtensionDataProperty(cache)) { // Remove from cache since it is handled independently. cache.Remove(DataExtensionProperty.NameAsString); cacheArray = new JsonPropertyInfo[cache.Count + 1]; // Set the last element to the extension property. cacheArray[cache.Count] = DataExtensionProperty; } else { cacheArray = new JsonPropertyInfo[cache.Count]; } // Set fields when finished to avoid concurrency issues. PropertyCache = cache; cache.Values.CopyTo(cacheArray, 0); PropertyCacheArray = cacheArray; } break; case ClassType.Enumerable: case ClassType.Dictionary: { ElementType = elementType; AddItemToObject = addMethod; PolicyProperty = CreatePolicyProperty(type, runtimeType, elementType, nullableUnderlyingType, converter: null, ClassType, options); CreateObject = options.MemberAccessorStrategy.CreateConstructor(PolicyProperty.RuntimePropertyType); } break; case ClassType.Value: { CreateObject = options.MemberAccessorStrategy.CreateConstructor(type); PolicyProperty = CreatePolicyProperty(type, runtimeType, elementType: null, nullableUnderlyingType, converter, ClassType, options); } break; case ClassType.Unknown: { CreateObject = options.MemberAccessorStrategy.CreateConstructor(type); PolicyProperty = CreatePolicyProperty(type, runtimeType, elementType: null, nullableUnderlyingType, converter, ClassType, options); PropertyCache = new Dictionary<string, JsonPropertyInfo>(); PropertyCacheArray = Array.Empty<JsonPropertyInfo>(); } break; default: Debug.Fail($"Unexpected class type: {ClassType}"); break; } } private bool DetermineExtensionDataProperty(Dictionary<string, JsonPropertyInfo> cache) { JsonPropertyInfo jsonPropertyInfo = GetPropertyWithUniqueAttribute(typeof(JsonExtensionDataAttribute), cache); if (jsonPropertyInfo != null) { Type declaredPropertyType = jsonPropertyInfo.DeclaredPropertyType; if (!typeof(IDictionary<string, JsonElement>).IsAssignableFrom(declaredPropertyType) && !typeof(IDictionary<string, object>).IsAssignableFrom(declaredPropertyType)) { ThrowHelper.ThrowInvalidOperationException_SerializationDataExtensionPropertyInvalid(this, jsonPropertyInfo); } DataExtensionProperty = jsonPropertyInfo; return true; } return false; } private JsonPropertyInfo GetPropertyWithUniqueAttribute(Type attributeType, Dictionary<string, JsonPropertyInfo> cache) { JsonPropertyInfo property = null; foreach (JsonPropertyInfo jsonPropertyInfo in cache.Values) { Attribute attribute = jsonPropertyInfo.PropertyInfo.GetCustomAttribute(attributeType); if (attribute != null) { if (property != null) { ThrowHelper.ThrowInvalidOperationException_SerializationDuplicateTypeAttribute(Type, attributeType); } property = jsonPropertyInfo; } } return property; } // AggressiveInlining used although a large method it is only called from one location and is on a hot path. [MethodImpl(MethodImplOptions.AggressiveInlining)] public JsonPropertyInfo GetProperty(ReadOnlySpan<byte> propertyName, ref ReadStackFrame frame) { JsonPropertyInfo info = null; // Keep a local copy of the cache in case it changes by another thread. PropertyRef[] localPropertyRefsSorted = _propertyRefsSorted; ulong key = GetKey(propertyName); // If there is an existing cache, then use it. if (localPropertyRefsSorted != null) { // Start with the current property index, and then go forwards\backwards. int propertyIndex = frame.PropertyIndex; int count = localPropertyRefsSorted.Length; int iForward = Math.Min(propertyIndex, count); int iBackward = iForward - 1; while (true) { if (iForward < count) { PropertyRef propertyRef = localPropertyRefsSorted[iForward]; if (TryIsPropertyRefEqual(propertyRef, propertyName, key, ref info)) { return info; } ++iForward; if (iBackward >= 0) { propertyRef = localPropertyRefsSorted[iBackward]; if (TryIsPropertyRefEqual(propertyRef, propertyName, key, ref info)) { return info; } --iBackward; } } else if (iBackward >= 0) { PropertyRef propertyRef = localPropertyRefsSorted[iBackward]; if (TryIsPropertyRefEqual(propertyRef, propertyName, key, ref info)) { return info; } --iBackward; } else { // Property was not found. break; } } } // No cached item was found. Try the main list which has all of the properties. string stringPropertyName = JsonHelpers.Utf8GetString(propertyName); Debug.Assert(PropertyCache != null); if (!PropertyCache.TryGetValue(stringPropertyName, out info)) { info = JsonPropertyInfo.s_missingProperty; } Debug.Assert(info != null); // Three code paths to get here: // 1) info == s_missingProperty. Property not found. // 2) key == info.PropertyNameKey. Exact match found. // 3) key != info.PropertyNameKey. Match found due to case insensitivity. Debug.Assert(info == JsonPropertyInfo.s_missingProperty || key == info.PropertyNameKey || Options.PropertyNameCaseInsensitive); // Check if we should add this to the cache. // Only cache up to a threshold length and then just use the dictionary when an item is not found in the cache. int cacheCount = 0; if (localPropertyRefsSorted != null) { cacheCount = localPropertyRefsSorted.Length; } // Do a quick check for the stable (after warm-up) case. if (cacheCount < PropertyNameCountCacheThreshold) { // Do a slower check for the warm-up case. if (frame.PropertyRefCache != null) { cacheCount += frame.PropertyRefCache.Count; } // Check again to append the cache up to the threshold. if (cacheCount < PropertyNameCountCacheThreshold) { if (frame.PropertyRefCache == null) { frame.PropertyRefCache = new List<PropertyRef>(); } PropertyRef propertyRef = new PropertyRef(key, info); frame.PropertyRefCache.Add(propertyRef); } } return info; } private Dictionary<string, JsonPropertyInfo> CreatePropertyCache(int capacity) { StringComparer comparer; if (Options.PropertyNameCaseInsensitive) { comparer = StringComparer.OrdinalIgnoreCase; } else { comparer = StringComparer.Ordinal; } return new Dictionary<string, JsonPropertyInfo>(capacity, comparer); } public JsonPropertyInfo PolicyProperty { get; private set; } public MethodInfo AddItemToObject { get; private set; } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool TryIsPropertyRefEqual(in PropertyRef propertyRef, ReadOnlySpan<byte> propertyName, ulong key, ref JsonPropertyInfo info) { if (key == propertyRef.Key) { // We compare the whole name, although we could skip the first 7 bytes (but it's not any faster) if (propertyName.Length <= PropertyNameKeyLength || propertyName.SequenceEqual(propertyRef.Info.Name)) { info = propertyRef.Info; return true; } } return false; } /// <summary> /// Get a key from the property name. /// The key consists of the first 7 bytes of the property name and then the length. /// </summary> public static ulong GetKey(ReadOnlySpan<byte> propertyName) { const int BitsInByte = 8; ulong key; int length = propertyName.Length; if (length > 7) { key = MemoryMarshal.Read<ulong>(propertyName); // Max out the length byte. // This will cause the comparison logic to always test for equality against the full contents // when the first 7 bytes are the same. key |= 0xFF00000000000000; // It is also possible to include the length up to 0xFF in order to prevent false positives // when the first 7 bytes match but a different length (up to 0xFF). However the extra logic // slows key generation in the majority of cases: // key &= 0x00FFFFFFFFFFFFFF; // key |= (ulong) 7 << Math.Max(length, 0xFF); } else if (length > 3) { key = MemoryMarshal.Read<uint>(propertyName); if (length == 7) { key |= (ulong)propertyName[6] << (6 * BitsInByte) | (ulong)propertyName[5] << (5 * BitsInByte) | (ulong)propertyName[4] << (4 * BitsInByte) | (ulong)7 << (7 * BitsInByte); } else if (length == 6) { key |= (ulong)propertyName[5] << (5 * BitsInByte) | (ulong)propertyName[4] << (4 * BitsInByte) | (ulong)6 << (7 * BitsInByte); } else if (length == 5) { key |= (ulong)propertyName[4] << (4 * BitsInByte) | (ulong)5 << (7 * BitsInByte); } else { key |= (ulong)4 << (7 * BitsInByte); } } else if (length > 1) { key = MemoryMarshal.Read<ushort>(propertyName); if (length == 3) { key |= (ulong)propertyName[2] << (2 * BitsInByte) | (ulong)3 << (7 * BitsInByte); } else { key |= (ulong)2 << (7 * BitsInByte); } } else if (length == 1) { key = propertyName[0] | (ulong)1 << (7 * BitsInByte); } else { // An empty name is valid. key = 0; } // Verify key contains the embedded bytes as expected. Debug.Assert( (length < 1 || propertyName[0] == ((key & ((ulong)0xFF << 8 * 0)) >> 8 * 0)) && (length < 2 || propertyName[1] == ((key & ((ulong)0xFF << 8 * 1)) >> 8 * 1)) && (length < 3 || propertyName[2] == ((key & ((ulong)0xFF << 8 * 2)) >> 8 * 2)) && (length < 4 || propertyName[3] == ((key & ((ulong)0xFF << 8 * 3)) >> 8 * 3)) && (length < 5 || propertyName[4] == ((key & ((ulong)0xFF << 8 * 4)) >> 8 * 4)) && (length < 6 || propertyName[5] == ((key & ((ulong)0xFF << 8 * 5)) >> 8 * 5)) && (length < 7 || propertyName[6] == ((key & ((ulong)0xFF << 8 * 6)) >> 8 * 6))); return key; } // This method gets the runtime information for a given type or property. // The runtime information consists of the following: // - class type, // - runtime type, // - element type (if the type is a collection), // - the underlying type (if the type is nullable type e.g. int?), // - the "add" method (if the type is a non-dictionary collection which doesn't implement IList // e.g. typeof(Stack<int>), where we retrieve the void Push(string) method), and // - the converter (either native or custom), if one exists. public static ClassType GetClassType( Type type, Type parentClassType, PropertyInfo propertyInfo, out Type runtimeType, out Type elementType, out Type nullableUnderlyingType, out MethodInfo addMethod, out JsonConverter converter, bool checkForAddMethod, JsonSerializerOptions options) { Debug.Assert(type != null); runtimeType = type; nullableUnderlyingType = Nullable.GetUnderlyingType(type); // Type is nullable e.g. typeof(int?). if (nullableUnderlyingType != null) { // Check if there's a converter for this nullable type, e.g. do we have a converter that implements // JsonConverter<int?> if the type is typeof(int?)? converter = options.DetermineConverterForProperty(parentClassType, type, propertyInfo); if (converter == null) { // No converter. We'll check below if there's a converter for the non-nullable type e.g. // one that implements JsonConverter<int>, given the type is typeof(int?). type = nullableUnderlyingType; } else { elementType = default; addMethod = default; // Don't treat the type as a Nullable when creating the property info later on, since we have a converter for it. nullableUnderlyingType = default; return ClassType.Value; } } converter = options.DetermineConverterForProperty(parentClassType, type, propertyInfo); if (converter != null) { elementType = default; addMethod = default; return type == typeof(object) ? ClassType.Unknown : ClassType.Value; } runtimeType = type; if (!(typeof(IEnumerable)).IsAssignableFrom(type)) { elementType = null; addMethod = default; return ClassType.Object; } if (type.IsArray) { elementType = type.GetElementType(); addMethod = default; return ClassType.Enumerable; } if (type.FullName.StartsWith("System.Collections.Generic.IEnumerable`1")) { elementType = type.GetGenericArguments()[0]; runtimeType = typeof(List<>).MakeGenericType(elementType); addMethod = default; return ClassType.Enumerable; } else if (type.FullName.StartsWith("System.Collections.Generic.IDictionary`2") || type.FullName.StartsWith("System.Collections.Generic.IReadOnlyDictionary`2")) { Type[] genericTypes = type.GetGenericArguments(); elementType = genericTypes[1]; runtimeType = typeof(Dictionary<,>).MakeGenericType(genericTypes[0], elementType); addMethod = default; return ClassType.Dictionary; } { Type genericIDictionaryType = type.GetInterface("System.Collections.Generic.IDictionary`2") ?? type.GetInterface("System.Collections.Generic.IReadOnlyDictionary`2"); if (genericIDictionaryType != null) { Type[] genericTypes = genericIDictionaryType.GetGenericArguments(); elementType = genericTypes[1]; addMethod = default; if (type.IsInterface) { Type concreteDictionaryType = typeof(Dictionary<,>).MakeGenericType(genericTypes[0], genericTypes[1]); if (type.IsAssignableFrom(concreteDictionaryType)) { runtimeType = concreteDictionaryType; } } return ClassType.Dictionary; } } if (typeof(IDictionary).IsAssignableFrom(type)) { elementType = typeof(object); addMethod = default; if (type.IsInterface) { Type concreteDictionaryType = typeof(Dictionary<string, object>); if (type.IsAssignableFrom(concreteDictionaryType)) { runtimeType = concreteDictionaryType; } } return ClassType.Dictionary; } { Type genericIEnumerableType = type.GetInterface("System.Collections.Generic.IEnumerable`1"); if (genericIEnumerableType != null) { elementType = genericIEnumerableType.GetGenericArguments()[0]; } else { elementType = typeof(object); } } if (typeof(IList).IsAssignableFrom(type)) { addMethod = default; if (type.IsInterface) { Type concreteListType = typeof(List<>).MakeGenericType(elementType); if (type.IsAssignableFrom(concreteListType)) { runtimeType = concreteListType; } } } else if (type.IsInterface) { addMethod = default; Type concreteType = typeof(List<>).MakeGenericType(elementType); if (type.IsAssignableFrom(concreteType)) { runtimeType = concreteType; } else { concreteType = typeof(HashSet<>).MakeGenericType(elementType); if (type.IsAssignableFrom(concreteType)) { runtimeType = concreteType; } } } else { addMethod = default; if (checkForAddMethod) { Type genericICollectionType = type.GetInterface("System.Collections.Generic.ICollection`1"); if (genericICollectionType != null) { addMethod = genericICollectionType.GetMethod("Add"); } else { // Non-immutable stack or queue. MethodInfo methodInfo = type.GetMethod("Push") ?? type.GetMethod("Enqueue"); if (methodInfo?.ReturnType == typeof(void)) { addMethod = methodInfo; } } } } return ClassType.Enumerable; } } }
// Copyright (c) 2017 Andrew Vardeman. Published under the MIT license. // See license.txt in the FileSharper distribution or repository for the // full text of the license. using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.IO; using System.Linq; using System.Runtime.CompilerServices; using System.Windows; using System.Windows.Input; using FileSharperCore; using Microsoft.Win32; namespace FileSharper { public class MainViewModel : INotifyPropertyChanged { public event PropertyChangedEventHandler PropertyChanged; private int m_SelectedScreenIndex = 0; private int m_SelectedIndex = 0; private bool m_AnyOpenFiles = false; private bool m_AnyRecentDocuments = false; private bool m_AnyTemplates = false; private bool m_ShowingSaveTemplateUI = false; private string m_SaveTemplateDisplayName = ""; public FileSharperSettings Settings { get; } public ObservableCollection<SearchDocument> SearchDocuments { get; } = new ObservableCollection<SearchDocument>(); public int SelectedScreenIndex { get => m_SelectedScreenIndex; set => SetField(ref m_SelectedScreenIndex, value); } public int SelectedIndex { get => m_SelectedIndex; set => SetField(ref m_SelectedIndex, value); } public bool AnyOpenFiles { get => m_AnyOpenFiles; private set => SetField(ref m_AnyOpenFiles, value); } public bool AnyRecentDocuments { get => m_AnyRecentDocuments; private set => SetField(ref m_AnyRecentDocuments, value); } public bool AnyTemplates { get => m_AnyTemplates; private set => SetField(ref m_AnyTemplates, value); } public bool ShowingSaveTemplateUI { get => m_ShowingSaveTemplateUI; set { if (m_ShowingSaveTemplateUI != value) { m_ShowingSaveTemplateUI = value; SaveTemplateDisplayName = string.Empty; OnPropertyChanged(); OnPropertyChanged(nameof(SearchDocumentsEnabled)); } } } public string SaveTemplateDisplayName { get => m_SaveTemplateDisplayName; set => SetField(ref m_SaveTemplateDisplayName, value); } public bool SearchDocumentsEnabled => !ShowingSaveTemplateUI; public ICommand AcceptEulaCommand { get; private set; } public ICommand NewSearchCommand { get; private set; } public ICommand NewSearchFromTemplateCommand { get; private set; } public ICommand OpenSearchCommand { get; private set; } public ICommand OpenRecentCommand { get; private set; } public ICommand CloseSearchCommand { get; private set; } public ICommand SaveSearchCommand { get; private set; } public ICommand SaveTemplateCommand { get; private set; } public ICommand SaveDefaultTemplateCommand { get; private set; } public ICommand ResetDefaultTemplateCommand { get; private set; } public ICommand DeleteTemplatesCommand { get; private set; } public ICommand MoveTemplatesUpCommand { get; private set; } public ICommand MoveTemplatesDownCommand { get; private set; } public ICommand ExitCommand { get; private set; } public ICommand SetHorizontalCommand { get; private set; } public ICommand SetSelectedScreenIndexCommand { get; private set; } public ICommand NavigateCommand { get; private set; } public ICommand ShowSaveTemplateCommand { get; private set; } public ICommand HideSaveTemplateCommand { get; private set; } public MainViewModel() { Settings = FileSharperSettings.Load(); Settings.PropertyChanged += Settings_PropertyChanged; Settings.RecentDocuments.CollectionChanged += RecentDocuments_CollectionChanged; AnyRecentDocuments = Settings.RecentDocuments.Count > 0; Settings.Templates.CollectionChanged += Templates_CollectionChanged; AnyTemplates = Settings.Templates.Count(t => !t.Hidden) > 0; SearchDocuments.CollectionChanged += SearchDocuments_CollectionChanged; FileInfo[] startupFiles = App.StartupFiles; if (startupFiles != null && startupFiles.Length > 0) { foreach (FileInfo file in startupFiles) { if (file.Exists) { try { OpenFile(file.FullName); } catch { // do something better here } } } } else { AddNewSearch(); } AcceptEulaCommand = new MainViewModelCommand(this, p => { Settings.EulaAccepted = true; }, false, p => true); NewSearchCommand = new MainViewModelCommand(this, p => { AddNewSearch(); }); NewSearchFromTemplateCommand = new MainViewModelCommand(this, NewSearchFromTemplate); OpenSearchCommand = new MainViewModelCommand(this, OpenSearch); OpenRecentCommand = new MainViewModelCommand(this, OpenRecentSearch); CloseSearchCommand = new MainViewModelCommand(this, CloseSearch, true); SaveSearchCommand = new MainViewModelCommand(this, SaveSearch, true); ShowSaveTemplateCommand = new MainViewModelCommand(this, SetShowSaveTemplates, true); SaveTemplateCommand = new MainViewModelCommand(this, SaveSearchTemplate, true); SaveDefaultTemplateCommand = new MainViewModelCommand(this, SaveDefaultSearchTemplate, true); ResetDefaultTemplateCommand = new MainViewModelCommand(this, ResetDefaultSearchTemplate); DeleteTemplatesCommand = new MainViewModelCommand(this, DeleteTemplates); MoveTemplatesUpCommand = new MainViewModelCommand(this, MoveTemplatesUp); MoveTemplatesDownCommand = new MainViewModelCommand(this, MoveTemplatesDown); ExitCommand = new MainViewModelCommand(this, p => { Application.Current.Shutdown(); }); SetHorizontalCommand = new MainViewModelCommand(this, SetHorizontal); SetSelectedScreenIndexCommand = new MainViewModelCommand(this, SetSelectedScreenIndex, false, p => true ); NavigateCommand = new MainViewModelCommand(this, Navigate); } private void Settings_PropertyChanged(object sender, PropertyChangedEventArgs e) { if (e.PropertyName == nameof(Settings.Horizontal)) { foreach (SearchDocument document in SearchDocuments) { document.Horizontal = Settings.Horizontal; } } } private void Templates_CollectionChanged(object sender, System.Collections.Specialized.NotifyCollectionChangedEventArgs e) { AnyTemplates = Settings.Templates.Count(t => !t.Hidden) > 0; } private void RecentDocuments_CollectionChanged(object sender, System.Collections.Specialized.NotifyCollectionChangedEventArgs e) { AnyRecentDocuments = Settings.RecentDocuments.Count > 0; } private void SearchDocuments_CollectionChanged(object sender, System.Collections.Specialized.NotifyCollectionChangedEventArgs e) { AnyOpenFiles = SearchDocuments.Count > 0; } public void AddNewSearch() { if (!AddNewSearchFromTemplate(FileSharperSettings.DefaultTemplatePath)) { SearchDocument doc = new SearchDocument(true); doc.Horizontal = Settings.Horizontal; SearchDocuments.Add(doc); SelectedIndex = SearchDocuments.Count - 1; } } public void NewSearchFromTemplate(object parameter) { string templatePath = parameter as string; if (templatePath != null) { AddNewSearchFromTemplate(templatePath); } } public void SetSelectedScreenIndex(object parameter) { if (parameter is int) { int index = (int)parameter; SelectedScreenIndex = index; } } public void OpenSearch(object parameter) { OpenFileDialog openFileDialog = new OpenFileDialog(); openFileDialog.Multiselect = true; openFileDialog.Filter = "FileSharper files (*.fsh)|*.fsh"; bool? success = openFileDialog.ShowDialog(); if (success.HasValue && success.Value) { foreach (string filename in openFileDialog.FileNames) { OpenFile(filename); } } } public void OpenRecentSearch(object parameter) { string filename = parameter as string; if (filename != null) { if (File.Exists(filename)) { OpenFile(filename); } else { Settings.RemoveRecentDocument(filename); } } } public void CloseSearch(object parameter) { bool close = true; int idx = -1; if (parameter is SearchDocument) { SearchDocument doc = (SearchDocument)parameter; idx = SearchDocuments.IndexOf(doc); } else { idx = SelectedIndex; if (idx < 0 || idx >= SearchDocuments.Count) { close = false; } } if (close) { SearchDocuments.RemoveAt(idx); } } public void SaveSearch(object parameter) { bool saveAs = parameter != null && (bool)parameter; int idx = SelectedIndex; bool save = false; if (idx >= 0 && idx < SearchDocuments.Count) { SearchDocument doc = SearchDocuments[idx]; string path = doc.FileName; if (saveAs || string.IsNullOrEmpty(path)) { SaveFileDialog sfd = new SaveFileDialog(); sfd.Filter = "FileSharper files (*.fsh)|*.fsh"; bool? result = sfd.ShowDialog(); if (result.HasValue && result.Value) { path = sfd.FileName; save = true; } } else { save = true; } if (save) { doc.Save(path); doc.FileName = path; Settings.AddRecentDocument(path); } } } public void SaveSearchTemplate(object parameter) { int idx = SelectedIndex; if (idx >= 0 && idx < SearchDocuments.Count) { string templateName = parameter as string; if (!string.IsNullOrEmpty(templateName)) { SearchDocument doc = SearchDocuments[idx]; if (doc != null) { Settings.AddTemplate(doc, templateName); } } } ShowingSaveTemplateUI = false; } public bool AddNewSearchFromTemplate(string templatePath) { SearchDocument doc = null; if (File.Exists(templatePath)) { try { doc = SearchDocument.FromFile(templatePath, true); } catch (Exception) { } } if (doc != null) { doc.Horizontal = Settings.Horizontal; SearchDocuments.Add(doc); SelectedIndex = SearchDocuments.Count - 1; return true; } return false; } public void OpenFile(string filename) { SearchDocuments.Add(SearchDocument.FromFile(filename)); SelectedIndex = SearchDocuments.Count - 1; Settings.AddRecentDocument(filename); } public void SaveDefaultSearchTemplate(object parameter) { int idx = SelectedIndex; if (idx >= 0 && idx < SearchDocuments.Count) { MessageBoxResult result = MessageBoxResult.OK; if (parameter is Window) { result = MessageBox.Show(parameter as Window, "Save the current search as the template for new searches?", "Save Template?", MessageBoxButton.OKCancel); if (result == MessageBoxResult.OK) { SearchDocument doc = SearchDocuments[idx]; if (doc != null) { Settings.SetDefaultTemplate(doc); } } } } } public void ResetDefaultSearchTemplate(object parameter) { MessageBoxResult result = MessageBoxResult.OK; if (parameter is Window) { result = MessageBox.Show(parameter as Window, "Reset the template for new searches to an empty search?", "Reset Template?", MessageBoxButton.OKCancel); } if (result == MessageBoxResult.OK) { Settings.ResetDefaultTemplate(); } } public void Navigate(object parameter) { string url = parameter as string; if (url != null) { try { System.Diagnostics.Process.Start(url); } catch (Exception ex) { MessageBox.Show($"Could not open hyperlink {url}: {ex}"); } } } public void SetShowSaveTemplates(object parameter) { bool? hide = parameter as bool?; if (hide.HasValue && hide.Value) { ShowingSaveTemplateUI = false; } else { ShowingSaveTemplateUI = true; } } public void DeleteTemplates(object parameter) { IList list = parameter as IList; if (list != null) { List<SearchTemplateInfo> templatesToRemove = list.OfType<SearchTemplateInfo>().ToList(); foreach (SearchTemplateInfo template in templatesToRemove) { if (!template.Stock) { Settings.Templates.Remove(template); try { File.Delete(template.FileFullName); } catch (Exception) { } } } } } public void MoveTemplatesUp(object parameter) { IList list = parameter as IList; if (list != null) { ObservableCollection<SearchTemplateInfo> templates = Settings.Templates; List<SearchTemplateInfo> templatesToMove = list.OfType<SearchTemplateInfo>() .OrderBy(t => templates.IndexOf(t)).ToList(); int lowestIndex = templates.Count - 1; foreach (SearchTemplateInfo template in templatesToMove) { int index = templates.IndexOf(template); if (index > -1 && index < lowestIndex) { lowestIndex = index; } } int insertionIndex = lowestIndex == 0 ? 0 : lowestIndex - 1; for (int i = templatesToMove.Count - 1; i >= 0; i--) { templates.Move(templates.IndexOf(templatesToMove[i]), insertionIndex); } } } public void MoveTemplatesDown(object parameter) { IList list = parameter as IList; if (list != null) { ObservableCollection<SearchTemplateInfo> templates = Settings.Templates; List<SearchTemplateInfo> templatesToMove = list.OfType<SearchTemplateInfo>() .OrderBy(t => templates.IndexOf(t)).ToList(); int highestIndex = -1; foreach (SearchTemplateInfo template in templatesToMove) { int index = templates.IndexOf(template); if (index > highestIndex) { highestIndex = index; } } int insertionIndex = highestIndex + 1; if (insertionIndex >= templates.Count) { insertionIndex = templates.Count - 1; } foreach (SearchTemplateInfo template in templatesToMove) { templates.Move(templates.IndexOf(template), insertionIndex); } } } public void SetHorizontal(object parameter) { if (parameter == null) { Settings.Horizontal = false; } else if (parameter is bool horizontal) { Settings.Horizontal = horizontal; } } protected void OnPropertyChanged([CallerMemberName] string propertyName = null) { PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName)); } protected bool SetField<T>(ref T field, T value, [CallerMemberName] string propertyName = null) { if (EqualityComparer<T>.Default.Equals(field, value)) return false; field = value; OnPropertyChanged(propertyName); return true; } public class MainViewModelCommand : ICommand { public delegate bool CanExecuteTester(object parameter); public delegate void CommandExecutor(object parameter); public event EventHandler CanExecuteChanged; public MainViewModel ViewModel { get; private set; } private CanExecuteTester m_CanExecuteTester; private CommandExecutor m_CommandExecutor; private bool m_RequiresOpenFile; public MainViewModelCommand(MainViewModel viewModel, CommandExecutor commandExecutor, bool requiresOpenFile = false, CanExecuteTester canExecuteTester = null) { ViewModel = viewModel; ViewModel.PropertyChanged += ViewModel_PropertyChanged; ViewModel.Settings.PropertyChanged += ViewModel_Settings_PropertyChanged; m_CommandExecutor = commandExecutor; m_CanExecuteTester = canExecuteTester; m_RequiresOpenFile = requiresOpenFile; } private void ViewModel_Settings_PropertyChanged(object sender, PropertyChangedEventArgs e) { CanExecuteChanged?.Invoke(this, EventArgs.Empty); } private void ViewModel_PropertyChanged(object sender, PropertyChangedEventArgs e) { CanExecuteChanged?.Invoke(this, EventArgs.Empty); } public bool CanExecute(object parameter) { if (m_CanExecuteTester != null) { return m_CanExecuteTester(parameter); } return ViewModel.Settings.EulaAccepted && (!m_RequiresOpenFile || ViewModel.AnyOpenFiles); } public void Execute(object parameter) { m_CommandExecutor?.Invoke(parameter); } } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using ICSharpCode.SharpZipLib.BZip2; using ICSharpCode.SharpZipLib.Checksum; using ICSharpCode.SharpZipLib.GZip; using Org.BouncyCastle.Crypto.Digests; using Serilog; using SevenZip.Compression.LZMA; using Villermen.RuneScapeCacheTools.Exception; using Villermen.RuneScapeCacheTools.File; using Villermen.RuneScapeCacheTools.Model; namespace Villermen.RuneScapeCacheTools.Utility { public class RuneTek5CacheFileDecoder : ICacheFileDecoder { public virtual CacheFile DecodeFile(byte[] encodedData, CacheFileInfo? info) { info ??= new CacheFileInfo(); var data = this.DecodeData(encodedData, info); if (!info.HasEntries) { return new CacheFile(data, info); } var entries = this.DecodeEntries(data, info.Entries.Keys.ToArray()); return new CacheFile(entries, info); } protected byte[] DecodeData(byte[] encodedData, CacheFileInfo info) { using var dataStream = new MemoryStream(encodedData); using var dataReader = new BinaryReader(dataStream); // Decrypt the data if a key is given if (info.EncryptionKey != null) { throw new DecodeException( "XTEA encryption not supported. If you encounter this please inform me about the index and file that triggered this message." ); // var totalLength = dataStream.Position + dataLength; // // var xtea = new XteaEngine(); // xtea.Init(false, new KeyParameter(info.EncryptionKey)); // var decrypted = new byte[totalLength]; // xtea.ProcessBlock(dataReader.ReadBytes(totalLength), 5, decrypted, 0); // // dataReader = new BinaryReader(new MemoryStream(decrypted)); } var compressionType = (CompressionType)dataReader.ReadByte(); var compressedSize = dataReader.ReadInt32BigEndian(); var uncompressedSize = compressedSize; if (compressionType != CompressionType.None) { uncompressedSize = dataReader.ReadInt32BigEndian(); } var data = this.Decompress( compressionType, dataReader.ReadBytesExactly(compressedSize), uncompressedSize ); // Compressed size includes meta bytes for info. compressedSize = (int)dataStream.Position; // Verify compressed size. Info's compressed size includes meta bytes. if (info.CompressedSize != null && compressedSize != info.CompressedSize) { throw new DecodeException( $"Compressed size ({compressedSize}) does not equal expected ({info.CompressedSize})." ); } // Verify uncompressed size. if (info.UncompressedSize != null && uncompressedSize != info.UncompressedSize) { // Some uncompressed files _do_ seem to include meta bytes into the uncompressed size. Allow for now. // TODO: Figure out when uncompressed size includes the meta bytes. Is this only true for audio files? var message = $"Uncompressed size ({uncompressedSize}) does not equal expected ({info.UncompressedSize})."; if (compressionType == CompressionType.None && uncompressedSize + 5 == info.UncompressedSize) { Log.Debug(message + " (allowed)"); } else { throw new DecodeException(message); } } // Calculate and verify CRC. var crcHasher = new Crc32(); crcHasher.Update(encodedData); // Note that there is no way to distinguish between an unset CRC and one that is zero. var crc = (int)crcHasher.Value; if (info.Crc != null && crc != info.Crc) { throw new DecodeException($"Calculated checksum ({crc}) did not match expected ({info.Crc})."); } // Calculate and verify whirlpool digest. var whirlpoolHasher = new WhirlpoolDigest(); whirlpoolHasher.BlockUpdate(encodedData, 0, compressedSize); var whirlpoolDigest = new byte[whirlpoolHasher.GetDigestSize()]; whirlpoolHasher.DoFinal(whirlpoolDigest, 0); if (info.WhirlpoolDigest != null && !whirlpoolDigest.SequenceEqual(info.WhirlpoolDigest) ) { throw new DecodeException("Calculated whirlpool digest did not match expected."); } if (dataStream.Position < dataStream.Length) { throw new DecodeException( $"Input data not fully consumed while decoding RuneTek5CacheFile. {dataStream.Length - dataStream.Position} bytes remain." ); } // Update info with obtained details. info.CompressionType = compressionType; info.CompressedSize = compressedSize; info.UncompressedSize = uncompressedSize; info.Crc = crc; info.WhirlpoolDigest = whirlpoolDigest; return data; } public Dictionary<int, byte[]> DecodeEntries(byte[] data, int[] entryIds) { /* * Format visualization (e = entry, c = chunk): * Chunk data: [e1c1][e2c1][e3c1] [e1c2][e2c2][e3c1] * Delta-encoded chunk sizes: [e1c1][e2c1][e3c1] [e1c2][e2c2][e3c2] * [amountOfChunks] * * I have no idea why it works back to front either =S */ using var dataStream = new MemoryStream(data, false); using var dataReader = new BinaryReader(dataStream); var amountOfEntries = entryIds.Length; // Read the amount of chunks. dataStream.Position = dataStream.Length - 1; var amountOfChunks = dataReader.ReadByte(); if (amountOfChunks == 0) { throw new DecodeException("Entry file contains no chunks = no entries."); } // Read the delta-encoded chunk sizes. var sizesStartPosition = dataStream.Length - 1 - 4 * amountOfChunks * amountOfEntries; dataStream.Position = sizesStartPosition; var entryChunkSizes = new int[amountOfEntries, amountOfChunks]; for (var chunkIndex = 0; chunkIndex < amountOfChunks; chunkIndex++) { var chunkSize = 0; for (var entryIndex = 0; entryIndex < amountOfEntries; entryIndex++) { var delta = dataReader.ReadInt32BigEndian(); chunkSize += delta; entryChunkSizes[entryIndex, chunkIndex] = chunkSize; } } // Read the entry data. var entryData = new byte[amountOfEntries][]; dataStream.Position = 0; for (var chunkIndex = 0; chunkIndex < amountOfChunks; chunkIndex++) { for (var entryIndex = 0; entryIndex < amountOfEntries; entryIndex++) { // Read the chunk data. var entrySize = entryChunkSizes[entryIndex, chunkIndex]; var chunkData = dataReader.ReadBytesExactly(entrySize); // Add the chunk data to the entry data. entryData[entryIndex] = chunkIndex == 0 ? chunkData : entryData[entryIndex].Concat(chunkData).ToArray(); } } if (dataStream.Position != sizesStartPosition) { throw new DecodeException( $"Not all or too much data was consumed while decoding entries. {sizesStartPosition - dataStream.Position} bytes remain." ); } // Combine entry keys and values. var entries = new Dictionary<int, byte[]>(); for (var entryIndex = 0; entryIndex < amountOfEntries; entryIndex++) { entries.Add(entryIds[entryIndex], entryData[entryIndex]); } return entries; } private byte[] Decompress(CompressionType compressionType, byte[] compressedData, int uncompressedSize) { if (compressionType == CompressionType.None) { return compressedData; } using var compressedDataStream = new MemoryStream(compressedData); if (compressionType == CompressionType.Bzip2) { // Add the required bzip2 magic number as it is missing from the cache for whatever reason. using var bzip2InputStream = new MemoryStream(4 + compressedData.Length); bzip2InputStream.WriteByte((byte)'B'); bzip2InputStream.WriteByte((byte)'Z'); bzip2InputStream.WriteByte((byte)'h'); bzip2InputStream.WriteByte((byte)'1'); compressedDataStream.CopyTo(bzip2InputStream); bzip2InputStream.Position = 0; using var outputStream = new MemoryStream(); BZip2.Decompress(bzip2InputStream, outputStream, true); return outputStream.ToArray(); } if (compressionType == CompressionType.Gzip) { using var outputStream = new MemoryStream(); GZip.Decompress(compressedDataStream, outputStream, true); return outputStream.ToArray(); } if (compressionType == CompressionType.Lzma) { using var compressedDataReader = new BinaryReader(compressedDataStream); using var outputStream = new MemoryStream(uncompressedSize); var lzmaDecoder = new Decoder(); lzmaDecoder.SetDecoderProperties(compressedDataReader.ReadBytesExactly(5)); lzmaDecoder.Code( compressedDataStream, outputStream, compressedDataStream.Length - compressedDataStream.Position, uncompressedSize, null ); return outputStream.ToArray(); } throw new DecodeException($"Unknown compression type {compressionType}."); } public virtual byte[] EncodeFile(CacheFile file, CacheFileInfo? info) { var data = file.HasEntries ? this.EncodeEntries(file.Entries, info) : file.Data; return this.EncodeData(data, info); } protected byte[] EncodeData(byte[] data, CacheFileInfo? info) { // Encrypt data. if (info?.EncryptionKey != null) { throw new EncodeException( "XTEA encryption not supported. If you encounter this please inform me about the index and file that triggered this message." ); } // Compression. var compressionType = info?.CompressionType ?? CompressionType.Bzip2; var uncompressedSize = data.Length; var compressedData = this.CompressData(compressionType, data); using var dataStream = new MemoryStream(); using var dataWriter = new BinaryWriter(dataStream); dataWriter.Write((byte)compressionType); dataWriter.WriteInt32BigEndian(compressedData.Length); // Add uncompressed size if compression is used. if (compressionType != CompressionType.None) { dataWriter.WriteInt32BigEndian(uncompressedSize); } dataWriter.Write(compressedData); if (compressionType == CompressionType.None) { // Uncompressed size includes meta bytes for info when not using compression. uncompressedSize = (int)dataStream.Position; } // Compressed size includes meta bytes for info. var compressedSize = (int)dataStream.Position; var result = dataStream.ToArray(); // Calculate new CRC. var crcHasher = new Crc32(); crcHasher.Update(result); var crc = (int)crcHasher.Value; // Calculate new whirlpool digest. var whirlpoolHasher = new WhirlpoolDigest(); whirlpoolHasher.BlockUpdate(result, 0, compressedSize); var whirlpoolDigest = new byte[whirlpoolHasher.GetDigestSize()]; whirlpoolHasher.DoFinal(whirlpoolDigest, 0); // Update file info. if (info != null) { info.CompressionType = compressionType; info.CompressedSize = compressedSize; info.UncompressedSize = uncompressedSize; info.Crc = crc; info.WhirlpoolDigest = whirlpoolDigest; } return result; } private byte[] CompressData(CompressionType compressionType, byte[] data) { if (compressionType == CompressionType.None) { return data; } if (compressionType == CompressionType.Bzip2) { using var outputStream = new MemoryStream(); BZip2.Compress(new MemoryStream(data), outputStream, true, 1); // Remove BZh1 (note that 1 is the block size/compression level). return outputStream.ToArray().Skip(4).ToArray(); } if (compressionType == CompressionType.Gzip) { using var outputStream = new MemoryStream(); GZip.Compress(new MemoryStream(data), outputStream, true, 512, 9); return outputStream.ToArray(); } if (compressionType == CompressionType.Lzma) { throw new NotImplementedException("LZMA compression is currently not implemented."); } throw new EncodeException($"Unknown compression type {compressionType}."); } public byte[] EncodeEntries(Dictionary<int, byte[]> entries, CacheFileInfo? info) { // Sort entries (encodes more efficiently). entries = entries.OrderBy(entryPair => entryPair.Key).ToDictionary( entryPair => entryPair.Key, entryPair => entryPair.Value ); using var dataStream = new MemoryStream(); using var dataWriter = new BinaryWriter(dataStream); // I don't know why splitting into chunks is necessary/desired so I just use one. This also happens to // greatly simplify this logic. foreach (var entryData in entries.Values) { dataWriter.Write(entryData); } // Write delta encoded entry sizes. var previousEntrySize = 0; foreach (var entryData in entries.Values) { var entrySize = entryData.Length; var delta = entrySize - previousEntrySize; dataWriter.WriteInt32BigEndian(delta); previousEntrySize = entrySize; } // Write amount of chunks. dataWriter.Write((byte)1); // Update info. if (info != null) { info.Entries = entries.Keys.ToDictionary( entryId => entryId, entryId => new CacheFileEntryInfo() ); } return dataStream.ToArray(); } } }
// // (C) Copyright 2003-2011 by Autodesk, Inc. // // Permission to use, copy, modify, and distribute this software in // object code form for any purpose and without fee is hereby granted, // provided that the above copyright notice appears in all copies and // that both that copyright notice and the limited warranty and // restricted rights notice below appear in all supporting // documentation. // // AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS. // AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF // MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC. // DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE // UNINTERRUPTED OR ERROR FREE. // // Use, duplication, or disclosure by the U.S. Government is subject to // restrictions set forth in FAR 52.227-19 (Commercial Computer // Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii) // (Rights in Technical Data and Computer Software), as applicable. // using System; using System.Drawing; using System.Collections; using System.ComponentModel; using System.Windows.Forms; namespace Revit.SDK.Samples.SlabProperties.CS { /// <summary> /// Show some properties of a slab in Revit Structure 5, including Level, Type name, Span direction, /// Material name, Thickness, and Young Modulus for each layer of the slab's material. /// </summary> public class SlabPropertiesForm : System.Windows.Forms.Form { private System.Windows.Forms.GroupBox layerGroupBox; private System.Windows.Forms.RichTextBox layerRichTextBox; private System.Windows.Forms.Label levelLabel; private System.Windows.Forms.Label typeNameLabel; private System.Windows.Forms.Label spanDirectionLabel; private System.Windows.Forms.TextBox levelTextBox; private System.Windows.Forms.TextBox typeNameTextBox; private System.Windows.Forms.TextBox spanDirectionTextBox; private System.Windows.Forms.Button closeButton; /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; private Label degreeLabel; // To store the data private Command m_dataBuffer; private SlabPropertiesForm() { // // Required for Windows Form Designer support // InitializeComponent(); } /// <summary> /// overload the constructor /// </summary> /// <param name="dataBuffer">To store the data of a slab</param> public SlabPropertiesForm(Command dataBuffer) { InitializeComponent(); // get all the data m_dataBuffer = dataBuffer; } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (null != components) { components.Dispose(); } } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.layerGroupBox = new System.Windows.Forms.GroupBox(); this.layerRichTextBox = new System.Windows.Forms.RichTextBox(); this.levelLabel = new System.Windows.Forms.Label(); this.levelTextBox = new System.Windows.Forms.TextBox(); this.typeNameTextBox = new System.Windows.Forms.TextBox(); this.spanDirectionTextBox = new System.Windows.Forms.TextBox(); this.typeNameLabel = new System.Windows.Forms.Label(); this.spanDirectionLabel = new System.Windows.Forms.Label(); this.closeButton = new System.Windows.Forms.Button(); this.degreeLabel = new System.Windows.Forms.Label(); this.layerGroupBox.SuspendLayout(); this.SuspendLayout(); // // layerGroupBox // this.layerGroupBox.Controls.Add(this.layerRichTextBox); this.layerGroupBox.Location = new System.Drawing.Point(22, 86); this.layerGroupBox.Name = "layerGroupBox"; this.layerGroupBox.Size = new System.Drawing.Size(375, 265); this.layerGroupBox.TabIndex = 29; this.layerGroupBox.TabStop = false; this.layerGroupBox.Text = "Layers:"; // // layerRichTextBox // this.layerRichTextBox.Location = new System.Drawing.Point(6, 19); this.layerRichTextBox.Name = "layerRichTextBox"; this.layerRichTextBox.ReadOnly = true; this.layerRichTextBox.Size = new System.Drawing.Size(359, 232); this.layerRichTextBox.TabIndex = 2; this.layerRichTextBox.Text = ""; // // levelLabel // this.levelLabel.Location = new System.Drawing.Point(13, 7); this.levelLabel.Name = "levelLabel"; this.levelLabel.Size = new System.Drawing.Size(98, 23); this.levelLabel.TabIndex = 27; this.levelLabel.Text = "Level:"; this.levelLabel.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // levelTextBox // this.levelTextBox.Location = new System.Drawing.Point(117, 8); this.levelTextBox.Name = "levelTextBox"; this.levelTextBox.ReadOnly = true; this.levelTextBox.Size = new System.Drawing.Size(280, 20); this.levelTextBox.TabIndex = 24; // // typeNameTextBox // this.typeNameTextBox.Location = new System.Drawing.Point(117, 34); this.typeNameTextBox.Name = "typeNameTextBox"; this.typeNameTextBox.ReadOnly = true; this.typeNameTextBox.Size = new System.Drawing.Size(280, 20); this.typeNameTextBox.TabIndex = 22; // // spanDirectionTextBox // this.spanDirectionTextBox.Location = new System.Drawing.Point(117, 60); this.spanDirectionTextBox.Name = "spanDirectionTextBox"; this.spanDirectionTextBox.ReadOnly = true; this.spanDirectionTextBox.Size = new System.Drawing.Size(224, 20); this.spanDirectionTextBox.TabIndex = 23; // // typeNameLabel // this.typeNameLabel.Location = new System.Drawing.Point(13, 34); this.typeNameLabel.Name = "typeNameLabel"; this.typeNameLabel.Size = new System.Drawing.Size(98, 23); this.typeNameLabel.TabIndex = 25; this.typeNameLabel.Text = "Type Name:"; this.typeNameLabel.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // spanDirectionLabel // this.spanDirectionLabel.Location = new System.Drawing.Point(13, 60); this.spanDirectionLabel.Name = "spanDirectionLabel"; this.spanDirectionLabel.Size = new System.Drawing.Size(98, 23); this.spanDirectionLabel.TabIndex = 26; this.spanDirectionLabel.Text = "Span Direction:"; this.spanDirectionLabel.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // closeButton // this.closeButton.DialogResult = System.Windows.Forms.DialogResult.Cancel; this.closeButton.Location = new System.Drawing.Point(322, 367); this.closeButton.Name = "closeButton"; this.closeButton.Size = new System.Drawing.Size(75, 23); this.closeButton.TabIndex = 0; this.closeButton.Text = "Close"; this.closeButton.Click += new System.EventHandler(this.closeButton_Click); // // degreeLabel // this.degreeLabel.Location = new System.Drawing.Point(347, 59); this.degreeLabel.Name = "degreeLabel"; this.degreeLabel.Size = new System.Drawing.Size(50, 23); this.degreeLabel.TabIndex = 26; this.degreeLabel.Text = "Degree"; this.degreeLabel.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // SlabPropertiesForm // this.AcceptButton = this.closeButton; this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.CancelButton = this.closeButton; this.ClientSize = new System.Drawing.Size(411, 402); this.Controls.Add(this.layerGroupBox); this.Controls.Add(this.levelLabel); this.Controls.Add(this.levelTextBox); this.Controls.Add(this.typeNameTextBox); this.Controls.Add(this.spanDirectionTextBox); this.Controls.Add(this.typeNameLabel); this.Controls.Add(this.degreeLabel); this.Controls.Add(this.spanDirectionLabel); this.Controls.Add(this.closeButton); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "SlabPropertiesForm"; this.ShowInTaskbar = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen; this.Text = "Slab Properties"; this.Load += new System.EventHandler(this.SlabPropertiesForm_Load); this.layerGroupBox.ResumeLayout(false); this.ResumeLayout(false); this.PerformLayout(); } #endregion /// <summary> /// Close the Form /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void closeButton_Click(object sender, System.EventArgs e) { Close(); } /// <summary> /// Display the properties on the form when the form load /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void SlabPropertiesForm_Load(object sender, System.EventArgs e) { this.levelTextBox.Text = m_dataBuffer.Level; this.typeNameTextBox.Text = m_dataBuffer.TypeName; this.spanDirectionTextBox.Text = m_dataBuffer.SpanDirection; int numberOfLayers = m_dataBuffer.NumberOfLayers; this.layerRichTextBox.Text = ""; for (int i = 0; i < numberOfLayers; i++) { // Get each layer's Material name and Young Modulus properties m_dataBuffer.SetLayer(i); this.layerRichTextBox.Text += "Layer " + (i + 1).ToString() + "\r\n"; this.layerRichTextBox.Text += "Material name: " + m_dataBuffer.LayerMaterialName + "\r\n"; this.layerRichTextBox.Text += "Thickness: " + m_dataBuffer.LayerThickness + "\r\n"; this.layerRichTextBox.Text += "YoungModulus X: " + m_dataBuffer.LayerYoungModulusX + "\r\n"; this.layerRichTextBox.Text += "YoungModulus Y: " + m_dataBuffer.LayerYoungModulusY + "\r\n"; this.layerRichTextBox.Text += "YoungModulus Z: " + m_dataBuffer.LayerYoungModulusZ + "\r\n"; this.layerRichTextBox.Text += "-----------------------------------------------------------" + "\r\n"; } } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace NPOI.XWPF.UserModel { using System; using NPOI.OpenXmlFormats.Wordprocessing; using System.Collections.Generic; using System.Text; using System.Xml; using System.IO; using NPOI.Util; using NPOI.OpenXmlFormats.Dml; using System.Xml.Serialization; using NPOI.OpenXmlFormats.Dml.WordProcessing; using NPOI.WP.UserModel; /** * @see <a href="http://msdn.microsoft.com/en-us/library/ff533743(v=office.12).aspx">[MS-OI29500] Run Fonts</a> */ public enum FontCharRange { None, Ascii /* char 0-127 */, CS /* complex symbol */, EastAsia /* east asia */, HAnsi /* high ansi */ }; /** * XWPFrun.object defines a region of text with a common Set of properties * * @author Yegor Kozlov * @author Gregg Morris (gregg dot morris at gmail dot com) - added getColor(), setColor() */ public class XWPFRun : ISDTContents, IRunElement, ICharacterRun { private CT_R run; private String pictureText; //private XWPFParagraph paragraph; private IRunBody parent; private List<XWPFPicture> pictures; /** * @param r the CT_R bean which holds the run.attributes * @param p the parent paragraph */ public XWPFRun(CT_R r, IRunBody p) { this.run = r; this.parent = p; /** * reserve already occupied Drawing ids, so reserving new ids later will * not corrupt the document */ IList<CT_Drawing> drawingList = r.GetDrawingList(); foreach (CT_Drawing ctDrawing in drawingList) { List<CT_Anchor> anchorList = ctDrawing.GetAnchorList(); foreach (CT_Anchor anchor in anchorList) { if (anchor.docPr != null) { this.Document.DrawingIdManager.Reserve(anchor.docPr.id); } } List<CT_Inline> inlineList = ctDrawing.GetInlineList(); foreach (CT_Inline inline in inlineList) { if (inline.docPr != null) { this.Document.DrawingIdManager.Reserve(inline.docPr.id); } } } //// Look for any text in any of our pictures or Drawings StringBuilder text = new StringBuilder(); List<object> pictTextObjs = new List<object>(); foreach (CT_Picture pic in r.GetPictList()) pictTextObjs.Add(pic); foreach (CT_Drawing draw in drawingList) pictTextObjs.Add(draw); //foreach (object o in pictTextObjs) //{ //todo:: imlement this //XmlObject[] t = o.SelectPath("declare namespace w='http://schemas.openxmlformats.org/wordprocessingml/2006/main' .//w:t"); //for (int m = 0; m < t.Length; m++) //{ // NodeList kids = t[m].DomNode.ChildNodes; // for (int n = 0; n < kids.Length; n++) // { // if (kids.Item(n) is Text) // { // if (text.Length > 0) // text.Append("\n"); // text.Append(kids.Item(n).NodeValue); // } // } //} //} pictureText = text.ToString(); // Do we have any embedded pictures? // (They're a different CT_Picture, under the Drawingml namespace) pictures = new List<XWPFPicture>(); foreach (object o in pictTextObjs) { foreach (OpenXmlFormats.Dml.Picture.CT_Picture pict in GetCTPictures(o)) { XWPFPicture picture = new XWPFPicture(pict, this); pictures.Add(picture); } } } /** * @deprecated Use {@link XWPFRun#XWPFRun(CTR, IRunBody)} */ [Obsolete("Use XWPFRun(CTR, IRunBody)")] public XWPFRun(CT_R r, XWPFParagraph p) : this(r, (IRunBody)p) { } private List<NPOI.OpenXmlFormats.Dml.Picture.CT_Picture> GetCTPictures(object o) { List<NPOI.OpenXmlFormats.Dml.Picture.CT_Picture> pictures = new List<NPOI.OpenXmlFormats.Dml.Picture.CT_Picture>(); //XmlObject[] picts = o.SelectPath("declare namespace pic='"+CT_Picture.type.Name.NamespaceURI+"' .//pic:pic"); //XmlElement[] picts = o.Any; //foreach (XmlElement pict in picts) //{ //if(pict is XmlAnyTypeImpl) { // // Pesky XmlBeans bug - see Bugzilla #49934 // try { // pict = CT_Picture.Factory.Parse( pict.ToString() ); // } catch(XmlException e) { // throw new POIXMLException(e); // } //} //if (pict is NPOI.OpenXmlFormats.Dml.CT_Picture) //{ // pictures.Add((NPOI.OpenXmlFormats.Dml.CT_Picture)pict); //} //} if (o is CT_Drawing) { CT_Drawing drawing = o as CT_Drawing; if (drawing.inline != null) { foreach (CT_Inline inline in drawing.inline) { GetPictures(inline.graphic.graphicData, pictures); } } } else if (o is CT_GraphicalObjectData) { GetPictures(o as CT_GraphicalObjectData, pictures); } return pictures; } private void GetPictures(CT_GraphicalObjectData god, List<NPOI.OpenXmlFormats.Dml.Picture.CT_Picture> pictures) { XmlSerializer xmlse = new XmlSerializer(typeof(NPOI.OpenXmlFormats.Dml.Picture.CT_Picture)); foreach (string el in god.Any) { System.IO.StringReader stringReader = new System.IO.StringReader(el); NPOI.OpenXmlFormats.Dml.Picture.CT_Picture pict = xmlse.Deserialize(System.Xml.XmlReader.Create(stringReader)) as NPOI.OpenXmlFormats.Dml.Picture.CT_Picture; pictures.Add(pict); } } /** * Get the currently used CT_R object * @return CT_R object */ public CT_R GetCTR() { return run; } /** * Get the currently referenced paragraph/SDT object * @return current parent */ public IRunBody Parent { get { return parent; } } /** * Get the currently referenced paragraph, or null if a SDT object * @deprecated use {@link XWPFRun#getParent()} instead */ public XWPFParagraph Paragraph { get { if (parent is XWPFParagraph) return (XWPFParagraph)parent; return null; } } /** * @return The {@link XWPFDocument} instance, this run.belongs to, or * <code>null</code> if parent structure (paragraph > document) is not properly Set. */ public XWPFDocument Document { get { if (parent != null) { return parent.Document; } return null; } } /** * For isBold, isItalic etc */ private bool IsCTOnOff(CT_OnOff onoff) { if (!onoff.IsSetVal()) return true; return onoff.val; } /** * Whether the bold property shall be applied to all non-complex script * characters in the contents of this run.when displayed in a document. * <p> * This formatting property is a toggle property, which specifies that its * behavior differs between its use within a style defInition and its use as * direct formatting. When used as part of a style defInition, Setting this * property shall toggle the current state of that property as specified up * to this point in the hierarchy (i.e. applied to not applied, and vice * versa). Setting it to <code>false</code> (or an equivalent) shall * result in the current Setting remaining unChanged. However, when used as * direct formatting, Setting this property to true or false shall Set the * absolute state of the resulting property. * </p> * <p> * If this element is not present, the default value is to leave the * formatting applied at previous level in the style hierarchy. If this * element is never applied in the style hierarchy, then bold shall not be * applied to non-complex script characters. * </p> * * @param value <code>true</code> if the bold property is applied to * this run */ public bool IsBold { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetB()) { return false; } return IsCTOnOff(pr.b); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff bold = pr.IsSetB() ? pr.b : pr.AddNewB(); bold.val = value; } } /** * Get text color. The returned value is a string in the hex form "RRGGBB". */ public String GetColor() { String color = null; if (run.IsSetRPr()) { CT_RPr pr = run.rPr; if (pr.IsSetColor()) { NPOI.OpenXmlFormats.Wordprocessing.CT_Color clr = pr.color; color = clr.val; //clr.xgetVal().getStringValue(); } } return color; } /** * Set text color. * @param rgbStr - the desired color, in the hex form "RRGGBB". */ public void SetColor(String rgbStr) { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); NPOI.OpenXmlFormats.Wordprocessing.CT_Color color = pr.IsSetColor() ? pr.color : pr.AddNewColor(); color.val = (rgbStr); } /** * Return the string content of this text run * * @return the text of this text run.or <code>null</code> if not Set */ public String GetText(int pos) { return run.SizeOfTArray() == 0 ? null : run.GetTArray(pos).Value; } /** * Returns text embedded in pictures */ public String PictureText { get { return pictureText; } } public void ReplaceText(string oldText, string newText) { string text = this.Text.Replace(oldText, newText); this.SetText(text); } /// <summary> ///Sets the text of this text run /// </summary> /// <param name="value">the literal text which shall be displayed in the document</param> public void SetText(String value) { SetText(value, 0); } public void AppendText(String value) { SetText(value, run.GetTList().Count); } /** * Sets the text of this text run.in the * * @param value the literal text which shall be displayed in the document * @param pos - position in the text array (NB: 0 based) */ public void SetText(String value, int pos) { int length = run.SizeOfTArray(); if (pos > length) throw new IndexOutOfRangeException("Value too large for the parameter position"); CT_Text t = (pos < length && pos >= 0) ? run.GetTArray(pos): run.AddNewT(); t.Value = (value); preserveSpaces(t); } /** * Whether the italic property should be applied to all non-complex script * characters in the contents of this run.when displayed in a document. * * @return <code>true</code> if the italic property is applied */ public bool IsItalic { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetI()) return false; return IsCTOnOff(pr.i); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff italic = pr.IsSetI() ? pr.i : pr.AddNewI(); italic.val = value; } } /** * Specifies that the contents of this run.should be displayed along with an * underline appearing directly below the character heigh * * @return the Underline pattern Applyed to this run * @see UnderlinePatterns */ public UnderlinePatterns Underline { get { CT_RPr pr = run.rPr; return (pr != null && pr.IsSetU() && pr.u.val != null) ? EnumConverter.ValueOf<UnderlinePatterns, ST_Underline>(pr.u.val) : UnderlinePatterns.None; } } internal void InsertText(CT_Text text, int textIndex) { run.GetTList().Insert(textIndex, text); } /// <summary> /// insert text at start index in the run /// </summary> /// <param name="text">insert text</param> /// <param name="startIndex">start index of the insertion in the run text</param> public void InsertText(string text, int startIndex) { List<CT_Text> texts = run.GetTList(); int endPos = 0; int startPos = 0; for (int i = 0; i < texts.Count; i++) { startPos = endPos; endPos += texts[i].Value.Length; if (endPos > startIndex) { texts[i].Value = texts[i].Value.Insert(startIndex - startPos, text); break; } } } public string Text { get { StringBuilder text = new StringBuilder(); for (int i = 0; i < run.Items.Count; i++) { object o = run.Items[i]; if (o is CT_Text) { if (!(run.ItemsElementName[i] == RunItemsChoiceType.instrText)) { text.Append(((CT_Text)o).Value); } } // Complex type evaluation (currently only for extraction of check boxes) if (o is CT_FldChar) { CT_FldChar ctfldChar = ((CT_FldChar)o); if (ctfldChar.fldCharType == ST_FldCharType.begin) { if (ctfldChar.ffData != null) { foreach (CT_FFCheckBox checkBox in ctfldChar.ffData.GetCheckBoxList()) { if (checkBox.@default.val == true) { text.Append("|X|"); } else { text.Append("|_|"); } } } } } if (o is CT_PTab) { text.Append("\t"); } if (o is CT_Br) { text.Append("\n"); } if (o is CT_Empty) { // Some inline text elements Get returned not as // themselves, but as CTEmpty, owing to some odd // defInitions around line 5642 of the XSDs // This bit works around it, and replicates the above // rules for that case if (run.ItemsElementName[i] == RunItemsChoiceType.tab) { text.Append("\t"); } if (run.ItemsElementName[i] == RunItemsChoiceType.br) { text.Append("\n"); } if (run.ItemsElementName[i] == RunItemsChoiceType.cr) { text.Append("\n"); } } if (o is CT_FtnEdnRef) { CT_FtnEdnRef ftn = (CT_FtnEdnRef)o; String footnoteRef = ftn.DomNode.LocalName.Equals("footnoteReference") ? "[footnoteRef:" + ftn.id + "]" : "[endnoteRef:" + ftn.id + "]"; text.Append(footnoteRef); } } // Any picture text? if (pictureText != null && pictureText.Length > 0) { text.Append("\n").Append(pictureText); } return text.ToString(); } } /** * Specifies that the contents of this run.should be displayed along with an * underline appearing directly below the character heigh * If this element is not present, the default value is to leave the * formatting applied at previous level in the style hierarchy. If this * element is never applied in the style hierarchy, then an underline shall * not be applied to the contents of this run. * * @param value - * underline type * @see UnderlinePatterns : all possible patterns that could be applied */ public void SetUnderline(UnderlinePatterns value) { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_Underline underline = (pr.u == null) ? pr.AddNewU() : pr.u; underline.val = EnumConverter.ValueOf<ST_Underline, UnderlinePatterns>(value); } /** * Specifies that the contents of this run.shall be displayed with a single * horizontal line through the center of the line. * * @return <code>true</code> if the strike property is applied */ public bool IsStrikeThrough { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetStrike()) return false; return IsCTOnOff(pr.strike); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff strike = pr.IsSetStrike() ? pr.strike : pr.AddNewStrike(); strike.val = value;//(value ? ST_OnOff.True : ST_OnOff.False); } } /** * Specifies that the contents of this run.shall be displayed with a single * horizontal line through the center of the line. * <p/> * This formatting property is a toggle property, which specifies that its * behavior differs between its use within a style defInition and its use as * direct formatting. When used as part of a style defInition, Setting this * property shall toggle the current state of that property as specified up * to this point in the hierarchy (i.e. applied to not applied, and vice * versa). Setting it to false (or an equivalent) shall result in the * current Setting remaining unChanged. However, when used as direct * formatting, Setting this property to true or false shall Set the absolute * state of the resulting property. * </p> * <p/> * If this element is not present, the default value is to leave the * formatting applied at previous level in the style hierarchy. If this * element is never applied in the style hierarchy, then strikethrough shall * not be applied to the contents of this run. * </p> * * @param value <code>true</code> if the strike property is applied to * this run */ [Obsolete] public bool IsStrike { get { return IsStrikeThrough; } set { IsStrikeThrough = value; } } /** * Specifies that the contents of this run shall be displayed with a double * horizontal line through the center of the line. * * @return <code>true</code> if the double strike property is applied */ public bool IsDoubleStrikeThrough { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetDstrike()) return false; return IsCTOnOff(pr.dstrike); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff dstrike = pr.IsSetDstrike() ? pr.dstrike : pr.AddNewDstrike(); dstrike.val = value;//(value ? STOnOff.TRUE : STOnOff.FALSE); } } public bool IsSmallCaps { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetSmallCaps()) return false; return IsCTOnOff(pr.smallCaps); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff caps = pr.IsSetSmallCaps() ? pr.smallCaps : pr.AddNewSmallCaps(); caps.val = value;//(value ? ST_OnOff.True : ST_OnOff.False); } } public bool IsCapitalized { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetCaps()) return false; return IsCTOnOff(pr.caps); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff caps = pr.IsSetCaps() ? pr.caps : pr.AddNewCaps(); caps.val = value;//(value ? ST_OnOff.True : ST_OnOff.False); } } public bool IsShadowed { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetShadow()) return false; return IsCTOnOff(pr.shadow); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff shadow = pr.IsSetShadow() ? pr.shadow : pr.AddNewShadow(); shadow.val = value;//(value ? ST_OnOff.True : ST_OnOff.False); } } public bool IsImprinted { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetImprint()) return false; return IsCTOnOff(pr.imprint); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff imprinted = pr.IsSetImprint() ? pr.imprint : pr.AddNewImprint(); imprinted.val = value;//(value ? ST_OnOff.True : ST_OnOff.False); } } public bool IsEmbossed { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetEmboss()) return false; return IsCTOnOff(pr.emboss); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff emboss = pr.IsSetEmboss() ? pr.emboss : pr.AddNewEmboss(); emboss.val = value;//(value ? ST_OnOff.True : ST_OnOff.False); } } [Obsolete] public void SetStrike(bool value) { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_OnOff strike = pr.IsSetStrike() ? pr.strike : pr.AddNewStrike(); strike.val = value; } /** * Specifies the alignment which shall be applied to the contents of this * run.in relation to the default appearance of the run.s text. * This allows the text to be repositioned as subscript or superscript without * altering the font size of the run.properties. * * @return VerticalAlign * @see VerticalAlign all possible value that could be Applyed to this run */ public VerticalAlign Subscript { get { CT_RPr pr = run.rPr; return (pr != null && pr.IsSetVertAlign()) ? EnumConverter.ValueOf<VerticalAlign, ST_VerticalAlignRun>(pr.vertAlign.val) : VerticalAlign.BASELINE; } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_VerticalAlignRun ctValign = pr.IsSetVertAlign() ? pr.vertAlign : pr.AddNewVertAlign(); ctValign.val = EnumConverter.ValueOf<ST_VerticalAlignRun, VerticalAlign>(value); } } public int Kerning { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetKern()) return 0; return (int)pr.kern.val; } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_HpsMeasure kernmes = pr.IsSetKern() ? pr.kern : pr.AddNewKern(); kernmes.val = (ulong)value; } } public int CharacterSpacing { get { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetSpacing()) return 0; return int.Parse(pr.spacing.val); } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_SignedTwipsMeasure spc = pr.IsSetSpacing() ? pr.spacing : pr.AddNewSpacing(); spc.val = value.ToString(); } } /** * Specifies the fonts which shall be used to display the text contents of * this run. Specifies a font which shall be used to format all characters * in the ASCII range (0 - 127) within the parent run * * @return a string representing the font family */ public String FontFamily { get { return GetFontFamily(FontCharRange.None); } set { SetFontFamily(value, FontCharRange.None); } } public string FontName { get { return FontFamily; } } /** * Gets the font family for the specified font char range. * If fcr is null, the font char range "ascii" is used * * @param fcr the font char range, defaults to "ansi" * @return a string representing the font famil */ public String GetFontFamily(FontCharRange fcr) { CT_RPr pr = run.rPr; if (pr == null || !pr.IsSetRFonts()) return null; CT_Fonts fonts = pr.rFonts; switch (fcr == FontCharRange.None ? FontCharRange.Ascii : fcr) { default: case FontCharRange.Ascii: return fonts.ascii; case FontCharRange.CS: return fonts.cs; case FontCharRange.EastAsia: return fonts.eastAsia; case FontCharRange.HAnsi: return fonts.hAnsi; } } /** * Specifies the fonts which shall be used to display the text contents of * this run. The default handling for fcr == null is to overwrite the * ascii font char range with the given font family and also set all not * specified font ranges * * @param fontFamily * @param fcr FontCharRange or null for default handling */ public void SetFontFamily(String fontFamily, FontCharRange fcr) { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_Fonts fonts = pr.IsSetRFonts() ? pr.rFonts : pr.AddNewRFonts(); if (fcr == FontCharRange.None) { fonts.ascii = (fontFamily); if (!fonts.IsSetHAnsi()) { fonts.hAnsi = (fontFamily); } if (!fonts.IsSetCs()) { fonts.cs = (fontFamily); } if (!fonts.IsSetEastAsia()) { fonts.eastAsia = (fontFamily); } } else { switch (fcr) { case FontCharRange.Ascii: fonts.ascii = (fontFamily); break; case FontCharRange.CS: fonts.cs = (fontFamily); break; case FontCharRange.EastAsia: fonts.eastAsia = (fontFamily); break; case FontCharRange.HAnsi: fonts.hAnsi = (fontFamily); break; } } } /** * Specifies the font size which shall be applied to all non complex script * characters in the contents of this run.when displayed. * * @return value representing the font size */ public int FontSize { get { CT_RPr pr = run.rPr; return (pr != null && pr.IsSetSz()) ? (int)pr.sz.val / 2 : -1; } set { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_HpsMeasure ctSize = pr.IsSetSz() ? pr.sz : pr.AddNewSz(); ctSize.val = (ulong)value * 2; } } /** * This element specifies the amount by which text shall be raised or * lowered for this run.in relation to the default baseline of the * surrounding non-positioned text. This allows the text to be repositioned * without altering the font size of the contents. * * @return a big integer representing the amount of text shall be "moved" */ public int GetTextPosition() { CT_RPr pr = run.rPr; return (pr != null && pr.IsSetPosition()) ? int.Parse(pr.position.val) : -1; } /** * This element specifies the amount by which text shall be raised or * lowered for this run.in relation to the default baseline of the * surrounding non-positioned text. This allows the text to be repositioned * without altering the font size of the contents. * * If the val attribute is positive, then the parent run.shall be raised * above the baseline of the surrounding text by the specified number of * half-points. If the val attribute is negative, then the parent run.shall * be lowered below the baseline of the surrounding text by the specified * number of half-points. * * * If this element is not present, the default value is to leave the * formatting applied at previous level in the style hierarchy. If this * element is never applied in the style hierarchy, then the text shall not * be raised or lowered relative to the default baseline location for the * contents of this run. */ public void SetTextPosition(int val) { CT_RPr pr = run.IsSetRPr() ? run.rPr : run.AddNewRPr(); CT_SignedHpsMeasure position = pr.IsSetPosition() ? pr.position : pr.AddNewPosition(); position.val = (val.ToString()); } /** * */ public void RemoveBreak() { // TODO } /** * Specifies that a break shall be placed at the current location in the run * content. * A break is a special character which is used to override the * normal line breaking that would be performed based on the normal layout * of the document's contents. * @see #AddCarriageReturn() */ public void AddBreak() { run.AddNewBr(); } /** * Specifies that a break shall be placed at the current location in the run * content. * A break is a special character which is used to override the * normal line breaking that would be performed based on the normal layout * of the document's contents. * <p> * The behavior of this break character (the * location where text shall be restarted After this break) shall be * determined by its type values. * </p> * @see BreakType */ public void AddBreak(BreakType type) { CT_Br br = run.AddNewBr(); br.type = EnumConverter.ValueOf<ST_BrType, BreakType>(type); } /** * Specifies that a break shall be placed at the current location in the run * content. A break is a special character which is used to override the * normal line breaking that would be performed based on the normal layout * of the document's contents. * <p> * The behavior of this break character (the * location where text shall be restarted After this break) shall be * determined by its type (in this case is BreakType.TEXT_WRAPPING as default) and clear attribute values. * </p> * @see BreakClear */ public void AddBreak(BreakClear Clear) { CT_Br br = run.AddNewBr(); br.type = EnumConverter.ValueOf<ST_BrType, BreakType>(BreakType.TEXTWRAPPING); br.clear = EnumConverter.ValueOf<ST_BrClear, BreakClear>(Clear); } /** * Specifies that a tab shall be placed at the current location in * the run content. */ public void AddTab() { run.AddNewTab(); } public void RemoveTab() { //TODO } /** * Specifies that a carriage return shall be placed at the * current location in the run.content. * A carriage return is used to end the current line of text in * WordProcess. * The behavior of a carriage return in run.content shall be * identical to a break character with null type and clear attributes, which * shall end the current line and find the next available line on which to * continue. * The carriage return character forced the following text to be * restarted on the next available line in the document. */ public void AddCarriageReturn() { run.AddNewCr(); } public void RemoveCarriageReturn(int i) { throw new NotImplementedException(); } /** * Adds a picture to the run. This method handles * attaching the picture data to the overall file. * * @see NPOI.XWPF.UserModel.Document#PICTURE_TYPE_EMF * @see NPOI.XWPF.UserModel.Document#PICTURE_TYPE_WMF * @see NPOI.XWPF.UserModel.Document#PICTURE_TYPE_PICT * @see NPOI.XWPF.UserModel.Document#PICTURE_TYPE_JPEG * @see NPOI.XWPF.UserModel.Document#PICTURE_TYPE_PNG * @see NPOI.XWPF.UserModel.Document#PICTURE_TYPE_DIB * * @param pictureData The raw picture data * @param pictureType The type of the picture, eg {@link Document#PICTURE_TYPE_JPEG} * @param width width in EMUs. To convert to / from points use {@link org.apache.poi.util.Units} * @param height height in EMUs. To convert to / from points use {@link org.apache.poi.util.Units} * @throws NPOI.Openxml4j.exceptions.InvalidFormatException * @throws IOException */ public XWPFPicture AddPicture(Stream pictureData, int pictureType, String filename, int width, int height) { XWPFDocument doc = parent.Document; // Add the picture + relationship String relationId = doc.AddPictureData(pictureData, pictureType); XWPFPictureData picData = (XWPFPictureData)doc.GetRelationById(relationId); // Create the Drawing entry for it CT_Drawing Drawing = run.AddNewDrawing(); CT_Inline inline = Drawing.AddNewInline(); // Do the fiddly namespace bits on the inline // (We need full control of what goes where and as what) //CT_GraphicalObject tmp = new CT_GraphicalObject(); //String xml = // "<a:graphic xmlns:a=\"" + "http://schemas.openxmlformats.org/drawingml/2006/main" + "\">" + // "<a:graphicData uri=\"" + "http://schemas.openxmlformats.org/drawingml/2006/picture" + "\">" + // "<pic:pic xmlns:pic=\"" + "http://schemas.openxmlformats.org/drawingml/2006/picture" + "\" />" + // "</a:graphicData>" + // "</a:graphic>"; //inline.Set((xml)); XmlDocument xmlDoc = new XmlDocument(); //XmlElement el = xmlDoc.CreateElement("pic", "pic", "http://schemas.openxmlformats.org/drawingml/2006/picture"); inline.graphic = new CT_GraphicalObject(); inline.graphic.graphicData = new CT_GraphicalObjectData(); inline.graphic.graphicData.uri = "http://schemas.openxmlformats.org/drawingml/2006/picture"; // Setup the inline inline.distT = (0); inline.distR = (0); inline.distB = (0); inline.distL = (0); NPOI.OpenXmlFormats.Dml.WordProcessing.CT_NonVisualDrawingProps docPr = inline.AddNewDocPr(); long id = parent.Document.DrawingIdManager.ReserveNew(); docPr.id = (uint)(id); /* This name is not visible in Word 2010 anywhere. */ docPr.name = ("Drawing " + id); docPr.descr = (filename); NPOI.OpenXmlFormats.Dml.WordProcessing.CT_PositiveSize2D extent = inline.AddNewExtent(); extent.cx = (width); extent.cy = (height); // Grab the picture object NPOI.OpenXmlFormats.Dml.Picture.CT_Picture pic = new OpenXmlFormats.Dml.Picture.CT_Picture(); // Set it up NPOI.OpenXmlFormats.Dml.Picture.CT_PictureNonVisual nvPicPr = pic.AddNewNvPicPr(); NPOI.OpenXmlFormats.Dml.CT_NonVisualDrawingProps cNvPr = nvPicPr.AddNewCNvPr(); /* use "0" for the id. See ECM-576, 20.2.2.3 */ cNvPr.id = (0); /* This name is not visible in Word 2010 anywhere */ cNvPr.name = ("Picture " + id); cNvPr.descr = (filename); CT_NonVisualPictureProperties cNvPicPr = nvPicPr.AddNewCNvPicPr(); cNvPicPr.AddNewPicLocks().noChangeAspect = true; CT_BlipFillProperties blipFill = pic.AddNewBlipFill(); CT_Blip blip = blipFill.AddNewBlip(); blip.embed = (picData.GetPackageRelationship().Id); blipFill.AddNewStretch().AddNewFillRect(); CT_ShapeProperties spPr = pic.AddNewSpPr(); CT_Transform2D xfrm = spPr.AddNewXfrm(); CT_Point2D off = xfrm.AddNewOff(); off.x = (0); off.y = (0); NPOI.OpenXmlFormats.Dml.CT_PositiveSize2D ext = xfrm.AddNewExt(); ext.cx = (width); ext.cy = (height); CT_PresetGeometry2D prstGeom = spPr.AddNewPrstGeom(); prstGeom.prst = (ST_ShapeType.rect); prstGeom.AddNewAvLst(); using (var ms = new MemoryStream()) { StreamWriter sw = new StreamWriter(ms); pic.Write(sw, "pic:pic"); sw.Flush(); ms.Position = 0; var sr = new StreamReader(ms); var picXml = sr.ReadToEnd(); inline.graphic.graphicData.AddPicElement(picXml); } // Finish up XWPFPicture xwpfPicture = new XWPFPicture(pic, this); pictures.Add(xwpfPicture); return xwpfPicture; } /** * Returns the embedded pictures of the run. These * are pictures which reference an external, * embedded picture image such as a .png or .jpg */ public List<XWPFPicture> GetEmbeddedPictures() { return pictures; } /** * Add the xml:spaces="preserve" attribute if the string has leading or trailing white spaces * * @param xs the string to check */ static void preserveSpaces(CT_Text xs) { String text = xs.Value; if (text != null && (text.StartsWith(" ") || text.EndsWith(" "))) { // XmlCursor c = xs.NewCursor(); // c.ToNextToken(); // c.InsertAttributeWithValue(new QName("http://www.w3.org/XML/1998/namespace", "space"), "preserve"); // c.Dispose(); xs.space = "preserve"; } } /** * Returns the string version of the text, with tabs and * carriage returns in place of their xml equivalents. */ public override String ToString() { return Text; } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gaxgrpc = Google.Api.Gax.Grpc; using lro = Google.LongRunning; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using xunit = Xunit; namespace Google.Cloud.Compute.V1.Tests { /// <summary>Generated unit tests.</summary> public sealed class GeneratedRegionHealthCheckServicesClientTest { [xunit::FactAttribute] public void GetRequestObject() { moq::Mock<RegionHealthCheckServices.RegionHealthCheckServicesClient> mockGrpcClient = new moq::Mock<RegionHealthCheckServices.RegionHealthCheckServicesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetRegionHealthCheckServiceRequest request = new GetRegionHealthCheckServiceRequest { Region = "regionedb20d96", Project = "projectaa6ff846", HealthCheckService = "health_check_servicecdc3a03b", }; HealthCheckService expectedResponse = new HealthCheckService { Id = 11672635353343658936UL, Kind = "kindf7aa39d9", Name = "name1c9368b0", NetworkEndpointGroups = { "network_endpoint_groupsb3b03e25", }, CreationTimestamp = "creation_timestamp235e59a1", Region = "regionedb20d96", Fingerprint = "fingerprint009e6052", HealthStatusAggregationPolicy = HealthCheckService.Types.HealthStatusAggregationPolicy.UndefinedHealthStatusAggregationPolicy, NotificationEndpoints = { "notification_endpoints507ebc27", }, Description = "description2cf9da67", HealthChecks = { "health_checksedb1f3f8", }, SelfLink = "self_link7e87f12d", }; mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); RegionHealthCheckServicesClient client = new RegionHealthCheckServicesClientImpl(mockGrpcClient.Object, null); HealthCheckService response = client.Get(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetRequestObjectAsync() { moq::Mock<RegionHealthCheckServices.RegionHealthCheckServicesClient> mockGrpcClient = new moq::Mock<RegionHealthCheckServices.RegionHealthCheckServicesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetRegionHealthCheckServiceRequest request = new GetRegionHealthCheckServiceRequest { Region = "regionedb20d96", Project = "projectaa6ff846", HealthCheckService = "health_check_servicecdc3a03b", }; HealthCheckService expectedResponse = new HealthCheckService { Id = 11672635353343658936UL, Kind = "kindf7aa39d9", Name = "name1c9368b0", NetworkEndpointGroups = { "network_endpoint_groupsb3b03e25", }, CreationTimestamp = "creation_timestamp235e59a1", Region = "regionedb20d96", Fingerprint = "fingerprint009e6052", HealthStatusAggregationPolicy = HealthCheckService.Types.HealthStatusAggregationPolicy.UndefinedHealthStatusAggregationPolicy, NotificationEndpoints = { "notification_endpoints507ebc27", }, Description = "description2cf9da67", HealthChecks = { "health_checksedb1f3f8", }, SelfLink = "self_link7e87f12d", }; mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<HealthCheckService>(stt::Task.FromResult(expectedResponse), null, null, null, null)); RegionHealthCheckServicesClient client = new RegionHealthCheckServicesClientImpl(mockGrpcClient.Object, null); HealthCheckService responseCallSettings = await client.GetAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); HealthCheckService responseCancellationToken = await client.GetAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void Get() { moq::Mock<RegionHealthCheckServices.RegionHealthCheckServicesClient> mockGrpcClient = new moq::Mock<RegionHealthCheckServices.RegionHealthCheckServicesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetRegionHealthCheckServiceRequest request = new GetRegionHealthCheckServiceRequest { Region = "regionedb20d96", Project = "projectaa6ff846", HealthCheckService = "health_check_servicecdc3a03b", }; HealthCheckService expectedResponse = new HealthCheckService { Id = 11672635353343658936UL, Kind = "kindf7aa39d9", Name = "name1c9368b0", NetworkEndpointGroups = { "network_endpoint_groupsb3b03e25", }, CreationTimestamp = "creation_timestamp235e59a1", Region = "regionedb20d96", Fingerprint = "fingerprint009e6052", HealthStatusAggregationPolicy = HealthCheckService.Types.HealthStatusAggregationPolicy.UndefinedHealthStatusAggregationPolicy, NotificationEndpoints = { "notification_endpoints507ebc27", }, Description = "description2cf9da67", HealthChecks = { "health_checksedb1f3f8", }, SelfLink = "self_link7e87f12d", }; mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); RegionHealthCheckServicesClient client = new RegionHealthCheckServicesClientImpl(mockGrpcClient.Object, null); HealthCheckService response = client.Get(request.Project, request.Region, request.HealthCheckService); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetAsync() { moq::Mock<RegionHealthCheckServices.RegionHealthCheckServicesClient> mockGrpcClient = new moq::Mock<RegionHealthCheckServices.RegionHealthCheckServicesClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetRegionHealthCheckServiceRequest request = new GetRegionHealthCheckServiceRequest { Region = "regionedb20d96", Project = "projectaa6ff846", HealthCheckService = "health_check_servicecdc3a03b", }; HealthCheckService expectedResponse = new HealthCheckService { Id = 11672635353343658936UL, Kind = "kindf7aa39d9", Name = "name1c9368b0", NetworkEndpointGroups = { "network_endpoint_groupsb3b03e25", }, CreationTimestamp = "creation_timestamp235e59a1", Region = "regionedb20d96", Fingerprint = "fingerprint009e6052", HealthStatusAggregationPolicy = HealthCheckService.Types.HealthStatusAggregationPolicy.UndefinedHealthStatusAggregationPolicy, NotificationEndpoints = { "notification_endpoints507ebc27", }, Description = "description2cf9da67", HealthChecks = { "health_checksedb1f3f8", }, SelfLink = "self_link7e87f12d", }; mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<HealthCheckService>(stt::Task.FromResult(expectedResponse), null, null, null, null)); RegionHealthCheckServicesClient client = new RegionHealthCheckServicesClientImpl(mockGrpcClient.Object, null); HealthCheckService responseCallSettings = await client.GetAsync(request.Project, request.Region, request.HealthCheckService, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); HealthCheckService responseCancellationToken = await client.GetAsync(request.Project, request.Region, request.HealthCheckService, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } } }
////////////////////////////////////////////////////////////////////////////////////// // Author : Shukri Adams // // Contact : shukri.adams@gmail.com // // Compiler requirement : .Net 4.0 // // // // vcFramework : A reuseable library of utility classes // // Copyright (C) // // // // This program is free software; you can redistribute it and/or modify it under // // the terms of the GNU General Public License as published by the Free Software // // Foundation; either version 2 of the License, or (at your option) any later // // version. // // // // This program is distributed in the hope that it will be useful, but WITHOUT ANY // // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A // // PARTICULAR PURPOSE. See the GNU General Public License for more details. // // // // You should have received a copy of the GNU General Public License along with // // this program; if not, write to the Free Software Foundation, Inc., 59 Temple // // Place, Suite 330, Boston, MA 02111-1307 USA // ////////////////////////////////////////////////////////////////////////////////////// using System; using System.Collections; using System.ComponentModel; using System.Drawing; using System.Data; using System.IO; using System.Reflection; using System.Windows.Forms; using System.Xml; using vcFramework; using vcFramework.IO; using vcFramework.Windows.Forms; using vcFramework.Assemblies; using vcFramework.DataItems; using vcFramework.Delegates; namespace vcFramework.UserControls { /// <summary> /// Visual-studion style console /// </summary> public class MessageConsole :UserControl, IStateManageable { #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent( ) { this.lvMessages = new vcFramework.Windows.Forms.ListViewSP(); this.btnSave = new System.Windows.Forms.Button(); this.btnCopy = new System.Windows.Forms.Button(); this.btnLog = new System.Windows.Forms.Button(); this.btnClearConsole = new System.Windows.Forms.Button(); this.cbLogging = new System.Windows.Forms.CheckBox(); this.pnlFunctionButtonHolder = new System.Windows.Forms.Panel(); this.pnlFunctionButtonHolder.SuspendLayout(); this.SuspendLayout(); // // lvMessages // this.lvMessages.AllowKeyboardDeleteKeyDeletion = false; this.lvMessages.Dock = System.Windows.Forms.DockStyle.Fill; this.lvMessages.FocusNewItemOnInsert = true; this.lvMessages.FullRowSelect = true; this.lvMessages.HideSelection = false; this.lvMessages.InsertPosition = vcFramework.Windows.Forms.ListViewSP.InsertPositions.Top; this.lvMessages.Location = new System.Drawing.Point(0, 0); this.lvMessages.LockWidthOnZeroWidthColumns = false; this.lvMessages.Name = "lvMessages"; this.lvMessages.Size = new System.Drawing.Size(432, 368); this.lvMessages.TabIndex = 0; this.lvMessages.View = System.Windows.Forms.View.Details; // // btnSave // this.btnSave.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.btnSave.Location = new System.Drawing.Point(2, 2); this.btnSave.Name = "btnSave"; this.btnSave.Size = new System.Drawing.Size(20, 20); this.btnSave.TabIndex = 1; this.btnSave.Text = "s"; this.btnSave.Click += new System.EventHandler(this.btnSave_Click); // // btnCopy // this.btnCopy.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.btnCopy.Location = new System.Drawing.Point(32, 2); this.btnCopy.Name = "btnCopy"; this.btnCopy.Size = new System.Drawing.Size(20, 20); this.btnCopy.TabIndex = 2; this.btnCopy.Text = "c"; this.btnCopy.Click += new System.EventHandler(this.btnCopy_Click); // // btnLog // this.btnLog.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.btnLog.Location = new System.Drawing.Point(64, 2); this.btnLog.Name = "btnLog"; this.btnLog.Size = new System.Drawing.Size(20, 20); this.btnLog.TabIndex = 3; this.btnLog.Text = "l"; this.btnLog.Click += new System.EventHandler(this.btnLog_Click); // // btnClearConsole // this.btnClearConsole.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right))); this.btnClearConsole.Location = new System.Drawing.Point(408, 2); this.btnClearConsole.Name = "btnClearConsole"; this.btnClearConsole.Size = new System.Drawing.Size(20, 20); this.btnClearConsole.TabIndex = 4; this.btnClearConsole.Text = "e"; this.btnClearConsole.Click += new System.EventHandler(this.ClearConsole); // // cbLogging // this.cbLogging.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.cbLogging.Location = new System.Drawing.Point(96, 6); this.cbLogging.Name = "cbLogging"; this.cbLogging.Size = new System.Drawing.Size(304, 16); this.cbLogging.TabIndex = 5; this.cbLogging.Text = "Not logging"; this.cbLogging.CheckedChanged += new System.EventHandler(this.cbLogging_CheckedChanged); // // pnlFunctionButtonHolder // this.pnlFunctionButtonHolder.Controls.Add(this.btnCopy); this.pnlFunctionButtonHolder.Controls.Add(this.btnLog); this.pnlFunctionButtonHolder.Controls.Add(this.btnClearConsole); this.pnlFunctionButtonHolder.Controls.Add(this.cbLogging); this.pnlFunctionButtonHolder.Controls.Add(this.btnSave); this.pnlFunctionButtonHolder.Dock = System.Windows.Forms.DockStyle.Bottom; this.pnlFunctionButtonHolder.Location = new System.Drawing.Point(0, 344); this.pnlFunctionButtonHolder.Name = "pnlFunctionButtonHolder"; this.pnlFunctionButtonHolder.Size = new System.Drawing.Size(432, 24); this.pnlFunctionButtonHolder.TabIndex = 6; // // MessageConsole // this.Controls.Add(this.pnlFunctionButtonHolder); this.Controls.Add(this.lvMessages); this.Name = "MessageConsole"; this.Size = new System.Drawing.Size(432, 368); this.Load += new System.EventHandler(this.Console_Load); this.pnlFunctionButtonHolder.ResumeLayout(false); this.ResumeLayout(false); } #endregion #region FIELDS private System.ComponentModel.Container components = null; private vcFramework.Windows.Forms.ListViewSP lvMessages; private System.Windows.Forms.Button btnSave; private System.Windows.Forms.Button btnCopy; private System.Windows.Forms.Button btnLog; private System.Windows.Forms.Button btnClearConsole; private System.Windows.Forms.CheckBox cbLogging; private System.Windows.Forms.Panel pnlFunctionButtonHolder; private System.Windows.Forms.ContextMenu mnuConsoleMenu; private System.Windows.Forms.MenuItem mnuClearConsole; private System.Windows.Forms.MenuItem mnuCopyConsole; /// <summary> /// Holds icons for listview /// </summary> private ImageList m_objImageList; /// <summary> /// Path of file to log to /// </summary> private string m_strLoggingFilePath; /// <summary> /// Logger /// </summary> private Logger m_objLogger; /// <summary> /// contains console state in xml form /// </summary> private XmlDocument m_dXmlConsoleState; /// <summary> /// </summary> public enum MessageTypes : int { Information, Exception, Warning } /// <summary> </summary> public enum MessagePriorities : int { Urgent, Important, Verbose } /// <summary> </summary> public bool m_blnShowConsoleFunctionButtons; #endregion #region PROPERTIES /// <summary> Gets or sets if buttons at bottom of /// console will be displayed. These buttons are /// intended for advanced users and debugging, /// and by default are disabled.</summary> public bool ShowButtons { set { //m_blnShowConsoleFunctionButtons = value; //for (int i = 0 ; i < pnlFunctionButtonHolder.Controls.Count ; i ++) // pnlFunctionButtonHolder.Controls[i].Visible = m_blnShowConsoleFunctionButtons; pnlFunctionButtonHolder.Visible = value; } get { //return m_blnShowConsoleFunctionButtons; return pnlFunctionButtonHolder.Visible; } } #endregion #region CONSTRUCTORS /// <summary> </summary> public MessageConsole( ) { InitializeComponent(); this.ShowButtons = false; // sets up empty Xml state document - actual // state data may be passed in sometime during // the lifetime of this instance of the console, // but if not, at least an empty state doc is // needed for the console to function normally. m_dXmlConsoleState = new XmlDocument(); m_dXmlConsoleState.InnerXml = "<state>" + "<logging/>" + "<loggingPath/>" + "</state>"; // set log path to blank. Log path can be set // in 1 of two ways - from state data passed in // as xml, or by a human setting the save path // from the log file path dialogue on the console. m_strLoggingFilePath = ""; } #endregion #region DESTRUCTORS protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #endregion #region METHODS /// <summary> /// Adds a message to console /// </summary> /// <param name="strMessage"></param> public void Add( string strMessage ) { StringItem[] arrNewItem = new StringItem[3]; arrNewItem[0] = new StringItem(); arrNewItem[1] = new StringItem(); arrNewItem[2] = new StringItem(); arrNewItem[0].Name = "icon"; arrNewItem[0].Value = ""; arrNewItem[1].Name = "time"; arrNewItem[1].Value = System.DateTime.Now.ToShortTimeString(); arrNewItem[2].Name = "message"; arrNewItem[2].Value = strMessage; lvMessages.InsertRow(arrNewItem); lvMessages.SetRowIcon(0, 0); // use checkbox as indicator of if should log if (cbLogging.Checked) m_objLogger.WriteToLog(strMessage); } string _threadsafemessageholder; private void AddThreadSafeInternal() { Add(_threadsafemessageholder); } public void AddThreadSafe( string strMessage) { _threadsafemessageholder = strMessage; lvMessages.Invoke(new WinFormActionDelegate(AddThreadSafeInternal)); } /// <summary> /// Works as logging switch /// </summary> private void SetLoggingIndicator( ) { if (cbLogging.Checked) { // checks if logging is possible if (m_strLoggingFilePath.Length > 0) { try { // create a new instance of logger if one has not already been created if (m_objLogger == null) { m_objLogger = new Logger( m_strLoggingFilePath, 10000000, // approx 10 meg log file false, // do not create path if path doesn't exist true // resume logging on existing file if it exists ); } cbLogging.Text = "Logging to file"; } catch { // if reach here, logging not possible cbLogging.Text = "Logging failed"; cbLogging.Checked = false; } } else { // if reach here, no log file path has been specified cbLogging.Text = "No log path ..."; cbLogging.Checked = false; } } else { if (m_objLogger != null) { cbLogging.Text = "Logging stopped"; m_objLogger.Dispose(); m_objLogger = null; } } } /// <summary> </summary> private void BuildMenu( ) { mnuConsoleMenu = new ContextMenu(); mnuClearConsole = new MenuItem(); mnuCopyConsole = new MenuItem(); mnuConsoleMenu.Popup += new EventHandler(ConsoleMenu_Popup); mnuCopyConsole.Text = "Copy console contents"; mnuCopyConsole.Index = 0; mnuCopyConsole.Click += new EventHandler(this.CopyConsole); mnuClearConsole.Text = "Clear console"; mnuClearConsole.Index = 1; mnuClearConsole.Click += new EventHandler(this.ClearConsole); mnuConsoleMenu.MenuItems.AddRange( new MenuItem[]{ mnuCopyConsole, mnuClearConsole }); lvMessages.ContextMenu = mnuConsoleMenu; } /// <summary> /// Gets state of console /// in Xlm node. /// </summary> /// <returns></returns> public XmlNode GetState( ) { m_dXmlConsoleState.SelectSingleNode("//logging").InnerText = cbLogging.Checked.ToString(); if (m_strLoggingFilePath != null) m_dXmlConsoleState.SelectSingleNode("//loggingPath").InnerText = m_strLoggingFilePath; return m_dXmlConsoleState.DocumentElement; } /// <summary> /// Sets state of console from /// Xml node. /// </summary> /// <param name="nXmlState"></param> public void SetState( XmlNode nXmlState ) { m_dXmlConsoleState.InnerXml = nXmlState.OuterXml; // always set path _before_ changing the checkbox checked status if (m_dXmlConsoleState.SelectSingleNode("//loggingPath").InnerText.Length > 0) m_strLoggingFilePath = m_dXmlConsoleState.SelectSingleNode("//loggingPath").InnerText; // must be done LAST - chanibg check state triggers event handler // which in turn uses the path above to start logging if (m_dXmlConsoleState.SelectSingleNode("//logging").InnerText == "True"){cbLogging.Checked = true;} } #endregion #region EVENTS /// <summary> /// Most start logic for this control /// goes here. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Console_Load( object sender, System.EventArgs e ) { ToolTip objToolTip = null; AssemblyAccessor objAssemblyAccessor = null; // sets up column structure XmlDocument dXmlColumnStructure = new XmlDocument(); dXmlColumnStructure.InnerXml = "<listview>" + "<column internalName='icon' width='20'/>" + "<column internalName='time' width='50'>Time</column>" + "<column internalName='message' width='100' widthBehaviour='Autofit'>Message</column>" + "</listview>"; lvMessages.ColumnsSet( dXmlColumnStructure.DocumentElement.ChildNodes ); // ######################################## // populates imagelist // ---------------------------------------- objAssemblyAccessor = new AssemblyAccessor( Assembly.GetAssembly(typeof(MessageConsole))); m_objImageList = new ImageList(); m_objImageList.Images.Add(objAssemblyAccessor.GetBitmap(objAssemblyAccessor.RootName + ".UserControls.info.png")); m_objImageList.Images.Add(objAssemblyAccessor.GetBitmap(objAssemblyAccessor.RootName + ".UserControls.close.png")); lvMessages.SmallImageList = m_objImageList; // ######################################## // sets listview properties // ---------------------------------------- lvMessages.MultiSelect = false; lvMessages.FocusNewItemOnInsert = true; lvMessages.InsertPosition = ListViewSP.InsertPositions.Top; // ######################################## // set button images // ---------------------------------------- btnSave.Image = new Bitmap(objAssemblyAccessor.GetBitmap(objAssemblyAccessor.RootName + ".UserControls.filesave.png"), 12,12); btnCopy.Image = new Bitmap(objAssemblyAccessor.GetBitmap(objAssemblyAccessor.RootName + ".UserControls.editcopy.png"), 12,12); btnLog.Image = new Bitmap(objAssemblyAccessor.GetBitmap(objAssemblyAccessor.RootName + ".UserControls.filesaveas.png"), 12,12); btnClearConsole.Image = new Bitmap(objAssemblyAccessor.GetBitmap(objAssemblyAccessor.RootName + ".UserControls.trash.png"), 12,12); // creates tooltips for buttons objToolTip = new ToolTip(); objToolTip.SetToolTip(btnSave, "Save current console contents to text file"); objToolTip = new ToolTip(); objToolTip.SetToolTip(btnCopy, "Copy current console contents to memory clipboard"); objToolTip = new ToolTip(); objToolTip.SetToolTip(btnLog, "Select a log file to write console message to"); objToolTip = new ToolTip(); objToolTip.SetToolTip(btnClearConsole, "Clear current console contents"); // SetLoggingIndicator(); // builds context menu BuildMenu(); } /// <summary> Invoked when "save console /// contents to file" button is clicked /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnSave_Click( object sender, System.EventArgs e ) { /* string strSavePath = ""; string strListViewContents = ""; strSavePath = FileSystemLib.GetFilePathFromSaveDialogue(); strListViewContents = lvMessages.GetAllRowsAsText(); if (Directory.Exists(FileSystemLib.GetFolderPath(strSavePath))) FileSystemLib.WriteToFile(strSavePath, strListViewContents, true); */ } /// <summary> Copies all console contents /// to memory clipboard </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnCopy_Click( object sender, System.EventArgs e ) { string strListViewContents = ""; strListViewContents = lvMessages.GetAllRowsAsText(); Clipboard.SetDataObject(strListViewContents, true); } /// <summary> /// sets log path after button press. note that log path can also be set /// "behind the scenes" via console state resetting (done through a /// property) /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnLog_Click( object sender, System.EventArgs e ) { /* string strLogPath = ""; strLogPath = FileSystemLib.GetFilePathFromSaveDialogue(); if (Directory.Exists(FileSystemLib.GetFolderPath(strLogPath))) { m_strLoggingFilePath = strLogPath; } */ } /// <summary> /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void ConsoleMenu_Popup( object sender, EventArgs e ) { mnuCopyConsole.Enabled = true; mnuClearConsole.Enabled = true; if (lvMessages.Items.Count == 0) { mnuCopyConsole.Enabled = false; mnuClearConsole.Enabled = false; } } /// <summary> /// Clears console contents /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void ClearConsole( object sender, System.EventArgs e ) { if (lvMessages.Items.Count > 0 && PromptLib.DialoguePrompt("", "Remove all console messages?")) lvMessages.Items.Clear(); } /// <summary> /// Copies console contents to memory /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void CopyConsole( object sender, System.EventArgs e ) { string strListViewContents = ""; strListViewContents = lvMessages.GetAllRowsAsText(); Clipboard.SetDataObject( strListViewContents, true); } /// <summary> /// Invoked by checkbox change - note, can be triggered by someone clicking the /// checkbox, but also from "behind the scenes" by console state resetting (Done /// via a property) /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void cbLogging_CheckedChanged( object sender, System.EventArgs e ) { SetLoggingIndicator(); } #endregion } }
/* * MindTouch Core - open source enterprise collaborative networking * Copyright (c) 2006-2010 MindTouch Inc. * www.mindtouch.com oss@mindtouch.com * * For community documentation and downloads visit www.opengarden.org; * please review the licensing section. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * http://www.gnu.org/copyleft/gpl.html */ using System; using System.Collections.Generic; using System.Text; using System.Text.RegularExpressions; using System.Security.Cryptography; using MindTouch.Deki.Data; using MindTouch.Dream; using MindTouch.Web; using MindTouch.Xml; namespace MindTouch.Deki.Logic { public static class AuthBL { //--- Constants --- private const string authTokenPattern = @"^(?<id>([\d])+)_(?<ts>([\d]){18})_(?<hash>.+)$"; private static readonly Regex authTokenRegex = new Regex(authTokenPattern, RegexOptions.Singleline | RegexOptions.Compiled | RegexOptions.CultureInvariant); //--- Class Methods --- public static UserBE Authenticate(DreamContext context, DreamMessage request, uint serviceId, bool autoCreateExternalUser, bool allowAnon, out bool altPassword) { UserBE user = null; altPassword = false; // Case 1: username/fullname, password, provider (login window) // 1. Validate & retrieve fullname using credentials // Failed -> return null // 2. Populate user object // A. Populates user info // B. Populates group info in user object // 3. Does fullname exist? // Yes -> Update user (email, fullname, ...) // No -> Create user // 4. Update Group information // 5. return user object // // Case 2: fullname, password (http, api, ...) // 1. Lookup full name, exist? // Yes -> return user // No -> return null // // Case 3: auth-token (header auth) // 0. Valid auth token? // No -> return null // 1. Lookup user by name // Found -> return user // Else -> return null string userName = null; string password = null; UserBE userFromToken = null; ServiceBE authService = null; // get username and password from request only if not allowing anonymous requests -- (GET/POST: users/authenticate only) if(allowAnon) { // TODO (steveb): token should NOT take preference over explicit authentication information userFromToken = UserFromAuthTokenInRequest(context); } if(userFromToken == null) { HttpUtil.GetAuthentication(context.Uri.ToUri(), request.Headers, out userName, out password); } // check if we need to retrieve authentication service information if(serviceId > 0) { authService = ServiceBL.GetServiceById(serviceId); if(authService == null) { throw new DreamAbortException(DreamMessage.BadRequest(string.Format(DekiResources.INVALID_SERVICE_ID, serviceId))); } if(authService.Type != ServiceType.AUTH) { throw new DreamAbortException(DreamMessage.BadRequest(string.Format(DekiResources.NOT_AUTH_SERVICE, serviceId))); } } // check if a username was provided if(!string.IsNullOrEmpty(userName)) { //Case 2: Given username + password if(authService == null) { //Assuming local user or existing external account user = DbUtils.CurrentSession.Users_GetByName(userName); if(user != null) { serviceId = user.ServiceId; authService = ServiceBL.GetServiceById(serviceId); } else { LoginAccessDenied(context, request, userName, null, password); } } if(authService == null) { throw new DreamAbortException(DreamMessage.BadRequest(string.Format(DekiResources.INVALID_SERVICE_ID, serviceId))); } if(authService.Type != ServiceType.AUTH) { throw new DreamAbortException(DreamMessage.BadRequest(string.Format(DekiResources.NOT_AUTH_SERVICE, serviceId))); } if(user == null) { //Performing auth on local account if(ServiceBL.IsLocalAuthService(authService)) { user = DbUtils.CurrentSession.Users_GetByName(userName); } else { //Performing external auth. Lookup by external user name user = DbUtils.CurrentSession.Users_GetByExternalName(userName, authService.Id); } if(user != null && user.ServiceId != authService.Id) { ServiceBE currentUsersAuthService = ServiceBL.GetServiceById(user.ServiceId); if(currentUsersAuthService != null) { throw new DreamAbortException(DreamMessage.Conflict(string.Format(DekiResources.LOGINEXTERNALUSERCONFLICT, currentUsersAuthService.Description))); } else { throw new DreamAbortException(DreamMessage.Conflict(DekiResources.LOGINEXTERNALUSERCONFLICTUNKNOWN)); } } } //Local account in the db. if(user != null && ServiceBL.IsLocalAuthService(authService)) { //Validate password for local account or validate the apikey if(!IsValidAuthenticationForLocalUser(user, password, out altPassword)) { // try impersonation using the ApiKey if(string.IsNullOrEmpty(password) && PermissionsBL.ValidateRequestApiKey()) { DekiContext.Current.Instance.Log.InfoFormat("user '{0}' authenticated via apikey impersonation", userName); } else { LoginAccessDenied(context, request, userName, user.ID, password); } } } // User was not found in the db and not being asked to create it. if(user == null && !autoCreateExternalUser) { LoginAccessDenied(context, request, userName, null, password); } // Creating local account if apikey checks out and our authservice is local if(user == null && string.IsNullOrEmpty(password) && PermissionsBL.ValidateRequestApiKey() && ServiceBL.IsLocalAuthService(authService)) { XDoc newUserDoc = new XDoc("user") .Elem("username", userName); DreamMessage newUserResponse = DekiContext.Current.ApiPlug.At("users") .With("apikey", DreamContext.Current.GetParam("apikey", string.Empty)) .Post(newUserDoc); user = UserBL.GetUserById(newUserResponse.ToDocument()["/user/@id"].AsUInt ?? 0); if(user != null && !string.IsNullOrEmpty(password)) { user = UserBL.SetPassword(user, password, false); } } // Got an external account // Passing in the user object from db if it was found. List<GroupBE> externalGroups = null; if(!ServiceBL.IsLocalAuthService(authService)) { bool bypassAuthentication = false; string externalName; if(user == null || string.IsNullOrEmpty(user.ExternalName)) externalName = userName; else externalName = user.ExternalName; // If apikey is valid, try to bypass auth with the external provider // and only lookup user/group details. if(string.IsNullOrEmpty(password) && PermissionsBL.ValidateRequestApiKey()) { DekiContext.Current.Instance.Log.InfoFormat("user '{0}' authenticating being bypassed via apikey impersonation", userName); bypassAuthentication = true; } user = ExternalServiceSA.BuildUserFromAuthService(authService, user, userName, bypassAuthentication, externalName, password, out externalGroups); } // User was not found or did not authenticate with external provider if(user == null) { LoginAccessDenied(context, request, userName, null, password); } else { //New user creation from external provider if(user.ID == 0) { if(!autoCreateExternalUser) { LoginAccessDenied(context, request, userName, null, password); } } else { //user exists } if(user.UserActive) { user = UserBL.CreateOrUpdateUser(user); if(externalGroups != null) { UserBL.UpdateUsersGroups(user, externalGroups.ToArray()); } } } } else if(userFromToken != null) { // valid token exists that resolved to a user user = userFromToken; } else if(allowAnon) { // Anonymous user user = DbUtils.CurrentSession.Users_GetByName(DekiWikiService.ANON_USERNAME); } if(user == null) { //No credentials. Or token not provided or is invalid. LoginAccessDenied(context, request, null, null, password); } else if(!user.UserActive && !PermissionsBL.ValidateRequestApiKey()) { //If a valid api key is provided, override the disabled account flag throw new DreamAbortException(DreamMessage.Forbidden(string.Format(DekiResources.USER_DISABLED, user.Name))); } return user; } public static string EncryptPassword(UserBE user, string pwd) { string md5Pwd = GetMD5(pwd); return GetMD5(user.ID + "-" + md5Pwd); } private static string GetMD5(string pass) { MD5 MD5 = MD5CryptoServiceProvider.Create(); StringBuilder sb = new StringBuilder(); foreach(byte ch in MD5.ComputeHash(Encoding.Default.GetBytes(pass))) { sb.AppendFormat("{0:x2}", ch); } return sb.ToString(); } /// <summary> /// Get a user out of an authtoken from a request if it's valid. /// </summary> /// <returns></returns> private static UserBE UserFromAuthTokenInRequest(DreamContext context) { DreamMessage request = context.Request; string authToken = context.Uri.GetParam(DekiWikiService.AUTHTOKEN_URIPARAM, null); UserBE user = null; // Check if auth token is in a cookie if((authToken == null) && request.HasCookies) { DreamCookie authCookie = DreamCookie.GetCookie(request.Cookies, DekiWikiService.AUTHTOKEN_COOKIENAME); if((authCookie != null) && (!authCookie.Expired)) { authToken = authCookie.Value; } } // Check if auth token is in a header or passed in as query parameter authToken = authToken ?? request.Headers[DekiWikiService.AUTHTOKEN_HEADERNAME]; // Extract user name from auth token if it's valid if(authToken != null) { user = ValidateAuthToken(authToken); // check whether licensestate prevents user from being authenticated LicenseStateType licensestate = LicenseBL.LicenseState; if((licensestate == LicenseStateType.EXPIRED || licensestate == LicenseStateType.INVALID || licensestate == LicenseStateType.INACTIVE) && !PermissionsBL.IsUserAllowed(user, Permissions.ADMIN) ) { if(DekiContext.Current.Instance.Log.IsWarnEnabled) { switch(licensestate) { case LicenseStateType.EXPIRED: DekiContext.Current.Instance.Log.WarnFormat("UserFromAuthTokenInRequest: Expired license {0}, reverting non-admin user to anonymous", LicenseBL.LicenseExpiration); break; case LicenseStateType.INVALID: DekiContext.Current.Instance.Log.WarnFormat("UserFromAuthTokenInRequest: Invalid license, reverting non-admin user to anonymous"); break; case LicenseStateType.INACTIVE: DekiContext.Current.Instance.Log.WarnFormat("UserFromAuthTokenInRequest: Inactive license, reverting non-admin user to anonymous"); break; } } user = null; } else { DekiContext.Current.AuthToken = authToken; } } if(PermissionsBL.ValidateRequestApiKey()) { uint userIdOverride = 0; if(uint.TryParse(context.GetParam(DekiWikiService.IMPERSONATE_USER_QUERYNAME, null), out userIdOverride)) { UserBE userOverride = UserBL.GetUserById(userIdOverride); if(userOverride != null) { user = userOverride; DekiContext.Current.Instance.Log.InfoFormat("APIKEY provided. Impersonating user id '{0}': {1}", user.ID, user.Name); } } } return user; } public static string CreateAuthTokenForUser(UserBE user) { return CreateAuthTokenForUser(user, DateTime.Now); } private static string CreateAuthTokenForUser(UserBE user, DateTime timestamp) { string ret = string.Empty; string tokenContent = string.Format("{0}_{1}", user.ID.ToString(), timestamp.ToUniversalTime().Ticks); //Include the users current password as part of validation to invalidate token upon pw change. string contentToValidate = string.Format("{0}.{1}.{2}", tokenContent, user.Password ?? string.Empty, DekiContext.Current.Instance.AuthTokenSalt); System.Security.Cryptography.MD5 md5 = System.Security.Cryptography.MD5.Create(); string hash = new Guid(md5.ComputeHash(Encoding.Default.GetBytes(contentToValidate))).ToString("N"); ret = tokenContent + "_" + hash; return ret; } private static UserBE ValidateAuthToken(string authToken) { UserBE user = null; // check if a structurally valid auth token was provided if(string.IsNullOrEmpty(authToken)) { return null; } Match m = authTokenRegex.Match(authToken); if(!m.Success) { return null; } // retrieve associated user object uint userid = 0; if(!uint.TryParse(m.Groups["id"].Value, out userid)) { return null; } else { user = UserBL.GetUserById(userid); if(user == null) { return null; } } // parse timestamp DekiInstance instance = DekiContext.Current.Instance; long tsValue; if(long.TryParse(m.Groups["ts"].Value, out tsValue)) { DateTime ts = new DateTime(tsValue, DateTimeKind.Utc); if(ts < DateTime.UtcNow.Subtract(instance.AuthCookieExpirationTime) && instance.AuthCookieExpirationTime.TotalSeconds > 0) { return null; } if(authToken == CreateAuthTokenForUser(user, ts)) { return user; } else { // TODO Max: Consider logging this as an intrusion attempt. return null; } } return null; } public static bool IsValidAuthenticationForLocalUser(UserBE user, string password) { bool altPassword; return IsValidAuthenticationForLocalUser(user, password, out altPassword); } public static bool IsValidAuthenticationForLocalUser(UserBE user, string password, out bool altPassword) { bool isValid = false; altPassword = false; string encrypted = AuthBL.EncryptPassword(user, password); if(string.CompareOrdinal(encrypted, user.Password) == 0) { //On login if a user has a temp password but logs in with original password, clear out the temp password. if(!string.IsNullOrEmpty(user.NewPassword)) { user.NewPassword = string.Empty; DbUtils.CurrentSession.Users_Update(user); } isValid = true; } else if(!string.IsNullOrEmpty(user.NewPassword) && string.CompareOrdinal(encrypted, user.NewPassword) == 0) { isValid = true; altPassword = true; } return isValid; } private static void LoginAccessDenied(DreamContext context, DreamMessage request, string username, uint? userid, string password) { string apiKey = context.GetParam("apikey", null); if(!string.IsNullOrEmpty(username) || !string.IsNullOrEmpty(password) || !string.IsNullOrEmpty(apiKey)) { DekiContext.Current.Instance.Log.WarnMethodCall("Authenticate: user password not correct or invalid auth token", "username:" + username, "userid:" + userid, "usingpassword?:" + !string.IsNullOrEmpty(password), "usingapikey?:" + !string.IsNullOrEmpty(apiKey), "origin:" + request.Headers[DreamHeaders.DREAM_CLIENTIP]); } throw new DreamAbortException(DreamMessage.AccessDenied(DekiWikiService.AUTHREALM, DekiResources.AUTHENTICATION_FAILED)); } } }
/* Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT License. See License.txt in the project root for license information. */ namespace Site.Areas.Portal.Controllers { using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Net; using System.Web; using System.Web.Mvc; using Microsoft.Xrm.Client; using Microsoft.Xrm.Portal.Configuration; using Microsoft.Xrm.Sdk; using Microsoft.Xrm.Sdk.Metadata; using Adxstudio.Xrm; using Adxstudio.Xrm.Activity; using Adxstudio.Xrm.Cms; using Adxstudio.Xrm.Core.Flighting; using Adxstudio.Xrm.Metadata; using Adxstudio.Xrm.Notes; using Adxstudio.Xrm.Resources; using Adxstudio.Xrm.Security; using Adxstudio.Xrm.Text; using Adxstudio.Xrm.Web; using Adxstudio.Xrm.Web.Mvc; using Order = Adxstudio.Xrm.Services.Query.Order; /// <summary> /// Controller that responds to ActivityPointer requests for information. /// </summary> public sealed class EntityActivityController : Controller { public sealed class ActivityRecord : EntityRecord { private readonly string[] predefinedTemplates = new string[] { "email", "phonecall", "appointment", "adx_portalcomment" }; public IDictionary<string, object> ViewFields { get; set; } public DateTime CreatedOn { get; private set; } public string CreatedOnDisplay { get; private set; } public string PostedByName { get; private set; } public bool DisplayToolbar { get; private set; } public bool IsCustomActivity { get; private set; } public ActivityRecord(IActivity activity, DataAdapterDependencies dataAdapterDependencies, CrmEntityPermissionProvider provider, EntityMetadata entityMetadata = null, bool readGranted = false, int? crmLcid = null) : base( activity.Entity, dataAdapterDependencies.GetServiceContext(), provider, entityMetadata, readGranted, activity.Regarding, crmLcid: crmLcid) { if (activity == null) throw new ArgumentNullException("activity"); SetPropertyValues(activity, dataAdapterDependencies); } private void SetPropertyValues(IActivity activity, DataAdapterDependencies dataAdapterDependencies) { var attributes = activity.Entity.Attributes; IsCustomActivity = false; ViewFields = attributes.SelectMany(FlattenAllPartiesAttribute).ToDictionary(attribute => attribute.Key, attribute => { var optionSetValue = attribute.Value as OptionSetValue; if (optionSetValue != null) { return optionSetValue.Value; } if (attribute.Key == "activitytypecode" && !predefinedTemplates.Contains((string)attribute.Value)) { IsCustomActivity = true; } if (attribute.Key == "description") { string formattedValue = FormatViewFieldsValue(attribute.Value); if (!string.IsNullOrWhiteSpace(formattedValue)) { return formattedValue; } } return attribute.Value; }); CreatedOn = activity.Entity.GetAttributeValue<DateTime>("createdon"); CreatedOnDisplay = CreatedOn.ToString(DateTimeClientFormat); var noteContact = activity.Entity.GetAttributeValue<EntityReference>("from"); PostedByName = noteContact == null ? activity.Entity.GetAttributeValue<EntityReference>("createdby").Name : noteContact.Name; DisplayToolbar = false; } /// <summary> /// If valueObj can be converted to string - formats it with SimpleHtmlFormatter. /// </summary> /// <param name="valueObj">attribute.Value</param> /// <returns>Formatted string if success. Otherwise null</returns> private string FormatViewFieldsValue(object valueObj) { string valueText = valueObj as string; if (string.IsNullOrWhiteSpace(valueText)) { return null; } try { string formattedText = (new SimpleHtmlFormatter().Format(valueText)).ToString(); return formattedText; } catch (Exception) { return null; } } } private static IEnumerable<KeyValuePair<string, object>> FlattenAllPartiesAttribute(KeyValuePair<string, object> attribute) { var attributeCollection = new List<KeyValuePair<string, object>> { }; var toRecipients = new List<EntityReference>(); var ccRecipients = new List<EntityReference>(); var requiredAttendees = new List<EntityReference>(); if (attribute.Key.Equals("allparties")) { // Iterate through each entity in allparties and assign to Sender, To, or CC foreach (var entity in ((EntityCollection)attribute.Value).Entities.Where(entity => entity.Attributes.ContainsKey("participationtypemask") && entity.Attributes.ContainsKey("partyid"))) { switch (entity.GetAttributeValue<OptionSetValue>("participationtypemask").Value) { // Sender or Organizer should be represented as "from" case (int)Activity.ParticipationTypeMaskOptionSetValue.Sender: case (int)Activity.ParticipationTypeMaskOptionSetValue.Organizer: attributeCollection.Add(new KeyValuePair<string, object>("from", entity.GetAttributeValue<EntityReference>("partyid"))); break; case (int)Activity.ParticipationTypeMaskOptionSetValue.ToRecipient: toRecipients.Add(entity.GetAttributeValue<EntityReference>("partyid")); break; case (int)Activity.ParticipationTypeMaskOptionSetValue.CcRecipient: ccRecipients.Add(entity.GetAttributeValue<EntityReference>("partyid")); break; case (int)Activity.ParticipationTypeMaskOptionSetValue.RequiredAttendee: requiredAttendees.Add(entity.GetAttributeValue<EntityReference>("partyid")); break; } } // flatten lists for to and cc recipient if (toRecipients.Any()) { attributeCollection.Add(new KeyValuePair<string, object>("to", toRecipients)); } if (ccRecipients.Any()) { attributeCollection.Add(new KeyValuePair<string, object>("cc", ccRecipients)); } if (requiredAttendees.Any()) { attributeCollection.Add(new KeyValuePair<string, object>("requiredattendees", requiredAttendees)); } } else { attributeCollection.Add(attribute); } return attributeCollection; } private const int DefaultPageSize = 10; /// <summary> /// Retrieves Json representation of Activity Pointers filtered by regarding filter. /// </summary> /// <param name="regarding"></param> /// <param name="orders"></param> /// <param name="page"></param> /// <param name="pageSize"></param> /// <returns></returns> [AcceptVerbs(HttpVerbs.Post)] [AjaxValidateAntiForgeryToken, SuppressMessage("ASP.NET.MVC.Security", "CA5332:MarkVerbHandlersWithValidateAntiforgeryToken", Justification = "Handled with the custom attribute AjaxValidateAntiForgeryToken")] public ActionResult GetActivities(EntityReference regarding, List<Order> orders, int page, int pageSize = DefaultPageSize) { var portalContext = PortalCrmConfigurationManager.CreatePortalContext(); var dataAdapterDependencies = new PortalConfigurationDataAdapterDependencies(requestContext: Request.RequestContext); var dataAdapter = new ActivityDataAdapter(dataAdapterDependencies); var entityMetadata = portalContext.ServiceContext.GetEntityMetadata(regarding.LogicalName, EntityFilters.All); var result = dataAdapter.GetActivities(regarding, orders, page, pageSize, entityMetadata); var entityPermissionProvider = new CrmEntityPermissionProvider(); var crmLcid = HttpContext.GetCrmLcid(); var records = result.Select(r => new ActivityRecord(r, dataAdapterDependencies, entityPermissionProvider, entityMetadata, true, crmLcid)).ToArray(); var data = new PaginatedGridData(records, result.TotalCount, page, pageSize); return new JsonResult { Data = data, MaxJsonLength = int.MaxValue }; } /// <summary> /// Method for creating a PortalComment entity. Will auto-populate From field with regarding entity's owner, /// To field with portal user, DirectionCode with Incoming, State code with Completed, and Status code /// with Received. /// </summary> /// <param name="regardingEntityLogicalName"></param> /// <param name="regardingEntityId"></param> /// <param name="text"></param> /// <param name="file"></param> /// <param name="attachmentSettings"></param> /// <returns></returns> [HttpPost] [AjaxFormStatusResponse] [ValidateAntiForgeryToken] public ActionResult AddPortalComment(string regardingEntityLogicalName, string regardingEntityId, string text, HttpPostedFileBase file = null, string attachmentSettings = null) { if (string.IsNullOrWhiteSpace(text) || string.IsNullOrWhiteSpace(StringHelper.StripHtml(text))) { return new HttpStatusCodeResult(HttpStatusCode.ExpectationFailed, ResourceManager.GetString("Required_Field_Error").FormatWith(ResourceManager.GetString("Comment_DefaultText"))); } Guid regardingId; Guid.TryParse(regardingEntityId, out regardingId); var regarding = new EntityReference(regardingEntityLogicalName, regardingId); var dataAdapterDependencies = new PortalConfigurationDataAdapterDependencies(requestContext: Request.RequestContext); var serviceContext = dataAdapterDependencies.GetServiceContext(); var dataAdapter = new ActivityDataAdapter(dataAdapterDependencies); var settings = EntityNotesController.GetAnnotationSettings(serviceContext, attachmentSettings); var crmUser = dataAdapter.GetCRMUserActivityParty(regarding, "ownerid"); var portalUser = new Entity("activityparty"); portalUser["partyid"] = dataAdapterDependencies.GetPortalUser(); var portalComment = new PortalComment { Description = text, From = portalUser, To = crmUser, Regarding = regarding, AttachmentSettings = settings, StateCode = StateCode.Completed, StatusCode = StatusCode.Received, DirectionCode = PortalCommentDirectionCode.Incoming }; if (file != null && file.ContentLength > 0) { // Soon we will change the UI/controller to accept multiple attachments during the create dialog, so the data adapter takes in a list of attachments portalComment.FileAttachments = new IAnnotationFile[] { AnnotationDataAdapter.CreateFileAttachment(file, settings.StorageLocation) }; } var result = dataAdapter.CreatePortalComment(portalComment); if (!result.PermissionsExist) { return new HttpStatusCodeResult(HttpStatusCode.Forbidden, ResourceManager.GetString("Entity_Permissions_Have_Not_Been_Defined_Message")); } if (!result.CanCreate) { return new HttpStatusCodeResult(HttpStatusCode.Forbidden, ResourceManager.GetString("No_Permissions_To_Create_Notes")); } if (!result.CanAppendTo) { return new HttpStatusCodeResult(HttpStatusCode.Forbidden, ResourceManager.GetString("No_Permissions_To_Append_Record")); } if (!result.CanAppend) { return new HttpStatusCodeResult(HttpStatusCode.Forbidden, ResourceManager.GetString("No_Permissions_To_Append_Notes")); } if (FeatureCheckHelper.IsFeatureEnabled(FeatureNames.TelemetryFeatureUsage)) { PortalFeatureTrace.TraceInstance.LogFeatureUsage(FeatureTraceCategory.Comments, this.HttpContext, "create_comment_" + regardingEntityLogicalName, 1, regarding, "create"); } return new HttpStatusCodeResult(HttpStatusCode.Created); } /// <summary> /// Retrieves Json representation of Attachments filtered by regarding filter. /// </summary> /// <param name="regarding"></param> /// <returns></returns> [AcceptVerbs(HttpVerbs.Post)] [AjaxValidateAntiForgeryToken, SuppressMessage("ASP.NET.MVC.Security", "CA5332:MarkVerbHandlersWithValidateAntiforgeryToken", Justification = "Handled with the custom attribute AjaxValidateAntiForgeryToken")] public ActionResult GetAttachments(EntityReference regarding) { var dataAdapterDependencies = new PortalConfigurationDataAdapterDependencies(requestContext: Request.RequestContext); var dataAdapter = new ActivityDataAdapter(dataAdapterDependencies); var attachments = dataAdapter.GetAttachments(regarding).ToArray(); if (attachments.Any()) { return new JsonResult { Data = attachments, MaxJsonLength = int.MaxValue }; } return new EmptyResult(); } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace MovieApp.Api.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try get sample provided for a specific mediaType, controllerName, actionName and parameterNames. // If not found, try get the sample provided for a specific mediaType, controllerName and actionName regardless of the parameterNames // If still not found, try get the sample provided for a specific type and mediaType if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create one using <see cref="ObjectGenerator"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // Try create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); sampleObject = objectGenerator.GenerateObject(type); } return sampleObject; } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, e.Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) Under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You Under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed Under the License is distributed on an "AS Is" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations Under the License. ==================================================================== */ namespace NPOI.HSSF.UserModel { using System; using System.Collections; using System.IO; using NPOI.Util; using System.Text; using NPOI.Util.Collections; using System.Globalization; /// <summary> /// Stores width and height details about a font. /// @author Glen Stampoultzis (glens at apache.org) /// </summary> internal class FontDetails { private String fontName; private int height; private Hashtable charWidths = new Hashtable(); /// <summary> /// Construct the font details with the given name and height. /// </summary> /// <param name="fontName">The font name.</param> /// <param name="height">The height of the font.</param> public FontDetails(String fontName, int height) { this.fontName = fontName; this.height = height; } /// <summary> /// Gets the name of the font. /// </summary> /// <returns></returns> public String GetFontName() { return fontName; } /// <summary> /// Gets the height. /// </summary> /// <returns></returns> public int GetHeight() { return height; } /// <summary> /// Adds the char. /// </summary> /// <param name="c">The c.</param> /// <param name="width">The width.</param> public void AddChar(char c, int width) { charWidths[c] = width; } /// <summary> /// Retrieves the width of the specified Char. If the metrics for /// a particular Char are not available it defaults to returning the /// width for the 'W' Char. /// </summary> /// <param name="c">The character.</param> /// <returns></returns> public int GetCharWidth(char c) { object widthInteger = charWidths[c]; if (widthInteger == null && c != 'W') return GetCharWidth('W'); else return (int)widthInteger; } /// <summary> /// Adds the chars. /// </summary> /// <param name="Chars">The chars.</param> /// <param name="widths">The widths.</param> public void AddChars(char[] Chars, int[] widths) { for (int i = 0; i < Chars.Length; i++) { if (Chars[i] != ' ') { charWidths[Chars[i]] = widths[i]; } } } /// <summary> /// Builds the font height property. /// </summary> /// <param name="fontName">Name of the font.</param> /// <returns></returns> public static String BuildFontHeightProperty(String fontName) { return "font." + fontName + ".height"; } /// <summary> /// Builds the font widths property. /// </summary> /// <param name="fontName">Name of the font.</param> /// <returns></returns> public static String BuildFontWidthsProperty(String fontName) { return "font." + fontName + ".widths"; } /// <summary> /// Builds the font chars property. /// </summary> /// <param name="fontName">Name of the font.</param> /// <returns></returns> public static String BuildFontCharsProperty(String fontName) { return "font." + fontName + ".characters"; } /// <summary> /// Create an instance of /// <c>FontDetails</c> /// by loading them from the /// provided property object. /// </summary> /// <param name="fontName">the font name.</param> /// <param name="fontMetricsProps">the property object holding the details of this /// particular font.</param> /// <returns>a new FontDetails instance.</returns> public static FontDetails Create(String fontName, Properties fontMetricsProps) { String heightStr = fontMetricsProps[BuildFontHeightProperty(fontName)]; String widthsStr = fontMetricsProps[BuildFontWidthsProperty(fontName)]; String CharsStr = fontMetricsProps[BuildFontCharsProperty(fontName)]; // Ensure that this Is a font we know about if (heightStr == null || widthsStr == null || CharsStr == null) { // We don't know all we need to about this font // Since we don't know its sizes, we can't work with it throw new ArgumentException("The supplied FontMetrics doesn't know about the font '" + fontName + "', so we can't use it. Please Add it to your font metrics file (see StaticFontMetrics.GetFontDetails"); } int height = int.Parse(heightStr, CultureInfo.InvariantCulture); FontDetails d = new FontDetails(fontName, height); String[] CharsStrArray = Split(CharsStr, ",", -1); String[] widthsStrArray = Split(widthsStr, ",", -1); if (CharsStrArray.Length != widthsStrArray.Length) throw new Exception("Number of Chars does not number of widths for font " + fontName); for (int i = 0; i < widthsStrArray.Length; i++) { if (CharsStrArray[i].Trim().Length != 0) d.AddChar(CharsStrArray[i].Trim()[0], int.Parse(widthsStrArray[i], CultureInfo.InvariantCulture)); } return d; } /// <summary> /// Gets the width of all Chars in a string. /// </summary> /// <param name="str">The string to measure.</param> /// <returns>The width of the string for a 10 point font.</returns> public int GetStringWidth(String str) { int width = 0; for (int i = 0; i < str.Length; i++) { width += GetCharWidth(str[i]); } return width; } /// <summary> /// Split the given string into an array of strings using the given /// delimiter. /// </summary> /// <param name="text">The text.</param> /// <param name="separator">The separator.</param> /// <param name="max">The max.</param> /// <returns></returns> private static String[] Split(String text, String separator, int max) { String[] list = text.Split(separator.ToCharArray()); return list; } } }
using UnityEngine; using System.Collections; [AddComponentMenu("2D Toolkit/Sprite/tk2dTiledSprite")] [RequireComponent(typeof(MeshRenderer))] [RequireComponent(typeof(MeshFilter))] [ExecuteInEditMode] /// <summary> /// Sprite implementation that tiles a sprite to fill given dimensions. /// </summary> public class tk2dTiledSprite : tk2dBaseSprite { Mesh mesh; Vector2[] meshUvs; Vector3[] meshVertices; Color32[] meshColors; Vector3[] meshNormals = null; Vector4[] meshTangents = null; int[] meshIndices; [SerializeField] Vector2 _dimensions = new Vector2(50.0f, 50.0f); [SerializeField] Anchor _anchor = Anchor.LowerLeft; /// <summary> /// Gets or sets the dimensions. /// </summary> /// <value> /// Use this to change the dimensions of the sliced sprite in pixel units /// </value> public Vector2 dimensions { get { return _dimensions; } set { if (value != _dimensions) { _dimensions = value; UpdateVertices(); #if UNITY_EDITOR EditMode__CreateCollider(); #endif UpdateCollider(); } } } /// <summary> /// The anchor position for this tiled sprite /// </summary> public Anchor anchor { get { return _anchor; } set { if (value != _anchor) { _anchor = value; UpdateVertices(); #if UNITY_EDITOR EditMode__CreateCollider(); #endif UpdateCollider(); } } } [SerializeField] protected bool _createBoxCollider = false; /// <summary> /// Create a trimmed box collider for this sprite /// </summary> public bool CreateBoxCollider { get { return _createBoxCollider; } set { if (_createBoxCollider != value) { _createBoxCollider = value; UpdateCollider(); } } } new void Awake() { base.Awake(); // Create mesh, independently to everything else mesh = new Mesh(); #if !UNITY_3_5 mesh.MarkDynamic(); #endif mesh.hideFlags = HideFlags.DontSave; GetComponent<MeshFilter>().mesh = mesh; // This will not be set when instantiating in code // In that case, Build will need to be called if (Collection) { // reset spriteId if outside bounds // this is when the sprite collection data is corrupt if (_spriteId < 0 || _spriteId >= Collection.Count) _spriteId = 0; Build(); if (boxCollider == null) boxCollider = GetComponent<BoxCollider>(); #if !(UNITY_3_5 || UNITY_4_0 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2) if (boxCollider2D == null) { boxCollider2D = GetComponent<BoxCollider2D>(); } #endif } } protected void OnDestroy() { if (mesh) { #if UNITY_EDITOR DestroyImmediate(mesh); #else Destroy(mesh); #endif } } new protected void SetColors(Color32[] dest) { int numVertices; int numIndices; tk2dSpriteGeomGen.GetTiledSpriteGeomDesc(out numVertices, out numIndices, CurrentSprite, dimensions); tk2dSpriteGeomGen.SetSpriteColors (dest, 0, numVertices, _color, collectionInst.premultipliedAlpha); } // Calculated center and extents Vector3 boundsCenter = Vector3.zero, boundsExtents = Vector3.zero; public override void Build() { var spriteDef = CurrentSprite; int numVertices; int numIndices; tk2dSpriteGeomGen.GetTiledSpriteGeomDesc(out numVertices, out numIndices, spriteDef, dimensions); if (meshUvs == null || meshUvs.Length != numVertices) { meshUvs = new Vector2[numVertices]; meshVertices = new Vector3[numVertices]; meshColors = new Color32[numVertices]; } if (meshIndices == null || meshIndices.Length != numIndices) { meshIndices = new int[numIndices]; } meshNormals = new Vector3[0]; meshTangents = new Vector4[0]; if (spriteDef.normals != null && spriteDef.normals.Length > 0) { meshNormals = new Vector3[numVertices]; } if (spriteDef.tangents != null && spriteDef.tangents.Length > 0) { meshTangents = new Vector4[numVertices]; } float colliderOffsetZ = ( boxCollider != null ) ? ( boxCollider.center.z ) : 0.0f; float colliderExtentZ = ( boxCollider != null ) ? ( boxCollider.size.z * 0.5f ) : 0.5f; tk2dSpriteGeomGen.SetTiledSpriteGeom(meshVertices, meshUvs, 0, out boundsCenter, out boundsExtents, spriteDef, _scale, dimensions, anchor, colliderOffsetZ, colliderExtentZ); tk2dSpriteGeomGen.SetTiledSpriteIndices(meshIndices, 0, 0, spriteDef, dimensions); if (meshNormals.Length > 0 || meshTangents.Length > 0) { Vector3 meshVertexMin = new Vector3(spriteDef.positions[0].x * dimensions.x * spriteDef.texelSize.x * scale.x, spriteDef.positions[0].y * dimensions.y * spriteDef.texelSize.y * scale.y); Vector3 meshVertexMax = new Vector3(spriteDef.positions[3].x * dimensions.x * spriteDef.texelSize.x * scale.x, spriteDef.positions[3].y * dimensions.y * spriteDef.texelSize.y * scale.y); tk2dSpriteGeomGen.SetSpriteVertexNormals(meshVertices, meshVertexMin, meshVertexMax, spriteDef.normals, spriteDef.tangents, meshNormals, meshTangents); } SetColors(meshColors); if (mesh == null) { mesh = new Mesh(); #if !UNITY_3_5 mesh.MarkDynamic(); #endif mesh.hideFlags = HideFlags.DontSave; } else { mesh.Clear(); } mesh.vertices = meshVertices; mesh.colors32 = meshColors; mesh.uv = meshUvs; mesh.normals = meshNormals; mesh.tangents = meshTangents; mesh.triangles = meshIndices; mesh.RecalculateBounds(); mesh.bounds = AdjustedMeshBounds( mesh.bounds, renderLayer ); GetComponent<MeshFilter>().mesh = mesh; UpdateCollider(); UpdateMaterial(); } protected override void UpdateGeometry() { UpdateGeometryImpl(); } protected override void UpdateColors() { UpdateColorsImpl(); } protected override void UpdateVertices() { UpdateGeometryImpl(); } protected void UpdateColorsImpl() { #if UNITY_EDITOR // This can happen with prefabs in the inspector if (meshColors == null || meshColors.Length == 0) return; #endif if (meshColors == null || meshColors.Length == 0) { Build(); } else { SetColors(meshColors); mesh.colors32 = meshColors; } } protected void UpdateGeometryImpl() { #if UNITY_EDITOR // This can happen with prefabs in the inspector if (mesh == null) return; #endif Build(); } #region Collider protected override void UpdateCollider() { if (CreateBoxCollider) { if (CurrentSprite.physicsEngine == tk2dSpriteDefinition.PhysicsEngine.Physics3D) { if (boxCollider != null) { boxCollider.size = 2 * boundsExtents; boxCollider.center = boundsCenter; } } else if (CurrentSprite.physicsEngine == tk2dSpriteDefinition.PhysicsEngine.Physics2D) { #if !(UNITY_3_5 || UNITY_4_0 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2) if (boxCollider2D != null) { boxCollider2D.size = 2 * boundsExtents; #if (UNITY_3_5 || UNITY_4_0 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 || UNITY_4_4 || UNITY_4_5 || UNITY_4_6 || UNITY_4_7 || UNITY_4_8 || UNITY_4_9) boxCollider2D.center = boundsCenter; #else boxCollider2D.offset = boundsCenter; #endif } #endif } } } #if UNITY_EDITOR void OnDrawGizmos() { if (mesh != null) { Bounds b = mesh.bounds; Gizmos.color = Color.clear; Gizmos.matrix = transform.localToWorldMatrix; Gizmos.DrawCube(b.center, b.extents * 2); Gizmos.matrix = Matrix4x4.identity; Gizmos.color = Color.white; } } #endif protected override void CreateCollider() { UpdateCollider(); } #if UNITY_EDITOR public override void EditMode__CreateCollider() { if (CreateBoxCollider) { base.CreateSimpleBoxCollider(); } UpdateCollider(); } #endif #endregion protected override void UpdateMaterial() { Renderer renderer = GetComponent<Renderer>(); if (renderer.sharedMaterial != collectionInst.spriteDefinitions[spriteId].materialInst) renderer.material = collectionInst.spriteDefinitions[spriteId].materialInst; } protected override int GetCurrentVertexCount() { #if UNITY_EDITOR if (meshVertices == null) return 0; #endif return 16; } public override void ReshapeBounds(Vector3 dMin, Vector3 dMax) { // Identical to tk2dSlicedSprite.ReshapeBounds float minSizeClampTexelScale = 0.1f; // Can't shrink sprite smaller than this many texels // Irrespective of transform var sprite = CurrentSprite; Vector2 boundsSize = new Vector2(_dimensions.x * sprite.texelSize.x, _dimensions.y * sprite.texelSize.y); Vector3 oldSize = new Vector3(boundsSize.x * _scale.x, boundsSize.y * _scale.y); Vector3 oldMin = Vector3.zero; switch (_anchor) { case Anchor.LowerLeft: oldMin.Set(0,0,0); break; case Anchor.LowerCenter: oldMin.Set(0.5f,0,0); break; case Anchor.LowerRight: oldMin.Set(1,0,0); break; case Anchor.MiddleLeft: oldMin.Set(0,0.5f,0); break; case Anchor.MiddleCenter: oldMin.Set(0.5f,0.5f,0); break; case Anchor.MiddleRight: oldMin.Set(1,0.5f,0); break; case Anchor.UpperLeft: oldMin.Set(0,1,0); break; case Anchor.UpperCenter: oldMin.Set(0.5f,1,0); break; case Anchor.UpperRight: oldMin.Set(1,1,0); break; } oldMin = Vector3.Scale(oldMin, oldSize) * -1; Vector3 newScale = oldSize + dMax - dMin; newScale.x /= boundsSize.x; newScale.y /= boundsSize.y; // Clamp the minimum size to avoid having the pivot move when we scale from near-zero if (Mathf.Abs(boundsSize.x * newScale.x) < sprite.texelSize.x * minSizeClampTexelScale && Mathf.Abs(newScale.x) < Mathf.Abs(_scale.x)) { dMin.x = 0; newScale.x = _scale.x; } if (Mathf.Abs(boundsSize.y * newScale.y) < sprite.texelSize.y * minSizeClampTexelScale && Mathf.Abs(newScale.y) < Mathf.Abs(_scale.y)) { dMin.y = 0; newScale.y = _scale.y; } // Add our wanted local dMin offset, while negating the positional offset caused by scaling Vector2 scaleFactor = new Vector3(Mathf.Approximately(_scale.x, 0) ? 0 : (newScale.x / _scale.x), Mathf.Approximately(_scale.y, 0) ? 0 : (newScale.y / _scale.y)); Vector3 scaledMin = new Vector3(oldMin.x * scaleFactor.x, oldMin.y * scaleFactor.y); Vector3 offset = dMin + oldMin - scaledMin; offset.z = 0; transform.position = transform.TransformPoint(offset); dimensions = new Vector2(_dimensions.x * scaleFactor.x, _dimensions.y * scaleFactor.y); } }
#region File Description //----------------------------------------------------------------------------- // RewardsScreen.cs // // Microsoft XNA Community Game Platform // Copyright (C) Microsoft Corporation. All rights reserved. //----------------------------------------------------------------------------- #endregion #region Using Statements using System; using System.Collections.Generic; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Content; using RolePlayingGameData; #endregion namespace RolePlaying { /// <summary> /// Displays the rewards earned by the party, from a quest or combat. /// </summary> class RewardsScreen : GameScreen { public enum RewardScreenMode { Quest, Combat, }; /// <summary> /// The mode of this screen. /// </summary> private RewardScreenMode mode; #region Rewards private int experienceReward; private int goldReward; private List<Gear> gearReward; #endregion #region Graphics content private Texture2D backTexture; private Texture2D selectIconTexture; private Texture2D lineTexture; private Texture2D scrollUpTexture; private Texture2D scrollDownTexture; private Texture2D fadeTexture; private Vector2 backgroundPosition; private Vector2 textPosition; private Vector2 iconPosition; private Vector2 linePosition; private Vector2 selectPosition; private Vector2 selectIconPosition; private Vector2 screenSize; private Vector2 titlePosition; private Vector2 scrollUpPosition; private Vector2 scrollDownPosition; private Vector2 xpAwardPosition; private Vector2 goldAwardPosition; private Vector2 itemAwardPosition; private Rectangle fadeDest; #endregion #region Dialog Text private string titleText; private readonly string selectString = "Continue"; #endregion #region Scrollable List Data /// <summary> /// Starting index of the list to be displayed /// </summary> private int startIndex; /// <summary> /// Ending index of the list to be displayed /// </summary> private int endIndex; /// <summary> /// Maximum number of lines to draw in the screen /// </summary> private int maxLines; /// <summary> /// Vertical spacing between each line /// </summary> private float lineSpacing; #endregion #region Initialization /// <summary> /// Creates a new RewardsScreen object. /// </summary> public RewardsScreen(RewardScreenMode mode, int experienceReward, int goldReward, List<Gear> gearReward) : base() { this.IsPopup = true; this.mode = mode; this.experienceReward = experienceReward; this.goldReward = goldReward; this.gearReward = gearReward; maxLines = 3; lineSpacing = 74 * ScaledVector2.ScaleFactor; startIndex = 0; endIndex = maxLines; if (endIndex > gearReward.Count) { endIndex = gearReward.Count; } // play the appropriate music switch (mode) { case RewardScreenMode.Combat: // play the combat-victory music AudioManager.PushMusic("WinTheme",false); break; case RewardScreenMode.Quest: // play the quest-complete music AudioManager.PushMusic("QuestComplete",false); break; } this.Exiting += new EventHandler(RewardsScreen_Exiting); } void RewardsScreen_Exiting(object sender, EventArgs e) { AudioManager.PopMusic(); } /// <summary> /// Load the graphics content from the content manager. /// </summary> public override void LoadContent() { ContentManager content = ScreenManager.Game.Content; backTexture = content.Load<Texture2D>(@"Textures\GameScreens\PopupScreen"); selectIconTexture = content.Load<Texture2D>(@"Textures\Buttons\rpgbtn"); scrollUpTexture = content.Load<Texture2D>(@"Textures\GameScreens\ScrollUp"); scrollDownTexture = content.Load<Texture2D>(@"Textures\GameScreens\ScrollDown"); lineTexture = content.Load<Texture2D>(@"Textures\GameScreens\SeparationLine"); fadeTexture = content.Load<Texture2D>(@"Textures\GameScreens\FadeScreen"); Viewport viewport = ScreenManager.GraphicsDevice.Viewport; fadeDest = new Rectangle(viewport.X, viewport.Y, viewport.Width, viewport.Height); backgroundPosition.X = (viewport.Width - backTexture.Width * ScaledVector2.DrawFactor) / 2; backgroundPosition.Y = (viewport.Height - backTexture.Height * ScaledVector2.DrawFactor) / 2; screenSize = new Vector2(viewport.Width, viewport.Height); selectIconPosition.X = screenSize.X / 2 + 120 * ScaledVector2.ScaleFactor; selectIconPosition.Y = backgroundPosition.Y + 520f * ScaledVector2.ScaleFactor; selectPosition.X = selectIconPosition.X - Fonts.ButtonNamesFont.MeasureString(selectString).X - 10f * ScaledVector2.ScaleFactor; selectPosition.Y = backgroundPosition.Y + 530f * ScaledVector2.ScaleFactor; textPosition = backgroundPosition + ScaledVector2.GetScaledVector(335f, 320f); iconPosition = backgroundPosition + ScaledVector2.GetScaledVector(155f, 303f); linePosition = backgroundPosition + ScaledVector2.GetScaledVector(142f, 285f); scrollUpPosition = backgroundPosition + ScaledVector2.GetScaledVector(810f, 300f); scrollDownPosition = backgroundPosition + ScaledVector2.GetScaledVector(810f, 480f); xpAwardPosition = backgroundPosition + ScaledVector2.GetScaledVector(160f, 180f); goldAwardPosition = backgroundPosition + ScaledVector2.GetScaledVector(160f, 210f); itemAwardPosition = backgroundPosition + ScaledVector2.GetScaledVector(160f, 240f); } #endregion #region Updating /// <summary> /// Handles user input. /// </summary> public override void HandleInput() { bool backClicked = false; if (InputManager.IsButtonClicked(new Rectangle( (int)selectIconPosition.X, (int)selectIconPosition.Y, selectIconTexture.Width, selectIconTexture.Height))) { backClicked = true; } // exit the screen if (backClicked || InputManager.IsActionTriggered(InputManager.Action.Back)) { ExitScreen(); // give the rewards to the party Session.Party.PartyGold += goldReward; foreach (Gear gear in gearReward) { Session.Party.AddToInventory(gear, 1); } Session.Party.GiveExperience(experienceReward); } // Scroll up else if (InputManager.IsActionTriggered(InputManager.Action.CursorUp)) { if (startIndex > 0) { startIndex--; endIndex--; } } // Scroll down else if (InputManager.IsActionTriggered(InputManager.Action.CursorDown)) { if (startIndex < gearReward.Count - maxLines) { endIndex++; startIndex++; } } } #endregion #region Drawing /// <summary> /// Draw the screen. /// </summary> public override void Draw(GameTime gameTime) { Vector2 currentIconPosition = iconPosition; Vector2 currentTextPosition = textPosition; Vector2 currentlinePosition = linePosition; switch (mode) { case RewardScreenMode.Quest: titleText = "Quest Complete"; break; case RewardScreenMode.Combat: titleText = "Combat Won"; break; } titlePosition.X = (screenSize.X - Fonts.HeaderFont.MeasureString(titleText).X) / 2; titlePosition.Y = backgroundPosition.Y + lineSpacing; SpriteBatch spriteBatch = ScreenManager.SpriteBatch; spriteBatch.Begin(); // Draw the fading screen spriteBatch.Draw(fadeTexture, new Vector2(ScreenManager.GraphicsDevice.Viewport.Width, ScreenManager.GraphicsDevice.Viewport.Height), null, Color.White, 0f, Vector2.Zero,ScaledVector2.DrawFactor,SpriteEffects.None,0f); // Draw the popup background spriteBatch.Draw(backTexture, backgroundPosition,null, Color.White,0f, Vector2.Zero, ScaledVector2.DrawFactor, SpriteEffects.None, 0f); // Draw the title spriteBatch.DrawString(Fonts.HeaderFont, titleText, titlePosition, Fonts.TitleColor); // Draw the experience points awarded spriteBatch.DrawString(Fonts.GearInfoFont, "XP Awarded : " + experienceReward, xpAwardPosition, Fonts.CountColor); // Draw the gold points awarded spriteBatch.DrawString(Fonts.GearInfoFont, "Gold Awarded : " + Fonts.GetGoldString(goldReward), goldAwardPosition, Fonts.CountColor); // Draw the items awarded spriteBatch.DrawString(Fonts.GearInfoFont, "Items Awarded :", itemAwardPosition, Fonts.CountColor); // Draw horizontal divider lines for (int i = 0; i <= maxLines; i++) { spriteBatch.Draw(lineTexture, currentlinePosition, null,Color.White,0f, Vector2.Zero, ScaledVector2.DrawFactor, SpriteEffects.None, 0f); currentlinePosition.Y += lineSpacing; } // Draw the item details for (int i = startIndex; i < endIndex; i++) { // Draw the item icon gearReward[i].DrawIcon(ScreenManager.SpriteBatch, currentIconPosition); // Draw the item name spriteBatch.DrawString(Fonts.GearInfoFont, gearReward[i].Name, currentTextPosition, Fonts.CountColor); // Increment the position to the next line currentTextPosition.Y += lineSpacing; currentIconPosition.Y += lineSpacing; } // Draw the scroll buttons spriteBatch.Draw(scrollUpTexture, scrollUpPosition,null, Color.White,0f, Vector2.Zero, ScaledVector2.DrawFactor, SpriteEffects.None, 0f); spriteBatch.Draw(scrollDownTexture, scrollDownPosition,null, Color.White,0f, Vector2.Zero, ScaledVector2.DrawFactor, SpriteEffects.None, 0f); // Draw the select button and its corresponding text spriteBatch.Draw(selectIconTexture, selectIconPosition,null, Color.White,0f, Vector2.Zero, ScaledVector2.DrawFactor, SpriteEffects.None, 0f); Vector2 selectFontPosition = Fonts.GetCenterPositionInButton(Fonts.ButtonNamesFont, selectString, new Rectangle((int)selectIconPosition.X, (int)selectIconPosition.Y, selectIconTexture.Width, selectIconTexture.Height)); spriteBatch.DrawString(Fonts.ButtonNamesFont, selectString, selectFontPosition, Color.White); spriteBatch.End(); } #endregion } }
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.html file at the root of this distribution. If * you cannot locate the Apache License, Version 2.0, please send an email to * dlr@microsoft.com. By using this source code in any fashion, you are agreeing to be bound * by the terms of the Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. * * * ***************************************************************************/ #if FEATURE_FULL_CONSOLE using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Reflection; using System.Threading; using IronPython.Compiler; using IronPython.Modules; using IronPython.Runtime; using IronPython.Runtime.Exceptions; using IronPython.Runtime.Operations; using Microsoft.Scripting; using Microsoft.Scripting.Hosting.Shell; using Microsoft.Scripting.Runtime; using Microsoft.Scripting.Utils; namespace IronPython.Hosting { /// <summary> /// A simple Python command-line should mimic the standard python.exe /// </summary> public sealed class PythonCommandLine : CommandLine { private PythonContext PythonContext { get { return (PythonContext)Language; } } private new PythonConsoleOptions Options { get { return (PythonConsoleOptions)base.Options; } } public PythonCommandLine() { } protected override string/*!*/ Logo { get { return GetLogoDisplay(); } } /// <summary> /// Returns the display look for IronPython. /// /// The returned string uses This \n instead of Environment.NewLine for it's line seperator /// because it is intended to be outputted through the Python I/O system. /// </summary> public static string GetLogoDisplay() { return PythonContext.GetVersionString() + "\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\n"; } private int GetEffectiveExitCode(SystemExitException/*!*/ e) { object nonIntegerCode; int exitCode = e.GetExitCode(out nonIntegerCode); if (nonIntegerCode != null) { Console.WriteLine(nonIntegerCode.ToString(), Style.Error); } return exitCode; } protected override void Shutdown() { try { Language.Shutdown(); } catch (Exception e) { Console.WriteLine("", Style.Error); Console.WriteLine("Error in sys.exitfunc:", Style.Error); Console.Write(Language.FormatException(e), Style.Error); } } protected override int Run() { if (Options.ModuleToRun != null) { // PEP 338 support - http://www.python.org/dev/peps/pep-0338 // This requires the presence of the Python standard library or // an equivalent runpy.py which defines a run_module method. // import the runpy module object runpy, runMod; try { runpy = Importer.Import( PythonContext.SharedContext, "runpy", PythonTuple.EMPTY, 0 ); } catch (Exception) { Console.WriteLine("Could not import runpy module", Style.Error); return -1; } // get the run_module method try { runMod = PythonOps.GetBoundAttr(PythonContext.SharedContext, runpy, "run_module"); } catch (Exception) { Console.WriteLine("Could not access runpy.run_module", Style.Error); return -1; } // call it with the name of the module to run try { PythonOps.CallWithKeywordArgs( PythonContext.SharedContext, runMod, new object[] { Options.ModuleToRun, "__main__", ScriptingRuntimeHelpers.True }, new string[] { "run_name", "alter_sys" } ); } catch (SystemExitException e) { object dummy; return e.GetExitCode(out dummy); } return 0; } int result = base.Run(); // Check if IRONPYTHONINSPECT was set during execution string inspectLine = Environment.GetEnvironmentVariable("IRONPYTHONINSPECT"); if (inspectLine != null && !Options.Introspection) result = RunInteractiveLoop(); return result; } #region Initialization protected override void Initialize() { Debug.Assert(Language != null); base.Initialize(); Console.Output = new OutputWriter(PythonContext, false); Console.ErrorOutput = new OutputWriter(PythonContext, true); // TODO: must precede path initialization! (??? - test test_importpkg.py) int pathIndex = PythonContext.PythonOptions.SearchPaths.Count; Language.DomainManager.LoadAssembly(typeof(string).Assembly); Language.DomainManager.LoadAssembly(typeof(System.Diagnostics.Debug).Assembly); InitializePath(ref pathIndex); InitializeModules(); InitializeExtensionDLLs(); ImportSite(); // Equivalent to -i command line option // Check if IRONPYTHONINSPECT was set before execution string inspectLine = Environment.GetEnvironmentVariable("IRONPYTHONINSPECT"); if (inspectLine != null) Options.Introspection = true; // If running in console mode (including with -c), the current working directory should be // the first entry in sys.path. If running a script file, however, the CWD should not be added; // instead, the script's containg folder should be added. string fullPath = "."; // this is a valid path resolving to current working dir. Pinky-swear. if (Options.Command == null && Options.FileName != null) { if (Options.FileName == "-") { Options.FileName = "<stdin>"; } else { #if !SILVERLIGHT if (Directory.Exists(Options.FileName)) { Options.FileName = Path.Combine(Options.FileName, "__main__.py"); } if (!File.Exists(Options.FileName)) { Console.WriteLine( String.Format( System.Globalization.CultureInfo.InvariantCulture, "File {0} does not exist.", Options.FileName), Style.Error); Environment.Exit(1); } #endif fullPath = Path.GetDirectoryName( Language.DomainManager.Platform.GetFullPath(Options.FileName) ); } } PythonContext.InsertIntoPath(0, fullPath); PythonContext.MainThread = Thread.CurrentThread; } protected override Scope/*!*/ CreateScope() { ModuleOptions trueDiv = (PythonContext.PythonOptions.DivisionOptions == PythonDivisionOptions.New) ? ModuleOptions.TrueDivision : ModuleOptions.None; var modCtx = new ModuleContext(new PythonDictionary(), PythonContext); modCtx.Features = trueDiv; modCtx.InitializeBuiltins(true); PythonContext.PublishModule("__main__", modCtx.Module); modCtx.Globals["__doc__"] = null; modCtx.Globals["__name__"] = "__main__"; return modCtx.GlobalScope; } private void InitializePath(ref int pathIndex) { #if !SILVERLIGHT // paths, environment vars if (!Options.IgnoreEnvironmentVariables) { string path = Environment.GetEnvironmentVariable("IRONPYTHONPATH"); if (path != null && path.Length > 0) { string[] paths = path.Split(Path.PathSeparator); foreach (string p in paths) { PythonContext.InsertIntoPath(pathIndex++, p); } } } #endif } private void InitializeModules() { string executable = ""; string prefix = ""; #if !SILVERLIGHT // paths Assembly entryAssembly = Assembly.GetEntryAssembly(); //Can be null if called from unmanaged code (VS integration scenario) if (entryAssembly != null) { executable = entryAssembly.Location; prefix = Path.GetDirectoryName(executable); } // Make sure there an IronPython Lib directory, and if not keep looking up while (prefix != null && !File.Exists(Path.Combine(prefix, "Lib/os.py"))) { prefix = Path.GetDirectoryName(prefix); } #endif PythonContext.SetHostVariables(prefix ?? "", executable, null); } /// <summary> /// Loads any extension DLLs present in sys.prefix\DLLs directory and adds references to them. /// /// This provides an easy drop-in location for .NET assemblies which should be automatically referenced /// (exposed via import), COM libraries, and pre-compiled Python code. /// </summary> private void InitializeExtensionDLLs() { string dir = Path.Combine(PythonContext.InitialPrefix, "DLLs"); if (Directory.Exists(dir)) { foreach (string file in Directory.GetFiles(dir)) { if (file.ToLower().EndsWith(".dll")) { try { ClrModule.AddReference(PythonContext.SharedContext, new FileInfo(file).Name); } catch { } } } } } private void ImportSite() { if (Options.SkipImportSite) return; try { Importer.ImportModule(PythonContext.SharedContext, null, "site", false, -1); } catch (Exception e) { Console.Write(Language.FormatException(e), Style.Error); } } #endregion #region Interactive protected override int RunInteractive() { PrintLogo(); if (Scope == null) { Scope = CreateScope(); } int result = 1; try { RunStartup(); result = 0; } catch (SystemExitException pythonSystemExit) { return GetEffectiveExitCode(pythonSystemExit); } catch (Exception) { } var sys = Engine.GetSysModule(); sys.SetVariable("ps1", ">>> "); sys.SetVariable("ps2", "... "); result = RunInteractiveLoop(); return (int)result; } protected override string Prompt { get { object value; if (Engine.GetSysModule().TryGetVariable("ps1", out value)) { var context = ((PythonScopeExtension)Scope.GetExtension(Language.ContextId)).ModuleContext.GlobalContext; return PythonOps.ToString(context, value); } return ">>> "; } } public override string PromptContinuation { get { object value; if (Engine.GetSysModule().TryGetVariable("ps2", out value)) { var context = ((PythonScopeExtension)Scope.GetExtension(Language.ContextId)).ModuleContext.GlobalContext; return PythonOps.ToString(context, value); } return "... "; } } private void RunStartup() { if (Options.IgnoreEnvironmentVariables) return; #if !SILVERLIGHT // Environment.GetEnvironmentVariable string startup = Environment.GetEnvironmentVariable("IRONPYTHONSTARTUP"); if (startup != null && startup.Length > 0) { if (Options.HandleExceptions) { try { ExecuteCommand(Engine.CreateScriptSourceFromFile(startup)); } catch (Exception e) { if (e is SystemExitException) throw; Console.Write(Language.FormatException(e), Style.Error); } } else { ExecuteCommand(Engine.CreateScriptSourceFromFile(startup)); } } #endif } protected override int? TryInteractiveAction() { try { try { return TryInteractiveActionWorker(); } finally { // sys.exc_info() is normally cleared after functions exit. But interactive console enters statements // directly instead of using functions. So clear explicitly. PythonOps.ClearCurrentException(); } } catch (SystemExitException se) { return GetEffectiveExitCode(se); } } /// <summary> /// Attempts to run a single interaction and handle any language-specific /// exceptions. Base classes can override this and call the base implementation /// surrounded with their own exception handling. /// /// Returns null if successful and execution should continue, or an exit code. /// </summary> private int? TryInteractiveActionWorker() { int? result = null; try { result = RunOneInteraction(); #if !FEATURE_EXCEPTION_STATE } catch (ThreadAbortException) { #else } catch (ThreadAbortException tae) { KeyboardInterruptException pki = tae.ExceptionState as KeyboardInterruptException; if (pki != null) { Console.WriteLine(Language.FormatException(tae), Style.Error); Thread.ResetAbort(); } #endif } return result; } /// <summary> /// Parses a single interactive command and executes it. /// /// Returns null if successful and execution should continue, or the appropiate exit code. /// </summary> private int? RunOneInteraction() { bool continueInteraction; string s = ReadStatement(out continueInteraction); if (continueInteraction == false) { PythonContext.DispatchCommand(null); // Notify dispatcher that we're done return 0; } if (String.IsNullOrEmpty(s)) { // Is it an empty line? Console.Write(String.Empty, Style.Out); return null; } SourceUnit su = Language.CreateSnippet(s, "<stdin>", SourceCodeKind.InteractiveCode); PythonCompilerOptions pco = (PythonCompilerOptions)Language.GetCompilerOptions(Scope); pco.Module |= ModuleOptions.ExecOrEvalCode; Action action = delegate() { try { su.Compile(pco, ErrorSink).Run(Scope); } catch (Exception e) { if (e is SystemExitException) { throw; } // Need to handle exceptions in the delegate so that they're not wrapped // in a TargetInvocationException UnhandledException(e); } }; try { PythonContext.DispatchCommand(action); } catch (SystemExitException sx) { object dummy; return sx.GetExitCode(out dummy); } return null; } protected override ErrorSink/*!*/ ErrorSink { get { return ThrowingErrorSink.Default; } } protected override int GetNextAutoIndentSize(string text) { return Parser.GetNextAutoIndentSize(text, Options.AutoIndentSize); } #endregion #region Command protected override int RunCommand(string command) { if (Options.HandleExceptions) { try { return RunCommandWorker(command); } catch (Exception e) { Console.Write(Language.FormatException(e), Style.Error); return 1; } } return RunCommandWorker(command); } private int RunCommandWorker(string command) { ScriptCode compiledCode; ModuleOptions trueDiv = (PythonContext.PythonOptions.DivisionOptions == PythonDivisionOptions.New) ? ModuleOptions.TrueDivision : ModuleOptions.None; ModuleOptions modOpt = ModuleOptions.Optimized | ModuleOptions.ModuleBuiltins | trueDiv; ; if (Options.SkipFirstSourceLine) { modOpt |= ModuleOptions.SkipFirstLine; } PythonModule module = PythonContext.CompileModule( "", // there is no file, it will be set to <module> "__main__", PythonContext.CreateSnippet(command, SourceCodeKind.File), modOpt, out compiledCode); PythonContext.PublishModule("__main__", module); Scope = module.Scope; try { compiledCode.Run(Scope); } catch (SystemExitException pythonSystemExit) { // disable introspection when exited: Options.Introspection = false; return GetEffectiveExitCode(pythonSystemExit); } return 0; } #endregion #region File protected override int RunFile(string/*!*/ fileName) { int result = 1; if (Options.HandleExceptions) { try { result = RunFileWorker(fileName); } catch (Exception e) { Console.Write(Language.FormatException(e), Style.Error); } } else { result = RunFileWorker(fileName); } return result; } private int RunFileWorker(string/*!*/ fileName) { try { // There is no PEP for this case, only http://bugs.python.org/issue1739468 object importer; if (Importer.TryImportMainFromZip(DefaultContext.Default, fileName, out importer)) { return 0; } if (importer != null && importer.GetType() != typeof(PythonImport.NullImporter)) { Console.WriteLine(String.Format("can't find '__main__' module in '{0}'", fileName), Style.Error); return 0; } } catch (SystemExitException pythonSystemExit) { // disable introspection when exited: Options.Introspection = false; return GetEffectiveExitCode(pythonSystemExit); } // classic file ScriptCode compiledCode; ModuleOptions modOpt = ModuleOptions.Optimized | ModuleOptions.ModuleBuiltins; if(Options.SkipFirstSourceLine) { modOpt |= ModuleOptions.SkipFirstLine; } PythonModule module = PythonContext.CompileModule( fileName, "__main__", PythonContext.CreateFileUnit(String.IsNullOrEmpty(fileName) ? null : fileName, PythonContext.DefaultEncoding), modOpt, out compiledCode); PythonContext.PublishModule("__main__", module); Scope = module.Scope; try { compiledCode.Run(Scope); } catch (SystemExitException pythonSystemExit) { // disable introspection when exited: Options.Introspection = false; return GetEffectiveExitCode(pythonSystemExit); } return 0; } #endregion public override IList<string> GetGlobals(string name) { IList<string> res = base.GetGlobals(name); foreach (object builtinName in PythonContext.BuiltinModuleInstance.__dict__.Keys) { string strName = builtinName as string; if (strName != null && strName.StartsWith(name)) { res.Add(strName); } } return res; } protected override void UnhandledException(Exception e) { PythonOps.PrintException(PythonContext.SharedContext, e, Console); } private new PythonContext Language { get { return (PythonContext)base.Language; } } } } #endif
using HTTPlease; using HTTPlease.Formatters; using HTTPlease.Formatters.Json; using Microsoft.AspNetCore.Mvc; using Microsoft.Extensions.Logging; using Raven.Client.Documents; using Raven.Client.Documents.Session; using System; using System.Collections.Generic; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Threading.Tasks; using StatusCodes = Microsoft.AspNetCore.Http.StatusCodes; namespace DaaSDemo.Api.Controllers { using Data; using Data.Indexes; using Models.Api; using Models.Data; using Raven.Client.Documents.Linq; /// <summary> /// Controller for the tenants API. /// </summary> [Route("api/v1/tenants")] public class TenantsController : Controller { /// <summary> /// Create a new tenants API controller. /// </summary> /// <param name="documentSession"> /// The RavenDB document session for the current request. /// </param> /// <param name="logger"> /// The controller's log facility. /// </param> public TenantsController(IDocumentSession documentSession, ILogger<TenantsController> logger) { if (documentSession == null) throw new ArgumentNullException(nameof(documentSession)); if (logger == null) throw new ArgumentNullException(nameof(logger)); DocumentSession = documentSession; Log = logger; } /// <summary> /// The RavenDB document session for the current request. /// </summary> IDocumentSession DocumentSession { get; } /// <summary> /// The controller's log facility. /// </summary> ILogger Log { get; } /// <summary> /// Get a tenant by Id. /// </summary> /// <param name="tenantId"> /// The tenant Id. /// </param> [HttpGet("{tenantId}")] public IActionResult GetById(string tenantId) { Tenant tenant = DocumentSession.Load<Tenant>(tenantId); if (tenant != null) return Json(tenant); return NotFound(new { Id = tenantId, EntityType = "Tenant", Message = $"No tenant found with Id {tenantId}" }); } /// <summary> /// Get all tenants. /// </summary> [HttpGet] public IActionResult List() { return Json( DocumentSession.Query<Tenant>() ); } /// <summary> /// Create a tenant. /// </summary> /// <param name="newTenant"> /// The request body as a <see cref="Tenant"/>. /// </param> [HttpPost] public IActionResult Create([FromBody] NewTenant newTenant) { if (!ModelState.IsValid) return BadRequest(ModelState); var tenant = new Tenant { Name = newTenant.Name }; DocumentSession.Store(tenant); DocumentSession.SaveChanges(); return Json(tenant); } /// <summary> /// Get all servers owned by a tenant. /// </summary> /// <param name="tenantId"> /// The tenant Id. /// </param> /// <param name="ensureUpToDate"> /// Ensure that the results are as up-to-date as possible? /// </param> [HttpGet("{tenantId}/servers")] public IActionResult GetServers(string tenantId, bool ensureUpToDate = false) { Tenant tenant = DocumentSession.Load<Tenant>(tenantId); if (tenant == null) { return NotFound(new { Id = tenantId, EntityType = "Tenant", Message = $"Tenant not found with Id '{tenantId}." }); } IRavenQueryable<DatabaseServer> query = DocumentSession.Query<DatabaseServer, DatabaseServerDetails>(); if (ensureUpToDate) { query = query.Customize( queryConfig => queryConfig.WaitForNonStaleResults( waitTimeout: TimeSpan.FromSeconds(5) ) ); } return Json( query.Where( server => server.TenantId == tenantId ) .OrderBy(server => server.Name) .ProjectFromIndexFieldsInto<DatabaseServerDetail>() ); } /// <summary> /// Get all databases owned by a tenant. /// </summary> /// <param name="tenantId"> /// The tenant Id. /// </param> /// <param name="ensureUpToDate"> /// Ensure that the results are as up-to-date as possible? /// </param> [HttpGet("{tenantId}/databases")] public IActionResult GetDatabases(string tenantId, bool ensureUpToDate = false) { Tenant tenant = DocumentSession.Load<Tenant>(tenantId); if (tenant == null) { return NotFound(new { Id = tenantId, EntityType = "Tenant", Message = $"Tenant not found with Id '{tenantId}." }); } IRavenQueryable<DatabaseInstance> query = DocumentSession.Query<DatabaseInstance, DatabaseInstanceDetails>(); if (ensureUpToDate) { query = query.Customize( queryConfig => queryConfig.WaitForNonStaleResults( waitTimeout: TimeSpan.FromSeconds(5) ) ); } return Json( query.Where( database => database.TenantId == tenantId ) .ProjectFromIndexFieldsInto<DatabaseInstanceDetail>() ); } } }
/* Copyright (C) 2013-2015 MetaMorph Software, Inc Permission is hereby granted, free of charge, to any person obtaining a copy of this data, including any software or models in source or binary form, as well as any drawings, specifications, and documentation (collectively "the Data"), to deal in the Data without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Data, and to permit persons to whom the Data is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Data. THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA. ======================= This version of the META tools is a fork of an original version produced by Vanderbilt University's Institute for Software Integrated Systems (ISIS). Their license statement: Copyright (C) 2011-2014 Vanderbilt University Developed with the sponsorship of the Defense Advanced Research Projects Agency (DARPA) and delivered to the U.S. Government with Unlimited Rights as defined in DFARS 252.227-7013. Permission is hereby granted, free of charge, to any person obtaining a copy of this data, including any software or models in source or binary form, as well as any drawings, specifications, and documentation (collectively "the Data"), to deal in the Data without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Data, and to permit persons to whom the Data is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Data. THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using Xunit; using System.IO; using GME.MGA; namespace DynamicsTeamTest.Projects { public class MSD_PETFixture : XmeImportFixture { protected override string xmeFilename { get { return Path.Combine("MSD_PET", "MSD_PET.xme"); } } } public partial class MSD_PET : IUseFixture<MSD_PETFixture> { internal string mgaFile { get { return this.fixture.mgaFile; } } private MSD_PETFixture fixture { get; set; } public void SetFixture(MSD_PETFixture data) { this.fixture = data; } //[Fact] //[Trait("Model", "MSD_PET")] //[Trait("ProjectImport/Open", "MSD_PET")] //public void ProjectXmeImport() //{ // Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); //} [Fact] [Trait("Model", "MSD_PET")] [Trait("ProjectImport/Open", "MSD_PET")] public void ProjectMgaOpen() { var mgaReference = "MGA=" + mgaFile; MgaProject project = new MgaProject(); project.OpenEx(mgaReference, "CyPhyML", null); project.Close(true); Assert.True(File.Exists(mgaReference.Substring("MGA=".Length))); } [Fact] [Trait("Model", "MSD_PET")] [Trait("CyPhy2Modelica", "MSD_PET")] public void TestBenches_MassSpringDamperTest() { string outputDir = "Test Benches_MassSpringDamperTest"; string testBenchPath = "/@Test Benches|kind=Testing|relpos=0/@MassSpringDamperTest|kind=TestBench|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhy2ModelicaRunner.Run(outputDir, mgaFile, testBenchPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void ResponseSurfaceExample() { string outputDir = "ResponseSurfaceExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@SurrogateModeling|kind=ParametricExplorationFolder|relpos=0/@ResponseSurfaceExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void LogisticRegressionExample() { string outputDir = "LogisticRegressionExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@SurrogateModeling|kind=ParametricExplorationFolder|relpos=0/@LogisticRegressionExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void NeuralNetExample() { string outputDir = "NeuralNetExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@SurrogateModeling|kind=ParametricExplorationFolder|relpos=0/@NeuralNetExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void KrigingExample() { string outputDir = "KrigingExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@SurrogateModeling|kind=ParametricExplorationFolder|relpos=0/@KrigingExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void CentralCompositeExample() { string outputDir = "CentralCompositeExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@DOE|kind=ParametricExplorationFolder|relpos=0/@CentralCompositeExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void FullFactorialExample() { string outputDir = "FullFactorialExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@DOE|kind=ParametricExplorationFolder|relpos=0/@FullFactorialExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void LatinHypercubeExample() { string outputDir = "LatinHypercubeExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@DOE|kind=ParametricExplorationFolder|relpos=0/@LatinHypercubeExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void UniformExample() { string outputDir = "UniformExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@DOE|kind=ParametricExplorationFolder|relpos=0/@UniformExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void CONMINExample() { string outputDir = "CONMINExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@Optimization|kind=ParametricExplorationFolder|relpos=0/@CONMINExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void COBYLAExample() { string outputDir = "COBYLAExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@Optimization|kind=ParametricExplorationFolder|relpos=0/@COBYLAExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } [Fact] [Trait("Model", "MSD_PET")] [Trait("PET", "MSD_PET")] public void NEWSUMTExample() { string outputDir = "NEWSUMTExample"; string petExperimentPath = "/@Examples|kind=Testing|relpos=0/@Optimization|kind=ParametricExplorationFolder|relpos=0/@NEWSUMTExample|kind=ParametricExploration|relpos=0"; Assert.True(File.Exists(mgaFile), "Failed to generate the mga."); bool result = CyPhyPETRunner.Run(outputDir, mgaFile, petExperimentPath); Assert.True(result, "CyPhyPET failed."); } } }
using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Linq.Expressions; using System.Runtime.Versioning; using NuGet.Resources; namespace NuGet { public static class PackageRepositoryExtensions { public static IDisposable StartOperation(this IPackageRepository self, string operation, string mainPackageId, string mainPackageVersion) { IOperationAwareRepository repo = self as IOperationAwareRepository; if (repo != null) { return repo.StartOperation(operation, mainPackageId, mainPackageVersion); } return DisposableAction.NoOp; } public static bool Exists(this IPackageRepository repository, IPackageName package) { return repository.Exists(package.Id, package.Version); } public static bool Exists(this IPackageRepository repository, string packageId) { return Exists(repository, packageId, version: null); } public static bool Exists(this IPackageRepository repository, string packageId, SemanticVersion version) { IPackageLookup packageLookup = repository as IPackageLookup; if ((packageLookup != null) && !String.IsNullOrEmpty(packageId) && (version != null)) { return packageLookup.Exists(packageId, version); } return repository.FindPackage(packageId, version) != null; } public static bool TryFindPackage(this IPackageRepository repository, string packageId, SemanticVersion version, out IPackage package) { package = repository.FindPackage(packageId, version); return package != null; } public static IPackage FindPackage(this IPackageRepository repository, string packageId) { return repository.FindPackage(packageId, version: null); } public static IPackage FindPackage(this IPackageRepository repository, string packageId, SemanticVersion version) { // Default allow pre release versions to true here because the caller typically wants to find all packages in this scenario for e.g when checking if a // a package is already installed in the local repository. The same applies to allowUnlisted. return FindPackage(repository, packageId, version, NullConstraintProvider.Instance, allowPrereleaseVersions: true, allowUnlisted: true); } public static IPackage FindPackage(this IPackageRepository repository, string packageId, SemanticVersion version, bool allowPrereleaseVersions, bool allowUnlisted) { return FindPackage(repository, packageId, version, NullConstraintProvider.Instance, allowPrereleaseVersions, allowUnlisted); } public static IPackage FindPackage( this IPackageRepository repository, string packageId, SemanticVersion version, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool allowUnlisted) { if (repository == null) { throw new ArgumentNullException("repository"); } if (packageId == null) { throw new ArgumentNullException("packageId"); } // if an explicit version is specified, disregard the 'allowUnlisted' argument // and always allow unlisted packages. if (version != null) { allowUnlisted = true; } else if (!allowUnlisted && (constraintProvider == null || constraintProvider == NullConstraintProvider.Instance)) { var packageLatestLookup = repository as ILatestPackageLookup; if (packageLatestLookup != null) { IPackage package; if (packageLatestLookup.TryFindLatestPackageById(packageId, allowPrereleaseVersions, out package)) { return package; } } } // If the repository implements it's own lookup then use that instead. // This is an optimization that we use so we don't have to enumerate packages for // sources that don't need to. var packageLookup = repository as IPackageLookup; if (packageLookup != null && version != null) { return packageLookup.FindPackage(packageId, version); } IEnumerable<IPackage> packages = repository.FindPackagesById(packageId); packages = packages.ToList() .OrderByDescending(p => p.Version); if (!allowUnlisted) { packages = packages.Where(PackageExtensions.IsListed); } if (version != null) { packages = packages.Where(p => p.Version == version); } else if (constraintProvider != null) { packages = FilterPackagesByConstraints(constraintProvider, packages, packageId, allowPrereleaseVersions); } return packages.FirstOrDefault(); } public static IPackage FindPackage(this IPackageRepository repository, string packageId, IVersionSpec versionSpec, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool allowUnlisted) { var packages = repository.FindPackages(packageId, versionSpec, allowPrereleaseVersions, allowUnlisted); if (constraintProvider != null) { packages = FilterPackagesByConstraints(constraintProvider, packages, packageId, allowPrereleaseVersions); } return packages.FirstOrDefault(); } public static IEnumerable<IPackage> FindPackages(this IPackageRepository repository, IEnumerable<string> packageIds) { if (packageIds == null) { throw new ArgumentNullException("packageIds"); } return FindPackages(repository, packageIds, GetFilterExpression); } public static IEnumerable<IPackage> FindPackagesById(this IPackageRepository repository, string packageId) { var serviceBasedRepository = repository as IPackageLookup; if (serviceBasedRepository != null) { return serviceBasedRepository.FindPackagesById(packageId).ToList(); } else { return FindPackagesByIdCore(repository, packageId); } } internal static IEnumerable<IPackage> FindPackagesByIdCore(IPackageRepository repository, string packageId) { var cultureRepository = repository as ICultureAwareRepository; if (cultureRepository != null) { packageId = packageId.ToLower(cultureRepository.Culture); } else { packageId = packageId.ToLower(CultureInfo.CurrentCulture); } return (from p in repository.GetPackages() where p.Id.ToLower() == packageId orderby p.Id select p).ToList(); } /// <summary> /// Since Odata dies when our query for updates is too big. We query for updates 10 packages at a time /// and return the full list of packages. /// </summary> private static IEnumerable<IPackage> FindPackages<T>( this IPackageRepository repository, IEnumerable<T> items, Func<IEnumerable<T>, Expression<Func<IPackage, bool>>> filterSelector) { const int batchSize = 10; while (items.Any()) { IEnumerable<T> currentItems = items.Take(batchSize); Expression<Func<IPackage, bool>> filterExpression = filterSelector(currentItems); var query = repository.GetPackages() .Where(filterExpression) .OrderBy(p => p.Id); foreach (var package in query) { yield return package; } items = items.Skip(batchSize); } } public static IEnumerable<IPackage> FindPackages( this IPackageRepository repository, string packageId, IVersionSpec versionSpec, bool allowPrereleaseVersions, bool allowUnlisted) { if (repository == null) { throw new ArgumentNullException("repository"); } if (packageId == null) { throw new ArgumentNullException("packageId"); } IEnumerable<IPackage> packages = repository.FindPackagesById(packageId) .OrderByDescending(p => p.Version); if (!allowUnlisted) { packages = packages.Where(PackageExtensions.IsListed); } if (versionSpec != null) { packages = packages.FindByVersion(versionSpec); } packages = FilterPackagesByConstraints(NullConstraintProvider.Instance, packages, packageId, allowPrereleaseVersions); return packages; } public static IPackage FindPackage( this IPackageRepository repository, string packageId, IVersionSpec versionSpec, bool allowPrereleaseVersions, bool allowUnlisted) { return repository.FindPackages(packageId, versionSpec, allowPrereleaseVersions, allowUnlisted).FirstOrDefault(); } public static IEnumerable<IPackage> FindCompatiblePackages(this IPackageRepository repository, IPackageConstraintProvider constraintProvider, IEnumerable<string> packageIds, IPackage package, FrameworkName targetFramework, bool allowPrereleaseVersions) { return (from p in repository.FindPackages(packageIds) where allowPrereleaseVersions || p.IsReleaseVersion() let dependency = p.FindDependency(package.Id, targetFramework) let otherConstaint = constraintProvider.GetConstraint(p.Id) where dependency != null && dependency.VersionSpec.Satisfies(package.Version) && (otherConstaint == null || otherConstaint.Satisfies(package.Version)) select p); } public static PackageDependency FindDependency(this IPackageMetadata package, string packageId, FrameworkName targetFramework) { return (from dependency in package.GetCompatiblePackageDependencies(targetFramework) where dependency.Id.Equals(packageId, StringComparison.OrdinalIgnoreCase) select dependency).FirstOrDefault(); } public static IQueryable<IPackage> Search(this IPackageRepository repository, string searchTerm, bool allowPrereleaseVersions) { return Search(repository, searchTerm, targetFrameworks: Enumerable.Empty<string>(), allowPrereleaseVersions: allowPrereleaseVersions); } public static IQueryable<IPackage> Search(this IPackageRepository repository, string searchTerm, IEnumerable<string> targetFrameworks, bool allowPrereleaseVersions) { if (targetFrameworks == null) { throw new ArgumentNullException("targetFrameworks"); } var serviceBasedRepository = repository as IServiceBasedRepository; if (serviceBasedRepository != null) { return serviceBasedRepository.Search(searchTerm, targetFrameworks, allowPrereleaseVersions); } // Ignore the target framework if the repository doesn't support searching return repository.GetPackages().Find(searchTerm) .FilterByPrerelease(allowPrereleaseVersions) .AsQueryable(); } public static IPackage ResolveDependency(this IPackageRepository repository, PackageDependency dependency, bool allowPrereleaseVersions, bool preferListedPackages) { return ResolveDependency(repository, dependency, constraintProvider: null, allowPrereleaseVersions: allowPrereleaseVersions, preferListedPackages: preferListedPackages, dependencyVersion: DependencyVersion.Lowest); } public static IPackage ResolveDependency(this IPackageRepository repository, PackageDependency dependency, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool preferListedPackages) { return ResolveDependency(repository, dependency, constraintProvider, allowPrereleaseVersions, preferListedPackages, dependencyVersion: DependencyVersion.Lowest); } public static IPackage ResolveDependency(this IPackageRepository repository, PackageDependency dependency, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool preferListedPackages, DependencyVersion dependencyVersion) { IDependencyResolver dependencyResolver = repository as IDependencyResolver; if (dependencyResolver != null) { return dependencyResolver.ResolveDependency(dependency, constraintProvider, allowPrereleaseVersions, preferListedPackages, dependencyVersion); } return ResolveDependencyCore(repository, dependency, constraintProvider, allowPrereleaseVersions, preferListedPackages, dependencyVersion); } internal static IPackage ResolveDependencyCore( this IPackageRepository repository, PackageDependency dependency, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool preferListedPackages, DependencyVersion dependencyVersion) { if (repository == null) { throw new ArgumentNullException("repository"); } if (dependency == null) { throw new ArgumentNullException("dependency"); } IEnumerable<IPackage> packages = repository.FindPackagesById(dependency.Id).ToList(); // Always filter by constraints when looking for dependencies packages = FilterPackagesByConstraints(constraintProvider, packages, dependency.Id, allowPrereleaseVersions); IList<IPackage> candidates = packages.ToList(); if (preferListedPackages) { // pick among Listed packages first IPackage listedSelectedPackage = ResolveDependencyCore( candidates.Where(PackageExtensions.IsListed), dependency, dependencyVersion); if (listedSelectedPackage != null) { return listedSelectedPackage; } } return ResolveDependencyCore(candidates, dependency, dependencyVersion); } /// <summary> /// From the list of packages <paramref name="packages"/>, selects the package that best /// matches the <paramref name="dependency"/>. /// </summary> /// <param name="packages">The list of packages.</param> /// <param name="dependency">The dependency used to select package from the list.</param> /// <param name="dependencyVersion">Indicates the method used to select dependency. /// Applicable only when dependency.VersionSpec is not null.</param> /// <returns>The selected package.</returns> private static IPackage ResolveDependencyCore( IEnumerable<IPackage> packages, PackageDependency dependency, DependencyVersion dependencyVersion) { // If version info was specified then use it if (dependency.VersionSpec != null) { packages = packages.FindByVersion(dependency.VersionSpec).OrderBy(p => p.Version); return packages.SelectDependency(dependencyVersion); } else { // BUG 840: If no version info was specified then pick the latest return packages.OrderByDescending(p => p.Version) .FirstOrDefault(); } } /// <summary> /// Returns updates for packages from the repository /// </summary> /// <param name="repository">The repository to search for updates</param> /// <param name="packages">Packages to look for updates</param> /// <param name="includePrerelease">Indicates whether to consider prerelease updates.</param> /// <param name="includeAllVersions">Indicates whether to include all versions of an update as opposed to only including the latest version.</param> public static IEnumerable<IPackage> GetUpdates( this IPackageRepository repository, IEnumerable<IPackageName> packages, bool includePrerelease, bool includeAllVersions, IEnumerable<FrameworkName> targetFrameworks = null, IEnumerable<IVersionSpec> versionConstraints = null) { if (packages.IsEmpty()) { return Enumerable.Empty<IPackage>(); } var serviceBasedRepository = repository as IServiceBasedRepository; return serviceBasedRepository != null ? serviceBasedRepository.GetUpdates(packages, includePrerelease, includeAllVersions, targetFrameworks, versionConstraints) : repository.GetUpdatesCore(packages, includePrerelease, includeAllVersions, targetFrameworks, versionConstraints); } public static IEnumerable<IPackage> GetUpdatesCore( this IPackageRepository repository, IEnumerable<IPackageName> packages, bool includePrerelease, bool includeAllVersions, IEnumerable<FrameworkName> targetFramework, IEnumerable<IVersionSpec> versionConstraints) { List<IPackageName> packageList = packages.ToList(); if (!packageList.Any()) { return Enumerable.Empty<IPackage>(); } IList<IVersionSpec> versionConstraintList; if (versionConstraints == null) { versionConstraintList = new IVersionSpec[packageList.Count]; } else { versionConstraintList = versionConstraints.ToList(); } if (packageList.Count != versionConstraintList.Count) { throw new ArgumentException(NuGetResources.GetUpdatesParameterMismatch); } // These are the packages that we need to look at for potential updates. ILookup<string, IPackage> sourcePackages = GetUpdateCandidates(repository, packageList, includePrerelease) .ToList() .ToLookup(package => package.Id, StringComparer.OrdinalIgnoreCase); var results = new List<IPackage>(); for (int i = 0; i < packageList.Count; i++) { var package = packageList[i]; var constraint = versionConstraintList[i]; var updates = from candidate in sourcePackages[package.Id] where (candidate.Version > package.Version) && SupportsTargetFrameworks(targetFramework, candidate) && (constraint == null || constraint.Satisfies(candidate.Version)) select candidate; results.AddRange(updates); } if (!includeAllVersions) { return results.CollapseById(); } return results; } private static bool SupportsTargetFrameworks(IEnumerable<FrameworkName> targetFramework, IPackage package) { return targetFramework.IsEmpty() || targetFramework.Any(t => VersionUtility.IsCompatible(t, package.GetSupportedFrameworks())); } public static IPackageRepository Clone(this IPackageRepository repository) { var cloneableRepository = repository as ICloneableRepository; if (cloneableRepository != null) { return cloneableRepository.Clone(); } return repository; } /// <summary> /// Since odata dies when our query for updates is too big. We query for updates 10 packages at a time /// and return the full list of candidates for updates. /// </summary> private static IEnumerable<IPackage> GetUpdateCandidates( IPackageRepository repository, IEnumerable<IPackageName> packages, bool includePrerelease) { var query = FindPackages(repository, packages, GetFilterExpression); if (!includePrerelease) { query = query.Where(p => p.IsReleaseVersion()); } // for updates, we never consider unlisted packages query = query.Where(PackageExtensions.IsListed); return query; } /// <summary> /// For the list of input packages generate an expression like: /// p => p.Id == 'package1id' or p.Id == 'package2id' or p.Id == 'package3id'... up to package n /// </summary> private static Expression<Func<IPackage, bool>> GetFilterExpression(IEnumerable<IPackageName> packages) { return GetFilterExpression(packages.Select(p => p.Id)); } [SuppressMessage("Microsoft.Globalization", "CA1304:SpecifyCultureInfo", MessageId = "System.String.ToLower", Justification = "This is for a linq query")] private static Expression<Func<IPackage, bool>> GetFilterExpression(IEnumerable<string> ids) { ParameterExpression parameterExpression = Expression.Parameter(typeof(IPackageName)); Expression expressionBody = ids.Select(id => GetCompareExpression(parameterExpression, id.ToLower())) .Aggregate(Expression.OrElse); return Expression.Lambda<Func<IPackage, bool>>(expressionBody, parameterExpression); } /// <summary> /// Builds the expression: package.Id.ToLower() == "somepackageid" /// </summary> private static Expression GetCompareExpression(Expression parameterExpression, object value) { // package.Id Expression propertyExpression = Expression.Property(parameterExpression, "Id"); // .ToLower() Expression toLowerExpression = Expression.Call(propertyExpression, typeof(string).GetMethod("ToLower", Type.EmptyTypes)); // == localPackage.Id return Expression.Equal(toLowerExpression, Expression.Constant(value)); } private static IEnumerable<IPackage> FilterPackagesByConstraints( IPackageConstraintProvider constraintProvider, IEnumerable<IPackage> packages, string packageId, bool allowPrereleaseVersions) { constraintProvider = constraintProvider ?? NullConstraintProvider.Instance; // Filter packages by this constraint IVersionSpec constraint = constraintProvider.GetConstraint(packageId); if (constraint != null) { packages = packages.FindByVersion(constraint); } if (!allowPrereleaseVersions) { packages = packages.Where(p => p.IsReleaseVersion()); } return packages; } /// <summary> /// Selects the dependency package from the list of candidate packages /// according to <paramref name="dependencyVersion"/>. /// </summary> /// <param name="packages">The list of candidate packages.</param> /// <param name="dependencyVersion">The rule used to select the package from /// <paramref name="packages"/> </param> /// <returns>The selected package.</returns> /// <remarks>Precondition: <paramref name="packages"/> are ordered by ascending version.</remarks> internal static IPackage SelectDependency(this IEnumerable<IPackage> packages, DependencyVersion dependencyVersion) { if (packages == null || !packages.Any()) { return null; } if (dependencyVersion == DependencyVersion.Lowest) { return packages.FirstOrDefault(); } else if (dependencyVersion == DependencyVersion.Highest) { return packages.LastOrDefault(); } else if (dependencyVersion == DependencyVersion.HighestPatch) { var groups = from p in packages group p by new { p.Version.Version.Major, p.Version.Version.Minor } into g orderby g.Key.Major, g.Key.Minor select g; return (from p in groups.First() orderby p.Version descending select p).FirstOrDefault(); } else if (dependencyVersion == DependencyVersion.HighestMinor) { var groups = from p in packages group p by new { p.Version.Version.Major } into g orderby g.Key.Major select g; return (from p in groups.First() orderby p.Version descending select p).FirstOrDefault(); } throw new ArgumentOutOfRangeException("dependencyVersion"); } } }