context
stringlengths
2.52k
185k
gt
stringclasses
1 value
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Loon.Utils; namespace Loon.Net { public class Base64Coder { private const int BASELENGTH = 255; private const int LOOKUPLENGTH = 64; private const int TWENTYFOURBITGROUP = 24; private const int EIGHTBIT = 8; private const int SIXTEENBIT = 16; private const int FOURBYTE = 4; private const int SIGN = -128; private const byte PAD = (byte) '='; private static byte[] BASE64_ALPHABET; private static byte[] LOOKUP_BASE64_ALPHABET; private Base64Coder() { } public static byte[] FromBinHexString(string s) { char[] chars = s.ToCharArray(); byte[] bytes = new byte[chars.Length / 2 + chars.Length % 2]; FromBinHexString(chars, 0, chars.Length, bytes); return bytes; } public static int FromBinHexString(char[] chars, int offset, int charLength, byte[] buffer) { int bufIndex = offset; for (int i = 0; i < charLength - 1; i += 2) { buffer[bufIndex] = (chars[i] > '9' ? (byte)(chars[i] - 'A' + 10) : (byte)(chars[i] - '0')); buffer[bufIndex] <<= 4; buffer[bufIndex] += chars[i + 1] > '9' ? (byte)(chars[i + 1] - 'A' + 10) : (byte)(chars[i + 1] - '0'); bufIndex++; } if (charLength % 2 != 0) buffer[bufIndex++] = (byte)((chars[charLength - 1] > '9' ? (byte)(chars[charLength - 1] - 'A' + 10) : (byte)(chars[charLength - 1] - '0')) << 4); return bufIndex - offset; } private static void Checking() { if (BASE64_ALPHABET == null) { BASE64_ALPHABET = new byte[BASELENGTH]; for (int i = 0; i < BASELENGTH; i++) { BASE64_ALPHABET[i] = 0; } for (int i_0 = 'Z'; i_0 >= 'A'; i_0--) { BASE64_ALPHABET[i_0] = (byte) (i_0 - 'A'); } for (int i_1 = 'z'; i_1 >= 'a'; i_1--) { BASE64_ALPHABET[i_1] = (byte) (i_1 - 'a' + 26); } for (int i_2 = '9'; i_2 >= '0'; i_2--) { BASE64_ALPHABET[i_2] = (byte) (i_2 - '0' + 52); } BASE64_ALPHABET['+'] = 62; BASE64_ALPHABET['/'] = 63; } if (LOOKUP_BASE64_ALPHABET == null) { LOOKUP_BASE64_ALPHABET = new byte[LOOKUPLENGTH]; for (int i_3 = 0; i_3 <= 25; i_3++) { LOOKUP_BASE64_ALPHABET[i_3] = (byte) ('A' + i_3); } for (int i_4 = 26, j = 0; i_4 <= 51; i_4++, j++) { LOOKUP_BASE64_ALPHABET[i_4] = (byte) ('a' + j); } for (int i_5 = 52, j_6 = 0; i_5 <= 61; i_5++, j_6++) { LOOKUP_BASE64_ALPHABET[i_5] = (byte) ('0' + j_6); } LOOKUP_BASE64_ALPHABET[62] = (byte) '+'; LOOKUP_BASE64_ALPHABET[63] = (byte) '/'; } } public static bool IsBase64(string v) { return IsArrayByteBase64(StringUtils.GetAsciiBytes(v)); } public static bool IsArrayByteBase64(byte[] bytes) { Checking(); int length = bytes.Length; if (length == 0) { return true; } for (int i = 0; i < length; i++) { if (!Base64Coder.IsBase64(bytes[i])) { return false; } } return true; } private static bool IsBase64(byte octect) { return (octect == PAD || BASE64_ALPHABET[octect] != 0); } public static byte[] Encode(byte[] binaryData) { Checking(); int lengthDataBits = binaryData.Length * EIGHTBIT; int fewerThan24bits = lengthDataBits % TWENTYFOURBITGROUP; int numberTriplets = lengthDataBits / TWENTYFOURBITGROUP; byte[] encodedData = null; if (fewerThan24bits != 0) { encodedData = new byte[(numberTriplets + 1) * 4]; } else { encodedData = new byte[numberTriplets * 4]; } byte k = 0; byte l = 0; byte b1 = 0; byte b2 = 0; byte b3 = 0; int encodedIndex = 0; int dataIndex = 0; int i = 0; for (i = 0; i < numberTriplets; i++) { dataIndex = i * 3; b1 = binaryData[dataIndex]; b2 = binaryData[dataIndex + 1]; b3 = binaryData[dataIndex + 2]; l = (byte) (b2 & 0x0f); k = (byte) (b1 & 0x03); encodedIndex = i * 4; byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0); byte val2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4) : (byte) ((b2) >> 4 ^ 0xf0); byte val3 = ((b3 & SIGN) == 0) ? (byte) (b3 >> 6) : (byte) ((b3) >> 6 ^ 0xfc); encodedData[encodedIndex] = LOOKUP_BASE64_ALPHABET[val1]; encodedData[encodedIndex + 1] = LOOKUP_BASE64_ALPHABET[val2 | (k << 4)]; encodedData[encodedIndex + 2] = LOOKUP_BASE64_ALPHABET[(l << 2) | val3]; encodedData[encodedIndex + 3] = LOOKUP_BASE64_ALPHABET[b3 & 0x3f]; } dataIndex = i * 3; encodedIndex = i * 4; if (fewerThan24bits == EIGHTBIT) { b1 = binaryData[dataIndex]; k = (byte) (b1 & 0x03); byte val1_0 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0); encodedData[encodedIndex] = LOOKUP_BASE64_ALPHABET[val1_0]; encodedData[encodedIndex + 1] = LOOKUP_BASE64_ALPHABET[k << 4]; encodedData[encodedIndex + 2] = PAD; encodedData[encodedIndex + 3] = PAD; } else if (fewerThan24bits == SIXTEENBIT) { b1 = binaryData[dataIndex]; b2 = binaryData[dataIndex + 1]; l = (byte) (b2 & 0x0f); k = (byte) (b1 & 0x03); byte val1_1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2) : (byte) ((b1) >> 2 ^ 0xc0); byte val2_2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4) : (byte) ((b2) >> 4 ^ 0xf0); encodedData[encodedIndex] = LOOKUP_BASE64_ALPHABET[val1_1]; encodedData[encodedIndex + 1] = LOOKUP_BASE64_ALPHABET[val2_2 | (k << 4)]; encodedData[encodedIndex + 2] = LOOKUP_BASE64_ALPHABET[l << 2]; encodedData[encodedIndex + 3] = PAD; } return encodedData; } public static byte[] Decode(byte[] base64Data) { Checking(); if (base64Data.Length == 0) { return new byte[0]; } int numberQuadruple = base64Data.Length / FOURBYTE; byte[] decodedData = null; byte b1 = 0, b2 = 0, b3 = 0, b4 = 0, marker0 = 0, marker1 = 0; int encodedIndex = 0; int dataIndex = 0; { int lastData = base64Data.Length; while (base64Data[lastData - 1] == PAD) { if (--lastData == 0) { return new byte[0]; } } decodedData = new byte[lastData - numberQuadruple]; } for (int i = 0; i < numberQuadruple; i++) { dataIndex = i * 4; marker0 = base64Data[dataIndex + 2]; marker1 = base64Data[dataIndex + 3]; b1 = BASE64_ALPHABET[base64Data[dataIndex]]; b2 = BASE64_ALPHABET[base64Data[dataIndex + 1]]; if (marker0 != PAD && marker1 != PAD) { b3 = BASE64_ALPHABET[marker0]; b4 = BASE64_ALPHABET[marker1]; decodedData[encodedIndex] = (byte) (b1 << 2 | b2 >> 4); decodedData[encodedIndex + 1] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf)); decodedData[encodedIndex + 2] = (byte) (b3 << 6 | b4); } else if (marker0 == PAD) { decodedData[encodedIndex] = (byte) (b1 << 2 | b2 >> 4); } else if (marker1 == PAD) { b3 = BASE64_ALPHABET[marker0]; decodedData[encodedIndex] = (byte) (b1 << 2 | b2 >> 4); decodedData[encodedIndex + 1] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf)); } encodedIndex += 3; } return decodedData; } public static byte[] DecodeBase64(char[] data) { Checking(); int size = data.Length; int temp = size; for (int ix = 0; ix < data.Length; ix++) { if ((data[ix] > 255) || BASE64_ALPHABET[data[ix]] < 0) { --temp; } } int len = (temp / 4) * 3; if ((temp % 4) == 3) { len += 2; } if ((temp % 4) == 2) { len += 1; } byte[] xout = new byte[len]; int shift = 0; int accum = 0; int index = 0; for (int ix_0 = 0; ix_0 < size; ix_0++) { int value_ren = (data[ix_0] > 255) ? 0: (int) (BASE64_ALPHABET[data[ix_0]]); if (value_ren >= 0) { accum <<= 6; shift += 6; accum |= value_ren; if (shift >= 8) { shift -= 8; xout[index++] = (byte) ((accum >> shift) & 0xff); } } } if (index != xout.Length) { throw new Exception("index != " + xout.Length); } return xout; } } }
// // TreeStore.cs // // Author: // Lluis Sanchez <lluis@xamarin.com> // // Copyright (c) 2011 Xamarin Inc // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Linq; using System.Collections.Generic; using Xwt.Backends; using System.ComponentModel; namespace Xwt { [BackendType (typeof(ITreeStoreBackend))] public class TreeStore: XwtComponent, ITreeDataSource { IDataField[] fields; class TreeStoreBackendHost: BackendHost<TreeStore,ITreeStoreBackend> { protected override IBackend OnCreateBackend () { var b = base.OnCreateBackend (); if (b == null) b = new DefaultTreeStoreBackend (); ((ITreeStoreBackend)b).Initialize (Parent.fields.Select (f => f.FieldType).ToArray ()); return b; } } protected override Xwt.Backends.BackendHost CreateBackendHost () { return new TreeStoreBackendHost (); } public TreeStore (params IDataField[] fields) { for (int n=0; n<fields.Length; n++) { if (fields[n].Index != -1) throw new InvalidOperationException ("DataField object already belongs to another data store"); ((IDataFieldInternal)fields[n]).SetIndex (n); } this.fields = fields; } ITreeStoreBackend Backend { get { return (ITreeStoreBackend)BackendHost.Backend; } } public TreeNavigator GetFirstNode () { var p = Backend.GetChild (null, 0); return new TreeNavigator (Backend, p); } public TreeNavigator GetNavigatorAt (TreePosition pos) { return new TreeNavigator (Backend, pos); } public TreeNavigator AddNode () { var pos = Backend.AddChild (null); return new TreeNavigator (Backend, pos); } public TreeNavigator AddNode (TreePosition position) { var pos = Backend.AddChild (position); return new TreeNavigator (Backend, pos); } public TreeNavigator InsertNodeAfter (TreePosition positon) { var pos = Backend.InsertAfter (positon); return new TreeNavigator (Backend, pos); } public TreeNavigator InsertNodeBefore (TreePosition positon) { var pos = Backend.InsertBefore (positon); return new TreeNavigator (Backend, pos); } public IEnumerable<TreeNavigator> FindNavigators<T> (T fieldValue, IDataField<T> field) { if (fieldValue == null) { return Enumerable.Empty<TreeNavigator> (); } TreeNavigator navigator = GetFirstNode (); if (navigator.CurrentPosition == null) { return Enumerable.Empty<TreeNavigator> (); } return FindNavigators (fieldValue, field, navigator); } static IEnumerable<TreeNavigator> FindNavigators<T> (T fieldValue, IDataField<T> field, TreeNavigator navigator) { do { if (IsNavigator (navigator, fieldValue, field)) { yield return navigator.Clone (); } foreach (TreeNavigator foundChild in FindChildNavigators (navigator, fieldValue, field)) { yield return foundChild.Clone (); } } while (navigator.MoveNext()); } static IEnumerable<TreeNavigator> FindChildNavigators<T> (TreeNavigator navigator, T fieldValue, IDataField<T> field) { if (!navigator.MoveToChild ()) { yield break; } foreach (var treeNavigator in FindNavigators (fieldValue, field, navigator)) { yield return treeNavigator; } navigator.MoveToParent (); } static bool IsNavigator<T> (TreeNavigator navigator, T fieldValue, IDataField<T> field) { T value = navigator.GetValue (field); return fieldValue.Equals (value); } public void Clear () { Backend.Clear (); } event EventHandler<TreeNodeEventArgs> ITreeDataSource.NodeInserted { add { Backend.NodeInserted += value; } remove { Backend.NodeInserted -= value; } } event EventHandler<TreeNodeChildEventArgs> ITreeDataSource.NodeDeleted { add { Backend.NodeDeleted += value; } remove { Backend.NodeDeleted -= value; } } event EventHandler<TreeNodeEventArgs> ITreeDataSource.NodeChanged { add { Backend.NodeChanged += value; } remove { Backend.NodeChanged -= value; } } event EventHandler<TreeNodeOrderEventArgs> ITreeDataSource.NodesReordered { add { Backend.NodesReordered += value; } remove { Backend.NodesReordered -= value; } } event EventHandler ITreeDataSource.Cleared { add { Backend.Cleared += value; } remove { Backend.Cleared -= value; } } TreePosition ITreeDataSource.GetChild (TreePosition pos, int index) { return Backend.GetChild (pos, index); } TreePosition ITreeDataSource.GetParent (TreePosition pos) { return Backend.GetParent (pos); } int ITreeDataSource.GetChildrenCount (TreePosition pos) { return Backend.GetChildrenCount (pos); } object ITreeDataSource.GetValue (TreePosition pos, int column) { return Backend.GetValue (pos, column); } void ITreeDataSource.SetValue (TreePosition pos, int column, object val) { Backend.SetValue (pos, column, val); } Type[] ITreeDataSource.ColumnTypes { get { return fields.Select (f => f.FieldType).ToArray (); } } } class DefaultTreeStoreBackend: ITreeStoreBackend { struct Node { public object[] Data; public NodeList Children; public int NodeId; } class NodePosition: TreePosition { public NodeList ParentList; public int NodeIndex; public int NodeId; public int StoreVersion; public override bool Equals (object obj) { NodePosition other = (NodePosition) obj; if (other == null) return false; return ParentList == other.ParentList && NodeId == other.NodeId; } public override int GetHashCode () { return ParentList.GetHashCode () ^ NodeId; } } class NodeList: List<Node> { public NodePosition Parent; } Type[] columnTypes; NodeList rootNodes = new NodeList (); int version; int nextNodeId; public event EventHandler<TreeNodeEventArgs> NodeInserted; public event EventHandler<TreeNodeChildEventArgs> NodeDeleted; public event EventHandler<TreeNodeEventArgs> NodeChanged; public event EventHandler<TreeNodeOrderEventArgs> NodesReordered; public event EventHandler Cleared; public void InitializeBackend (object frontend, ApplicationContext context) { } public void Initialize (Type[] columnTypes) { this.columnTypes = columnTypes; } public void Clear () { rootNodes.Clear (); if (Cleared != null) Cleared (this, EventArgs.Empty); } NodePosition GetPosition (TreePosition pos) { if (pos == null) return null; NodePosition np = (NodePosition)pos; if (np.StoreVersion != version) { np.NodeIndex = -1; for (int i=0; i<np.ParentList.Count; i++) { if (np.ParentList [i].NodeId == np.NodeId) { np.NodeIndex = i; break; } } if (np.NodeIndex == -1) throw new InvalidOperationException ("Invalid node position"); np.StoreVersion = version; } return np; } public void SetValue (TreePosition pos, int column, object value) { NodePosition n = GetPosition (pos); Node node = n.ParentList [n.NodeIndex]; if (node.Data == null) { node.Data = new object [columnTypes.Length]; n.ParentList [n.NodeIndex] = node; } node.Data [column] = value; if (NodeChanged != null) NodeChanged (this, new TreeNodeEventArgs (pos, n.NodeIndex)); } public object GetValue (TreePosition pos, int column) { NodePosition np = GetPosition (pos); Node n = np.ParentList[np.NodeIndex]; if (n.Data == null) return null; return n.Data [column]; } public TreePosition GetChild (TreePosition pos, int index) { if (pos == null) { if (rootNodes.Count == 0) return null; Node n = rootNodes[index]; return new NodePosition () { ParentList = rootNodes, NodeId = n.NodeId, NodeIndex = index, StoreVersion = version }; } else { NodePosition np = GetPosition (pos); Node n = np.ParentList[np.NodeIndex]; if (n.Children == null || index >= n.Children.Count) return null; return new NodePosition () { ParentList = n.Children, NodeId = n.Children[index].NodeId, NodeIndex = index, StoreVersion = version }; } } public TreePosition GetNext (TreePosition pos) { NodePosition np = GetPosition (pos); if (np.NodeIndex >= np.ParentList.Count - 1) return null; Node n = np.ParentList[np.NodeIndex + 1]; return new NodePosition () { ParentList = np.ParentList, NodeId = n.NodeId, NodeIndex = np.NodeIndex + 1, StoreVersion = version }; } public TreePosition GetPrevious (TreePosition pos) { NodePosition np = GetPosition (pos); if (np.NodeIndex <= 0) return null; Node n = np.ParentList[np.NodeIndex - 1]; return new NodePosition () { ParentList = np.ParentList, NodeId = n.NodeId, NodeIndex = np.NodeIndex - 1, StoreVersion = version }; } public int GetChildrenCount (TreePosition pos) { if (pos == null) return rootNodes.Count; NodePosition np = GetPosition (pos); Node n = np.ParentList[np.NodeIndex]; return n.Children != null ? n.Children.Count : 0; } public TreePosition InsertBefore (TreePosition pos) { NodePosition np = GetPosition (pos); Node nn = new Node (); nn.NodeId = nextNodeId++; np.ParentList.Insert (np.NodeIndex, nn); version++; // Update the NodePosition since it is now invalid np.NodeIndex++; np.StoreVersion = version; var node = new NodePosition () { ParentList = np.ParentList, NodeId = nn.NodeId, NodeIndex = np.NodeIndex - 1, StoreVersion = version }; if (NodeInserted != null) NodeInserted (this, new TreeNodeEventArgs (node, node.NodeIndex)); return node; } public TreePosition InsertAfter (TreePosition pos) { NodePosition np = GetPosition (pos); Node nn = new Node (); nn.NodeId = nextNodeId++; np.ParentList.Insert (np.NodeIndex + 1, nn); version++; // Update the provided position is still valid np.StoreVersion = version; var node = new NodePosition () { ParentList = np.ParentList, NodeId = nn.NodeId, NodeIndex = np.NodeIndex + 1, StoreVersion = version }; if (NodeInserted != null) NodeInserted (this, new TreeNodeEventArgs (node, node.NodeIndex)); return node; } public TreePosition AddChild (TreePosition pos) { NodePosition np = GetPosition (pos); Node nn = new Node (); nn.NodeId = nextNodeId++; NodeList list; if (pos == null) { list = rootNodes; } else { Node n = np.ParentList [np.NodeIndex]; if (n.Children == null) { n.Children = new NodeList (); n.Children.Parent = new NodePosition () { ParentList = np.ParentList, NodeId = n.NodeId, NodeIndex = np.NodeIndex, StoreVersion = version }; np.ParentList [np.NodeIndex] = n; } list = n.Children; } list.Add (nn); version++; // The provided position is unafected by this change. Keep it valid. if (np != null) np.StoreVersion = version; var node = new NodePosition () { ParentList = list, NodeId = nn.NodeId, NodeIndex = list.Count - 1, StoreVersion = version }; if (NodeInserted != null) NodeInserted (this, new TreeNodeEventArgs (node, node.NodeIndex)); return node; } public void Remove (TreePosition pos) { NodePosition np = GetPosition (pos); np.ParentList.RemoveAt (np.NodeIndex); var parent = np.ParentList.Parent; var index = np.NodeIndex; version++; if (NodeDeleted != null) NodeDeleted (this, new TreeNodeChildEventArgs (parent, index, pos)); } public TreePosition GetParent (TreePosition pos) { NodePosition np = GetPosition (pos); if (np.ParentList == rootNodes) return null; var parent = np.ParentList.Parent; return new NodePosition () { ParentList = parent.ParentList, NodeId = parent.NodeId, NodeIndex = parent.NodeIndex, StoreVersion = version }; } public Type[] ColumnTypes { get { return columnTypes; } } public void EnableEvent (object eventId) { } public void DisableEvent (object eventId) { } protected virtual void OnNodesReordered(ListRowOrderEventArgs e) { if (NodesReordered != null) System.Diagnostics.Debug.WriteLine($"No support for {nameof(NodesReordered)} events from {nameof(DefaultTreeStoreBackend)}, sorry."); } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyrightD * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using log4net; using Nini.Config; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using Caps=OpenSim.Framework.Capabilities.Caps; namespace OpenSim.Region.CoreModules.Agent.Capabilities { public class CapabilitiesModule : INonSharedRegionModule, ICapabilitiesModule { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); protected Scene m_scene; /// <summary> /// Each agent has its own capabilities handler. /// </summary> protected Dictionary<UUID, Caps> m_capsHandlers = new Dictionary<UUID, Caps>(); protected Dictionary<UUID, string> capsPaths = new Dictionary<UUID, string>(); protected Dictionary<UUID, Dictionary<ulong, string>> childrenSeeds = new Dictionary<UUID, Dictionary<ulong, string>>(); public void Initialise(IConfigSource source) { } public void AddRegion(Scene scene) { m_scene = scene; m_scene.RegisterModuleInterface<ICapabilitiesModule>(this); } public void RegionLoaded(Scene scene) { } public void RemoveRegion(Scene scene) { m_scene.UnregisterModuleInterface<ICapabilitiesModule>(this); } public void PostInitialise() {} public void Close() {} public string Name { get { return "Capabilities Module"; } } public Type ReplaceableInterface { get { return null; } } public void AddCapsHandler(UUID agentId) { if (m_scene.RegionInfo.EstateSettings.IsBanned(agentId)) return; String capsObjectPath = GetCapsPath(agentId); if (m_capsHandlers.ContainsKey(agentId)) { Caps oldCaps = m_capsHandlers[agentId]; m_log.DebugFormat( "[CAPS]: Reregistering caps for agent {0}. Old caps path {1}, new caps path {2}. ", agentId, oldCaps.CapsObjectPath, capsObjectPath); // This should not happen. The caller code is confused. We need to fix that. // CAPs can never be reregistered, or the client will be confused. // Hence this return here. //return; } Caps caps = new Caps(m_scene, m_scene.AssetService, MainServer.Instance, m_scene.RegionInfo.ExternalHostName, MainServer.Instance.Port, capsObjectPath, agentId, m_scene.DumpAssetsToFile, m_scene.RegionInfo.RegionName); caps.RegisterHandlers(); m_scene.EventManager.TriggerOnRegisterCaps(agentId, caps); caps.AddNewInventoryItem = m_scene.AddUploadedInventoryItem; caps.ItemUpdatedCall = m_scene.CapsUpdateInventoryItemAsset; caps.TaskScriptUpdatedCall = m_scene.CapsUpdateTaskInventoryScriptAsset; caps.CAPSFetchInventoryDescendents = m_scene.HandleFetchInventoryDescendentsCAPS; caps.GetClient = m_scene.SceneContents.GetControllingClient; m_capsHandlers[agentId] = caps; } public void RemoveCapsHandler(UUID agentId) { if (childrenSeeds.ContainsKey(agentId)) { childrenSeeds.Remove(agentId); } lock (m_capsHandlers) { if (m_capsHandlers.ContainsKey(agentId)) { m_capsHandlers[agentId].DeregisterHandlers(); m_scene.EventManager.TriggerOnDeregisterCaps(agentId, m_capsHandlers[agentId]); m_capsHandlers.Remove(agentId); } else { m_log.WarnFormat( "[CAPS]: Received request to remove CAPS handler for root agent {0} in {1}, but no such CAPS handler found!", agentId, m_scene.RegionInfo.RegionName); } } } public Caps GetCapsHandlerForUser(UUID agentId) { lock (m_capsHandlers) { if (m_capsHandlers.ContainsKey(agentId)) { return m_capsHandlers[agentId]; } } return null; } public void NewUserConnection(AgentCircuitData agent) { capsPaths[agent.AgentID] = agent.CapsPath; childrenSeeds[agent.AgentID] = ((agent.ChildrenCapSeeds == null) ? new Dictionary<ulong, string>() : agent.ChildrenCapSeeds); } public string GetCapsPath(UUID agentId) { if (capsPaths.ContainsKey(agentId)) { return capsPaths[agentId]; } return null; } public Dictionary<ulong, string> GetChildrenSeeds(UUID agentID) { Dictionary<ulong, string> seeds = null; if (childrenSeeds.TryGetValue(agentID, out seeds)) return seeds; return new Dictionary<ulong, string>(); } public void DropChildSeed(UUID agentID, ulong handle) { Dictionary<ulong, string> seeds; if (childrenSeeds.TryGetValue(agentID, out seeds)) { seeds.Remove(handle); } } public string GetChildSeed(UUID agentID, ulong handle) { Dictionary<ulong, string> seeds; string returnval; if (childrenSeeds.TryGetValue(agentID, out seeds)) { if (seeds.TryGetValue(handle, out returnval)) return returnval; } return null; } public void SetChildrenSeed(UUID agentID, Dictionary<ulong, string> seeds) { //m_log.DebugFormat(" !!! Setting child seeds in {0} to {1}", m_scene.RegionInfo.RegionName, seeds.Count); childrenSeeds[agentID] = seeds; } public void DumpChildrenSeeds(UUID agentID) { m_log.Info("================ ChildrenSeed "+m_scene.RegionInfo.RegionName+" ================"); foreach (KeyValuePair<ulong, string> kvp in childrenSeeds[agentID]) { uint x, y; Utils.LongToUInts(kvp.Key, out x, out y); x = x / Constants.RegionSize; y = y / Constants.RegionSize; m_log.Info(" >> "+x+", "+y+": "+kvp.Value); } } } }
// Created by Paul Gonzalez Becerra using System; using System.Runtime.InteropServices; using Saserdote.Mathematics; namespace Saserdote.Graphics { [StructLayout(LayoutKind.Sequential)] public struct Color { #region --- Field Variables --- // Variables public float red; public float green; public float blue; public float alpha; #endregion // Field Variables #region --- Constructors --- public Color(float r, float g, float b, float a) { red= Mathx.clamp(r, 0f, 1f); green= Mathx.clamp(g, 0f, 1f); blue= Mathx.clamp(b, 0f, 1f); alpha= Mathx.clamp(a, 0f, 1f); } public Color(float r, float g, float b):this(r, g, b, 1f) {} public Color(float all, float a):this(all, all, all, a) {} public Color(float all):this(all, all, all, 1f) {} public Color(int r, int g, int b, int a):this((float)r/255f, (float)g/255f, (float)b/255f, (float)a/255f) {} public Color(int r, int g, int b):this(r, g, b, 255) {} public Color(int all, int a):this(all, all, all, a) {} public Color(int all):this(all, all, all, 255) {} public Color(byte r, byte g, byte b, byte a):this((float)r/255f, (float)g/255f, (float)b/255f, (float)a/255f) {} public Color(byte r, byte g, byte b):this(r, g, b, (byte)255) {} public Color(byte all, byte a):this(all, all, all, a) {} public Color(byte all):this(all, all, all, (byte)255) {} #endregion // Constructors #region --- Properties --- // Gets and sets the red of the color in integer form public int redi { get { return (int)(red*255); } set { red= Mathx.clamp((float)value/255f, 0f, 1f); } } // Gets and sets the green of the color in integer form public int greeni { get { return (int)(green*255); } set { green= Mathx.clamp((float)value/255f, 0f, 1f); } } // Gets and sets the blue of the color in integer form public int bluei { get { return (int)(blue*255); } set { blue= Mathx.clamp((float)value/255f, 0f, 1f); } } // Gets and sets the alpha of the color in integer form public int alphai { get { return (int)(alpha*255); } set { alpha= Mathx.clamp((float)value/255f, 0f, 1f); } } // Gets and sets the red of the color in byte form public byte redb { get { return (byte)(red*255); } set { red= Mathx.clamp((float)value/255f, 0f, 1f); } } // Gets and sets the green of the color in byte form public byte greenb { get { return (byte)(green*255); } set { green= Mathx.clamp((float)value/255f, 0f, 1f); } } // Gets and sets the blue of the color in byte form public byte blueb { get { return (byte)(blue*255); } set { blue= Mathx.clamp((float)value/255f, 0f, 1f); } } // Gets and sets the alpha of the color in byte form public byte alphab { get { return (byte)(alpha*255); } set { alpha= Mathx.clamp((float)value/255f, 0f, 1f); } } #endregion // Properties #region --- Static Methods --- // Gets a color from a packed long version public static Color fromPackedLong(long packedLong) { // Variables Color temp= new Color(0f, 0f, 0f, 0f); temp.redb= (byte)((packedLong>> 0x10)&0xffL); temp.greenb= (byte)((packedLong>> 8)&0xffL); temp.blueb= (byte)(packedLong&0xffL); temp.alphab= (byte)((packedLong>> 0x18)&0xffL); return temp; } // Gets a color from a packed int version public static Color fromPackedInt(int packedInt) { return fromPackedLong((packedInt&((long)0xffffffffL))); } #endregion // Static Methods #region --- Methods --- // Gets a version of the color represented in a long value public long toPackedLong() { // Variables byte r= redb; byte g= greenb; byte b= blueb; byte a= alphab; return (long)(((ulong)((r<< 0x10)|(g<< 8)|b|a<<0x18))&0xffffffffL); } // Returns an array of floats of RGBA public float[] toFloatArray() { return new float[] {red, green, blue, alpha}; } // Adds the two colors together public Color add(Color c) { return new Color(red+c.red, green+c.green, blue+c.blue, alpha+c.alpha); } // Subtracts the two colors together public Color subtract(Color c) { return new Color(red-c.red, green-c.green, blue-c.blue, alpha-c.alpha); } // Multiplies the two colors together public Color multiply(Color c) { return new Color(red*c.red, green*c.green, blue*c.blue, alpha*c.alpha); } // Gets the difference of the two colors and keeps it absolute so no high percentage of black public Color difference(Color c) { return new Color(Math.Abs(red-c.red), Math.Abs(green-c.green), Math.Abs(blue-c.blue), Math.Abs(alpha-c.alpha)); } // Gets the average of the two colors public Color average(Color c) { return new Color((red+c.red)/2f, (green+c.green)/2f, (blue+c.blue)/2f, (alpha+c.alpha)/2f); } // Fades the two colors together, the two must add to 1 public Color crossFade(Color c, float leftFade, float rightFade) { leftFade= Mathx.clamp(leftFade, 0f, 1f); rightFade= Mathx.clamp(rightFade, 0f, 1f); if(leftFade+rightFade!= 1f) rightFade= 1f-leftFade; return new Color ( red*leftFade+c.red*rightFade, green*leftFade+c.green*rightFade, blue*leftFade+c.blue*rightFade, alpha*leftFade+c.alpha*rightFade ); } public Color crossFade(Color c) { return crossFade(c, 0.75f, 0.25f); } // Multiplies the color to make it seem brighter public Color brighten(float scale) { return new Color(red*scale, green*scale, blue*scale, alpha*scale); } // Divides the color to make it seem darker public Color darken(float scale) { if(scale== 0) throw new Exception("Dividing by Zero!"); return brighten(1/scale); } // Gets the opposite coloration of the colors present public Color negate() { return new Color(1f-red, 1f-green, 1f-blue, alpha); } // Finds out if the two colors are equal public bool equals(Color c) { return (red== c.red && green== c.green && blue== c.blue && alpha== c.alpha); } #endregion // Methods #region --- Inherited Methods --- // Finds if the two colors are equal public override bool Equals(object obj) { if(obj== null) return false; if(obj is Color) return equals((Color)obj); return false; } // Gets the hash code public override int GetHashCode() { return ((int)red^(int)green^(int)blue^(int)alpha); } // Prints out the contents of the color public override string ToString() { return "Red:"+red+",Green:"+green+",Blue:"+blue+",Alpha:"+alpha; } #endregion // Inherited Methods #region --- Operators --- // Equality operators public static bool operator ==(Color left, Color right) { return left.equals(right); } // Inequality operators public static bool operator !=(Color left, Color right) { return !left.equals(right); } // Uniary operators public static Color operator -(Color self) { return self.negate(); } #endregion // Operators } } // End of File
using System; using System.Threading; using System.Globalization; /// <summary> /// Parse(System.String,System.Globalization.NumberStyles,System.IFormatProvider) /// </summary> public class DoubleParse3 { private static NumberFormatInfo nfi; private static string currencySymbol; public static void InitializeIFormatProvider() { nfi = new CultureInfo("en-US").NumberFormat; currencySymbol = nfi.CurrencySymbol; } public static int Main() { InitializeIFormatProvider(); DoubleParse3 test = new DoubleParse3(); TestLibrary.TestFramework.BeginTestCase("DoubleParse3"); if (test.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; retVal = PosTest4() && retVal; retVal = PosTest5() && retVal; retVal = PosTest6() && retVal; retVal = PosTest7() && retVal; retVal = PosTest8() && retVal; retVal = PosTest9() && retVal; retVal = PosTest10() && retVal; retVal = PosTest11() && retVal; retVal = PosTest12() && retVal; retVal = PosTest13() && retVal; retVal = PosTest14() && retVal; retVal = PosTest15() && retVal; retVal = PosTest16() && retVal; TestLibrary.TestFramework.LogInformation("[Negative]"); retVal = NegTest1() && retVal; retVal = NegTest2() && retVal; retVal = NegTest3() && retVal; retVal = NegTest4() && retVal; retVal = NegTest5() && retVal; retVal = NegTest6() && retVal; return retVal; } public bool PosTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest1: Ensure the result is correct when NumberStyles is set to AllowCurrencySymbol."); try { Double d = Double.Parse("123" + currencySymbol, NumberStyles.AllowCurrencySymbol, nfi); if (d.CompareTo(123.0) != 0) { TestLibrary.TestFramework.LogError("P01.1", "The result is not correct when NumberStyles is set to AllowCurrencySymbol!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P01.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest2: Ensure the result is correct when NumberStyles is set to AllowDecimalPoint."); try { Double d = Double.Parse("123.1", NumberStyles.AllowDecimalPoint, nfi); if (d.CompareTo(123.1) != 0) { TestLibrary.TestFramework.LogError("P02.1", "The result is not correct when NumberStyles is set to AllowDecimalPoint!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P02.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest3: Ensure the result is correct when NumberStyles is set to AllowExponent."); try { Double d = Double.Parse("123E2", NumberStyles.AllowExponent, nfi); if (d.CompareTo(12300.0) != 0) { TestLibrary.TestFramework.LogError("P03.1", "The result is not correct when NumberStyles is set to AllowExponent!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P03.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest4: Ensure the result is correct when NumberStyles is set to AllowLeadingSign."); try { Double d = Double.Parse("-12345", NumberStyles.AllowLeadingSign, nfi); if (d.CompareTo(-12345.0) != 0) { TestLibrary.TestFramework.LogError("P04.1", "The result is not correct when NumberStyles is set to AllowLeadingSign!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P04.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest5() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest5: Ensure the result is correct when NumberStyles is set to AllowLeadingWhite."); try { Double d = Double.Parse(" 1234", NumberStyles.AllowLeadingWhite, nfi); if (d.CompareTo(1234.0) != 0) { TestLibrary.TestFramework.LogError("P05.1", "The result is not correct when NumberStyles is set to AllowLeadingWhite!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P05.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest6() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest6: Ensure the result is correct when NumberStyles is set to AllowParentheses."); try { Double d = Double.Parse("(456)", NumberStyles.AllowParentheses, nfi); if (d.CompareTo(-456.0) != 0) { TestLibrary.TestFramework.LogError("P06.1", "The result is not correct when NumberStyles is set to AllowParentheses!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P06.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest7() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest7: Ensure the result is correct when NumberStyles is set to AllowThousands."); try { Double d = Double.Parse("123,456", NumberStyles.AllowThousands, nfi); if (d.CompareTo(123456.0) != 0) { TestLibrary.TestFramework.LogError("P07.1", "The result is not correct when NumberStyles is set to AllowThousands!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P07.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest8() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest8: Ensure the result is correct when NumberStyles is set to AllowTrailingSign."); try { Double d = Double.Parse("123-", NumberStyles.AllowTrailingSign, nfi); if (d.CompareTo(-123.0) != 0) { TestLibrary.TestFramework.LogError("P08.1", "The result is not correct when NumberStyles is set to AllowTrailingSign!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P08.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest9() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest9: Ensure the result is correct when NumberStyles is set to AllowTrailingWhite."); try { Double d = Double.Parse("123 ", NumberStyles.AllowTrailingWhite, nfi); if (d.CompareTo(123.0) != 0) { TestLibrary.TestFramework.LogError("P09.1", "The result is not correct when NumberStyles is set to AllowTrailingWhite!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P09.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest10() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest10: Ensure the result is correct when NumberStyles is set to Any."); try { Double d = Double.Parse("-123E2 " + currencySymbol, NumberStyles.Any, nfi); if (d.CompareTo(-12300.0) != 0) { TestLibrary.TestFramework.LogError("P10.1", "The result is not correct when NumberStyles is set to Any!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P10.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest11() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest11: Ensure the result is correct when NumberStyles is set to Currency."); try { Double d = Double.Parse("-123 " + currencySymbol, NumberStyles.Currency, nfi); if (d.CompareTo(-123.0) != 0) { TestLibrary.TestFramework.LogError("P11.1", "The result is not correct when NumberStyles is set to Currency!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P11.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest12() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest12: Ensure the result is correct when NumberStyles is set to Float."); try { Double d = Double.Parse(" -123.4 ", NumberStyles.Float, nfi); if (d.CompareTo(-123.4) != 0) { TestLibrary.TestFramework.LogError("P12.1", "The result is not correct when NumberStyles is set to Float!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P12.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest13() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest13: Ensure the result is correct when NumberStyles is set to Integer."); try { Double d = Double.Parse(" -123 ", NumberStyles.Integer, nfi); if (d.CompareTo(-123.0) != 0) { TestLibrary.TestFramework.LogError("P13.1", "The result is not correct when NumberStyles is set to Integer!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P13.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest14() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest14: Ensure the result is correct when NumberStyles is set to None."); try { Double d = Double.Parse("123", NumberStyles.None, nfi); if (d.CompareTo(123.0) != 0) { TestLibrary.TestFramework.LogError("P14.1", "The result is not correct when NumberStyles is set to None!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P14.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest15() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest15: Ensure the result is correct when NumberStyles is set to Number."); try { Double d = Double.Parse(" 4,123.1- ", NumberStyles.Number, nfi); if (d.CompareTo(-4123.1) != 0) { TestLibrary.TestFramework.LogError("P15.1", "The result is not correct when NumberStyles is set to Number!"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("P15.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool PosTest16() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest16: Ensure the result is correct when NumberStyles is set to None but System.String has currency symbol."); try { Double d = Double.Parse("123$", NumberStyles.None, nfi); TestLibrary.TestFramework.LogError("P16.1", "The result is not correct when NumberStyles is set to None but System.String has currency symbol!"); retVal = false; } catch (FormatException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("P16.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool NegTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest1: ArgumentNullException should be thrown when System.String is a null reference."); try { Double d = Double.Parse(null, NumberStyles.Any, nfi); TestLibrary.TestFramework.LogError("N01.1", "ArgumentNullException is not thrown when System.String is a null reference!"); retVal = false; } catch (ArgumentNullException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N01.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool NegTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest2: FormatException should be thrown when System.String is not a number in a valid format."); try { Double d = Double.Parse("123,456.5.66", NumberStyles.None, nfi); TestLibrary.TestFramework.LogError("N02.1", "FormatException is not thrown when System.String is not a number in a valid format!"); retVal = false; } catch (FormatException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N02.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool NegTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest3: OverflowException should be thrown when System.String represents a number greater than MaxValue."); try { Double d = Double.Parse("1.79769313486233e308", NumberStyles.Float, nfi); TestLibrary.TestFramework.LogError("N03.1", "OverflowException is not thrown when System.String represents a number greater than MaxValue!"); retVal = false; } catch (OverflowException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N03.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool NegTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest4: ArgumentException should be thrown when NumberStyles contains AllowHexSpecifier value."); try { Double d = Double.Parse("108", NumberStyles.HexNumber, nfi); TestLibrary.TestFramework.LogError("N04.1", "ArgumentException is not thrown when NumberStyles contains AllowHexSpecifier value!"); retVal = false; } catch (ArgumentException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N04.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool NegTest5() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest5: ArgumentException should be thrown when NumberStyles is the AllowHexSpecifier value."); try { Double d = Double.Parse("108", NumberStyles.AllowHexSpecifier, nfi); TestLibrary.TestFramework.LogError("N05.1", "ArgumentException is not thrown when NumberStyles is the AllowHexSpecifier value!"); retVal = false; } catch (ArgumentException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N05.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } public bool NegTest6() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest6: ArgumentException should be thrown when NumberStyles is not a NumberStyles value."); try { Double d = Double.Parse("123", (NumberStyles)5000, nfi); TestLibrary.TestFramework.LogError("N06.1", "ArgumentException is not thrown when NumberStyles is not a NumberStyles value!"); retVal = false; } catch (ArgumentException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N06.2", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: tenancy_config/occupancy_factor_config.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace HOLMS.Types.TenancyConfig { /// <summary>Holder for reflection information generated from tenancy_config/occupancy_factor_config.proto</summary> public static partial class OccupancyFactorConfigReflection { #region Descriptor /// <summary>File descriptor for tenancy_config/occupancy_factor_config.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static OccupancyFactorConfigReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Cix0ZW5hbmN5X2NvbmZpZy9vY2N1cGFuY3lfZmFjdG9yX2NvbmZpZy5wcm90", "bxIaaG9sbXMudHlwZXMudGVuYW5jeV9jb25maWcaQXRlbmFuY3lfY29uZmln", "L2luZGljYXRvcnMvb2NjdXBhbmN5X2ZhY3Rvcl9jb25maWdfaW5kaWNhdG9y", "LnByb3RvIpYCChVPY2N1cGFuY3lGYWN0b3JDb25maWcSWAoJZW50aXR5X2lk", "GAEgASgLMkUuaG9sbXMudHlwZXMudGVuYW5jeV9jb25maWcuaW5kaWNhdG9y", "cy5PY2N1cGFuY3lGYWN0b3JDb25maWdJbmRpY2F0b3ISHAoUb2NjdXBhbmN5", "X3JhbmdlX25hbWUYAiABKAkSFgoOcHJpY2luZ19mYWN0b3IYAyABKAESDwoH", "aG9yaXpvbhgEIAEoBRITCgtmYWN0b3JfcmF0ZRgFIAEoARIbChNvY3V1cGFu", "Y3lfcmFuZ2VfbWluGAYgASgFEhsKE29jY3VwYW5jeV9yYW5nZV9tYXgYByAB", "KAUSDQoFc2xvcGUYCCABKAFCK1oNdGVuYW5jeWNvbmZpZ6oCGUhPTE1TLlR5", "cGVzLlRlbmFuY3lDb25maWdiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::HOLMS.Types.TenancyConfig.Indicators.OccupancyFactorConfigIndicatorReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.TenancyConfig.OccupancyFactorConfig), global::HOLMS.Types.TenancyConfig.OccupancyFactorConfig.Parser, new[]{ "EntityId", "OccupancyRangeName", "PricingFactor", "Horizon", "FactorRate", "OcuupancyRangeMin", "OccupancyRangeMax", "Slope" }, null, null, null) })); } #endregion } #region Messages public sealed partial class OccupancyFactorConfig : pb::IMessage<OccupancyFactorConfig> { private static readonly pb::MessageParser<OccupancyFactorConfig> _parser = new pb::MessageParser<OccupancyFactorConfig>(() => new OccupancyFactorConfig()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<OccupancyFactorConfig> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::HOLMS.Types.TenancyConfig.OccupancyFactorConfigReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public OccupancyFactorConfig() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public OccupancyFactorConfig(OccupancyFactorConfig other) : this() { EntityId = other.entityId_ != null ? other.EntityId.Clone() : null; occupancyRangeName_ = other.occupancyRangeName_; pricingFactor_ = other.pricingFactor_; horizon_ = other.horizon_; factorRate_ = other.factorRate_; ocuupancyRangeMin_ = other.ocuupancyRangeMin_; occupancyRangeMax_ = other.occupancyRangeMax_; slope_ = other.slope_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public OccupancyFactorConfig Clone() { return new OccupancyFactorConfig(this); } /// <summary>Field number for the "entity_id" field.</summary> public const int EntityIdFieldNumber = 1; private global::HOLMS.Types.TenancyConfig.Indicators.OccupancyFactorConfigIndicator entityId_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.TenancyConfig.Indicators.OccupancyFactorConfigIndicator EntityId { get { return entityId_; } set { entityId_ = value; } } /// <summary>Field number for the "occupancy_range_name" field.</summary> public const int OccupancyRangeNameFieldNumber = 2; private string occupancyRangeName_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string OccupancyRangeName { get { return occupancyRangeName_; } set { occupancyRangeName_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "pricing_factor" field.</summary> public const int PricingFactorFieldNumber = 3; private double pricingFactor_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public double PricingFactor { get { return pricingFactor_; } set { pricingFactor_ = value; } } /// <summary>Field number for the "horizon" field.</summary> public const int HorizonFieldNumber = 4; private int horizon_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int Horizon { get { return horizon_; } set { horizon_ = value; } } /// <summary>Field number for the "factor_rate" field.</summary> public const int FactorRateFieldNumber = 5; private double factorRate_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public double FactorRate { get { return factorRate_; } set { factorRate_ = value; } } /// <summary>Field number for the "ocuupancy_range_min" field.</summary> public const int OcuupancyRangeMinFieldNumber = 6; private int ocuupancyRangeMin_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int OcuupancyRangeMin { get { return ocuupancyRangeMin_; } set { ocuupancyRangeMin_ = value; } } /// <summary>Field number for the "occupancy_range_max" field.</summary> public const int OccupancyRangeMaxFieldNumber = 7; private int occupancyRangeMax_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int OccupancyRangeMax { get { return occupancyRangeMax_; } set { occupancyRangeMax_ = value; } } /// <summary>Field number for the "slope" field.</summary> public const int SlopeFieldNumber = 8; private double slope_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public double Slope { get { return slope_; } set { slope_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as OccupancyFactorConfig); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(OccupancyFactorConfig other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!object.Equals(EntityId, other.EntityId)) return false; if (OccupancyRangeName != other.OccupancyRangeName) return false; if (PricingFactor != other.PricingFactor) return false; if (Horizon != other.Horizon) return false; if (FactorRate != other.FactorRate) return false; if (OcuupancyRangeMin != other.OcuupancyRangeMin) return false; if (OccupancyRangeMax != other.OccupancyRangeMax) return false; if (Slope != other.Slope) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (entityId_ != null) hash ^= EntityId.GetHashCode(); if (OccupancyRangeName.Length != 0) hash ^= OccupancyRangeName.GetHashCode(); if (PricingFactor != 0D) hash ^= PricingFactor.GetHashCode(); if (Horizon != 0) hash ^= Horizon.GetHashCode(); if (FactorRate != 0D) hash ^= FactorRate.GetHashCode(); if (OcuupancyRangeMin != 0) hash ^= OcuupancyRangeMin.GetHashCode(); if (OccupancyRangeMax != 0) hash ^= OccupancyRangeMax.GetHashCode(); if (Slope != 0D) hash ^= Slope.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (entityId_ != null) { output.WriteRawTag(10); output.WriteMessage(EntityId); } if (OccupancyRangeName.Length != 0) { output.WriteRawTag(18); output.WriteString(OccupancyRangeName); } if (PricingFactor != 0D) { output.WriteRawTag(25); output.WriteDouble(PricingFactor); } if (Horizon != 0) { output.WriteRawTag(32); output.WriteInt32(Horizon); } if (FactorRate != 0D) { output.WriteRawTag(41); output.WriteDouble(FactorRate); } if (OcuupancyRangeMin != 0) { output.WriteRawTag(48); output.WriteInt32(OcuupancyRangeMin); } if (OccupancyRangeMax != 0) { output.WriteRawTag(56); output.WriteInt32(OccupancyRangeMax); } if (Slope != 0D) { output.WriteRawTag(65); output.WriteDouble(Slope); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (entityId_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(EntityId); } if (OccupancyRangeName.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(OccupancyRangeName); } if (PricingFactor != 0D) { size += 1 + 8; } if (Horizon != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(Horizon); } if (FactorRate != 0D) { size += 1 + 8; } if (OcuupancyRangeMin != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(OcuupancyRangeMin); } if (OccupancyRangeMax != 0) { size += 1 + pb::CodedOutputStream.ComputeInt32Size(OccupancyRangeMax); } if (Slope != 0D) { size += 1 + 8; } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(OccupancyFactorConfig other) { if (other == null) { return; } if (other.entityId_ != null) { if (entityId_ == null) { entityId_ = new global::HOLMS.Types.TenancyConfig.Indicators.OccupancyFactorConfigIndicator(); } EntityId.MergeFrom(other.EntityId); } if (other.OccupancyRangeName.Length != 0) { OccupancyRangeName = other.OccupancyRangeName; } if (other.PricingFactor != 0D) { PricingFactor = other.PricingFactor; } if (other.Horizon != 0) { Horizon = other.Horizon; } if (other.FactorRate != 0D) { FactorRate = other.FactorRate; } if (other.OcuupancyRangeMin != 0) { OcuupancyRangeMin = other.OcuupancyRangeMin; } if (other.OccupancyRangeMax != 0) { OccupancyRangeMax = other.OccupancyRangeMax; } if (other.Slope != 0D) { Slope = other.Slope; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { if (entityId_ == null) { entityId_ = new global::HOLMS.Types.TenancyConfig.Indicators.OccupancyFactorConfigIndicator(); } input.ReadMessage(entityId_); break; } case 18: { OccupancyRangeName = input.ReadString(); break; } case 25: { PricingFactor = input.ReadDouble(); break; } case 32: { Horizon = input.ReadInt32(); break; } case 41: { FactorRate = input.ReadDouble(); break; } case 48: { OcuupancyRangeMin = input.ReadInt32(); break; } case 56: { OccupancyRangeMax = input.ReadInt32(); break; } case 65: { Slope = input.ReadDouble(); break; } } } } } #endregion } #endregion Designer generated code
using System; using NDatabase; using NDatabase.Api; using NDatabase.Api.Query; using NDatabase.Core.Query; using NDatabase.Exceptions; using NDatabase.Tool.Wrappers; using NUnit.Framework; namespace Test.NDatabase.Odb.Test.Index { [TestFixture] public class TestIndex : ODBTest { [Test] public void SimpleUniqueIndex() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); var clazz = odb.IndexManagerFor<VO.Login.Function>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index", indexFields); odb.Close(); odb = Open(baseName); // inserting 3 objects with 3 different index keys odb.Store(new VO.Login.Function("function1")); odb.Store(new VO.Login.Function("function2")); odb.Store(new VO.Login.Function("function3")); odb.Close(); odb = Open(baseName); try { // Tries to store another function with name function1 => send an // exception because of duplicated keys odb.Store(new VO.Login.Function("function1")); Fail("Should have thrown Exception"); } catch (DuplicatedKeyException) { Assert.Pass(); odb.Close(); DeleteBase(baseName); } } [Test] public void TestIndexExist1() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); var clazz = odb.IndexManagerFor<VO.Login.Function>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("my-index", indexFields); odb.Store(new VO.Login.Function("test")); odb.Close(); odb = Open(baseName); AssertTrue(odb.IndexManagerFor<VO.Login.Function>().ExistIndex("my-index")); AssertFalse(odb.IndexManagerFor<VO.Login.Function>().ExistIndex("my-indexdhfdjkfhdjkhj")); odb.Close(); } [Test] public void TestIndexExist2() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); var clazz = odb.IndexManagerFor<VO.Login.Function>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("my-index", indexFields); odb.Close(); odb = Open(baseName); AssertTrue(odb.IndexManagerFor<VO.Login.Function>().ExistIndex("my-index")); AssertFalse(odb.IndexManagerFor<VO.Login.Function>().ExistIndex("my-indexdhfdjkfhdjkhj")); odb.Close(); } [Test] public void TestIndexWithOneFieldAndQueryWithTwoFields() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var io1 = new IndexedObject("olivier", 15, new DateTime()); odb.Store(io1); odb.Close(); odb = Open(baseName); IQuery q = odb.Query<IndexedObject>(); var constraintOnName = (IConstraint) q.Descend("name").Constrain((object) "olivier").Equal(); var constraintOnDuration = (IConstraint) q.Descend("duration").Constrain((object) 15).Equal(); constraintOnName.And(constraintOnDuration); var objects = q.Execute<IndexedObject>(true); odb.Close(); Println(((IInternalQuery)q).GetExecutionPlan().ToString()); AssertEquals(false, ((IInternalQuery)q).GetExecutionPlan().UseIndex()); AssertEquals(1, objects.Count); DeleteBase(baseName); } [Test] public void TestInsertAndDeleteWithIndex() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var size = 1000; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + i, new DateTime()); odb.Store(io1); if (i % 10 == 0) Println(i); } var tt0 = OdbTime.GetCurrentTimeInMs(); odb.Close(); var tt1 = OdbTime.GetCurrentTimeInMs(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); Println("commit time=" + (tt1 - tt0)); odb = Open(baseName); long totalTime = 0; long maxTime = 0; long minTime = 100000; for (var i = 0; i < size; i++) { IQuery query = odb.Query<IndexedObject>(); query.Descend("name").Constrain("olivier" + (i + 1)).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = query.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier" + (i + 1), io2.GetName()); AssertEquals(15 + i, io2.GetDuration()); var d = end - start; totalTime += d; if (d > maxTime) maxTime = d; if (d < minTime) minTime = d; odb.Delete(io2); } odb.Close(); odb = Open(baseName); IQuery q = odb.Query<IndexedObject>(); var oos = q.Execute<IndexedObject>(true); for (var i = 0; i < size; i++) { q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + (i + 1))).Equal(); oos = q.Execute<IndexedObject>(true); AssertEquals(0, oos.Count); } odb.Close(); DeleteBase(baseName); Println("total duration=" + totalTime + " / " + (double) totalTime / size); Println("duration max=" + maxTime + " / min=" + minTime); if (testPerformance) { AssertTrue(totalTime / size < 0.9); AssertTrue(maxTime < 20); AssertTrue(minTime == 0); } } [Test] public void TestInsertAndDeleteWithIndex1() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var size = 1400; for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + i, new DateTime()); odb.Store(io1); } odb.Close(); Console.Out.WriteLine("----ola"); odb = Open(baseName); IQuery q = odb.Query<IndexedObject>(); var objects = q.Execute<IndexedObject>(); while (objects.HasNext()) { var io = objects.Next(); Println(io); odb.Delete(io); } odb.Close(); } [Test] public void TestInsertAndDeleteWithIndexWith1000() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var size = 1000; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + i, new DateTime()); odb.Store(io1); if (i % 100 == 0) Println(i); } var tt0 = OdbTime.GetCurrentTimeInMs(); odb.Close(); var tt1 = OdbTime.GetCurrentTimeInMs(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); Println("commit time=" + (tt1 - tt0)); odb = Open(baseName); long totalSelectTime = 0; long maxTime = 0; long minTime = 100000; var t0 = OdbTime.GetCurrentTimeInMs(); long t1 = 0; long ta1 = 0; long ta2 = 0; long totalTimeDelete = 0; long totalTimeSelect = 0; for (var j = 0; j < size; j++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + (j + 1))).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier" + (j + 1), io2.GetName()); AssertEquals(15 + j, io2.GetDuration()); var d = end - start; totalSelectTime += d; if (d > maxTime) maxTime = d; if (d < minTime) minTime = d; ta1 = OdbTime.GetCurrentTimeInMs(); odb.Delete(io2); ta2 = OdbTime.GetCurrentTimeInMs(); totalTimeDelete += (ta2 - ta1); totalTimeSelect += (end - start); if (j % 100 == 0 && j > 0) { t1 = OdbTime.GetCurrentTimeInMs(); Println(j + " - t= " + (t1 - t0) + " - delete=" + (totalTimeDelete / j) + " / select=" + (totalTimeSelect / j)); t0 = t1; } } odb.Close(); Println("total select=" + totalSelectTime + " / " + (double) totalSelectTime / size); Println("total delete=" + totalTimeDelete + " / " + (double) totalTimeDelete / size); Println("duration max=" + maxTime + " / min=" + minTime); odb = Open(baseName); for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + (i + 1))).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(0, objects.Count); if (i % 100 == 0) Println(i); } odb.Close(); DeleteBase(baseName); var timePerObject = totalSelectTime / (float) size; Println("Time per object = " + timePerObject); if (timePerObject > 1) Println("Time per object = " + timePerObject); AssertTrue(timePerObject < 1); // TODO Try to get maxTime < 10! AssertTrue(maxTime < 250); AssertTrue(minTime < 1); } [Test] public void TestInsertAndDeleteWithIndexWith40Elements() { var baseName = GetBaseName(); OdbConfiguration.SetIndexBTreeDegree(3); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var size = 6; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + i, new DateTime()); odb.Store(io1); if (i % 1000 == 0) Println(i); } var tt0 = OdbTime.GetCurrentTimeInMs(); odb.Close(); var tt1 = OdbTime.GetCurrentTimeInMs(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); Println("commit time=" + (tt1 - tt0)); odb = Open(baseName); long totalTime = 0; long maxTime = 0; long minTime = 100000; var t0 = OdbTime.GetCurrentTimeInMs(); long t1 = 0; for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + (i + 1))).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier" + (i + 1), io2.GetName()); AssertEquals(15 + i, io2.GetDuration()); var d = end - start; totalTime += d; if (d > maxTime) maxTime = d; if (d < minTime) minTime = d; odb.Delete(io2); if (i % 100 == 0) { t1 = OdbTime.GetCurrentTimeInMs(); Println(i + " - t= " + (t1 - t0)); t0 = t1; } } // println(new BTreeDisplay().build(cii.getBTree(), true)); odb.Close(); odb = Open(baseName); for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + (i + 1))).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(0, objects.Count); if (i % 100 == 0) Println(i); } var unitTime = (double) totalTime / size; Println("total duration=" + totalTime + " / " + unitTime); Println("duration max=" + maxTime + " / min=" + minTime); odb.Close(); DeleteBase(baseName); AssertTrue(unitTime < 10); // TODO Try to get maxTime < 10! if (testPerformance) { AssertTrue(maxTime < 250); AssertTrue(minTime <= 1); } OdbConfiguration.SetIndexBTreeDegree(20); } [Test] public void TestInsertAndDeleteWithIndexWith4Elements() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var size = 4; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + i, new DateTime()); odb.Store(io1); if (i % 1000 == 0) Println(i); } var tt0 = OdbTime.GetCurrentTimeInMs(); odb.Close(); var tt1 = OdbTime.GetCurrentTimeInMs(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); Println("commit time=" + (tt1 - tt0)); odb = Open(baseName); long totalTime = 0; long maxTime = 0; long minTime = 100000; var t0 = OdbTime.GetCurrentTimeInMs(); long t1 = 0; for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + (i + 1))).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier" + (i + 1), io2.GetName()); AssertEquals(15 + i, io2.GetDuration()); var d = end - start; totalTime += d; if (d > maxTime) maxTime = d; if (d < minTime) minTime = d; odb.Delete(io2); if (i % 100 == 0) { t1 = OdbTime.GetCurrentTimeInMs(); Println(i + " - t= " + (t1 - t0)); t0 = t1; } } odb.Close(); odb = Open(baseName); for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + (i + 1))).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(0, objects.Count); if (i % 100 == 0) Println(i); } odb.Close(); DeleteBase(baseName); var unitTime = (double) totalTime / size; Println("total duration=" + totalTime + " / " + (double) totalTime / size); Println("duration max=" + maxTime + " / min=" + minTime); AssertTrue(unitTime < 10); if (testPerformance) { AssertTrue(maxTime < 250); AssertTrue(minTime <= 1); } } /// <summary> /// Test with two key index /// </summary> [Test] public void TestInsertWith3Indexes() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // Configuration.setUseLazyCache(true); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields3 = new[] {"name"}; clazz.AddUniqueIndexOn("index3", indexFields3); var indexFields2 = new[] {"name", "creation"}; clazz.AddUniqueIndexOn("index2", indexFields2); var indexField4 = new[] {"duration", "creation"}; clazz.AddUniqueIndexOn("inde3", indexField4); odb.Close(); odb = Open(baseName); var size = 1000; var start0 = OdbTime.GetCurrentTimeInMs(); var dates = new DateTime[size]; for (var i = 0; i < size; i++) { // println(i); dates[i] = new DateTime(); var io1 = new IndexedObject("olivier" + (i + 1), i, dates[i]); odb.Store(io1); if (i % 100 == 0) Println(i); } odb.Close(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); odb = Open(baseName); var start = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); var constraintOnDuration = (IConstraint) q.Descend("duration").Constrain((object) i).Equal(); var constraintOnCreation = (IConstraint) q.Descend("creation").Constrain((object) dates[i]).Equal(); constraintOnDuration.And(constraintOnCreation); var objects = q.Execute<IndexedObject>(true); AssertEquals(1, objects.Count); AssertTrue(((IInternalQuery)q).GetExecutionPlan().UseIndex()); } var end = OdbTime.GetCurrentTimeInMs(); double duration = (end - start); duration = duration / size; Println("duration=" + duration); odb.Close(); DeleteBase(baseName); Println(duration); var d = 2; if (duration > d) Fail("Time of search in index is greater than " + d + " ms : " + duration); } /// <summary> /// Test with 3 indexes /// </summary> [Test] public void TestInsertWith3IndexesCheckAll() { var baseName = GetBaseName(); // LogUtil.logOn(LazyOdbBtreePersister.LOG_ID, true); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"duration"}; clazz.AddIndexOn("index1", indexFields); var indexFields2 = new[] {"creation"}; clazz.AddIndexOn("index2", indexFields2); var indexFields3 = new[] {"name"}; clazz.AddIndexOn("index3", indexFields3); odb.Close(); odb = Open(baseName); var size = 130; var commitInterval = 10; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), i, new DateTime()); odb.Store(io1); if (i % commitInterval == 0) { odb.Commit(); Println(i + " : commit / " + size); } } odb.Close(); var end0 = OdbTime.GetCurrentTimeInMs(); // ObjectWriter.getNbNormalUpdates()); // println("inserting time with index=" + (end0 - start0)); odb = Open(baseName); var start = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("duration").Constrain((object) i).Equal(); var objects = q.Execute<IndexedObject>(false); // println("olivier" + (i+1)); AssertEquals(1, objects.Count); } var end = OdbTime.GetCurrentTimeInMs(); try { var duration = (end - start) / (float) size; Println(duration); var d = 0.144; if (testPerformance && duration > d) Fail("Time of search in index is greater than " + d + " ms : " + duration); } finally { odb.Close(); DeleteBase(baseName); } } /// <summary> /// Test index with 3 keys . /// </summary> /// <remarks> /// Test index with 3 keys . /// Select using only one field to verify that query does not use index, then /// execute a query with the 3 fields and checks than index is used /// </remarks> [Test] public void TestInsertWith3Keys() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name", "duration", "creation"}; clazz.AddUniqueIndexOn("index", indexFields); odb.Close(); odb = Open(baseName); var size = 500; var commitInterval = 10000; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io2 = new IndexedObject("olivier" + (i + 1), i + 15 + size, new DateTime()); odb.Store(io2); if (i % commitInterval == 0) { var t0 = OdbTime.GetCurrentTimeInMs(); odb.Commit(); var t1 = OdbTime.GetCurrentTimeInMs(); Println(i + " : commit - ctime " + (t1 - t0) + " -ttime="); } } var theDate = new DateTime(); var theName = "name indexed"; var theDuration = 45; var io1 = new IndexedObject(theName, theDuration, theDate); odb.Store(io1); odb.Close(); odb = Open(baseName); // first search without index IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) theName).Equal(); var objects = q.Execute<IndexedObject>(true); AssertFalse(((IInternalQuery)q).GetExecutionPlan().UseIndex()); Println(((IInternalQuery)q).GetExecutionPlan().GetDetails()); AssertEquals(1, objects.Count); var io3 = objects.GetFirst(); AssertEquals(theName, io3.GetName()); AssertEquals(theDuration, io3.GetDuration()); AssertEquals(theDate, io3.GetCreation()); odb.Close(); odb = Open(baseName); // Then search usin index q = odb.Query<IndexedObject>(); var constraintOnName = (IConstraint) q.Descend("name").Constrain((object) theName).Equal(); var constraintOnCreation = (IConstraint) q.Descend("creation").Constrain((object) theDate).Equal(); var constraintOnDuration = (IConstraint) q.Descend("duration").Constrain((object) theDuration).Equal(); constraintOnName.And(constraintOnCreation).And(constraintOnDuration); objects = q.Execute<IndexedObject>(true); AssertTrue(((IInternalQuery)q).GetExecutionPlan().UseIndex()); AssertEquals("index", ((IInternalQuery)q).GetExecutionPlan().GetIndex().Name); Println(((IInternalQuery)q).GetExecutionPlan().GetDetails()); AssertEquals(1, objects.Count); io3 = objects.GetFirst(); AssertEquals(theName, io3.GetName()); AssertEquals(theDuration, io3.GetDuration()); AssertEquals(theDate, io3.GetCreation()); odb.Close(); } /// <summary> /// Test with two key index /// </summary> [Test] public void TestInsertWith4IndexesAndCommits() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // Configuration.setUseLazyCache(true); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexField1 = new[] {"duration"}; clazz.AddUniqueIndexOn("inde1", indexField1); var indexFields3 = new[] {"name"}; clazz.AddUniqueIndexOn("index3", indexFields3); var indexFields2 = new[] {"name", "creation"}; clazz.AddUniqueIndexOn("index2", indexFields2); var indexField4 = new[] {"duration", "creation"}; clazz.AddUniqueIndexOn("inde4", indexField4); odb.Close(); odb = Open(baseName); var size = 100; var commitInterval = 10; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { // println(i); var io1 = new IndexedObject("olivier" + (i + 1), i, new DateTime()); odb.Store(io1); if (i % 10 == 0) Println(i); if (i % commitInterval == 0) odb.Commit(); } odb.Close(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); odb = Open(baseName); var start = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("duration").Constrain((object) i).Equal(); var objects = q.Execute<IndexedObject>(false); // println("olivier" + (i+1)); AssertEquals(1, objects.Count); } var end = OdbTime.GetCurrentTimeInMs(); var duration = end - start; Println("duration=" + duration); odb.Close(); DeleteBase(baseName); if (testPerformance && duration > 111) Fail("Time of search in index : " + duration + ", should be less than 111"); } /// <summary> /// Test with one key index /// </summary> [Test] public void TestInsertWithDateIndex3CheckAll() { var baseName = GetBaseName(); // LogUtil.logOn(LazyOdbBtreePersister.LOG_ID, true); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"creation"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var size = 1300; var commitInterval = 1000; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), i, new DateTime(start0 + i)); odb.Store(io1); if (i % commitInterval == 0) odb.Commit(); } // println(i+" : commit / " + size); odb.Close(); var end0 = OdbTime.GetCurrentTimeInMs(); // ObjectWriter.getNbNormalUpdates()); // println("inserting time with index=" + (end0 - start0)); odb = Open(baseName); var start = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("creation").Constrain((object) new DateTime(start0 + i)).Equal(); var objects = q.Execute<IndexedObject>(false); // println("olivier" + (i+1)); AssertEquals(1, objects.Count); } var end = OdbTime.GetCurrentTimeInMs(); try { var duration = (end - start) / (float) size; Println(duration); var d = 0.144; if (testPerformance && duration > d) Fail("Time of search in index is greater than " + d + " ms : " + duration); } finally { odb.Close(); DeleteBase(baseName); } } [Test] public void TestInsertWithIndex() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name", "duration"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var io1 = new IndexedObject("olivier", 15, new DateTime()); odb.Store(io1); odb.Close(); odb = Open(baseName); IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain(null).Equal().Not(); var objects = q.Execute<IndexedObject>(true); odb.Close(); AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier", io2.GetName()); AssertEquals(15, io2.GetDuration()); AssertFalse(((IInternalQuery)q).GetExecutionPlan().GetDetails().IndexOf("index1") != -1); } [Test] public void TestInsertWithIndex1() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var size = 1000; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + i, new DateTime()); odb.Store(io1); if (i % 100 == 0) Println(i); } var tt0 = OdbTime.GetCurrentTimeInMs(); odb.Close(); var tt1 = OdbTime.GetCurrentTimeInMs(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); Println("commit time=" + (tt1 - tt0)); odb = Open(baseName); long totalTime = 0; long maxTime = 0; long minTime = 100000; for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + (i + 1))).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier" + (i + 1), io2.GetName()); AssertEquals(15 + i, io2.GetDuration()); var d = end - start; totalTime += d; if (d > maxTime) maxTime = d; if (d < minTime) minTime = d; } odb.Close(); DeleteBase(baseName); Println("total duration=" + totalTime + " / " + (double) totalTime / size); Println("duration max=" + maxTime + " / min=" + minTime); if (testPerformance && totalTime / size > 2) Fail("Total/size is > than 2 : " + totalTime); if (testPerformance) { // TODO Try to get maxTime < 10! AssertTrue(maxTime < 100); AssertTrue(minTime < 1); } } [Test] public void TestInsertWithIndex2() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); var size = 1000; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + i, new DateTime()); odb.Store(io1); if (i % 100 == 0) Println(i); } var tt0 = OdbTime.GetCurrentTimeInMs(); odb.Close(); var tt1 = OdbTime.GetCurrentTimeInMs(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); Println("commit time=" + (tt1 - tt0)); odb = Open(baseName); long totalTime = 0; long maxTime = 0; long minTime = 100000; var t0 = OdbTime.GetCurrentTimeInMs(); long t1 = 0; for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + (i + 1))).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier" + (i + 1), io2.GetName()); AssertEquals(15 + i, io2.GetDuration()); var d = end - start; totalTime += d; if (d > maxTime) maxTime = d; if (d < minTime) minTime = d; if (i % 100 == 0) { t1 = OdbTime.GetCurrentTimeInMs(); Println("i=" + i + " - time=" + (t1 - t0)); t0 = t1; } } // / odb.Close(); DeleteBase(baseName); // println("total duration=" + totalTime + " / " + (double) totalTime / // size); // println("duration max=" + maxTime + " / min=" + minTime); if (totalTime / size > 1) Fail("Total/size is > than 1 : " + (totalTime / (float) size)); Println("Max time=" + maxTime); Println("Min time=" + minTime); // TODO Try to get maxTime < 10! AssertTrue(maxTime < 250); AssertTrue(minTime < 1); } /// <summary> /// Test with on e key index /// </summary> [Test] public void TestInsertWithIndex3() { var baseName = GetBaseName(); // LogUtil.logOn(LazyOdbBtreePersister.LOG_ID, true); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var size = 1300; var commitInterval = 10; var start0 = OdbTime.GetCurrentTimeInMs(); var engine = ((global::NDatabase.Odb)odb).GetStorageEngine(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + size, new DateTime()); odb.Store(io1); if (i % commitInterval == 0) { odb.Commit(); odb.Close(); odb = Open(baseName); engine = ((global::NDatabase.Odb)odb).GetStorageEngine(); } if (io1.GetName().Equals("olivier" + size)) Println("Ola chico"); } engine = ((global::NDatabase.Odb)odb).GetStorageEngine(); // println(new // BTreeDisplay().build(engine.getSession(true).getMetaModel().getClassInfo(IndexedObject.class.Name, // true).getIndex(0).getBTree(), true)); odb.Close(); var end0 = OdbTime.GetCurrentTimeInMs(); // ObjectWriter.getNbNormalUpdates()); Console.WriteLine("inserting time with index=" + (end0 - start0)); odb = Open(baseName); IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + size)).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(false); var end = OdbTime.GetCurrentTimeInMs(); try { AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier" + size, io2.GetName()); AssertEquals(15 + size, io2.GetDuration()); var duration = end - start; Println("duration=" + duration); if (testPerformance) { if (duration > 2) Fail("Time of search in index is greater than 2ms : " + duration); } } finally { odb.Close(); DeleteBase(baseName); } } [Test] public void TestInsertWithIndex3Part1() { var baseName = "index.ndb"; // LogUtil.logOn(LazyOdbBtreePersister.LOG_ID, true); DeleteBase(baseName); var @base = Open(baseName); // base.store(new IndexedObject()); var clazz = @base.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); @base.Close(); @base = Open(baseName); var size = 1300; var commitInterval = 10; var start0 = OdbTime.GetCurrentTimeInMs(); var engine = ((global::NDatabase.Odb)@base).GetStorageEngine(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + size, new DateTime()); @base.Store(io1); if (i % commitInterval == 0) { @base.Commit(); @base.Close(); @base = Open(baseName); engine = ((global::NDatabase.Odb)@base).GetStorageEngine(); } if (io1.GetName().Equals("olivier" + size)) Println("Ola chico"); } engine = ((global::NDatabase.Odb)@base).GetStorageEngine(); // println(new // BTreeDisplay().build(engine.getSession(true).getMetaModel().getClassInfo(IndexedObject.class.Name, // true).getIndex(0).getBTree(), true)); @base.Close(); var end0 = OdbTime.GetCurrentTimeInMs(); } [Test] public void TestInsertWithIndex3Part2() { var baseName = "index.ndb"; var size = 1300; var odb = Open(baseName); IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + size)).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(false); var end = OdbTime.GetCurrentTimeInMs(); try { AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier" + size, io2.GetName()); AssertEquals(15 + size, io2.GetDuration()); var duration = end - start; Println("duration=" + duration); if (testPerformance) { if (duration > 2) Fail("Time of search in index is greater than 2ms : " + duration); } } finally { odb.Close(); } } /// <summary> /// Test with two key index /// </summary> [Test] public void TestInsertWithIndex4() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields3 = new[] {"name"}; clazz.AddUniqueIndexOn("index3", indexFields3); var indexFields2 = new[] {"name", "creation"}; clazz.AddUniqueIndexOn("index2", indexFields2); var indexField4 = new[] {"duration", "creation"}; clazz.AddUniqueIndexOn("inde3", indexField4); odb.Close(); odb = Open(baseName); var size = 500; var commitInterval = 1000; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { // println(i); var ioio = new IndexedObject("olivier" + (i + 1), i + 15 + size, new DateTime()); odb.Store(ioio); if (i % commitInterval == 0) { var t0 = OdbTime.GetCurrentTimeInMs(); odb.Commit(); var t1 = OdbTime.GetCurrentTimeInMs(); Println(i + " : commit - ctime " + (t1 - t0) + " -ttime="); } } var theDate = new DateTime(); var theName = "name indexed"; var io1 = new IndexedObject(theName, 45, theDate); odb.Store(io1); odb.Close(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); odb = Open(baseName); IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) theName).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(true); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals("index3", ((IInternalQuery)q).GetExecutionPlan().GetIndex().Name); AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals(theName, io2.GetName()); AssertEquals(45, io2.GetDuration()); AssertEquals(theDate, io2.GetCreation()); var duration = end - start; Println("duration=" + duration); odb.Close(); DeleteBase(baseName); if (testPerformance && duration > 1) Fail("Time of search in index > 1 : " + duration); } // deleteBase(baseName); /// <summary> /// Test with one key index /// </summary> [Test] public void TestInsertWithIntIndex3CheckAll() { var baseName = GetBaseName(); // LogUtil.logOn(LazyOdbBtreePersister.LOG_ID, true); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"duration"}; clazz.AddUniqueIndexOn("index1", indexFields); odb.Close(); odb = Open(baseName); var size = 1300; var commitInterval = 10; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), i, new DateTime()); odb.Store(io1); if (i % commitInterval == 0) odb.Commit(); } // println(i+" : commit / " + size); odb.Close(); var end0 = OdbTime.GetCurrentTimeInMs(); Console.WriteLine("inserting time with index=" + (end0 - start0)); odb = Open(baseName); var start = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { IQuery q = odb.Query<IndexedObject>(); q.Descend("duration").Constrain((object) i).Equal(); var objects = q.Execute<IndexedObject>(false); AssertEquals(1, objects.Count); } var end = OdbTime.GetCurrentTimeInMs(); try { var duration = (end - start) / (float) size; if (testPerformance && duration > 2) Fail("Time of search in index is greater than 2ms : " + duration); } finally { odb.Close(); DeleteBase(baseName); } } /// <summary> /// Test with on e key index /// </summary> [Test] public void TestInsertWithoutIndex3() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); var size = 3000; var commitInterval = 1000; var start0 = OdbTime.GetCurrentTimeInMs(); for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + size, new DateTime()); odb.Store(io1); if (i % commitInterval == 0) odb.Commit(); } // println(i+" : commit"); odb.Close(); var end0 = OdbTime.GetCurrentTimeInMs(); Println("inserting time with index=" + (end0 - start0)); odb = Open(baseName); IQuery q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) ("olivier" + size)).Equal(); var start = OdbTime.GetCurrentTimeInMs(); var objects = q.Execute<IndexedObject>(false); var end = OdbTime.GetCurrentTimeInMs(); AssertEquals(1, objects.Count); var io2 = objects.GetFirst(); AssertEquals("olivier" + size, io2.GetName()); AssertEquals(15 + size, io2.GetDuration()); var duration = end - start; Println("duration=" + duration); odb.Close(); DeleteBase(baseName); Println(duration); double d = 500; if (duration > d) Fail("Time of search in index is greater than " + d + " ms : " + duration); } [Test] public void TestSaveIndex() { var baseName = GetBaseName(); DeleteBase(baseName); var @base = Open(baseName); var clazz = @base.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name", "duration"}; clazz.AddUniqueIndexOn("index1", indexFields); var indexFields2 = new[] {"name", "creation"}; clazz.AddUniqueIndexOn("index2", indexFields2); var indexFields3 = new[] {"duration", "creation"}; clazz.AddUniqueIndexOn("index3", indexFields3); @base.Close(); DeleteBase(baseName); } [Test] public void TestSizeBTree() { var baseName = GetBaseName(); DeleteBase(baseName); var @base = Open(baseName); // base.store(new IndexedObject()); var clazz = @base.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index1", indexFields); @base.Close(); @base = Open(baseName); var size = 4; for (var i = 0; i < size; i++) { var io1 = new IndexedObject("olivier" + (i + 1), 15 + i, new DateTime()); @base.Store(io1); if (i % 1000 == 0) Println(i); } @base.Close(); DeleteBase(baseName); } /// <summary> /// Test index. /// </summary> /// <remarks> /// Test index. Creates 1000 objects. Take 10 objects to update 10000 times. /// Then check if all objects are ok /// </remarks> [Test] public void TestXUpdatesWithIndex() { var baseName = GetBaseName(); DeleteBase(baseName); var odb = Open(baseName); // base.store(new IndexedObject()); var clazz = odb.IndexManagerFor<IndexedObject>(); var indexFields = new[] {"name"}; clazz.AddUniqueIndexOn("index", indexFields); odb.Close(); odb = Open(baseName); var start = OdbTime.GetCurrentTimeInMs(); var size = 100; var nbObjects = 10; var nbUpdates = 10; for (var i = 0; i < size; i++) { var io1 = new IndexedObject("IO-" + i + "-0", i + 15 + size, new DateTime()); odb.Store(io1); } odb.Close(); Println("Time of insert " + size + " objects = " + size); var indexes = new[] { "IO-0-0", "IO-10-0", "IO-20-0", "IO-30-0", "IO-40-0", "IO-50-0", "IO-60-0", "IO-70-0", "IO-80-0", "IO-90-0" }; long t1 = 0; long t2 = 0; long t3 = 0; long t4 = 0; long t5 = 0; long t6 = 0; for (var i = 0; i < nbUpdates; i++) { start = OdbTime.GetCurrentTimeInMs(); for (var j = 0; j < nbObjects; j++) { t1 = OdbTime.GetCurrentTimeInMs(); odb = Open(baseName); t2 = OdbTime.GetCurrentTimeInMs(); var q = odb.Query<IndexedObject>(); q.Descend("name").Constrain((object) indexes[j]).Equal(); var os = q.Execute<IndexedObject>(); t3 = OdbTime.GetCurrentTimeInMs(); AssertTrue(((IInternalQuery)q).GetExecutionPlan().UseIndex()); AssertEquals(1, os.Count); // check if index has been used AssertTrue(((IInternalQuery)q).GetExecutionPlan().UseIndex()); var io = os.GetFirst(); if (i > 0) AssertTrue(io.GetName().EndsWith(("-" + (i - 1)))); io.SetName(io.GetName() + "-updated-" + i); odb.Store(io); t4 = OdbTime.GetCurrentTimeInMs(); indexes[j] = io.GetName(); AssertEquals(new Decimal(size), odb.Query<IndexedObject>().Count()); t5 = OdbTime.GetCurrentTimeInMs(); odb.Commit(); odb.Close(); t6 = OdbTime.GetCurrentTimeInMs(); } var end = OdbTime.GetCurrentTimeInMs(); Console.Out.WriteLine("Nb Updates of " + nbObjects + " =" + i + " - " + (end - start) + "ms -- open=" + (t2 - t1) + " - getObjects=" + (t3 - t2) + " - update=" + (t4 - t3) + " - count=" + (t5 - t4) + " - close=" + (t6 - t5)); } } } }
using System; using System.Globalization; using Hydra.Framework; using Hydra.Framework.Helpers; namespace Hydra.Framework.Globalization.Formatters { /// <summary> /// Implementation of <see cref="IFormatter"/> that can be used to /// format and parse numbers. /// </summary> /// <remarks> /// <para> /// <c>PercentFormatter</c> uses percent-related properties of the /// <see cref="NumberFormatInfo"/> to format and parse percentages. /// </para> /// <para> /// If you use one of the constructors that accept culture as a parameter /// to create an instance of <c>PercentFormatter</c>, default <c>NumberFormatInfo</c> /// for the specified culture will be used. /// </para> /// <para> /// You can also use properties exposed by the <c>PercentFormatter</c> in order /// to override some of the default number formatting parameters. /// </para> /// </remarks> public class PercentFormatter : IFormatter { #region Member Variables private static int[] positivePatterns = new int[] { 3, 1, 0 }; private static int[] negativePatterns = new int[] { 8, 5, 1 }; private NumberFormatInfo formatInfo; #endregion #region Constructors /// <summary> /// Initializes a new instance of the <see cref="PercentFormatter"/> class /// using default <see cref="NumberFormatInfo"/> for the current thread's culture. /// </summary> public PercentFormatter() : this(CultureInfo.CurrentCulture) { } /// <summary> /// Initializes a new instance of the <see cref="PercentFormatter"/> class /// using default <see cref="NumberFormatInfo"/> for the specified culture. /// </summary> /// <param name="cultureName">The culture name.</param> public PercentFormatter(string cultureName) : this(CultureInfo.CreateSpecificCulture(cultureName)) { } /// <summary> /// Initializes a new instance of the <see cref="PercentFormatter"/> class /// using default <see cref="NumberFormatInfo"/> for the specified culture. /// </summary> /// <param name="culture">The culture.</param> public PercentFormatter(CultureInfo culture) { formatInfo = culture.NumberFormat; } /// <summary> /// Initializes a new instance of the <see cref="PercentFormatter"/> class /// using specified <see cref="NumberFormatInfo"/>. /// </summary> /// <param name="formatInfo"> /// The <see cref="NumberFormatInfo"/> instance that defines how /// numbers are formatted and parsed. /// </param> public PercentFormatter(NumberFormatInfo formatInfo) { this.formatInfo = formatInfo; } #endregion #region Properties /// <summary> /// Gets or sets the number of decimal digits. /// </summary> /// <value>The number of decimal digits.</value> /// <seealso cref="NumberFormatInfo.PercentDecimalDigits"/> public int DecimalDigits { get { return formatInfo.PercentDecimalDigits; } set { formatInfo.PercentDecimalDigits = value; } } /// <summary> /// Gets or sets the decimal separator. /// </summary> /// <value>The decimal separator.</value> /// <seealso cref="NumberFormatInfo.PercentDecimalSeparator"/> public string DecimalSeparator { get { return formatInfo.PercentDecimalSeparator; } set { formatInfo.PercentDecimalSeparator = value; } } /// <summary> /// Gets or sets the percent group sizes. /// </summary> /// <value>The percent group sizes.</value> /// <seealso cref="NumberFormatInfo.PercentGroupSizes"/> public int[] GroupSizes { get { return formatInfo.PercentGroupSizes; } set { formatInfo.PercentGroupSizes = value; } } /// <summary> /// Gets or sets the percent group separator. /// </summary> /// <value>The percent group separator.</value> /// <seealso cref="NumberFormatInfo.PercentGroupSeparator"/> public string GroupSeparator { get { return formatInfo.PercentGroupSeparator; } set { formatInfo.PercentGroupSeparator = value; } } /// <summary> /// Gets or sets the negative pattern. /// </summary> /// <value>The percent negative pattern.</value> /// <seealso cref="NumberFormatInfo.PercentNegativePattern"/> public int NegativePattern { get { return formatInfo.PercentNegativePattern; } set { formatInfo.PercentNegativePattern = value; } } /// <summary> /// Gets or sets the positive pattern. /// </summary> /// <value>The percent positive pattern.</value> /// <seealso cref="NumberFormatInfo.PercentPositivePattern"/> public int PositivePattern { get { return formatInfo.PercentPositivePattern; } set { formatInfo.PercentPositivePattern = value; } } /// <summary> /// Gets or sets the percent symbol. /// </summary> /// <value>The percent symbol.</value> /// <seealso cref="NumberFormatInfo.PercentSymbol"/> public string PercentSymbol { get { return formatInfo.PercentSymbol; } set { formatInfo.PercentSymbol = value; } } /// <summary> /// Gets or sets the per mille symbol. /// </summary> /// <value>The per mille symbol.</value> /// <seealso cref="NumberFormatInfo.PerMilleSymbol"/> public string PerMilleSymbol { get { return formatInfo.PerMilleSymbol; } set { formatInfo.PerMilleSymbol = value; } } #endregion #region IFormatter Implementation /// <summary> /// Formats the specified percentage value. /// </summary> /// <param name="value">The value to format.</param> /// <returns>Formatted percentage.</returns> /// <exception cref="ArgumentNullException">If <paramref name="value"/> is <c>null</c>.</exception> /// <exception cref="ArgumentException">If <paramref name="value"/> is not a number.</exception> public string Format(object value) { Check.That(value, Is.Not.Null); if (!NumberUtils.IsNumber(value)) throw new ArgumentException("PercentFormatter can only be used to format numbers."); return String.Format(formatInfo, "{0:P}", value); } /// <summary> /// Parses the specified percentage value. /// </summary> /// <param name="value">The percentage value to parse.</param> /// <returns>Parsed percentage value as a <see cref="Double"/>.</returns> public object Parse(string value) { if (string.IsNullOrEmpty(value)) return 0d; // there is no percentage parser in .NET, so we use currency parser to achieve the goal NumberFormatInfo fi = (NumberFormatInfo)formatInfo.Clone(); fi.CurrencyDecimalDigits = formatInfo.PercentDecimalDigits; fi.CurrencyDecimalSeparator = formatInfo.PercentDecimalSeparator; fi.CurrencyGroupSeparator = formatInfo.PercentGroupSeparator; fi.CurrencyGroupSizes = formatInfo.PercentGroupSizes; fi.CurrencyNegativePattern = negativePatterns[formatInfo.PercentNegativePattern]; fi.CurrencyPositivePattern = positivePatterns[formatInfo.PercentPositivePattern]; fi.CurrencySymbol = formatInfo.PercentSymbol; return Double.Parse(value, NumberStyles.Currency, fi) / 100; } #endregion } }
//------------------------------------------------------------------------------ // <copyright file="OdbcParameter.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // <owner current="true" primary="true">[....]</owner> // <owner current="true" primary="false">[....]</owner> //------------------------------------------------------------------------------ using System; using System.ComponentModel; using System.Data; using System.Data.Common; using System.Data.ProviderBase; using System.Data.SqlTypes; using System.Diagnostics; using System.Globalization; using System.Runtime.InteropServices; using System.Text; using System.Threading; namespace System.Data.Odbc { [ TypeConverterAttribute(typeof(System.Data.Odbc.OdbcParameter.OdbcParameterConverter)) ] public sealed partial class OdbcParameter : DbParameter, ICloneable, IDbDataParameter { private bool _hasChanged; private bool _userSpecifiedType; // _typemap User explicit set type or default parameter type // _infertpe _typemap if the user explicitly sets type // otherwise it is infered from the value // _bindtype The actual type used for binding. E.g. string substitutes numeric // // set_DbType: _bindtype = _infertype = _typemap = TypeMap.FromDbType(value) // set_OdbcType: _bindtype = _infertype = _typemap = TypeMap.FromOdbcType(value) // // GetParameterType: If _typemap != _infertype AND value != 0 // _bindtype = _infertype = TypeMap.FromSystemType(value.GetType()); // otherwise // _bindtype = _infertype // // Bind: Bind may change _bindtype if the type is not supported through the driver // private TypeMap _typemap; private TypeMap _bindtype; private string _parameterName; private byte _precision; private byte _scale; private bool _hasScale; private ODBC32.SQL_C _boundSqlCType; private ODBC32.SQL_TYPE _boundParameterType; // if we bound already that is the type we used private int _boundSize; private int _boundScale; private IntPtr _boundBuffer; private IntPtr _boundIntbuffer; private TypeMap _originalbindtype; // the original type in case we had to change the bindtype // (e.g. decimal to string) private byte _internalPrecision; private bool _internalShouldSerializeSize; private int _internalSize; private ParameterDirection _internalDirection; private byte _internalScale; private int _internalOffset; internal bool _internalUserSpecifiedType; private object _internalValue; private int _preparedOffset; private int _preparedSize; private int _preparedBufferSize; private object _preparedValue; private int _preparedIntOffset; private int _preparedValueOffset; private ODBC32.SQL_C _prepared_Sql_C_Type; public OdbcParameter() : base() { // uses System.Threading! } public OdbcParameter(string name, object value) : this() { ParameterName = name; Value = value; } public OdbcParameter(string name, OdbcType type) : this() { ParameterName = name; OdbcType = type; } public OdbcParameter(string name, OdbcType type, int size) : this() { ParameterName = name; OdbcType = type; Size = size; } public OdbcParameter(string name, OdbcType type, int size, string sourcecolumn) : this() { ParameterName = name; OdbcType = type; Size = size; SourceColumn = sourcecolumn; } [ EditorBrowsableAttribute(EditorBrowsableState.Advanced) ] // MDAC 69508 public OdbcParameter(string parameterName, OdbcType odbcType, int size, ParameterDirection parameterDirection, Boolean isNullable, Byte precision, Byte scale, string srcColumn, DataRowVersion srcVersion, object value ) : this() { // V1.0 everything this.ParameterName = parameterName; this.OdbcType = odbcType; this.Size = size; this.Direction = parameterDirection; this.IsNullable = isNullable; PrecisionInternal = precision; ScaleInternal = scale; this.SourceColumn = srcColumn; this.SourceVersion = srcVersion; this.Value = value; } [ EditorBrowsableAttribute(EditorBrowsableState.Advanced) ] // MDAC 69508 public OdbcParameter(string parameterName, OdbcType odbcType, int size, ParameterDirection parameterDirection, Byte precision, Byte scale, string sourceColumn, DataRowVersion sourceVersion, bool sourceColumnNullMapping, object value) : this() { // V2.0 everything - round trip all browsable properties + precision/scale this.ParameterName = parameterName; this.OdbcType = odbcType; this.Size = size; this.Direction = parameterDirection; this.PrecisionInternal = precision; this.ScaleInternal = scale; this.SourceColumn = sourceColumn; this.SourceVersion = sourceVersion; this.SourceColumnNullMapping = sourceColumnNullMapping; this.Value = value; } override public System.Data.DbType DbType { get { if (_userSpecifiedType) { return _typemap._dbType; } return TypeMap._NVarChar._dbType; // default type } set { if ((null == _typemap) || (_typemap._dbType != value)) { PropertyTypeChanging(); _typemap = TypeMap.FromDbType(value); _userSpecifiedType = true; } } } public override void ResetDbType() { ResetOdbcType(); } [ DefaultValue(OdbcType.NChar), RefreshProperties(RefreshProperties.All), ResCategoryAttribute(Res.DataCategory_Data), ResDescriptionAttribute(Res.OdbcParameter_OdbcType), System.Data.Common.DbProviderSpecificTypePropertyAttribute(true), ] public OdbcType OdbcType { get { if (_userSpecifiedType) { return _typemap._odbcType; } return TypeMap._NVarChar._odbcType; // default type } set { if ((null == _typemap) || (_typemap._odbcType != value)) { PropertyTypeChanging(); _typemap = TypeMap.FromOdbcType(value); _userSpecifiedType = true; } } } public void ResetOdbcType() { PropertyTypeChanging(); _typemap = null; _userSpecifiedType = false; } internal bool HasChanged { set { _hasChanged = value; } } internal bool UserSpecifiedType { get { return _userSpecifiedType; } } [ ResCategoryAttribute(Res.DataCategory_Data), ResDescriptionAttribute(Res.DbParameter_ParameterName), ] override public string ParameterName { // V1.2.3300, XXXParameter V1.0.3300 get { string parameterName = _parameterName; return ((null != parameterName) ? parameterName : ADP.StrEmpty); } set { if (_parameterName != value) { PropertyChanging(); _parameterName = value; } } } [DefaultValue((Byte)0)] // MDAC 65862 [ResCategoryAttribute(Res.DataCategory_Data)] [ResDescriptionAttribute(Res.DbDataParameter_Precision)] public new Byte Precision { get { return PrecisionInternal; } set { PrecisionInternal = value; } } internal byte PrecisionInternal { get { byte precision = _precision; if (0 == precision) { precision = ValuePrecision(Value); } return precision; } set { if (_precision != value) { PropertyChanging(); _precision = value; } } } private bool ShouldSerializePrecision() { return (0 != _precision); } [DefaultValue((Byte)0)] // MDAC 65862 [ResCategoryAttribute(Res.DataCategory_Data)] [ResDescriptionAttribute(Res.DbDataParameter_Scale)] public new Byte Scale { get { return ScaleInternal; } set { ScaleInternal = value; } } internal byte ScaleInternal { get { byte scale = _scale; if (!ShouldSerializeScale(scale)) { // WebData 94688 scale = ValueScale(Value); } return scale; } set { if (_scale != value || !_hasScale) { PropertyChanging(); _scale = value; _hasScale = true; } } } private bool ShouldSerializeScale() { return ShouldSerializeScale(_scale); } private bool ShouldSerializeScale(byte scale) { return _hasScale && ((0 != scale) || ShouldSerializePrecision()); } // returns the count of bytes for the data (ColumnSize argument to SqlBindParameter) private int GetColumnSize(object value, int offset, int ordinal) { if ((ODBC32.SQL_C.NUMERIC == _bindtype._sql_c) && (0 != _internalPrecision)){ return Math.Min((int)_internalPrecision,ADP.DecimalMaxPrecision); } int cch = _bindtype._columnSize; if (0 >= cch) { if (ODBC32.SQL_C.NUMERIC == _typemap._sql_c) { cch = 62; // (DecimalMaxPrecision+sign+terminator)*BytesPerUnicodeCharater } else { cch = _internalSize; if (!_internalShouldSerializeSize || 0x3fffffff<=cch || cch<0) { Debug.Assert((ODBC32.SQL_C.WCHAR == _bindtype._sql_c) || (ODBC32.SQL_C.BINARY == _bindtype._sql_c), "not wchar or binary"); if (!_internalShouldSerializeSize && (0 != (ParameterDirection.Output & _internalDirection))) { throw ADP.UninitializedParameterSize(ordinal, _bindtype._type); } if ((null == value) || Convert.IsDBNull(value)) { cch = 0; } else if (value is String) { cch = ((String)value).Length - offset; if ((0 != (ParameterDirection.Output & _internalDirection)) && (0x3fffffff <= _internalSize)) { // restrict output parameters when user set Size to Int32.MaxValue // to the greater of intput size or 8K cch = Math.Max(cch, 4 * 1024); // MDAC 69224 } // the following code causes failure against SQL 6.5 // ERROR [HY104] [Microsoft][ODBC SQL Server Driver]Invalid precision value // // the code causes failure if it is NOT there (remark added by [....]) // it causes failure with jet if it is there // // MDAC 76227: Code is required for japanese client/server tests. // If this causes regressions with Jet please doc here including bug#. ([....]) // if ((ODBC32.SQL_TYPE.CHAR == _bindtype._sql_type) || (ODBC32.SQL_TYPE.VARCHAR == _bindtype._sql_type) || (ODBC32.SQL_TYPE.LONGVARCHAR == _bindtype._sql_type)) { cch = System.Text.Encoding.Default.GetMaxByteCount(cch); } } else if (value is char[]) { cch = ((char[])value).Length - offset; if ((0 != (ParameterDirection.Output & _internalDirection)) && (0x3fffffff <= _internalSize)) { cch = Math.Max(cch, 4 * 1024); // MDAC 69224 } if ((ODBC32.SQL_TYPE.CHAR == _bindtype._sql_type) || (ODBC32.SQL_TYPE.VARCHAR == _bindtype._sql_type) || (ODBC32.SQL_TYPE.LONGVARCHAR == _bindtype._sql_type)) { cch = System.Text.Encoding.Default.GetMaxByteCount(cch); } } else if (value is byte[]) { cch = ((byte[])value).Length - offset; if ((0 != (ParameterDirection.Output & _internalDirection)) && (0x3fffffff <= _internalSize)) { // restrict output parameters when user set Size to Int32.MaxValue // to the greater of intput size or 8K cch = Math.Max(cch, 8 * 1024); // MDAC 69224 } } #if DEBUG else { Debug.Assert(false, "not expecting this"); } #endif // Note: ColumnSize should never be 0, // this represents the size of the column on the backend. // // without the following code causes failure //ERROR [HY104] [Microsoft][ODBC Microsoft Access Driver]Invalid precision value cch = Math.Max(2, cch); } } } Debug.Assert((0 <= cch) && (cch < 0x3fffffff), String.Format((IFormatProvider)null, "GetColumnSize: cch = {0} out of range, _internalShouldSerializeSize = {1}, _internalSize = {2}",cch, _internalShouldSerializeSize, _internalSize)); return cch; } // Return the count of bytes for the data (size in bytes for the native buffer) // private int GetValueSize(object value, int offset) { if ((ODBC32.SQL_C.NUMERIC == _bindtype._sql_c) && (0 != _internalPrecision)){ return Math.Min((int)_internalPrecision,ADP.DecimalMaxPrecision); } int cch = _bindtype._columnSize; if (0 >= cch) { bool twobytesperunit = false; if (value is String) { cch = ((string)value).Length - offset; twobytesperunit = true; } else if (value is char[]) { cch = ((char[])value).Length - offset; twobytesperunit = true; } else if (value is byte[]) { cch = ((byte[])value).Length - offset; } else { cch = 0; } if (_internalShouldSerializeSize && (_internalSize>=0) && (_internalSize<cch) && (_bindtype==_originalbindtype)) { cch = _internalSize; } if (twobytesperunit) { cch *= 2; } } Debug.Assert((0 <= cch) && (cch < 0x3fffffff), String.Format((IFormatProvider)null, "GetValueSize: cch = {0} out of range, _internalShouldSerializeSize = {1}, _internalSize = {2}",cch, _internalShouldSerializeSize, _internalSize)); return cch; } // return the count of bytes for the data, used for SQLBindParameter // private int GetParameterSize(object value, int offset, int ordinal) { int ccb = _bindtype._bufferSize; if (0 >= ccb) { if (ODBC32.SQL_C.NUMERIC == _typemap._sql_c) { ccb = 518; // _bindtype would be VarChar ([0-9]?{255} + '-' + '.') * 2 } else { ccb = _internalSize; if (!_internalShouldSerializeSize || (0x3fffffff <= ccb)||(ccb < 0)) { Debug.Assert((ODBC32.SQL_C.WCHAR == _bindtype._sql_c) || (ODBC32.SQL_C.BINARY == _bindtype._sql_c), "not wchar or binary"); if ((ccb <= 0) && (0 != (ParameterDirection.Output & _internalDirection))) { throw ADP.UninitializedParameterSize(ordinal, _bindtype._type); } if ((null == value) || Convert.IsDBNull(value)) { if (_bindtype._sql_c == ODBC32.SQL_C.WCHAR) { ccb = 2; // allow for null termination } else { ccb = 0; } } else if (value is String) { ccb = (((String)value).Length - offset ) * 2 + 2; } else if (value is char[]) { ccb = (((char[])value).Length - offset ) * 2 + 2; } else if (value is byte[]) { ccb = ((byte[])value).Length - offset; } #if DEBUG else { Debug.Assert(false, "not expecting this"); } #endif if ((0 != (ParameterDirection.Output & _internalDirection)) && (0x3fffffff <= _internalSize)) { // restrict output parameters when user set Size to Int32.MaxValue // to the greater of intput size or 8K ccb = Math.Max(ccb, 8 * 1024); // MDAC 69224 } } else if (ODBC32.SQL_C.WCHAR == _bindtype._sql_c) { if ((value is String) && (ccb < ((String)value).Length) && (_bindtype == _originalbindtype)) { // silently truncate ... MDAC 84408 ... do not truncate upgraded values ... MDAC 84706 ccb = ((String)value).Length; } ccb = (ccb * 2) + 2; // allow for null termination } else if ((value is byte[]) && (ccb < ((byte[])value).Length) && (_bindtype == _originalbindtype)) { // silently truncate ... MDAC 84408 ... do not truncate upgraded values ... MDAC 84706 ccb = ((byte[])value).Length; } } } Debug.Assert((0 <= ccb) && (ccb < 0x3fffffff), "GetParameterSize: out of range " + ccb); return ccb; } private byte GetParameterPrecision(object value) { if (0 != _internalPrecision && value is decimal) { // from qfe 762 if (_internalPrecision<29) { // from SqlClient ... if (_internalPrecision != 0) { // devnote: If the userspecified precision (_internalPrecision) is less than the actual values precision // we silently adjust the userspecified precision to the values precision. byte precision = ((SqlDecimal)(decimal)value).Precision; _internalPrecision = Math.Max(_internalPrecision, precision); // silently adjust the precision } return _internalPrecision; } return ADP.DecimalMaxPrecision; } if ((null == value) || (value is Decimal) || Convert.IsDBNull(value)) { // MDAC 60882 return ADP.DecimalMaxPrecision28; } return 0; } private byte GetParameterScale(object value) { // For any value that is not decimal simply return the Scale // if (!(value is decimal)) { return _internalScale; } // Determin the values scale // If the user specified a lower scale we return the user specified scale, // otherwise the values scale // byte s = (byte)((Decimal.GetBits((Decimal)value)[3] & 0x00ff0000) >> 0x10); if ((_internalScale > 0) && (_internalScale < s)){ return _internalScale; } return s; } //This is required for OdbcCommand.Clone to deep copy the parameters collection object ICloneable.Clone() { return new OdbcParameter(this); } private void CopyParameterInternal () { _internalValue = Value; // we should coerce the parameter value at this time. _internalPrecision = ShouldSerializePrecision() ? PrecisionInternal : ValuePrecision(_internalValue); _internalShouldSerializeSize = ShouldSerializeSize(); _internalSize = _internalShouldSerializeSize ? Size : ValueSize(_internalValue); _internalDirection = Direction; _internalScale = ShouldSerializeScale() ? ScaleInternal : ValueScale(_internalValue); _internalOffset = Offset; _internalUserSpecifiedType = UserSpecifiedType; } private void CloneHelper(OdbcParameter destination) { CloneHelperCore(destination); destination._userSpecifiedType = _userSpecifiedType; destination._typemap = _typemap; destination._parameterName = _parameterName; destination._precision = _precision; destination._scale = _scale; destination._hasScale = _hasScale; } internal void ClearBinding() { if (!_userSpecifiedType) { _typemap = null; } _bindtype = null; } internal void PrepareForBind(OdbcCommand command, short ordinal, ref int parameterBufferSize) { // make a snapshot of the current properties. Properties may change while we work on them // CopyParameterInternal(); object value = ProcessAndGetParameterValue(); int offset = _internalOffset; int size = _internalSize; ODBC32.SQL_C sql_c_type; // offset validation based on the values type // if (offset > 0) { if (value is string) { if (offset > ((string)value).Length) { throw ADP.OffsetOutOfRangeException(); } } else if (value is char[]) { if (offset > ((char[])value).Length) { throw ADP.OffsetOutOfRangeException(); } } else if (value is byte[]) { if (offset > ((byte[])value).Length) { throw ADP.OffsetOutOfRangeException(); } } else { // for all other types offset has no meaning // this is important since we might upgrade some types to strings offset = 0; } } // type support verification for certain data types // switch(_bindtype._sql_type) { case ODBC32.SQL_TYPE.DECIMAL: case ODBC32.SQL_TYPE.NUMERIC: if ( !command.Connection.IsV3Driver // for non V3 driver we always do the conversion || !command.Connection.TestTypeSupport(ODBC32.SQL_TYPE.NUMERIC) // otherwise we convert if the driver does not support numeric || command.Connection.TestRestrictedSqlBindType(_bindtype._sql_type)// or the type is not supported ){ // No support for NUMERIC // Change the type _bindtype = TypeMap._VarChar; if ((null != value) && !Convert.IsDBNull(value)) { value = ((Decimal)value).ToString(CultureInfo.CurrentCulture); size = ((string)value).Length; offset = 0; } } break; case ODBC32.SQL_TYPE.BIGINT: if (!command.Connection.IsV3Driver){ // No support for BIGINT // Change the type _bindtype = TypeMap._VarChar; if ((null != value) && !Convert.IsDBNull(value)) { value = ((Int64)value).ToString(CultureInfo.CurrentCulture); size = ((string)value).Length; offset = 0; } } break; case ODBC32.SQL_TYPE.WCHAR: // MDAC 68993 case ODBC32.SQL_TYPE.WVARCHAR: case ODBC32.SQL_TYPE.WLONGVARCHAR: if (value is Char) { value = value.ToString(); size = ((string)value).Length; offset = 0; } if (!command.Connection.TestTypeSupport (_bindtype._sql_type)) { // No support for WCHAR, WVARCHAR or WLONGVARCHAR // Change the type if (ODBC32.SQL_TYPE.WCHAR == _bindtype._sql_type) { _bindtype = TypeMap._Char; } else if (ODBC32.SQL_TYPE.WVARCHAR == _bindtype._sql_type) { _bindtype = TypeMap._VarChar; } else if (ODBC32.SQL_TYPE.WLONGVARCHAR == _bindtype._sql_type) { _bindtype = TypeMap._Text; } } break; } // end switch // Conversation from WCHAR to CHAR, VARCHAR or LONVARCHAR (AnsiString) is different for some providers // we need to chonvert WCHAR to CHAR and bind as sql_c_type = CHAR // sql_c_type = _bindtype._sql_c; if (!command.Connection.IsV3Driver) { if (sql_c_type == ODBC32.SQL_C.WCHAR) { sql_c_type = ODBC32.SQL_C.CHAR; if (null != value){ if (!Convert.IsDBNull(value) && value is string) { int lcid = System.Globalization.CultureInfo.CurrentCulture.LCID; CultureInfo culInfo = new CultureInfo(lcid); Encoding cpe = System.Text.Encoding.GetEncoding(culInfo.TextInfo.ANSICodePage); value = cpe.GetBytes(value.ToString()); size = ((byte[])value).Length; } } } }; int cbParameterSize = GetParameterSize(value, offset, ordinal); // count of bytes for the data, for SQLBindParameter // here we upgrade the datatypes if the given values size is bigger than the types columnsize // switch(_bindtype._sql_type) { case ODBC32.SQL_TYPE.VARBINARY: // MDAC 74372 // Note: per definition DbType.Binary does not support more than 8000 bytes so we change the type for binding if ((cbParameterSize > 8000)) { _bindtype = TypeMap._Image; } // will change to LONGVARBINARY break; case ODBC32.SQL_TYPE.VARCHAR: // MDAC 74372 // Note: per definition DbType.Binary does not support more than 8000 bytes so we change the type for binding if ((cbParameterSize > 8000)) { _bindtype = TypeMap._Text; } // will change to LONGVARCHAR break; case ODBC32.SQL_TYPE.WVARCHAR : // MDAC 75099 // Note: per definition DbType.Binary does not support more than 8000 bytes so we change the type for binding if ((cbParameterSize > 4000)) { _bindtype = TypeMap._NText; } // will change to WLONGVARCHAR break; } _prepared_Sql_C_Type = sql_c_type; _preparedOffset = offset; _preparedSize = size; _preparedValue = value; _preparedBufferSize = cbParameterSize; _preparedIntOffset = parameterBufferSize; _preparedValueOffset = _preparedIntOffset + IntPtr.Size; parameterBufferSize += (cbParameterSize + IntPtr.Size); } internal void Bind(OdbcStatementHandle hstmt, OdbcCommand command, short ordinal, CNativeBuffer parameterBuffer, bool allowReentrance) { ODBC32.RetCode retcode; ODBC32.SQL_C sql_c_type = _prepared_Sql_C_Type; ODBC32.SQL_PARAM sqldirection = SqlDirectionFromParameterDirection(); int offset = _preparedOffset; int size = _preparedSize; object value = _preparedValue; int cbValueSize = GetValueSize(value, offset); // count of bytes for the data int cchSize = GetColumnSize(value, offset, ordinal); // count of bytes for the data, used to allocate the buffer length byte precision = GetParameterPrecision(value); byte scale = GetParameterScale(value); int cbActual; HandleRef valueBuffer = parameterBuffer.PtrOffset(_preparedValueOffset, _preparedBufferSize); HandleRef intBuffer = parameterBuffer.PtrOffset(_preparedIntOffset, IntPtr.Size); // for the numeric datatype we need to do some special case handling ... // if (ODBC32.SQL_C.NUMERIC == sql_c_type) { // for input/output parameters we need to adjust the scale of the input value since the convert function in // sqlsrv32 takes this scale for the output parameter (possible bug in sqlsrv32?) // if ((ODBC32.SQL_PARAM.INPUT_OUTPUT == sqldirection) && (value is Decimal)) { if (scale < _internalScale) { while (scale < _internalScale) { value = ((decimal)value ) * 10; scale++; } } } SetInputValue(value, sql_c_type, cbValueSize, precision, 0, parameterBuffer); // for output parameters we need to write precision and scale to the buffer since the convert function in // sqlsrv32 expects these values there (possible bug in sqlsrv32?) // if (ODBC32.SQL_PARAM.INPUT != sqldirection) { parameterBuffer.WriteInt16(_preparedValueOffset, (short)(((ushort)scale << 8) | (ushort)precision)); } } else { SetInputValue(value, sql_c_type, cbValueSize, size, offset, parameterBuffer); } // Try to reuse existing bindings if // the binding is valid (means we already went through binding all parameters) // the parametercollection is bound already // the bindtype ParameterType did not change (forced upgrade) if (!_hasChanged && (_boundSqlCType == sql_c_type) && (_boundParameterType == _bindtype._sql_type) && (_boundSize == cchSize) && (_boundScale == scale) && (_boundBuffer == valueBuffer.Handle) && (_boundIntbuffer == intBuffer.Handle) ) { return; } //SQLBindParameter retcode = hstmt.BindParameter( ordinal, // Parameter Number (short)sqldirection, // InputOutputType sql_c_type, // ValueType _bindtype._sql_type, // ParameterType (IntPtr)cchSize, // ColumnSize (IntPtr)scale, // DecimalDigits valueBuffer, // ParameterValuePtr (IntPtr)_preparedBufferSize, intBuffer); // StrLen_or_IndPtr if (ODBC32.RetCode.SUCCESS != retcode) { if ("07006" == command.GetDiagSqlState()) { Bid.Trace("<odbc.OdbcParameter.Bind|ERR> Call to BindParameter returned errorcode [07006]\n"); command.Connection.FlagRestrictedSqlBindType(_bindtype._sql_type); if (allowReentrance) { this.Bind(hstmt, command, ordinal, parameterBuffer, false); return; } } command.Connection.HandleError(hstmt, retcode); } _hasChanged = false; _boundSqlCType = sql_c_type; _boundParameterType = _bindtype._sql_type; _boundSize = cchSize; _boundScale = scale; _boundBuffer = valueBuffer.Handle; _boundIntbuffer = intBuffer.Handle; if (ODBC32.SQL_C.NUMERIC == sql_c_type) { OdbcDescriptorHandle hdesc = command.GetDescriptorHandle(ODBC32.SQL_ATTR.APP_PARAM_DESC); // descriptor handle is cached on command wrapper, don't release it // Set descriptor Type // //SQLSetDescField(hdesc, i+1, SQL_DESC_TYPE, (void *)SQL_C_NUMERIC, 0); retcode = hdesc.SetDescriptionField1(ordinal, ODBC32.SQL_DESC.TYPE, (IntPtr)ODBC32.SQL_C.NUMERIC); if (ODBC32.RetCode.SUCCESS != retcode) { command.Connection.HandleError(hstmt, retcode); } // Set precision // cbActual= (int)precision; //SQLSetDescField(hdesc, i+1, SQL_DESC_PRECISION, (void *)precision, 0); retcode = hdesc.SetDescriptionField1(ordinal, ODBC32.SQL_DESC.PRECISION, (IntPtr)cbActual); if (ODBC32.RetCode.SUCCESS != retcode) { command.Connection.HandleError(hstmt, retcode); } // Set scale // // SQLSetDescField(hdesc, i+1, SQL_DESC_SCALE, (void *)llen, 0); cbActual= (int)scale; retcode = hdesc.SetDescriptionField1(ordinal, ODBC32.SQL_DESC.SCALE, (IntPtr)cbActual); if (ODBC32.RetCode.SUCCESS != retcode) { command.Connection.HandleError(hstmt, retcode); } // Set data pointer // // SQLSetDescField(hdesc, i+1, SQL_DESC_DATA_PTR, (void *)&numeric, 0); retcode = hdesc.SetDescriptionField2(ordinal, ODBC32.SQL_DESC.DATA_PTR, valueBuffer); if (ODBC32.RetCode.SUCCESS != retcode) { command.Connection.HandleError(hstmt, retcode); } } } internal void GetOutputValue(CNativeBuffer parameterBuffer) { //Handle any output params // No value is available if the user fiddles with the parameters properties // if (_hasChanged) return; if ((null != _bindtype) && (_internalDirection != ParameterDirection.Input)) { TypeMap typemap = _bindtype; _bindtype = null; int cbActual = (int)parameterBuffer.ReadIntPtr(_preparedIntOffset); if (ODBC32.SQL_NULL_DATA == cbActual) { Value = DBNull.Value; } else if ((0 <= cbActual) || (cbActual == ODBC32.SQL_NTS)){ // safeguard Value = parameterBuffer.MarshalToManaged(_preparedValueOffset, _boundSqlCType, cbActual); if (_boundSqlCType== ODBC32.SQL_C.CHAR) { if ((null != Value) && !Convert.IsDBNull(Value)) { int lcid = System.Globalization.CultureInfo.CurrentCulture.LCID; CultureInfo culInfo = new CultureInfo(lcid); Encoding cpe = System.Text.Encoding.GetEncoding(culInfo.TextInfo.ANSICodePage); Value = cpe.GetString((Byte[])Value); } } if ((typemap != _typemap) && (null != Value) && !Convert.IsDBNull(Value) && (Value.GetType() != _typemap._type)) { Debug.Assert(ODBC32.SQL_C.NUMERIC == _typemap._sql_c, "unexpected"); Value = Decimal.Parse((string)Value, System.Globalization.CultureInfo.CurrentCulture); } } } } private object ProcessAndGetParameterValue() { object value = _internalValue; if (_internalUserSpecifiedType) { if ((null != value) && !Convert.IsDBNull(value)) { Type valueType = value.GetType(); if (!valueType.IsArray) { if (valueType != _typemap._type) { try { value = Convert.ChangeType (value, _typemap._type, (System.IFormatProvider)null); } catch(Exception e) { // Don't know which exception to expect from ChangeType so we filter out the serious ones // if (!ADP.IsCatchableExceptionType(e)) { throw; } throw ADP.ParameterConversionFailed(value, _typemap._type, e); // WebData 75433 } } } else if (valueType == typeof(char[])) { value = new String((char[])value); } } } else if (null == _typemap) { if ((null == value) || Convert.IsDBNull (value)) { _typemap = TypeMap._NVarChar; // default type } else { Type type = value.GetType (); _typemap = TypeMap.FromSystemType (type); } } Debug.Assert(null != _typemap, "GetParameterValue: null _typemap"); _originalbindtype = _bindtype = _typemap; return value; } private void PropertyChanging() { _hasChanged = true; } private void PropertyTypeChanging() { PropertyChanging(); //CoercedValue = null; } internal void SetInputValue(object value, ODBC32.SQL_C sql_c_type, int cbsize, int sizeorprecision, int offset, CNativeBuffer parameterBuffer) { //Handle any input params if((ParameterDirection.Input == _internalDirection) || (ParameterDirection.InputOutput == _internalDirection)) { //Note: (lang) "null" means to use the servers default (not DBNull). //We probably should just not have bound this parameter, period, but that //would mess up the users question marks, etc... if((null == value)) { parameterBuffer.WriteIntPtr(_preparedIntOffset, (IntPtr)ODBC32.SQL_DEFAULT_PARAM); } else if(Convert.IsDBNull(value)) { parameterBuffer.WriteIntPtr(_preparedIntOffset, (IntPtr)ODBC32.SQL_NULL_DATA); } else { switch(sql_c_type) { case ODBC32.SQL_C.CHAR: case ODBC32.SQL_C.WCHAR: case ODBC32.SQL_C.BINARY: //StrLen_or_IndPtr is ignored except for Character or Binary or data. parameterBuffer.WriteIntPtr(_preparedIntOffset, (IntPtr)cbsize); break; default: parameterBuffer.WriteIntPtr(_preparedIntOffset, IntPtr.Zero); break; } //Place the input param value into the native buffer parameterBuffer.MarshalToNative(_preparedValueOffset, value, sql_c_type, sizeorprecision, offset); } } else { // always set ouput only and return value parameter values to null when executing _internalValue = null; //Always initialize the intbuffer (for output params). Since we need to know //if/when the parameters are available for output. (ie: when is the buffer valid...) //if (_sqldirection != ODBC32.SQL_PARAM.INPUT) parameterBuffer.WriteIntPtr(_preparedIntOffset, (IntPtr)ODBC32.SQL_NULL_DATA); } } private ODBC32.SQL_PARAM SqlDirectionFromParameterDirection () { switch(_internalDirection) { case ParameterDirection.Input: return ODBC32.SQL_PARAM.INPUT; case ParameterDirection.Output: case ParameterDirection.ReturnValue: //ODBC doesn't seem to distinguish between output and return value //as SQL_PARAM_RETURN_VALUE fails with "Invalid parameter type" return ODBC32.SQL_PARAM.OUTPUT; case ParameterDirection.InputOutput: return ODBC32.SQL_PARAM.INPUT_OUTPUT; default: Debug.Assert (false, "Unexpected Direction Property on Parameter"); return ODBC32.SQL_PARAM.INPUT; } } [ RefreshProperties(RefreshProperties.All), ResCategoryAttribute(Res.DataCategory_Data), ResDescriptionAttribute(Res.DbParameter_Value), TypeConverterAttribute(typeof(StringConverter)), ] override public object Value { // V1.2.3300, XXXParameter V1.0.3300 get { return _value; } set { _coercedValue = null; _value = value; } } private byte ValuePrecision(object value) { return ValuePrecisionCore(value); } private byte ValueScale(object value) { return ValueScaleCore(value); } private int ValueSize(object value) { return ValueSizeCore(value); } // implemented as nested class to take advantage of the private/protected ShouldSerializeXXX methods sealed internal class OdbcParameterConverter : ExpandableObjectConverter { // converter classes should have public ctor public OdbcParameterConverter() { } public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) { if (destinationType == typeof(System.ComponentModel.Design.Serialization.InstanceDescriptor)) { return true; } return base.CanConvertTo(context, destinationType); } public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) { if (destinationType == null) { throw ADP.ArgumentNull("destinationType"); } if (destinationType == typeof(System.ComponentModel.Design.Serialization.InstanceDescriptor) && value is OdbcParameter) { OdbcParameter p = (OdbcParameter)value; // MDAC 67321 - reducing parameter generated code int flags = 0; // if part of the collection - the parametername can't be empty if (OdbcType.NChar != p.OdbcType) { flags |= 1; } if (p.ShouldSerializeSize()) { flags |= 2; } if (!ADP.IsEmpty(p.SourceColumn)) { flags |= 4; } if (null != p.Value) { flags |= 8; } if ((ParameterDirection.Input != p.Direction) || p.IsNullable || p.ShouldSerializePrecision() || p.ShouldSerializeScale() || (DataRowVersion.Current != p.SourceVersion)) { flags |= 16; // V1.0 everything } if (p.SourceColumnNullMapping) { flags |= 32; // v2.0 everything } Type[] ctorParams; object[] ctorValues; switch(flags) { case 0: // ParameterName case 1: // SqlDbType ctorParams = new Type[] { typeof(string), typeof(OdbcType) }; ctorValues = new object[] { p.ParameterName, p.OdbcType }; break; case 2: // Size case 3: // Size, SqlDbType ctorParams = new Type[] { typeof(string), typeof(OdbcType), typeof(int) }; ctorValues = new object[] { p.ParameterName, p.OdbcType, p.Size }; break; case 4: // SourceColumn case 5: // SourceColumn, SqlDbType case 6: // SourceColumn, Size case 7: // SourceColumn, Size, SqlDbType ctorParams = new Type[] { typeof(string), typeof(OdbcType), typeof(int), typeof(string) }; ctorValues = new object[] { p.ParameterName, p.OdbcType, p.Size, p.SourceColumn }; break; case 8: // Value ctorParams = new Type[] { typeof(string), typeof(object) }; ctorValues = new object[] { p.ParameterName, p.Value }; break; default: if (0 == (32 & flags)) { // V1.0 everything ctorParams = new Type[] { typeof(string), typeof(OdbcType), typeof(int), typeof(ParameterDirection), typeof(bool), typeof(byte), typeof(byte), typeof(string), typeof(DataRowVersion), typeof(object) }; ctorValues = new object[] { p.ParameterName, p.OdbcType, p.Size, p.Direction, p.IsNullable, p.PrecisionInternal, p.ScaleInternal, p.SourceColumn, p.SourceVersion, p.Value }; } else { // v2.0 everything - round trip all browsable properties + precision/scale ctorParams = new Type[] { typeof(string), typeof(OdbcType), typeof(int), typeof(ParameterDirection), typeof(byte), typeof(byte), typeof(string), typeof(DataRowVersion), typeof(bool), typeof(object) }; ctorValues = new object[] { p.ParameterName, p.OdbcType, p.Size, p.Direction, p.PrecisionInternal, p.ScaleInternal, p.SourceColumn, p.SourceVersion, p.SourceColumnNullMapping, p.Value }; } break; } System.Reflection.ConstructorInfo ctor = typeof(OdbcParameter).GetConstructor(ctorParams); if (null != ctor) { return new System.ComponentModel.Design.Serialization.InstanceDescriptor(ctor, ctorValues); } } return base.ConvertTo(context, culture, value, destinationType); } } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using Agent.Sdk; using Microsoft.TeamFoundation.Build.WebApi; using Microsoft.TeamFoundation.DistributedTask.WebApi; using Microsoft.VisualStudio.Services.Agent.Util; using System; using System.Collections.Generic; using System.Threading.Tasks; using System.Linq; using System.IO; using Microsoft.VisualStudio.Services.WebApi; using Pipelines = Microsoft.TeamFoundation.DistributedTask.Pipelines; namespace Microsoft.VisualStudio.Services.Agent.Worker.Handlers { public interface IHandler : IAgentService { List<ServiceEndpoint> Endpoints { get; set; } Dictionary<string, string> Environment { get; set; } IExecutionContext ExecutionContext { get; set; } Variables RuntimeVariables { get; set; } IStepHost StepHost { get; set; } Dictionary<string, string> Inputs { get; set; } List<SecureFile> SecureFiles { get; set; } string TaskDirectory { get; set; } Pipelines.TaskStepDefinitionReference Task { get; set; } Task RunAsync(); } public abstract class Handler : AgentService { // On Windows, the maximum supported size of a environment variable value is 32k. // You can set environment variables greater then 32K, but Node won't be able to read them. private const int _windowsEnvironmentVariableMaximumSize = 32766; protected IWorkerCommandManager CommandManager { get; private set; } public List<ServiceEndpoint> Endpoints { get; set; } public Dictionary<string, string> Environment { get; set; } public Variables RuntimeVariables { get; set; } public IExecutionContext ExecutionContext { get; set; } public IStepHost StepHost { get; set; } public Dictionary<string, string> Inputs { get; set; } public List<SecureFile> SecureFiles { get; set; } public string TaskDirectory { get; set; } public Pipelines.TaskStepDefinitionReference Task { get; set; } public override void Initialize(IHostContext hostContext) { ArgUtil.NotNull(hostContext, nameof(hostContext)); base.Initialize(hostContext); CommandManager = hostContext.GetService<IWorkerCommandManager>(); } protected void AddEndpointsToEnvironment() { Trace.Entering(); ArgUtil.NotNull(Endpoints, nameof(Endpoints)); ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); ArgUtil.NotNull(ExecutionContext.Endpoints, nameof(ExecutionContext.Endpoints)); List<ServiceEndpoint> endpoints = Endpoints; // Add the endpoints to the environment variable dictionary. foreach (ServiceEndpoint endpoint in endpoints) { ArgUtil.NotNull(endpoint, nameof(endpoint)); string partialKey = null; if (endpoint.Id != Guid.Empty) { partialKey = endpoint.Id.ToString(); } else if (string.Equals(endpoint.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)) { partialKey = WellKnownServiceEndpointNames.SystemVssConnection.ToUpperInvariant(); } else if (endpoint.Data == null || !endpoint.Data.TryGetValue(EndpointData.RepositoryId, out partialKey) || string.IsNullOrEmpty(partialKey)) { continue; // This should never happen. } AddEnvironmentVariable( key: $"ENDPOINT_URL_{partialKey}", value: endpoint.Url?.ToString()); AddEnvironmentVariable( key: $"ENDPOINT_AUTH_{partialKey}", // Note, JsonUtility.ToString will not null ref if the auth object is null. value: JsonUtility.ToString(endpoint.Authorization)); if (endpoint.Authorization != null && endpoint.Authorization.Scheme != null) { AddEnvironmentVariable( key: $"ENDPOINT_AUTH_SCHEME_{partialKey}", value: endpoint.Authorization.Scheme); foreach (KeyValuePair<string, string> pair in endpoint.Authorization.Parameters) { AddEnvironmentVariable( key: $"ENDPOINT_AUTH_PARAMETER_{partialKey}_{pair.Key?.Replace(' ', '_').ToUpperInvariant()}", value: pair.Value); } } if (endpoint.Id != Guid.Empty) { AddEnvironmentVariable( key: $"ENDPOINT_DATA_{partialKey}", // Note, JsonUtility.ToString will not null ref if the data object is null. value: JsonUtility.ToString(endpoint.Data)); if (endpoint.Data != null) { foreach (KeyValuePair<string, string> pair in endpoint.Data) { AddEnvironmentVariable( key: $"ENDPOINT_DATA_{partialKey}_{pair.Key?.Replace(' ', '_').ToUpperInvariant()}", value: pair.Value); } } } } } protected void AddSecureFilesToEnvironment() { Trace.Entering(); ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); ArgUtil.NotNull(SecureFiles, nameof(SecureFiles)); List<SecureFile> secureFiles = SecureFiles; // Add the secure files to the environment variable dictionary. foreach (SecureFile secureFile in secureFiles) { if (secureFile != null && secureFile.Id != Guid.Empty) { string partialKey = secureFile.Id.ToString(); AddEnvironmentVariable( key: $"SECUREFILE_NAME_{partialKey}", value: secureFile.Name); AddEnvironmentVariable( key: $"SECUREFILE_TICKET_{partialKey}", value: secureFile.Ticket); } } } protected void AddInputsToEnvironment() { // Validate args. Trace.Entering(); ArgUtil.NotNull(Inputs, nameof(Inputs)); // Add the inputs to the environment variable dictionary. foreach (KeyValuePair<string, string> pair in Inputs) { AddEnvironmentVariable( key: $"INPUT_{pair.Key?.Replace(' ', '_').ToUpperInvariant()}", value: pair.Value); } } protected void AddVariablesToEnvironment(bool excludeNames = false, bool excludeSecrets = false) { // Validate args. Trace.Entering(); ArgUtil.NotNull(Environment, nameof(Environment)); ArgUtil.NotNull(RuntimeVariables, nameof(RuntimeVariables)); // Add the public variables. var names = new List<string>(); foreach (KeyValuePair<string, string> pair in RuntimeVariables.Public) { // Add "agent.jobstatus" using the unformatted name and formatted name. if (string.Equals(pair.Key, Constants.Variables.Agent.JobStatus, StringComparison.OrdinalIgnoreCase)) { AddEnvironmentVariable(pair.Key, pair.Value); } // Add the variable using the formatted name. string formattedKey = (pair.Key ?? string.Empty).Replace('.', '_').Replace(' ', '_').ToUpperInvariant(); AddEnvironmentVariable(formattedKey, pair.Value); // Store the name. names.Add(pair.Key ?? string.Empty); } // Add the public variable names. if (!excludeNames) { AddEnvironmentVariable("VSTS_PUBLIC_VARIABLES", JsonUtility.ToString(names)); } if (!excludeSecrets) { // Add the secret variables. var secretNames = new List<string>(); foreach (KeyValuePair<string, string> pair in RuntimeVariables.Private) { // Add the variable using the formatted name. string formattedKey = (pair.Key ?? string.Empty).Replace('.', '_').Replace(' ', '_').ToUpperInvariant(); AddEnvironmentVariable($"SECRET_{formattedKey}", pair.Value); // Store the name. secretNames.Add(pair.Key ?? string.Empty); } // Add the secret variable names. if (!excludeNames) { AddEnvironmentVariable("VSTS_SECRET_VARIABLES", JsonUtility.ToString(secretNames)); } } } protected void AddEnvironmentVariable(string key, string value) { ArgUtil.NotNullOrEmpty(key, nameof(key)); Trace.Verbose($"Setting env '{key}' to '{value}'."); Environment[key] = value ?? string.Empty; if (PlatformUtil.RunningOnWindows && Environment[key].Length > _windowsEnvironmentVariableMaximumSize) { ExecutionContext.Warning(StringUtil.Loc("EnvironmentVariableExceedsMaximumLength", key, value.Length, _windowsEnvironmentVariableMaximumSize)); } } protected void AddTaskVariablesToEnvironment() { // Validate args. Trace.Entering(); ArgUtil.NotNull(ExecutionContext.TaskVariables, nameof(ExecutionContext.TaskVariables)); foreach (KeyValuePair<string, string> pair in ExecutionContext.TaskVariables.Public) { // Add the variable using the formatted name. string formattedKey = (pair.Key ?? string.Empty).Replace('.', '_').Replace(' ', '_').ToUpperInvariant(); AddEnvironmentVariable($"VSTS_TASKVARIABLE_{formattedKey}", pair.Value); } foreach (KeyValuePair<string, string> pair in ExecutionContext.TaskVariables.Private) { // Add the variable using the formatted name. string formattedKey = (pair.Key ?? string.Empty).Replace('.', '_').Replace(' ', '_').ToUpperInvariant(); AddEnvironmentVariable($"VSTS_TASKVARIABLE_{formattedKey}", pair.Value); } } protected void AddPrependPathToEnvironment() { // Validate args. Trace.Entering(); ArgUtil.NotNull(ExecutionContext.PrependPath, nameof(ExecutionContext.PrependPath)); if (ExecutionContext.PrependPath.Count == 0) { return; } // Prepend path. var containerStepHost = StepHost as ContainerStepHost; if (containerStepHost != null) { List<string> prepend = new List<string>(); foreach (var path in ExecutionContext.PrependPath) { prepend.Add(ExecutionContext.TranslatePathForStepTarget(path)); } containerStepHost.PrependPath = string.Join(Path.PathSeparator.ToString(), prepend.Reverse<string>()); } else { string prepend = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>()); string taskEnvPATH; Environment.TryGetValue(Constants.PathVariable, out taskEnvPATH); string originalPath = RuntimeVariables.Get(Constants.PathVariable) ?? // Prefer a job variable. taskEnvPATH ?? // Then a task-environment variable. System.Environment.GetEnvironmentVariable(Constants.PathVariable) ?? // Then an environment variable. string.Empty; string newPath = PathUtil.PrependPath(prepend, originalPath); AddEnvironmentVariable(Constants.PathVariable, newPath); } } } }
using System; using System.CodeDom.Compiler; using System.ComponentModel; using System.Runtime.Serialization; using Microsoft.Xrm.Sdk; namespace PowerShellLibrary.Crm.CmdletProviders { [DataContract] [GeneratedCode("CrmSvcUtil", "7.1.0001.3108")] [Microsoft.Xrm.Sdk.Client.EntityLogicalName("plugintracelog")] public class PluginTraceLog : Entity, INotifyPropertyChanging, INotifyPropertyChanged { public const string EntityLogicalName = "plugintracelog"; public const int EntityTypeCode = 4619; [AttributeLogicalName("configuration")] public string Configuration { get { return this.GetAttributeValue<string>("configuration"); } } [AttributeLogicalName("correlationid")] public Guid? CorrelationId { get { return this.GetAttributeValue<Guid?>("correlationid"); } } [AttributeLogicalName("createdby")] public EntityReference CreatedBy { get { return this.GetAttributeValue<EntityReference>("createdby"); } } [AttributeLogicalName("createdon")] public DateTime? CreatedOn { get { return this.GetAttributeValue<DateTime?>("createdon"); } } [AttributeLogicalName("createdonbehalfby")] public EntityReference CreatedOnBehalfBy { get { return this.GetAttributeValue<EntityReference>("createdonbehalfby"); } } [AttributeLogicalName("depth")] public int? Depth { get { return this.GetAttributeValue<int?>("depth"); } } [AttributeLogicalName("exceptiondetails")] public string ExceptionDetails { get { return this.GetAttributeValue<string>("exceptiondetails"); } } [AttributeLogicalName("issystemcreated")] public bool? IsSystemCreated { get { return this.GetAttributeValue<bool?>("issystemcreated"); } } [AttributeLogicalName("messageblock")] public string MessageBlock { get { return this.GetAttributeValue<string>("messageblock"); } } [AttributeLogicalName("messagename")] public string MessageName { get { return this.GetAttributeValue<string>("messagename"); } } [AttributeLogicalName("mode")] public OptionSetValue Mode { get { return this.GetAttributeValue<OptionSetValue>("mode"); } } [AttributeLogicalName("operationtype")] public OptionSetValue OperationType { get { return this.GetAttributeValue<OptionSetValue>("operationtype"); } } [AttributeLogicalName("organizationid")] public Guid? OrganizationId { get { return this.GetAttributeValue<Guid?>("organizationid"); } } [AttributeLogicalName("performanceconstructorduration")] public int? PerformanceConstructorDuration { get { return this.GetAttributeValue<int?>("performanceconstructorduration"); } } [AttributeLogicalName("performanceconstructorstarttime")] public DateTime? PerformanceConstructorStartTime { get { return this.GetAttributeValue<DateTime?>("performanceconstructorstarttime"); } } [AttributeLogicalName("performanceexecutionduration")] public int? PerformanceExecutionDuration { get { return this.GetAttributeValue<int?>("performanceexecutionduration"); } } [AttributeLogicalName("performanceexecutionstarttime")] public DateTime? PerformanceExecutionStartTime { get { return this.GetAttributeValue<DateTime?>("performanceexecutionstarttime"); } } [AttributeLogicalName("persistencekey")] public Guid? PersistenceKey { get { return this.GetAttributeValue<Guid?>("persistencekey"); } } [AttributeLogicalName("pluginstepid")] public Guid? PluginStepId { get { return this.GetAttributeValue<Guid?>("pluginstepid"); } } [AttributeLogicalName("plugintracelogid")] public Guid? PluginTraceLogId { get { return this.GetAttributeValue<Guid?>("plugintracelogid"); } } [AttributeLogicalName("plugintracelogid")] public override Guid Id { get { return base.Id; } set { base.Id = value; } } [AttributeLogicalName("primaryentity")] public string PrimaryEntity { get { return this.GetAttributeValue<string>("primaryentity"); } } [AttributeLogicalName("profile")] public string Profile { get { return this.GetAttributeValue<string>("profile"); } } [AttributeLogicalName("requestid")] public Guid? RequestId { get { return this.GetAttributeValue<Guid?>("requestid"); } } [AttributeLogicalName("secureconfiguration")] public string SecureConfiguration { get { return this.GetAttributeValue<string>("secureconfiguration"); } } [AttributeLogicalName("typename")] public string TypeName { get { return this.GetAttributeValue<string>("typename"); } } [RelationshipSchemaName("createdby_plugintracelog")] [AttributeLogicalName("createdby")] public SystemUser createdby_plugintracelog { get { return this.GetRelatedEntity<SystemUser>("createdby_plugintracelog", new EntityRole?()); } } [RelationshipSchemaName("lk_plugintracelogbase_createdonbehalfby")] [AttributeLogicalName("createdonbehalfby")] public SystemUser lk_plugintracelogbase_createdonbehalfby { get { return this.GetRelatedEntity<SystemUser>("lk_plugintracelogbase_createdonbehalfby", new EntityRole?()); } } public event PropertyChangedEventHandler PropertyChanged; public event PropertyChangingEventHandler PropertyChanging; public PluginTraceLog() : base("plugintracelog") { } private void OnPropertyChanged(string propertyName) { if (this.PropertyChanged == null) return; this.PropertyChanged((object) this, new PropertyChangedEventArgs(propertyName)); } private void OnPropertyChanging(string propertyName) { if (this.PropertyChanging == null) return; this.PropertyChanging((object) this, new PropertyChangingEventArgs(propertyName)); } } }
using System; using System.Collections.Generic; using System.Text; using System.Collections; using System.IO; using System.Resources; using System.Reflection; using System.Text.RegularExpressions; using NUnit.Framework; namespace DDay.iCal.Test { [TestFixture] public class TodoTest { private string tzid; IICalendarCollection iCalendar_LoadFromFile(string path) { using (StreamReader sr = new StreamReader (Android.MainActivity.am.Open (path))) { return iCalendar.LoadFromStream(sr); } } [TestFixtureSetUp] public void InitAll() { tzid = "US-Eastern"; } public void TestTodoActive(string calendar, ArrayList items, params int[] numPeriods) { IICalendar iCal = iCalendar_LoadFromFile(@"Calendars/Todo/" + calendar)[0]; ProgramTest.TestCal(iCal); ITodo todo = iCal.Todos[0]; for (int i = 0; i < items.Count; i += 2) { iCalDateTime dt = (iCalDateTime)items[i]; dt.TZID = tzid; bool tf = (bool)items[i + 1]; if (tf) Assert.IsTrue(todo.IsActive(dt), "Todo should be active at " + dt); else Assert.IsFalse(todo.IsActive(dt), "Todo should not be active at " + dt); } if (numPeriods != null && numPeriods.Length > 0) { IEvaluator evaluator = todo.GetService(typeof(IEvaluator)) as IEvaluator; Assert.IsNotNull(evaluator); Assert.AreEqual( numPeriods[0], evaluator.Periods.Count, "Todo should have " + numPeriods[0] + " occurrences after evaluation; it had " + evaluator.Periods.Count); } } public void TestTodoCompleted(string calendar, ArrayList items) { IICalendar iCal = iCalendar_LoadFromFile(@"Calendars/Todo/" + calendar)[0]; ProgramTest.TestCal(iCal); ITodo todo = iCal.Todos[0]; for (int i = 0; i < items.Count; i += 2) { IDateTime dt = (IDateTime)items[i]; dt.TZID = tzid; bool tf = (bool)items[i + 1]; if (tf) Assert.IsTrue(todo.IsCompleted(dt), "Todo should be completed at " + dt); else Assert.IsFalse(todo.IsCompleted(dt), "Todo should not be completed at " + dt); } } [Test, Category("Todo")] public void Todo1() { ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2200, 12, 31, 0, 0, 0)); items.Add(true); TestTodoActive("Todo1.ics", items); } [Test, Category("Todo")] public void Todo2() { ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2006, 7, 28, 8, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 28, 8, 59, 59)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 28, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2200, 12, 31, 0, 0, 0)); items.Add(true); TestTodoActive("Todo2.ics", items); } [Test, Category("Todo")] public void Todo3() { ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2006, 7, 28, 8, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2200, 12, 31, 0, 0, 0)); items.Add(false); TestTodoActive("Todo3.ics", items); } [Test, Category("Todo")] public void Todo4() { ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2006, 07, 28, 8, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 07, 28, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 8, 1, 0, 0, 0)); items.Add(true); TestTodoCompleted("Todo4.ics", items); } [Test, Category("Todo")] public void Todo5() { ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2006, 7, 28, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 29, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 30, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 31, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 1, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 2, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 3, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 4, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 8, 5, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 8, 6, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 8, 7, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 8, 8, 9, 0, 0)); items.Add(true); TestTodoActive("Todo5.ics", items); } [Test, Category("Todo")] public void Todo6() { ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2006, 7, 28, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 29, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 30, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 31, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 1, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 2, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 3, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 4, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 8, 5, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 8, 6, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 8, 7, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 8, 8, 9, 0, 0)); items.Add(true); TestTodoActive("Todo6.ics", items); } [Test, Category("Todo")] public void Todo7() { ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2006, 7, 28, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 29, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 30, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 31, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 1, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 2, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 3, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 4, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 5, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 6, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 30, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 31, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 31, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 9, 1, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 9, 2, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 9, 3, 9, 0, 0)); items.Add(true); TestTodoActive("Todo7.ics", items); } [Test, Category("Todo")] public void Todo7_1() { IICalendar iCal = iCalendar_LoadFromFile(@"Calendars/Todo/Todo7.ics")[0]; ITodo todo = iCal.Todos[0]; ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2006, 7, 28, 9, 0, 0, tzid)); items.Add(new iCalDateTime(2006, 8, 4, 9, 0, 0, tzid)); items.Add(new iCalDateTime(2006, 9, 1, 9, 0, 0, tzid)); items.Add(new iCalDateTime(2006, 10, 6, 9, 0, 0, tzid)); items.Add(new iCalDateTime(2006, 11, 3, 9, 0, 0, tzid)); items.Add(new iCalDateTime(2006, 12, 1, 9, 0, 0, tzid)); items.Add(new iCalDateTime(2007, 1, 5, 9, 0, 0, tzid)); items.Add(new iCalDateTime(2007, 2, 2, 9, 0, 0, tzid)); items.Add(new iCalDateTime(2007, 3, 2, 9, 0, 0, tzid)); items.Add(new iCalDateTime(2007, 4, 6, 9, 0, 0, tzid)); IList<Occurrence> occurrences = todo.GetOccurrences( new iCalDateTime(2006, 7, 1, 9, 0, 0), new iCalDateTime(2007, 7, 1, 9, 0, 0)); // FIXME: Count is not properly restricting recurrences to 10. // What's going wrong here? Assert.AreEqual( items.Count, occurrences.Count, "TODO should have " + items.Count + " occurrences; it has " + occurrences.Count); for (int i = 0; i < items.Count; i++) Assert.AreEqual(items[i], occurrences[i].Period.StartTime, "TODO should occur at " + items[i] + ", but does not."); } [Test, Category("Todo")] public void Todo8() { ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2006, 7, 28, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 29, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 30, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 31, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 1, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 2, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 3, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 4, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 5, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 6, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 30, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 31, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 31, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 9, 1, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 9, 2, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 9, 3, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 10, 10, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 11, 15, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 12, 5, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2007, 1, 3, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2007, 1, 4, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2007, 1, 5, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2007, 1, 6, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2007, 1, 7, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2007, 2, 1, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2007, 2, 2, 8, 59, 59)); items.Add(false); items.Add(new iCalDateTime(2007, 2, 2, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2007, 2, 3, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2007, 2, 4, 9, 0, 0)); items.Add(true); TestTodoActive("Todo8.ics", items); } [Test, Category("Todo")] public void Todo9() { ArrayList items = new ArrayList(); items.Add(new iCalDateTime(2006, 7, 28, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 29, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 7, 30, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 17, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 18, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 8, 19, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 9, 7, 9, 0, 0)); items.Add(false); items.Add(new iCalDateTime(2006, 9, 8, 8, 59, 59)); items.Add(false); items.Add(new iCalDateTime(2006, 9, 8, 9, 0, 0)); items.Add(true); items.Add(new iCalDateTime(2006, 9, 9, 9, 0, 0)); items.Add(true); TestTodoActive("Todo9.ics", items, 3); } // FIXME: re-implement //[Test, Category("Todo")] //public void TODO10() //{ // iCalendar iCal = new iCalendar(); // Todo todo = iCal.Create<Todo>(); // todo.Summary = "xxxx"; // todo.Description = "fdsdsfds"; // // Set Start & Due date // todo.DTStart = new iCalDateTime(2007, 1, 1, 8, 0, 0); // todo.Due = new iCalDateTime(2007, 1, 7); // todo.Created = new iCalDateTime(DateTime.SpecifyKind(new DateTime(2007, 1, 1), DateTimeKind.Utc)); // todo.DTStamp = new iCalDateTime(DateTime.SpecifyKind(new DateTime(2007, 1, 1), DateTimeKind.Utc)); // todo.UID = "b6709c95-5523-46aa-a7e5-1b5ea034b86a"; // // Create an alarm // Alarm al = new Alarm(); // al.Trigger = new Trigger(TimeSpan.FromMinutes(-30)); // al.Action = AlarmAction.Display; // al.Description = "Reminder"; // // Add the alarm to the todo item // todo.Alarms.Add(al); // // Save into calendar file. // iCalendarSerializer serializer = new iCalendarSerializer(); // string serializedTodo = serializer.SerializeToString(iCal); // Assert.AreEqual( // "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//ddaysoftware.com//NONSGML DDay.iCal 1.0//EN\r\nBEGIN:VTODO\r\nCREATED:20070101T000000Z\r\nDESCRIPTION:fdsdsfds\r\nDTSTAMP:20070101T000000Z\r\nDTSTART:20070101T080000\r\nDUE;VALUE=DATE:20070107\r\nSEQUENCE:0\r\nSTATUS:NEEDS-ACTION\r\nSUMMARY:xxxx\r\nUID:b6709c95-5523-46aa-a7e5-1b5ea034b86a\r\nBEGIN:VALARM\r\nACTION:DISPLAY\r\nDESCRIPTION:Reminder\r\nTRIGGER:-PT30M\r\nEND:VALARM\r\nEND:VTODO\r\nEND:VCALENDAR\r\n", // serializedTodo); // List<AlarmOccurrence> alarms = todo.PollAlarms( // new iCalDateTime(2007, 1, 1), // new iCalDateTime(2007, 2, 1)); // iCalDateTime expectedAlarm = new iCalDateTime(2007, 1, 1, 7, 30, 0); // Assert.AreEqual(1, alarms.Count, "There should be exactly 1 alarm"); // Assert.AreEqual(expectedAlarm, alarms[0].DateTime, "The alarm should occur at " + expectedAlarm); //} // FIXME: re-implement //[Test, Category("Todo")] //public void TODO11() //{ // iCalendar iCal = new iCalendar(); // Todo todo = iCal.Create<Todo>(); // todo.Summary = "xxxx"; // todo.Description = "fdsdsfds"; // // Set Start & Due date // todo.DTStart = new iCalDateTime(2007, 1, 1, 8, 0, 0); // todo.Due = new iCalDateTime(2007, 1, 7); // todo.Created = new iCalDateTime(DateTime.SpecifyKind(new DateTime(2007, 1, 1), DateTimeKind.Utc)); // todo.DTStamp = new iCalDateTime(DateTime.SpecifyKind(new DateTime(2007, 1, 1), DateTimeKind.Utc)); // todo.UID = "b6709c95-5523-46aa-a7e5-1b5ea034b86a"; // // Add an alarm in my task // Alarm al = new Alarm(todo); // al.Action = AlarmAction.Display; // al.Description = "Reminder"; // al.Trigger = new Trigger(); // // Set reminder time // al.Trigger.DateTime = new DateTime(2007, 1, 6, 8, 0, 0); // // Save into calendar file. // iCalendarSerializer serializer = new iCalendarSerializer(); // string serializedTodo = serializer.SerializeToString(iCal); // Assert.AreEqual( // "BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:-//ddaysoftware.com//NONSGML DDay.iCal 1.0//EN\r\nBEGIN:VTODO\r\nCREATED:20070101T000000Z\r\nDESCRIPTION:fdsdsfds\r\nDTSTAMP:20070101T000000Z\r\nDTSTART:20070101T080000\r\nDUE;VALUE=DATE:20070107\r\nSEQUENCE:0\r\nSTATUS:NEEDS-ACTION\r\nSUMMARY:xxxx\r\nUID:b6709c95-5523-46aa-a7e5-1b5ea034b86a\r\nBEGIN:VALARM\r\nACTION:DISPLAY\r\nDESCRIPTION:Reminder\r\nTRIGGER;VALUE=DATE-TIME:20070106T080000\r\nEND:VALARM\r\nEND:VTODO\r\nEND:VCALENDAR\r\n", // serializedTodo); //} } }
/* * Copyright (c) Contributors, http://aurora-sim.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using Aurora.DataManager; using Aurora.Framework; using Nini.Config; using OpenMetaverse; using OpenSim.Region.Framework.Interfaces; using OpenSim.Services.Interfaces; using ChatSessionMember = Aurora.Framework.ChatSessionMember; namespace Aurora.Modules.Groups { public class AuroraDataGroupsServicesConnectorModule : ISharedRegionModule, IGroupsServicesConnector { private readonly Dictionary<UUID, ChatSession> ChatSessions = new Dictionary<UUID, ChatSession>(); private IGroupsServiceConnector GroupsConnector; private IUserAccountService m_accountService; private bool m_connectorEnabled; #region IGroupsServicesConnector Members /// <summary> /// Create a Group, including Everyone and Owners Role, place FounderID in both groups, select Owner as selected role, and newly created group as agent's active role. /// </summary> public UUID CreateGroup(UUID requestingAgentID, string name, string charter, bool showInList, UUID insigniaID, int membershipFee, bool openEnrollment, bool allowPublish, bool maturePublish, UUID founderID) { UUID GroupID = UUID.Random(); UUID OwnerRoleID = UUID.Random(); GroupsConnector.CreateGroup(GroupID, name, charter, showInList, insigniaID, 0, openEnrollment, allowPublish, maturePublish, founderID, OwnerRoleID); return GroupID; } public void UpdateGroup(UUID requestingAgentID, UUID groupID, string charter, bool showInList, UUID insigniaID, int membershipFee, bool openEnrollment, bool allowPublish, bool maturePublish) { GroupsConnector.UpdateGroup(requestingAgentID, groupID, charter, showInList ? 1 : 0, insigniaID, membershipFee, openEnrollment ? 1 : 0, allowPublish ? 1 : 0, maturePublish ? 1 : 0); } public void AddGroupRole(UUID requestingAgentID, UUID groupID, UUID roleID, string name, string description, string title, ulong powers) { GroupsConnector.AddRoleToGroup(requestingAgentID, groupID, roleID, name, description, title, powers); } public void RemoveGroupRole(UUID requestingAgentID, UUID groupID, UUID roleID) { GroupsConnector.RemoveRoleFromGroup(requestingAgentID, roleID, groupID); } public void UpdateGroupRole(UUID requestingAgentID, UUID groupID, UUID roleID, string name, string description, string title, ulong powers) { GroupsConnector.UpdateRole(requestingAgentID, groupID, roleID, name, description, title, powers); } public GroupRecord GetGroupRecord(UUID requestingAgentID, UUID GroupID, string GroupName) { return GroupsConnector.GetGroupRecord(requestingAgentID, GroupID, GroupName); } public string SetAgentActiveGroup(UUID requestingAgentID, UUID AgentID, UUID GroupID) { return GroupsConnector.SetAgentActiveGroup(AgentID, GroupID); } public string SetAgentActiveGroupRole(UUID requestingAgentID, UUID AgentID, UUID GroupID, UUID RoleID) { return GroupsConnector.SetAgentGroupSelectedRole(AgentID, GroupID, RoleID); } public void SetAgentGroupInfo(UUID requestingAgentID, UUID AgentID, UUID GroupID, bool AcceptNotices, bool ListInProfile) { GroupsConnector.SetAgentGroupInfo(requestingAgentID, AgentID, GroupID, AcceptNotices ? 1 : 0, ListInProfile ? 1 : 0); } public void AddAgentToGroupInvite(UUID requestingAgentID, UUID inviteID, UUID groupID, UUID roleID, UUID agentID, string FromAgentName) { GroupsConnector.AddAgentGroupInvite(requestingAgentID, inviteID, groupID, roleID, agentID, FromAgentName); } public GroupInviteInfo GetAgentToGroupInvite(UUID requestingAgentID, UUID inviteID) { return GroupsConnector.GetAgentToGroupInvite(requestingAgentID, inviteID); } public void RemoveAgentToGroupInvite(UUID requestingAgentID, UUID inviteID) { GroupsConnector.RemoveAgentInvite(requestingAgentID, inviteID); } public void AddAgentToGroup(UUID requestingAgentID, UUID AgentID, UUID GroupID, UUID RoleID) { GroupsConnector.AddAgentToGroup(requestingAgentID, AgentID, GroupID, RoleID); } public bool RemoveAgentFromGroup(UUID requestingAgentID, UUID AgentID, UUID GroupID) { return GroupsConnector.RemoveAgentFromGroup(requestingAgentID, AgentID, GroupID); } public void AddAgentToGroupRole(UUID requestingAgentID, UUID AgentID, UUID GroupID, UUID RoleID) { GroupsConnector.AddAgentToRole(requestingAgentID, AgentID, GroupID, RoleID); } public void RemoveAgentFromGroupRole(UUID requestingAgentID, UUID AgentID, UUID GroupID, UUID RoleID) { GroupsConnector.RemoveAgentFromRole(requestingAgentID, AgentID, GroupID, RoleID); } public List<DirGroupsReplyData> FindGroups(UUID requestingAgentID, string search, uint? start, uint? count, uint queryflags) { //TODO: Fix this.. should be in the search module return GroupsConnector.FindGroups(requestingAgentID, search, start, count, queryflags); } public GroupProfileData GetGroupProfile(UUID requestingAgentID, UUID GroupID) { return GroupsConnector.GetGroupProfile(requestingAgentID, GroupID); } public GroupMembershipData GetAgentGroupMembership(UUID requestingAgentID, UUID AgentID, UUID GroupID) { return GroupsConnector.GetGroupMembershipData(requestingAgentID, GroupID, AgentID); } public GroupMembershipData GetAgentActiveMembership(UUID requestingAgentID, UUID AgentID) { return GroupsConnector.GetGroupMembershipData(requestingAgentID, UUID.Zero, AgentID); } public List<GroupMembershipData> GetAgentGroupMemberships(UUID requestingAgentID, UUID AgentID) { return GroupsConnector.GetAgentGroupMemberships(requestingAgentID, AgentID); } public List<GroupRolesData> GetAgentGroupRoles(UUID requestingAgentID, UUID AgentID, UUID GroupID) { return GroupsConnector.GetAgentGroupRoles(requestingAgentID, AgentID, GroupID); } public List<GroupRolesData> GetGroupRoles(UUID requestingAgentID, UUID GroupID) { return GroupsConnector.GetGroupRoles(requestingAgentID, GroupID); } public List<GroupMembersData> GetGroupMembers(UUID requestingAgentID, UUID GroupID) { return GroupsConnector.GetGroupMembers(requestingAgentID, GroupID); } public List<GroupRoleMembersData> GetGroupRoleMembers(UUID requestingAgentID, UUID GroupID) { return GroupsConnector.GetGroupRoleMembers(requestingAgentID, GroupID); } public List<GroupNoticeData> GetGroupNotices(UUID requestingAgentID, UUID GroupID) { return GetGroupNotices(requestingAgentID, 0, 0, GroupID); } public List<GroupNoticeData> GetGroupNotices(UUID requestingAgentID, uint start, uint count, UUID GroupID) { return GroupsConnector.GetGroupNotices(requestingAgentID, start, count, GroupID); } public List<GroupTitlesData> GetGroupTitles(UUID requestingAgentID, UUID GroupID) { return GroupsConnector.GetGroupTitles(requestingAgentID, GroupID); } public uint GetNumberOfGroupNotices(UUID requestingAgentID, UUID GroupID) { return GroupsConnector.GetNumberOfGroupNotices(requestingAgentID, GroupID); } public uint GetNumberOfGroupNotices(UUID requestingAgentID, List<UUID> GroupIDs) { return GroupsConnector.GetNumberOfGroupNotices(requestingAgentID, GroupIDs); } public GroupNoticeData GetGroupNoticeData(UUID requestingAgentID, UUID noticeID) { return GroupsConnector.GetGroupNoticeData(requestingAgentID, noticeID); } public GroupNoticeInfo GetGroupNotice(UUID requestingAgentID, UUID noticeID) { return GroupsConnector.GetGroupNotice(requestingAgentID, noticeID); } public void AddGroupNotice(UUID requestingAgentID, UUID groupID, UUID noticeID, string fromName, string subject, string message, UUID ItemID, int AssetType, string ItemName) { GroupsConnector.AddGroupNotice(requestingAgentID, groupID, noticeID, fromName, subject, message, ItemID, AssetType, ItemName); } public void AddGroupProposal(UUID agentID, GroupProposalInfo info) { GroupsConnector.AddGroupProposal(agentID, info); } public void VoteOnActiveProposals(UUID agentID, UUID groupID, UUID proposalID, string vote) { GroupsConnector.VoteOnActiveProposals(agentID, groupID, proposalID, vote); } public List<GroupInviteInfo> GetGroupInvites(UUID requestingAgentID) { return GroupsConnector.GetGroupInvites(requestingAgentID); } public List<GroupProposalInfo> GetActiveProposals(UUID agentID, UUID groupID) { return GroupsConnector.GetActiveProposals(agentID, groupID); } public List<GroupProposalInfo> GetInactiveProposals(UUID agentID, UUID groupID) { return GroupsConnector.GetInactiveProposals(agentID, groupID); } /// <summary> /// Add this member to the friend conference /// </summary> /// <param name = "member"></param> /// <param name = "SessionID"></param> public void AddMemberToGroup(ChatSessionMember member, UUID SessionID) { ChatSession session; ChatSessions.TryGetValue(SessionID, out session); ChatSessionMember oldMember = FindMember(SessionID, member.AvatarKey); if ((oldMember == null) || (oldMember.AvatarKey == UUID.Zero)) session.Members.Add(member); else oldMember.HasBeenAdded = true; //Reset this } /// <summary> /// Create a new friend conference session /// </summary> /// <param name = "session"></param> public bool CreateSession(ChatSession session) { ChatSession oldSession = null; if (ChatSessions.TryGetValue(session.SessionID, out oldSession)) if (GetMemeberCount(session) == 0) RemoveSession(session.SessionID); else return false; //Already have one ChatSessions.Add(session.SessionID, session); return true; } public void RemoveSession(UUID sessionid) { ChatSessions.Remove(sessionid); } /// <summary> /// Get a session by a user's sessionID /// </summary> /// <param name = "SessionID"></param> /// <returns></returns> public ChatSession GetSession(UUID SessionID) { ChatSession session; ChatSessions.TryGetValue(SessionID, out session); return session; } /// <summary> /// Find the member from X sessionID /// </summary> /// <param name = "sessionid"></param> /// <param name = "Agent"></param> /// <returns></returns> public ChatSessionMember FindMember(UUID sessionid, UUID Agent) { ChatSession session; ChatSessions.TryGetValue(sessionid, out session); if (session == null) return null; ChatSessionMember thismember = new ChatSessionMember {AvatarKey = UUID.Zero}; #if (!ISWIN) foreach (ChatSessionMember testmember in session.Members) { if (testmember.AvatarKey == Agent) { thismember = testmember; } } #else foreach (ChatSessionMember testmember in session.Members.Where(testmember => testmember.AvatarKey == Agent)) { thismember = testmember; } #endif return thismember; } #endregion #region ISharedRegionModule Members public string Name { get { return "AuroraDataGroupsServicesConnectorModule"; } } // this module is not intended to be replaced, but there should only be 1 of them. public Type ReplaceableInterface { get { return null; } } public void Initialise(IConfigSource config) { IConfig groupsConfig = config.Configs["Groups"]; if (groupsConfig == null) { // Do not run this module by default. return; } else { // if groups aren't enabled, we're not needed. // if we're not specified as the connector to use, then we're not wanted if ((groupsConfig.GetBoolean("Enabled", false) == false) || (groupsConfig.GetString("ServicesConnectorModule", "Default") != Name)) { m_connectorEnabled = false; return; } //MainConsole.Instance.InfoFormat("[AURORA-GROUPS-CONNECTOR]: Initializing {0}", this.Name); m_connectorEnabled = true; } } public void Close() { MainConsole.Instance.InfoFormat("[AURORA-GROUPS-CONNECTOR]: Closing {0}", this.Name); } public void AddRegion(IScene scene) { GroupsConnector = Aurora.DataManager.DataManager.RequestPlugin<IGroupsServiceConnector>(); if (GroupsConnector == null) { MainConsole.Instance.Warn("[AURORA-GROUPS-CONNECTOR]: GroupsConnector is null"); m_connectorEnabled = false; } if (m_connectorEnabled) { if (m_accountService == null) { m_accountService = scene.UserAccountService; } scene.RegisterModuleInterface<IGroupsServicesConnector>(this); } } public void RemoveRegion(IScene scene) { if (scene.RequestModuleInterface<IGroupsServicesConnector>() == this) { scene.UnregisterModuleInterface<IGroupsServicesConnector>(this); } } public void RegionLoaded(IScene scene) { } public void PostInitialise() { // NoOp } #endregion public GroupProfileData GetMemberGroupProfile(UUID requestingAgentID, UUID GroupID, UUID AgentID) { GroupMembershipData MemberInfo = GroupsConnector.GetGroupMembershipData(requestingAgentID, GroupID, AgentID); GroupProfileData MemberGroupProfile = GroupsConnector.GetMemberGroupProfile(requestingAgentID, GroupID, AgentID); MemberGroupProfile.MemberTitle = MemberInfo.GroupTitle; MemberGroupProfile.PowersMask = MemberInfo.GroupPowers; return MemberGroupProfile; } private int GetMemeberCount(ChatSession session) { #if (!ISWIN) int count = 0; foreach (ChatSessionMember member in session.Members) { if (member.HasBeenAdded) count++; } return count; #else return session.Members.Count(member => member.HasBeenAdded); #endif } /// <summary> /// Add the agent to the in-memory session lists and give them the default permissions /// </summary> /// <param name = "AgentID"></param> /// <param name = "SessionID"></param> private void AddDefaultPermsMemberToSession(UUID AgentID, UUID SessionID) { ChatSession session; ChatSessions.TryGetValue(SessionID, out session); ChatSessionMember member = new ChatSessionMember { AvatarKey = AgentID, CanVoiceChat = true, IsModerator = false, MuteText = false, MuteVoice = false, HasBeenAdded = false }; session.Members.Add(member); } } }
using UnityEngine; using System; using System.IO; using System.Collections; using System.Collections.Generic; using System.Runtime.InteropServices; using System.Text; #if UNITY_EDITOR using UnityEditor; #endif public class EffekseerSystem : MonoBehaviour { public static string resourcePath { get { return Path.Combine(Application.streamingAssetsPath, "Effekseer"); } } public static EffekseerHandle PlayEffect(string name, Vector3 location) { IntPtr effect = Instance._GetEffect(name); if (effect != IntPtr.Zero) { int handle = Plugin.EffekseerPlayEffect(effect, location.x, location.y, location.z); return new EffekseerHandle(handle); } return new EffekseerHandle(-1); } public static void StopAllEffects() { Plugin.EffekseerStopAllEffects(); } public static void LoadEffect(string name) { Instance._GetEffect(name); } public static void ReleaseEffect(string name) { Instance._ReleaseEffect(name); } #region Implimentation private static EffekseerSystem _Instance = null; public static EffekseerSystem Instance { get { if (_Instance == null) { var go = GameObject.Find("Effekseer"); if (go && go.GetComponent<EffekseerSystem>()) { _Instance = go.GetComponent<EffekseerSystem>(); } else { go = new GameObject("Effekseer"); _Instance = go.AddComponent<EffekseerSystem>(); } } return _Instance; } } public bool drawInSceneView = true; public int maxInstances = 800; public int maxSquares = 1200; private const int renderEventId = 0x2040; private Dictionary<string, IntPtr> effects = new Dictionary<string, IntPtr>(); #if UNITY_EDITOR [Serializable] private struct EffectKeyValue { private string key; private string value; public EffectKeyValue(string key, IntPtr value) { this.key = key; this.value = value.ToString(); } public string GetKey() {return key;} public IntPtr GetValue() {return (IntPtr)ulong.Parse(value);} } private List<EffectKeyValue> effectKeyValues = new List<EffectKeyValue>(); #endif private IntPtr _GetEffect(string name) { name = Path.GetFileNameWithoutExtension(name); if (effects.ContainsKey(name) == false) { string fullPath = Path.Combine(EffekseerSystem.resourcePath, Path.ChangeExtension(name, "efk")); byte[] bytes = Encoding.Unicode.GetBytes(fullPath); GCHandle ghc = GCHandle.Alloc(bytes, GCHandleType.Pinned); IntPtr effect = Plugin.EffekseerLoadEffect(ghc.AddrOfPinnedObject()); ghc.Free(); if (effect == IntPtr.Zero) { Debug.LogError("[Effekseer] Error loading " + fullPath); return IntPtr.Zero; } effects.Add(name, effect); #if UNITY_EDITOR effectKeyValues.Add(new EffectKeyValue(name, effect)); #endif return effect; } else { return effects[name]; } } private void _ReleaseEffect(string name) { if (effects.ContainsKey(name) == false) { var effect = effects[name]; Plugin.EffekseerReleaseEffect(effect); effects.Remove(name); } } void Awake() { Plugin.EffekseerInit(maxInstances, maxSquares); } void OnDestroy() { foreach (var pair in effects) { Plugin.EffekseerReleaseEffect(pair.Value); } effects = null; Plugin.EffekseerTerm(); } void OnEnable() { #if UNITY_EDITOR if (effects.Count == 0) { for (int i = 0; i < effectKeyValues.Count; i++) { effects.Add(effectKeyValues[i].GetKey(), effectKeyValues[i].GetValue()); } } #endif } void FixedUpdate() { Plugin.EffekseerUpdate(1); } void OnRenderObject() { int eventId = renderEventId; #if UNITY_EDITOR if (SceneView.currentDrawingSceneView != null && Camera.current == SceneView.currentDrawingSceneView.camera ) { if (this.drawInSceneView == false) { return; } eventId = renderEventId + 1; } #endif { float[] projectionMatrixArray = Matrix2Array(Camera.current.projectionMatrix); if (RenderTexture.active) { projectionMatrixArray[5] = -projectionMatrixArray[5]; } GCHandle ghc = GCHandle.Alloc(projectionMatrixArray, GCHandleType.Pinned); Plugin.EffekseerSetProjectionMatrix(eventId, ghc.AddrOfPinnedObject()); ghc.Free(); } { float[] cameraMatrixArray = Matrix2Array(Camera.current.worldToCameraMatrix); GCHandle ghc = GCHandle.Alloc(cameraMatrixArray, GCHandleType.Pinned); Plugin.EffekseerSetCameraMatrix(eventId, ghc.AddrOfPinnedObject()); ghc.Free(); } GL.IssuePluginEvent(eventId); } private float[] Matrix2Array(Matrix4x4 mat) { float[] res = new float[16]; res[ 0] = mat.m00; res[ 1] = mat.m01; res[ 2] = mat.m02; res[ 3] = mat.m03; res[ 4] = mat.m10; res[ 5] = mat.m11; res[ 6] = mat.m12; res[ 7] = mat.m13; res[ 8] = mat.m20; res[ 9] = mat.m21; res[10] = mat.m22; res[11] = mat.m23; res[12] = mat.m30; res[13] = mat.m31; res[14] = mat.m32; res[15] = mat.m33; return res; } public static class Plugin { #if UNITY_IPHONE public const string pluginName = "__Internal"; #else public const string pluginName = "EffekseerUnity"; #endif [DllImport(pluginName)] public static extern void EffekseerInit(int maxInstances, int maxSquares); [DllImport(pluginName)] public static extern void EffekseerTerm(); [DllImport(pluginName)] public static extern void EffekseerUpdate(float deltaTime); [DllImport(pluginName)] public static extern void EffekseerSetProjectionMatrix(int renderId, IntPtr matrix); [DllImport(pluginName)] public static extern void EffekseerSetCameraMatrix(int renderId, IntPtr matrix); [DllImport(pluginName)] public static extern IntPtr EffekseerLoadEffect(IntPtr path); [DllImport(pluginName)] public static extern void EffekseerReleaseEffect(IntPtr effect); [DllImport(pluginName)] public static extern int EffekseerPlayEffect(IntPtr effect, float x, float y, float z); [DllImport(pluginName)] public static extern void EffekseerStopEffect(int handle); [DllImport(pluginName)] public static extern void EffekseerStopAllEffects(); [DllImport(pluginName)] public static extern void EffekseerSetShown(int handle, bool shown); [DllImport(pluginName)] public static extern void EffekseerSetPaused(int handle, bool paused); [DllImport(pluginName)] public static extern bool EffekseerExists(int handle); [DllImport(pluginName)] public static extern void EffekseerSetLocation(int handle, float x, float y, float z); [DllImport(pluginName)] public static extern void EffekseerSetRotation(int handle, float x, float y, float z, float angle); [DllImport(pluginName)] public static extern void EffekseerSetScale(int handle, float x, float y, float z); } #endregion } [Serializable] public struct EffekseerHandle { int handle; public EffekseerHandle(int _handle) { handle = _handle; } public void Stop() { EffekseerSystem.Plugin.EffekseerStopEffect(handle); } public void SetLocation(Vector3 location) { EffekseerSystem.Plugin.EffekseerSetLocation(handle, location.x, location.y, location.z); } public void SetRotation(Quaternion rotation) { Vector3 axis; float angle; rotation.ToAngleAxis(out angle, out axis); EffekseerSystem.Plugin.EffekseerSetRotation(handle, axis.x, axis.y, axis.z, angle * Mathf.Deg2Rad); } public void SetScale(Vector3 scale) { EffekseerSystem.Plugin.EffekseerSetScale(handle, scale.x, scale.y, scale.z); } public bool enable { get { return handle >= 0; } } public bool exists { get { return EffekseerSystem.Plugin.EffekseerExists(handle); } } }
using Microsoft.IdentityModel.S2S.Protocols.OAuth2; using Microsoft.IdentityModel.Tokens; using Microsoft.SharePoint.Client; using System; using System.Net; using System.Security.Principal; using System.Web; using System.Web.Configuration; namespace Provision.Cloud.Async.WebJob.Console.Create { /// <summary> /// Encapsulates all the information from SharePoint. /// </summary> public abstract class SharePointContext { public const string SPHostUrlKey = "SPHostUrl"; public const string SPAppWebUrlKey = "SPAppWebUrl"; public const string SPLanguageKey = "SPLanguage"; public const string SPClientTagKey = "SPClientTag"; public const string SPProductNumberKey = "SPProductNumber"; protected static readonly TimeSpan AccessTokenLifetimeTolerance = TimeSpan.FromMinutes(5.0); private readonly Uri spHostUrl; private readonly Uri spAppWebUrl; private readonly string spLanguage; private readonly string spClientTag; private readonly string spProductNumber; // <AccessTokenString, UtcExpiresOn> protected Tuple<string, DateTime> userAccessTokenForSPHost; protected Tuple<string, DateTime> userAccessTokenForSPAppWeb; protected Tuple<string, DateTime> appOnlyAccessTokenForSPHost; protected Tuple<string, DateTime> appOnlyAccessTokenForSPAppWeb; /// <summary> /// Gets the SharePoint host url from QueryString of the specified HTTP request. /// </summary> /// <param name="httpRequest">The specified HTTP request.</param> /// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns> public static Uri GetSPHostUrl(HttpRequestBase httpRequest) { if (httpRequest == null) { throw new ArgumentNullException("httpRequest"); } string spHostUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SPHostUrlKey]); Uri spHostUrl; if (Uri.TryCreate(spHostUrlString, UriKind.Absolute, out spHostUrl) && (spHostUrl.Scheme == Uri.UriSchemeHttp || spHostUrl.Scheme == Uri.UriSchemeHttps)) { return spHostUrl; } return null; } /// <summary> /// Gets the SharePoint host url from QueryString of the specified HTTP request. /// </summary> /// <param name="httpRequest">The specified HTTP request.</param> /// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns> public static Uri GetSPHostUrl(HttpRequest httpRequest) { return GetSPHostUrl(new HttpRequestWrapper(httpRequest)); } /// <summary> /// The SharePoint host url. /// </summary> public Uri SPHostUrl { get { return this.spHostUrl; } } /// <summary> /// The SharePoint app web url. /// </summary> public Uri SPAppWebUrl { get { return this.spAppWebUrl; } } /// <summary> /// The SharePoint language. /// </summary> public string SPLanguage { get { return this.spLanguage; } } /// <summary> /// The SharePoint client tag. /// </summary> public string SPClientTag { get { return this.spClientTag; } } /// <summary> /// The SharePoint product number. /// </summary> public string SPProductNumber { get { return this.spProductNumber; } } /// <summary> /// The user access token for the SharePoint host. /// </summary> public abstract string UserAccessTokenForSPHost { get; } /// <summary> /// The user access token for the SharePoint app web. /// </summary> public abstract string UserAccessTokenForSPAppWeb { get; } /// <summary> /// The app only access token for the SharePoint host. /// </summary> public abstract string AppOnlyAccessTokenForSPHost { get; } /// <summary> /// The app only access token for the SharePoint app web. /// </summary> public abstract string AppOnlyAccessTokenForSPAppWeb { get; } /// <summary> /// Constructor. /// </summary> /// <param name="spHostUrl">The SharePoint host url.</param> /// <param name="spAppWebUrl">The SharePoint app web url.</param> /// <param name="spLanguage">The SharePoint language.</param> /// <param name="spClientTag">The SharePoint client tag.</param> /// <param name="spProductNumber">The SharePoint product number.</param> protected SharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber) { if (spHostUrl == null) { throw new ArgumentNullException("spHostUrl"); } if (string.IsNullOrEmpty(spLanguage)) { throw new ArgumentNullException("spLanguage"); } if (string.IsNullOrEmpty(spClientTag)) { throw new ArgumentNullException("spClientTag"); } if (string.IsNullOrEmpty(spProductNumber)) { throw new ArgumentNullException("spProductNumber"); } this.spHostUrl = spHostUrl; this.spAppWebUrl = spAppWebUrl; this.spLanguage = spLanguage; this.spClientTag = spClientTag; this.spProductNumber = spProductNumber; } /// <summary> /// Creates a user ClientContext for the SharePoint host. /// </summary> /// <returns>A ClientContext instance.</returns> public ClientContext CreateUserClientContextForSPHost() { return CreateClientContext(this.SPHostUrl, this.UserAccessTokenForSPHost); } /// <summary> /// Creates a user ClientContext for the SharePoint app web. /// </summary> /// <returns>A ClientContext instance.</returns> public ClientContext CreateUserClientContextForSPAppWeb() { return CreateClientContext(this.SPAppWebUrl, this.UserAccessTokenForSPAppWeb); } /// <summary> /// Creates app only ClientContext for the SharePoint host. /// </summary> /// <returns>A ClientContext instance.</returns> public ClientContext CreateAppOnlyClientContextForSPHost() { return CreateClientContext(this.SPHostUrl, this.AppOnlyAccessTokenForSPHost); } /// <summary> /// Creates an app only ClientContext for the SharePoint app web. /// </summary> /// <returns>A ClientContext instance.</returns> public ClientContext CreateAppOnlyClientContextForSPAppWeb() { return CreateClientContext(this.SPAppWebUrl, this.AppOnlyAccessTokenForSPAppWeb); } /// <summary> /// Gets the database connection string from SharePoint for autohosted app. /// </summary> /// <returns>The database connection string. Returns <c>null</c> if the app is not autohosted or there is no database.</returns> public string GetDatabaseConnectionString() { string dbConnectionString = null; using (ClientContext clientContext = CreateAppOnlyClientContextForSPHost()) { if (clientContext != null) { var result = AppInstance.RetrieveAppDatabaseConnectionString(clientContext); clientContext.ExecuteQuery(); dbConnectionString = result.Value; } } if (dbConnectionString == null) { const string LocalDBInstanceForDebuggingKey = "LocalDBInstanceForDebugging"; var dbConnectionStringSettings = WebConfigurationManager.ConnectionStrings[LocalDBInstanceForDebuggingKey]; dbConnectionString = dbConnectionStringSettings != null ? dbConnectionStringSettings.ConnectionString : null; } return dbConnectionString; } /// <summary> /// Determines if the specified access token is valid. /// It considers an access token as not valid if it is null, or it has expired. /// </summary> /// <param name="accessToken">The access token to verify.</param> /// <returns>True if the access token is valid.</returns> protected static bool IsAccessTokenValid(Tuple<string, DateTime> accessToken) { return accessToken != null && !string.IsNullOrEmpty(accessToken.Item1) && accessToken.Item2 > DateTime.UtcNow; } /// <summary> /// Creates a ClientContext with the specified SharePoint site url and the access token. /// </summary> /// <param name="spSiteUrl">The site url.</param> /// <param name="accessToken">The access token.</param> /// <returns>A ClientContext instance.</returns> private static ClientContext CreateClientContext(Uri spSiteUrl, string accessToken) { if (spSiteUrl != null && !string.IsNullOrEmpty(accessToken)) { return TokenHelper.GetClientContextWithAccessToken(spSiteUrl.AbsoluteUri, accessToken); } return null; } } /// <summary> /// Redirection status. /// </summary> public enum RedirectionStatus { Ok, ShouldRedirect, CanNotRedirect } /// <summary> /// Provides SharePointContext instances. /// </summary> public abstract class SharePointContextProvider { private static SharePointContextProvider current; /// <summary> /// The current SharePointContextProvider instance. /// </summary> public static SharePointContextProvider Current { get { return SharePointContextProvider.current; } } /// <summary> /// Initializes the default SharePointContextProvider instance. /// </summary> static SharePointContextProvider() { if (!TokenHelper.IsHighTrustApp()) { SharePointContextProvider.current = new SharePointAcsContextProvider(); } else { SharePointContextProvider.current = new SharePointHighTrustContextProvider(); } } /// <summary> /// Registers the specified SharePointContextProvider instance as current. /// It should be called by Application_Start() in Global.asax. /// </summary> /// <param name="provider">The SharePointContextProvider to be set as current.</param> public static void Register(SharePointContextProvider provider) { if (provider == null) { throw new ArgumentNullException("provider"); } SharePointContextProvider.current = provider; } /// <summary> /// Checks if it is necessary to redirect to SharePoint for user to authenticate. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param> /// <returns>Redirection status.</returns> public static RedirectionStatus CheckRedirectionStatus(HttpContextBase httpContext, out Uri redirectUrl) { if (httpContext == null) { throw new ArgumentNullException("httpContext"); } redirectUrl = null; if (SharePointContextProvider.Current.GetSharePointContext(httpContext) != null) { return RedirectionStatus.Ok; } const string SPHasRedirectedToSharePointKey = "SPHasRedirectedToSharePoint"; if (!string.IsNullOrEmpty(httpContext.Request.QueryString[SPHasRedirectedToSharePointKey])) { return RedirectionStatus.CanNotRedirect; } Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request); if (spHostUrl == null) { return RedirectionStatus.CanNotRedirect; } if (StringComparer.OrdinalIgnoreCase.Equals(httpContext.Request.HttpMethod, "POST")) { return RedirectionStatus.CanNotRedirect; } Uri requestUrl = httpContext.Request.Url; var queryNameValueCollection = HttpUtility.ParseQueryString(requestUrl.Query); // Removes the values that are included in {StandardTokens}, as {StandardTokens} will be inserted at the beginning of the query string. queryNameValueCollection.Remove(SharePointContext.SPHostUrlKey); queryNameValueCollection.Remove(SharePointContext.SPAppWebUrlKey); queryNameValueCollection.Remove(SharePointContext.SPLanguageKey); queryNameValueCollection.Remove(SharePointContext.SPClientTagKey); queryNameValueCollection.Remove(SharePointContext.SPProductNumberKey); // Adds SPHasRedirectedToSharePoint=1. queryNameValueCollection.Add(SPHasRedirectedToSharePointKey, "1"); UriBuilder returnUrlBuilder = new UriBuilder(requestUrl); returnUrlBuilder.Query = queryNameValueCollection.ToString(); // Inserts StandardTokens. const string StandardTokens = "{StandardTokens}"; string returnUrlString = returnUrlBuilder.Uri.AbsoluteUri; returnUrlString = returnUrlString.Insert(returnUrlString.IndexOf("?") + 1, StandardTokens + "&"); // Constructs redirect url. string redirectUrlString = TokenHelper.GetAppContextTokenRequestUrl(spHostUrl.AbsoluteUri, Uri.EscapeDataString(returnUrlString)); redirectUrl = new Uri(redirectUrlString, UriKind.Absolute); return RedirectionStatus.ShouldRedirect; } /// <summary> /// Checks if it is necessary to redirect to SharePoint for user to authenticate. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param> /// <returns>Redirection status.</returns> public static RedirectionStatus CheckRedirectionStatus(HttpContext httpContext, out Uri redirectUrl) { return CheckRedirectionStatus(new HttpContextWrapper(httpContext), out redirectUrl); } /// <summary> /// Creates a SharePointContext instance with the specified HTTP request. /// </summary> /// <param name="httpRequest">The HTTP request.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns> public SharePointContext CreateSharePointContext(HttpRequestBase httpRequest) { if (httpRequest == null) { throw new ArgumentNullException("httpRequest"); } // SPHostUrl Uri spHostUrl = SharePointContext.GetSPHostUrl(httpRequest); if (spHostUrl == null) { return null; } // SPAppWebUrl string spAppWebUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SharePointContext.SPAppWebUrlKey]); Uri spAppWebUrl; if (!Uri.TryCreate(spAppWebUrlString, UriKind.Absolute, out spAppWebUrl) || !(spAppWebUrl.Scheme == Uri.UriSchemeHttp || spAppWebUrl.Scheme == Uri.UriSchemeHttps)) { spAppWebUrl = null; } // SPLanguage string spLanguage = httpRequest.QueryString[SharePointContext.SPLanguageKey]; if (string.IsNullOrEmpty(spLanguage)) { return null; } // SPClientTag string spClientTag = httpRequest.QueryString[SharePointContext.SPClientTagKey]; if (string.IsNullOrEmpty(spClientTag)) { return null; } // SPProductNumber string spProductNumber = httpRequest.QueryString[SharePointContext.SPProductNumberKey]; if (string.IsNullOrEmpty(spProductNumber)) { return null; } return CreateSharePointContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, httpRequest); } /// <summary> /// Creates a SharePointContext instance with the specified HTTP request. /// </summary> /// <param name="httpRequest">The HTTP request.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns> public SharePointContext CreateSharePointContext(HttpRequest httpRequest) { return CreateSharePointContext(new HttpRequestWrapper(httpRequest)); } /// <summary> /// Gets a SharePointContext instance associated with the specified HTTP context. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns> public SharePointContext GetSharePointContext(HttpContextBase httpContext) { if (httpContext == null) { throw new ArgumentNullException("httpContext"); } Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request); if (spHostUrl == null) { return null; } SharePointContext spContext = LoadSharePointContext(httpContext); if (spContext == null || !ValidateSharePointContext(spContext, httpContext)) { spContext = CreateSharePointContext(httpContext.Request); if (spContext != null) { SaveSharePointContext(spContext, httpContext); } } return spContext; } /// <summary> /// Gets a SharePointContext instance associated with the specified HTTP context. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns> public SharePointContext GetSharePointContext(HttpContext httpContext) { return GetSharePointContext(new HttpContextWrapper(httpContext)); } /// <summary> /// Creates a SharePointContext instance. /// </summary> /// <param name="spHostUrl">The SharePoint host url.</param> /// <param name="spAppWebUrl">The SharePoint app web url.</param> /// <param name="spLanguage">The SharePoint language.</param> /// <param name="spClientTag">The SharePoint client tag.</param> /// <param name="spProductNumber">The SharePoint product number.</param> /// <param name="httpRequest">The HTTP request.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns> protected abstract SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest); /// <summary> /// Validates if the given SharePointContext can be used with the specified HTTP context. /// </summary> /// <param name="spContext">The SharePointContext.</param> /// <param name="httpContext">The HTTP context.</param> /// <returns>True if the given SharePointContext can be used with the specified HTTP context.</returns> protected abstract bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext); /// <summary> /// Loads the SharePointContext instance associated with the specified HTTP context. /// </summary> /// <param name="httpContext">The HTTP context.</param> /// <returns>The SharePointContext instance. Returns <c>null</c> if not found.</returns> protected abstract SharePointContext LoadSharePointContext(HttpContextBase httpContext); /// <summary> /// Saves the specified SharePointContext instance associated with the specified HTTP context. /// <c>null</c> is accepted for clearing the SharePointContext instance associated with the HTTP context. /// </summary> /// <param name="spContext">The SharePointContext instance to be saved, or <c>null</c>.</param> /// <param name="httpContext">The HTTP context.</param> protected abstract void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext); } #region ACS /// <summary> /// Encapsulates all the information from SharePoint in ACS mode. /// </summary> public class SharePointAcsContext : SharePointContext { private readonly string contextToken; private readonly SharePointContextToken contextTokenObj; /// <summary> /// The context token. /// </summary> public string ContextToken { get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextToken : null; } } /// <summary> /// The context token's "CacheKey" claim. /// </summary> public string CacheKey { get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.CacheKey : null; } } /// <summary> /// The context token's "refreshtoken" claim. /// </summary> public string RefreshToken { get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.RefreshToken : null; } } public override string UserAccessTokenForSPHost { get { return GetAccessTokenString(ref this.userAccessTokenForSPHost, () => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPHostUrl.Authority)); } } public override string UserAccessTokenForSPAppWeb { get { if (this.SPAppWebUrl == null) { return null; } return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb, () => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPAppWebUrl.Authority)); } } public override string AppOnlyAccessTokenForSPHost { get { return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost, () => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPHostUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPHostUrl))); } } public override string AppOnlyAccessTokenForSPAppWeb { get { if (this.SPAppWebUrl == null) { return null; } return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb, () => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPAppWebUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPAppWebUrl))); } } public SharePointAcsContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, string contextToken, SharePointContextToken contextTokenObj) : base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber) { if (string.IsNullOrEmpty(contextToken)) { throw new ArgumentNullException("contextToken"); } if (contextTokenObj == null) { throw new ArgumentNullException("contextTokenObj"); } this.contextToken = contextToken; this.contextTokenObj = contextTokenObj; } /// <summary> /// Ensures the access token is valid and returns it. /// </summary> /// <param name="accessToken">The access token to verify.</param> /// <param name="tokenRenewalHandler">The token renewal handler.</param> /// <returns>The access token string.</returns> private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler) { RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler); return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null; } /// <summary> /// Renews the access token if it is not valid. /// </summary> /// <param name="accessToken">The access token to renew.</param> /// <param name="tokenRenewalHandler">The token renewal handler.</param> private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler) { if (IsAccessTokenValid(accessToken)) { return; } try { OAuth2AccessTokenResponse oAuth2AccessTokenResponse = tokenRenewalHandler(); DateTime expiresOn = oAuth2AccessTokenResponse.ExpiresOn; if ((expiresOn - oAuth2AccessTokenResponse.NotBefore) > AccessTokenLifetimeTolerance) { // Make the access token get renewed a bit earlier than the time when it expires // so that the calls to SharePoint with it will have enough time to complete successfully. expiresOn -= AccessTokenLifetimeTolerance; } accessToken = Tuple.Create(oAuth2AccessTokenResponse.AccessToken, expiresOn); } catch (WebException) { } } } /// <summary> /// Default provider for SharePointAcsContext. /// </summary> public class SharePointAcsContextProvider : SharePointContextProvider { private const string SPContextKey = "SPContext"; private const string SPCacheKeyKey = "SPCacheKey"; protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest) { string contextTokenString = TokenHelper.GetContextTokenFromRequest(httpRequest); if (string.IsNullOrEmpty(contextTokenString)) { return null; } SharePointContextToken contextToken = null; try { contextToken = TokenHelper.ReadAndValidateContextToken(contextTokenString, httpRequest.Url.Authority); } catch (WebException) { return null; } catch (AudienceUriValidationFailedException) { return null; } return new SharePointAcsContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, contextTokenString, contextToken); } protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext) { SharePointAcsContext spAcsContext = spContext as SharePointAcsContext; if (spAcsContext != null) { Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request); string contextToken = TokenHelper.GetContextTokenFromRequest(httpContext.Request); HttpCookie spCacheKeyCookie = httpContext.Request.Cookies[SPCacheKeyKey]; string spCacheKey = spCacheKeyCookie != null ? spCacheKeyCookie.Value : null; return spHostUrl == spAcsContext.SPHostUrl && !string.IsNullOrEmpty(spAcsContext.CacheKey) && spCacheKey == spAcsContext.CacheKey && !string.IsNullOrEmpty(spAcsContext.ContextToken) && (string.IsNullOrEmpty(contextToken) || contextToken == spAcsContext.ContextToken); } return false; } protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext) { return httpContext.Session[SPContextKey] as SharePointAcsContext; } protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext) { SharePointAcsContext spAcsContext = spContext as SharePointAcsContext; if (spAcsContext != null) { HttpCookie spCacheKeyCookie = new HttpCookie(SPCacheKeyKey) { Value = spAcsContext.CacheKey, Secure = true, HttpOnly = true }; httpContext.Response.AppendCookie(spCacheKeyCookie); } httpContext.Session[SPContextKey] = spAcsContext; } } #endregion ACS #region HighTrust /// <summary> /// Encapsulates all the information from SharePoint in HighTrust mode. /// </summary> public class SharePointHighTrustContext : SharePointContext { private readonly WindowsIdentity logonUserIdentity; /// <summary> /// The Windows identity for the current user. /// </summary> public WindowsIdentity LogonUserIdentity { get { return this.logonUserIdentity; } } public override string UserAccessTokenForSPHost { get { return GetAccessTokenString(ref this.userAccessTokenForSPHost, () => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, this.LogonUserIdentity)); } } public override string UserAccessTokenForSPAppWeb { get { if (this.SPAppWebUrl == null) { return null; } return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb, () => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, this.LogonUserIdentity)); } } public override string AppOnlyAccessTokenForSPHost { get { return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost, () => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, null)); } } public override string AppOnlyAccessTokenForSPAppWeb { get { if (this.SPAppWebUrl == null) { return null; } return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb, () => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, null)); } } public SharePointHighTrustContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, WindowsIdentity logonUserIdentity) : base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber) { if (logonUserIdentity == null) { throw new ArgumentNullException("logonUserIdentity"); } this.logonUserIdentity = logonUserIdentity; } /// <summary> /// Ensures the access token is valid and returns it. /// </summary> /// <param name="accessToken">The access token to verify.</param> /// <param name="tokenRenewalHandler">The token renewal handler.</param> /// <returns>The access token string.</returns> private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler) { RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler); return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null; } /// <summary> /// Renews the access token if it is not valid. /// </summary> /// <param name="accessToken">The access token to renew.</param> /// <param name="tokenRenewalHandler">The token renewal handler.</param> private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler) { if (IsAccessTokenValid(accessToken)) { return; } DateTime expiresOn = DateTime.UtcNow.Add(TokenHelper.HighTrustAccessTokenLifetime); if (TokenHelper.HighTrustAccessTokenLifetime > AccessTokenLifetimeTolerance) { // Make the access token get renewed a bit earlier than the time when it expires // so that the calls to SharePoint with it will have enough time to complete successfully. expiresOn -= AccessTokenLifetimeTolerance; } accessToken = Tuple.Create(tokenRenewalHandler(), expiresOn); } } /// <summary> /// Default provider for SharePointHighTrustContext. /// </summary> public class SharePointHighTrustContextProvider : SharePointContextProvider { private const string SPContextKey = "SPContext"; protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest) { WindowsIdentity logonUserIdentity = httpRequest.LogonUserIdentity; if (logonUserIdentity == null || !logonUserIdentity.IsAuthenticated || logonUserIdentity.IsGuest || logonUserIdentity.User == null) { return null; } return new SharePointHighTrustContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, logonUserIdentity); } protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext) { SharePointHighTrustContext spHighTrustContext = spContext as SharePointHighTrustContext; if (spHighTrustContext != null) { Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request); WindowsIdentity logonUserIdentity = httpContext.Request.LogonUserIdentity; return spHostUrl == spHighTrustContext.SPHostUrl && logonUserIdentity != null && logonUserIdentity.IsAuthenticated && !logonUserIdentity.IsGuest && logonUserIdentity.User == spHighTrustContext.LogonUserIdentity.User; } return false; } protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext) { return httpContext.Session[SPContextKey] as SharePointHighTrustContext; } protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext) { httpContext.Session[SPContextKey] = spContext as SharePointHighTrustContext; } } #endregion HighTrust }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using osu.Framework.Allocation; using osu.Framework.Bindables; using osu.Framework.Extensions.Color4Extensions; using osu.Framework.Graphics; using osu.Framework.Graphics.Colour; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Cursor; using osu.Framework.Graphics.Shapes; using osu.Game.Beatmaps; using osu.Game.Graphics; using osu.Game.Graphics.Containers; using osu.Game.Graphics.Sprites; using osu.Game.Online.API; using osu.Game.Online.API.Requests; using osu.Game.Online.Multiplayer; using osu.Game.Screens.Multi.Components; using osu.Game.Users; using osu.Game.Users.Drawables; using osuTK; using osuTK.Graphics; namespace osu.Game.Screens.Multi.Lounge.Components { public class RoomInspector : MultiplayerComposite { private const float transition_duration = 100; private readonly MarginPadding contentPadding = new MarginPadding { Horizontal = 20, Vertical = 10 }; private ParticipantCountDisplay participantCount; private OsuSpriteText name; private BeatmapTypeInfo beatmapTypeInfo; private ParticipantInfo participantInfo; [Resolved] private BeatmapManager beatmaps { get; set; } private readonly Bindable<RoomStatus> status = new Bindable<RoomStatus>(new RoomStatusNoneSelected()); [BackgroundDependencyLoader] private void load(OsuColour colours) { InternalChildren = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = OsuColour.FromHex(@"343138"), }, new GridContainer { RelativeSizeAxes = Axes.Both, RowDimensions = new[] { new Dimension(GridSizeMode.AutoSize), new Dimension(GridSizeMode.Distributed), }, Content = new[] { new Drawable[] { new FillFlowContainer { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Direction = FillDirection.Vertical, Children = new Drawable[] { new Container { RelativeSizeAxes = Axes.X, Height = 200, Masking = true, Children = new Drawable[] { new MultiplayerBackgroundSprite { RelativeSizeAxes = Axes.Both }, new Box { RelativeSizeAxes = Axes.Both, Colour = ColourInfo.GradientVertical(Color4.Black.Opacity(0.5f), Color4.Black.Opacity(0)), }, new Container { RelativeSizeAxes = Axes.Both, Padding = new MarginPadding(20), Children = new Drawable[] { participantCount = new ParticipantCountDisplay { Anchor = Anchor.TopRight, Origin = Anchor.TopRight, }, name = new OsuSpriteText { Anchor = Anchor.BottomLeft, Origin = Anchor.BottomLeft, Font = OsuFont.GetFont(size: 30), Current = RoomName }, }, }, }, }, new StatusColouredContainer(transition_duration) { RelativeSizeAxes = Axes.X, Height = 5, Child = new Box { RelativeSizeAxes = Axes.Both } }, new Container { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = OsuColour.FromHex(@"28242d"), }, new FillFlowContainer { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Direction = FillDirection.Vertical, LayoutDuration = transition_duration, Padding = contentPadding, Spacing = new Vector2(0f, 5f), Children = new Drawable[] { new StatusColouredContainer(transition_duration) { AutoSizeAxes = Axes.Both, Child = new StatusText { Font = OsuFont.GetFont(weight: FontWeight.Bold, size: 14), } }, beatmapTypeInfo = new BeatmapTypeInfo(), }, }, }, }, new Container { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Padding = contentPadding, Children = new Drawable[] { participantInfo = new ParticipantInfo(), }, }, }, }, }, new Drawable[] { new MatchParticipants { RelativeSizeAxes = Axes.Both, } } } } }; Status.BindValueChanged(_ => updateStatus(), true); RoomID.BindValueChanged(_ => updateStatus(), true); } protected override IReadOnlyDependencyContainer CreateChildDependencies(IReadOnlyDependencyContainer parent) { var dependencies = new DependencyContainer(base.CreateChildDependencies(parent)); dependencies.CacheAs(status, new CacheInfo(nameof(Room.Status), typeof(Room))); return dependencies; } private void updateStatus() { if (RoomID.Value == null) { status.Value = new RoomStatusNoneSelected(); participantCount.FadeOut(transition_duration); beatmapTypeInfo.FadeOut(transition_duration); name.FadeOut(transition_duration); participantInfo.FadeOut(transition_duration); } else { status.Value = Status.Value; participantCount.FadeIn(transition_duration); beatmapTypeInfo.FadeIn(transition_duration); name.FadeIn(transition_duration); participantInfo.FadeIn(transition_duration); } } private class RoomStatusNoneSelected : RoomStatus { public override string Message => @"No Room Selected"; public override Color4 GetAppropriateColour(OsuColour colours) => colours.Gray8; } private class StatusText : OsuSpriteText { [Resolved(typeof(Room), nameof(Room.Status))] private Bindable<RoomStatus> status { get; set; } [BackgroundDependencyLoader] private void load() { status.BindValueChanged(s => Text = s.NewValue.Message, true); } } private class MatchParticipants : MultiplayerComposite { private readonly FillFlowContainer fill; public MatchParticipants() { Padding = new MarginPadding { Horizontal = 10 }; InternalChild = new OsuScrollContainer { RelativeSizeAxes = Axes.Both, Child = fill = new FillFlowContainer { Spacing = new Vector2(10), RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Direction = FillDirection.Full, } }; } [BackgroundDependencyLoader] private void load() { RoomID.BindValueChanged(_ => updateParticipants(), true); } [Resolved] private IAPIProvider api { get; set; } private GetRoomScoresRequest request; private void updateParticipants() { var roomId = RoomID.Value ?? 0; request?.Cancel(); // nice little progressive fade int time = 500; foreach (var c in fill.Children) { c.Delay(500 - time).FadeOut(time, Easing.Out); time = Math.Max(20, time - 20); c.Expire(); } if (roomId == 0) return; request = new GetRoomScoresRequest(roomId); request.Success += scores => { if (roomId != RoomID.Value) return; fill.Clear(); foreach (var s in scores) fill.Add(new UserTile(s.User)); fill.FadeInFromZero(1000, Easing.OutQuint); }; api.Queue(request); } protected override void Dispose(bool isDisposing) { request?.Cancel(); base.Dispose(isDisposing); } private class UserTile : CompositeDrawable, IHasTooltip { private readonly User user; public string TooltipText => user.Username; public UserTile(User user) { this.user = user; Size = new Vector2(70f); CornerRadius = 5f; Masking = true; InternalChildren = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = OsuColour.FromHex(@"27252d"), }, new UpdateableAvatar { RelativeSizeAxes = Axes.Both, User = user, }, }; } } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Diagnostics; namespace System.Collections.Generic { // The SortedDictionary class implements a generic sorted list of keys // and values. Entries in a sorted list are sorted by their keys and // are accessible both by key and by index. The keys of a sorted dictionary // can be ordered either according to a specific IComparer // implementation given when the sorted dictionary is instantiated, or // according to the IComparable implementation provided by the keys // themselves. In either case, a sorted dictionary does not allow entries // with duplicate or null keys. // // A sorted list internally maintains two arrays that store the keys and // values of the entries. The capacity of a sorted list is the allocated // length of these internal arrays. As elements are added to a sorted list, the // capacity of the sorted list is automatically increased as required by // reallocating the internal arrays. The capacity is never automatically // decreased, but users can call either TrimExcess or // Capacity explicitly. // // The GetKeyList and GetValueList methods of a sorted list // provides access to the keys and values of the sorted list in the form of // List implementations. The List objects returned by these // methods are aliases for the underlying sorted list, so modifications // made to those lists are directly reflected in the sorted list, and vice // versa. // // The SortedList class provides a convenient way to create a sorted // copy of another dictionary, such as a Hashtable. For example: // // Hashtable h = new Hashtable(); // h.Add(...); // h.Add(...); // ... // SortedList s = new SortedList(h); // // The last line above creates a sorted list that contains a copy of the keys // and values stored in the hashtable. In this particular example, the keys // will be ordered according to the IComparable interface, which they // all must implement. To impose a different ordering, SortedList also // has a constructor that allows a specific IComparer implementation to // be specified. // [DebuggerTypeProxy(typeof(IDictionaryDebugView<,>))] [DebuggerDisplay("Count = {Count}")] public class SortedList<TKey, TValue> : IDictionary<TKey, TValue>, System.Collections.IDictionary, IReadOnlyDictionary<TKey, TValue> { private TKey[] _keys; private TValue[] _values; private int _size; private int _version; private IComparer<TKey> _comparer; private KeyList _keyList; private ValueList _valueList; private Object _syncRoot; private const int DefaultCapacity = 4; // Constructs a new sorted list. The sorted list is initially empty and has // a capacity of zero. Upon adding the first element to the sorted list the // capacity is increased to DefaultCapacity, and then increased in multiples of two as // required. The elements of the sorted list are ordered according to the // IComparable interface, which must be implemented by the keys of // all entries added to the sorted list. public SortedList() { _keys = Array.Empty<TKey>(); _values = Array.Empty<TValue>(); _size = 0; _comparer = Comparer<TKey>.Default; } // Constructs a new sorted list. The sorted list is initially empty and has // a capacity of zero. Upon adding the first element to the sorted list the // capacity is increased to 16, and then increased in multiples of two as // required. The elements of the sorted list are ordered according to the // IComparable interface, which must be implemented by the keys of // all entries added to the sorted list. // public SortedList(int capacity) { if (capacity < 0) throw new ArgumentOutOfRangeException("capacity", SR.ArgumentOutOfRange_NeedNonNegNumRequired); _keys = new TKey[capacity]; _values = new TValue[capacity]; _comparer = Comparer<TKey>.Default; } // Constructs a new sorted list with a given IComparer // implementation. The sorted list is initially empty and has a capacity of // zero. Upon adding the first element to the sorted list the capacity is // increased to 16, and then increased in multiples of two as required. The // elements of the sorted list are ordered according to the given // IComparer implementation. If comparer is null, the // elements are compared to each other using the IComparable // interface, which in that case must be implemented by the keys of all // entries added to the sorted list. // public SortedList(IComparer<TKey> comparer) : this() { if (comparer != null) { _comparer = comparer; } } // Constructs a new sorted dictionary with a given IComparer // implementation and a given initial capacity. The sorted list is // initially empty, but will have room for the given number of elements // before any reallocations are required. The elements of the sorted list // are ordered according to the given IComparer implementation. If // comparer is null, the elements are compared to each other using // the IComparable interface, which in that case must be implemented // by the keys of all entries added to the sorted list. // public SortedList(int capacity, IComparer<TKey> comparer) : this(comparer) { Capacity = capacity; } // Constructs a new sorted list containing a copy of the entries in the // given dictionary. The elements of the sorted list are ordered according // to the IComparable interface, which must be implemented by the // keys of all entries in the the given dictionary as well as keys // subsequently added to the sorted list. // public SortedList(IDictionary<TKey, TValue> dictionary) : this(dictionary, null) { } // Constructs a new sorted list containing a copy of the entries in the // given dictionary. The elements of the sorted list are ordered according // to the given IComparer implementation. If comparer is // null, the elements are compared to each other using the // IComparable interface, which in that case must be implemented // by the keys of all entries in the the given dictionary as well as keys // subsequently added to the sorted list. // public SortedList(IDictionary<TKey, TValue> dictionary, IComparer<TKey> comparer) : this((dictionary != null ? dictionary.Count : 0), comparer) { if (dictionary == null) throw new ArgumentNullException("dictionary"); dictionary.Keys.CopyTo(_keys, 0); dictionary.Values.CopyTo(_values, 0); Array.Sort<TKey, TValue>(_keys, _values, comparer); _size = dictionary.Count; } // Adds an entry with the given key and value to this sorted list. An // ArgumentException is thrown if the key is already present in the sorted list. // public void Add(TKey key, TValue value) { if (key == null) throw new ArgumentNullException("key"); int i = Array.BinarySearch<TKey>(_keys, 0, _size, key, _comparer); if (i >= 0) throw new ArgumentException(SR.Format(SR.Argument_AddingDuplicate, key)); Insert(~i, key, value); } void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> keyValuePair) { Add(keyValuePair.Key, keyValuePair.Value); } bool ICollection<KeyValuePair<TKey, TValue>>.Contains(KeyValuePair<TKey, TValue> keyValuePair) { int index = IndexOfKey(keyValuePair.Key); if (index >= 0 && EqualityComparer<TValue>.Default.Equals(_values[index], keyValuePair.Value)) { return true; } return false; } bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> keyValuePair) { int index = IndexOfKey(keyValuePair.Key); if (index >= 0 && EqualityComparer<TValue>.Default.Equals(_values[index], keyValuePair.Value)) { RemoveAt(index); return true; } return false; } // Returns the capacity of this sorted list. The capacity of a sorted list // represents the allocated length of the internal arrays used to store the // keys and values of the list, and thus also indicates the maximum number // of entries the list can contain before a reallocation of the internal // arrays is required. // public int Capacity { get { return _keys.Length; } set { if (value != _keys.Length) { if (value < _size) { throw new ArgumentOutOfRangeException("value", SR.ArgumentOutOfRange_SmallCapacity); } if (value > 0) { TKey[] newKeys = new TKey[value]; TValue[] newValues = new TValue[value]; if (_size > 0) { Array.Copy(_keys, 0, newKeys, 0, _size); Array.Copy(_values, 0, newValues, 0, _size); } _keys = newKeys; _values = newValues; } else { _keys = Array.Empty<TKey>(); _values = Array.Empty<TValue>(); } } } } public IComparer<TKey> Comparer { get { return _comparer; } } void System.Collections.IDictionary.Add(Object key, Object value) { if (key == null) { throw new ArgumentNullException("key"); } if (value == null && !(default(TValue) == null)) throw new ArgumentNullException("value"); try { TKey tempKey = (TKey)key; try { Add(tempKey, (TValue)value); } catch (InvalidCastException) { throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(TValue)), "value"); } } catch (InvalidCastException) { throw new ArgumentException(SR.Format(SR.Arg_WrongType, key, typeof(TKey)), "key"); } } // Returns the number of entries in this sorted list. // public int Count { get { return _size; } } // Returns a collection representing the keys of this sorted list. This // method returns the same object as GetKeyList, but typed as an // ICollection instead of an IList. // public IList<TKey> Keys { get { return GetKeyListHelper(); } } ICollection<TKey> IDictionary<TKey, TValue>.Keys { get { return GetKeyListHelper(); } } System.Collections.ICollection System.Collections.IDictionary.Keys { get { return GetKeyListHelper(); } } IEnumerable<TKey> IReadOnlyDictionary<TKey, TValue>.Keys { get { return GetKeyListHelper(); } } // Returns a collection representing the values of this sorted list. This // method returns the same object as GetValueList, but typed as an // ICollection instead of an IList. // public IList<TValue> Values { get { return GetValueListHelper(); } } ICollection<TValue> IDictionary<TKey, TValue>.Values { get { return GetValueListHelper(); } } System.Collections.ICollection System.Collections.IDictionary.Values { get { return GetValueListHelper(); } } IEnumerable<TValue> IReadOnlyDictionary<TKey, TValue>.Values { get { return GetValueListHelper(); } } private KeyList GetKeyListHelper() { if (_keyList == null) _keyList = new KeyList(this); return _keyList; } private ValueList GetValueListHelper() { if (_valueList == null) _valueList = new ValueList(this); return _valueList; } bool ICollection<KeyValuePair<TKey, TValue>>.IsReadOnly { get { return false; } } bool System.Collections.IDictionary.IsReadOnly { get { return false; } } bool System.Collections.IDictionary.IsFixedSize { get { return false; } } bool System.Collections.ICollection.IsSynchronized { get { return false; } } // Synchronization root for this object. Object System.Collections.ICollection.SyncRoot { get { if (_syncRoot == null) { System.Threading.Interlocked.CompareExchange(ref _syncRoot, new Object(), null); } return _syncRoot; } } // Removes all entries from this sorted list. public void Clear() { // clear does not change the capacity _version++; // Don't need to doc this but we clear the elements so that the gc can reclaim the references. Array.Clear(_keys, 0, _size); Array.Clear(_values, 0, _size); _size = 0; } bool System.Collections.IDictionary.Contains(Object key) { if (IsCompatibleKey(key)) { return ContainsKey((TKey)key); } return false; } // Checks if this sorted list contains an entry with the given key. // public bool ContainsKey(TKey key) { return IndexOfKey(key) >= 0; } // Checks if this sorted list contains an entry with the given value. The // values of the entries of the sorted list are compared to the given value // using the Object.Equals method. This method performs a linear // search and is substantially slower than the Contains // method. // public bool ContainsValue(TValue value) { return IndexOfValue(value) >= 0; } // Copies the values in this SortedList to an array. void ICollection<KeyValuePair<TKey, TValue>>.CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex) { if (array == null) { throw new ArgumentNullException("array"); } if (arrayIndex < 0 || arrayIndex > array.Length) { throw new ArgumentOutOfRangeException("arrayIndex", SR.ArgumentOutOfRange_NeedNonNegNum); } if (array.Length - arrayIndex < Count) { throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall); } for (int i = 0; i < Count; i++) { KeyValuePair<TKey, TValue> entry = new KeyValuePair<TKey, TValue>(_keys[i], _values[i]); array[arrayIndex + i] = entry; } } void System.Collections.ICollection.CopyTo(Array array, int arrayIndex) { if (array == null) { throw new ArgumentNullException("array"); } if (array.Rank != 1) { throw new ArgumentException(SR.Arg_RankMultiDimNotSupported); } if (array.GetLowerBound(0) != 0) { throw new ArgumentException(SR.Arg_NonZeroLowerBound); } if (arrayIndex < 0 || arrayIndex > array.Length) { throw new ArgumentOutOfRangeException("arrayIndex", SR.ArgumentOutOfRange_NeedNonNegNum); } if (array.Length - arrayIndex < Count) { throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall); } KeyValuePair<TKey, TValue>[] keyValuePairArray = array as KeyValuePair<TKey, TValue>[]; if (keyValuePairArray != null) { for (int i = 0; i < Count; i++) { keyValuePairArray[i + arrayIndex] = new KeyValuePair<TKey, TValue>(_keys[i], _values[i]); } } else { object[] objects = array as object[]; if (objects == null) { throw new ArgumentException(SR.Argument_InvalidArrayType); } try { for (int i = 0; i < Count; i++) { objects[i + arrayIndex] = new KeyValuePair<TKey, TValue>(_keys[i], _values[i]); } } catch (ArrayTypeMismatchException) { throw new ArgumentException(SR.Argument_InvalidArrayType); } } } private const int MaxArrayLength = 0X7FEFFFFF; // Ensures that the capacity of this sorted list is at least the given // minimum value. If the currect capacity of the list is less than // min, the capacity is increased to twice the current capacity or // to min, whichever is larger. private void EnsureCapacity(int min) { int newCapacity = _keys.Length == 0 ? DefaultCapacity : _keys.Length * 2; // Allow the list to grow to maximum possible capacity (~2G elements) before encountering overflow. // Note that this check works even when _items.Length overflowed thanks to the (uint) cast if ((uint)newCapacity > MaxArrayLength) newCapacity = MaxArrayLength; if (newCapacity < min) newCapacity = min; Capacity = newCapacity; } // Returns the value of the entry at the given index. // private TValue GetByIndex(int index) { if (index < 0 || index >= _size) throw new ArgumentOutOfRangeException("index", SR.ArgumentOutOfRange_Index); return _values[index]; } public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator() { return new Enumerator(this, Enumerator.KeyValuePair); } IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator() { return new Enumerator(this, Enumerator.KeyValuePair); } System.Collections.IDictionaryEnumerator System.Collections.IDictionary.GetEnumerator() { return new Enumerator(this, Enumerator.DictEntry); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return new Enumerator(this, Enumerator.KeyValuePair); } // Returns the key of the entry at the given index. // private TKey GetKey(int index) { if (index < 0 || index >= _size) throw new ArgumentOutOfRangeException("index", SR.ArgumentOutOfRange_Index); return _keys[index]; } // Returns the value associated with the given key. If an entry with the // given key is not found, the returned value is null. // public TValue this[TKey key] { get { int i = IndexOfKey(key); if (i >= 0) return _values[i]; throw new KeyNotFoundException(); // return default(TValue); } set { if (((Object)key) == null) throw new ArgumentNullException("key"); int i = Array.BinarySearch<TKey>(_keys, 0, _size, key, _comparer); if (i >= 0) { _values[i] = value; _version++; return; } Insert(~i, key, value); } } Object System.Collections.IDictionary.this[Object key] { get { if (IsCompatibleKey(key)) { int i = IndexOfKey((TKey)key); if (i >= 0) { return _values[i]; } } return null; } set { if (!IsCompatibleKey(key)) { throw new ArgumentNullException("key"); } if (value == null && !(default(TValue) == null)) throw new ArgumentNullException("value"); try { TKey tempKey = (TKey)key; try { this[tempKey] = (TValue)value; } catch (InvalidCastException) { throw new ArgumentException(SR.Format(SR.Arg_WrongType, value, typeof(TValue)), "value"); } } catch (InvalidCastException) { throw new ArgumentException(SR.Format(SR.Arg_WrongType, key, typeof(TKey)), "key"); } } } // Returns the index of the entry with a given key in this sorted list. The // key is located through a binary search, and thus the average execution // time of this method is proportional to Log2(size), where // size is the size of this sorted list. The returned value is -1 if // the given key does not occur in this sorted list. Null is an invalid // key value. // public int IndexOfKey(TKey key) { if (key == null) throw new ArgumentNullException("key"); int ret = Array.BinarySearch<TKey>(_keys, 0, _size, key, _comparer); return ret >= 0 ? ret : -1; } // Returns the index of the first occurrence of an entry with a given value // in this sorted list. The entry is located through a linear search, and // thus the average execution time of this method is proportional to the // size of this sorted list. The elements of the list are compared to the // given value using the Object.Equals method. // public int IndexOfValue(TValue value) { return Array.IndexOf(_values, value, 0, _size); } // Inserts an entry with a given key and value at a given index. private void Insert(int index, TKey key, TValue value) { if (_size == _keys.Length) EnsureCapacity(_size + 1); if (index < _size) { Array.Copy(_keys, index, _keys, index + 1, _size - index); Array.Copy(_values, index, _values, index + 1, _size - index); } _keys[index] = key; _values[index] = value; _size++; _version++; } public bool TryGetValue(TKey key, out TValue value) { int i = IndexOfKey(key); if (i >= 0) { value = _values[i]; return true; } value = default(TValue); return false; } // Removes the entry at the given index. The size of the sorted list is // decreased by one. // public void RemoveAt(int index) { if (index < 0 || index >= _size) throw new ArgumentOutOfRangeException("index", SR.ArgumentOutOfRange_Index); _size--; if (index < _size) { Array.Copy(_keys, index + 1, _keys, index, _size - index); Array.Copy(_values, index + 1, _values, index, _size - index); } _keys[_size] = default(TKey); _values[_size] = default(TValue); _version++; } // Removes an entry from this sorted list. If an entry with the specified // key exists in the sorted list, it is removed. An ArgumentException is // thrown if the key is null. // public bool Remove(TKey key) { int i = IndexOfKey(key); if (i >= 0) RemoveAt(i); return i >= 0; } void System.Collections.IDictionary.Remove(Object key) { if (IsCompatibleKey(key)) { Remove((TKey)key); } } // Sets the capacity of this sorted list to the size of the sorted list. // This method can be used to minimize a sorted list's memory overhead once // it is known that no new elements will be added to the sorted list. To // completely clear a sorted list and release all memory referenced by the // sorted list, execute the following statements: // // SortedList.Clear(); // SortedList.TrimExcess(); // public void TrimExcess() { int threshold = (int)(((double)_keys.Length) * 0.9); if (_size < threshold) { Capacity = _size; } } private static bool IsCompatibleKey(object key) { if (key == null) { throw new ArgumentNullException("key"); } return (key is TKey); } /// <include file='doc\SortedList.uex' path='docs/doc[@for="SortedListEnumerator"]/*' /> private struct Enumerator : IEnumerator<KeyValuePair<TKey, TValue>>, System.Collections.IDictionaryEnumerator { private SortedList<TKey, TValue> _sortedList; private TKey _key; private TValue _value; private int _index; private int _version; private int _getEnumeratorRetType; // What should Enumerator.Current return? internal const int KeyValuePair = 1; internal const int DictEntry = 2; internal Enumerator(SortedList<TKey, TValue> sortedList, int getEnumeratorRetType) { _sortedList = sortedList; _index = 0; _version = _sortedList._version; _getEnumeratorRetType = getEnumeratorRetType; _key = default(TKey); _value = default(TValue); } public void Dispose() { _index = 0; _key = default(TKey); _value = default(TValue); } Object System.Collections.IDictionaryEnumerator.Key { get { if (_index == 0 || (_index == _sortedList.Count + 1)) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } return _key; } } public bool MoveNext() { if (_version != _sortedList._version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); if ((uint)_index < (uint)_sortedList.Count) { _key = _sortedList._keys[_index]; _value = _sortedList._values[_index]; _index++; return true; } _index = _sortedList.Count + 1; _key = default(TKey); _value = default(TValue); return false; } DictionaryEntry System.Collections.IDictionaryEnumerator.Entry { get { if (_index == 0 || (_index == _sortedList.Count + 1)) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } return new DictionaryEntry(_key, _value); } } public KeyValuePair<TKey, TValue> Current { get { return new KeyValuePair<TKey, TValue>(_key, _value); } } Object System.Collections.IEnumerator.Current { get { if (_index == 0 || (_index == _sortedList.Count + 1)) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } if (_getEnumeratorRetType == DictEntry) { return new System.Collections.DictionaryEntry(_key, _value); } else { return new KeyValuePair<TKey, TValue>(_key, _value); } } } Object System.Collections.IDictionaryEnumerator.Value { get { if (_index == 0 || (_index == _sortedList.Count + 1)) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } return _value; } } void System.Collections.IEnumerator.Reset() { if (_version != _sortedList._version) { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } _index = 0; _key = default(TKey); _value = default(TValue); } } private sealed class SortedListKeyEnumerator : IEnumerator<TKey>, System.Collections.IEnumerator { private SortedList<TKey, TValue> _sortedList; private int _index; private int _version; private TKey _currentKey; internal SortedListKeyEnumerator(SortedList<TKey, TValue> sortedList) { _sortedList = sortedList; _version = sortedList._version; } public void Dispose() { _index = 0; _currentKey = default(TKey); } public bool MoveNext() { if (_version != _sortedList._version) { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } if ((uint)_index < (uint)_sortedList.Count) { _currentKey = _sortedList._keys[_index]; _index++; return true; } _index = _sortedList.Count + 1; _currentKey = default(TKey); return false; } public TKey Current { get { return _currentKey; } } Object System.Collections.IEnumerator.Current { get { if (_index == 0 || (_index == _sortedList.Count + 1)) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } return _currentKey; } } void System.Collections.IEnumerator.Reset() { if (_version != _sortedList._version) { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } _index = 0; _currentKey = default(TKey); } } private sealed class SortedListValueEnumerator : IEnumerator<TValue>, System.Collections.IEnumerator { private SortedList<TKey, TValue> _sortedList; private int _index; private int _version; private TValue _currentValue; internal SortedListValueEnumerator(SortedList<TKey, TValue> sortedList) { _sortedList = sortedList; _version = sortedList._version; } public void Dispose() { _index = 0; _currentValue = default(TValue); } public bool MoveNext() { if (_version != _sortedList._version) { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } if ((uint)_index < (uint)_sortedList.Count) { _currentValue = _sortedList._values[_index]; _index++; return true; } _index = _sortedList.Count + 1; _currentValue = default(TValue); return false; } public TValue Current { get { return _currentValue; } } Object System.Collections.IEnumerator.Current { get { if (_index == 0 || (_index == _sortedList.Count + 1)) { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } return _currentValue; } } void System.Collections.IEnumerator.Reset() { if (_version != _sortedList._version) { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } _index = 0; _currentValue = default(TValue); } } [DebuggerTypeProxy(typeof(DictionaryKeyCollectionDebugView<,>))] [DebuggerDisplay("Count = {Count}")] private sealed class KeyList : IList<TKey>, System.Collections.ICollection { private SortedList<TKey, TValue> _dict; internal KeyList(SortedList<TKey, TValue> dictionary) { _dict = dictionary; } public int Count { get { return _dict._size; } } public bool IsReadOnly { get { return true; } } bool System.Collections.ICollection.IsSynchronized { get { return false; } } Object System.Collections.ICollection.SyncRoot { get { return ((ICollection)_dict).SyncRoot; } } public void Add(TKey key) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public void Clear() { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public bool Contains(TKey key) { return _dict.ContainsKey(key); } public void CopyTo(TKey[] array, int arrayIndex) { // defer error checking to Array.Copy Array.Copy(_dict._keys, 0, array, arrayIndex, _dict.Count); } void System.Collections.ICollection.CopyTo(Array array, int arrayIndex) { if (array != null && array.Rank != 1) throw new ArgumentException(SR.Arg_RankMultiDimNotSupported); try { // defer error checking to Array.Copy Array.Copy(_dict._keys, 0, array, arrayIndex, _dict.Count); } catch (ArrayTypeMismatchException) { throw new ArgumentException(SR.Argument_InvalidArrayType); } } public void Insert(int index, TKey value) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public TKey this[int index] { get { return _dict.GetKey(index); } set { throw new NotSupportedException(SR.NotSupported_KeyCollectionSet); } } public IEnumerator<TKey> GetEnumerator() { return new SortedListKeyEnumerator(_dict); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return new SortedListKeyEnumerator(_dict); } public int IndexOf(TKey key) { if (((Object)key) == null) throw new ArgumentNullException("key"); int i = Array.BinarySearch<TKey>(_dict._keys, 0, _dict.Count, key, _dict._comparer); if (i >= 0) return i; return -1; } public bool Remove(TKey key) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); // return false; } public void RemoveAt(int index) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } } [DebuggerTypeProxy(typeof(DictionaryValueCollectionDebugView<,>))] [DebuggerDisplay("Count = {Count}")] private sealed class ValueList : IList<TValue>, System.Collections.ICollection { private SortedList<TKey, TValue> _dict; internal ValueList(SortedList<TKey, TValue> dictionary) { _dict = dictionary; } public int Count { get { return _dict._size; } } public bool IsReadOnly { get { return true; } } bool System.Collections.ICollection.IsSynchronized { get { return false; } } Object System.Collections.ICollection.SyncRoot { get { return ((ICollection)_dict).SyncRoot; } } public void Add(TValue key) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public void Clear() { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public bool Contains(TValue value) { return _dict.ContainsValue(value); } public void CopyTo(TValue[] array, int arrayIndex) { // defer error checking to Array.Copy Array.Copy(_dict._values, 0, array, arrayIndex, _dict.Count); } void System.Collections.ICollection.CopyTo(Array array, int arrayIndex) { if (array != null && array.Rank != 1) throw new ArgumentException(SR.Arg_RankMultiDimNotSupported); try { // defer error checking to Array.Copy Array.Copy(_dict._values, 0, array, arrayIndex, _dict.Count); } catch (ArrayTypeMismatchException) { throw new ArgumentException(SR.Argument_InvalidArrayType); } } public void Insert(int index, TValue value) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public TValue this[int index] { get { return _dict.GetByIndex(index); } set { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } } public IEnumerator<TValue> GetEnumerator() { return new SortedListValueEnumerator(_dict); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return new SortedListValueEnumerator(_dict); } public int IndexOf(TValue value) { return Array.IndexOf(_dict._values, value, 0, _dict.Count); } public bool Remove(TValue value) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); // return false; } public void RemoveAt(int index) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } } } }
using Microsoft.Xna.Framework; namespace MenuBuddy { /// <summary> /// This is just a list of items that are on the screen. /// The user has to take care of placing them etc. /// </summary> public class AbsoluteLayout : Layout, IScalable { #region Properties protected Vector2 _size; /// <summary> /// Whenever the rect changes on this layout, the old rect is stored here so can get a delta. /// </summary> private Rectangle PreviousRect { get; set; } public virtual Vector2 Size { get { return _size; } set { SetPrevRect(); _size = value; UpdateItems(); } } public override Point Position { get { return base.Position; } set { SetPrevRect(); base.Position = value; UpdateItems(); } } public override HorizontalAlignment Horizontal { get { return base.Horizontal; } set { if (base.Horizontal != value) { SetPrevRect(); base.Horizontal = value; UpdateItems(); } } } public override VerticalAlignment Vertical { get { return base.Vertical; } set { if (base.Vertical != value) { SetPrevRect(); base.Vertical = value; UpdateItems(); } } } public override float Scale { get { return base.Scale; } set { base.Scale = value; } } public override Rectangle Rect { get { return CalculateRect(); } } #endregion //Properties #region Init public AbsoluteLayout() { } public AbsoluteLayout(AbsoluteLayout inst) : base(inst) { _size = new Vector2(inst._size.X, inst._size.Y); PreviousRect = inst.PreviousRect; } public override IScreenItem DeepCopy() { return new AbsoluteLayout(this); } #endregion //Init #region Methods /// <summary> /// Set the position of the item as store it /// </summary> /// <param name="item"></param> public override void AddItem(IScreenItem item) { SetItemPosition(item, CalculateRect()); //store the new item Items.Add(item); Sort(); } private void SetPrevRect() { PreviousRect = CalculateRect(); } protected virtual Rectangle CalculateRect() { var pos = Position; switch (Horizontal) { case HorizontalAlignment.Center: { pos.X -= (int)(Size.X / 2f); } break; case HorizontalAlignment.Right: { pos.X -= (int)Size.X; } break; } switch (Vertical) { case VerticalAlignment.Center: { pos.Y -= (int)(Size.Y / 2f); } break; case VerticalAlignment.Bottom: { pos.Y -= (int)Size.Y; } break; } return new Rectangle(pos.X, pos.Y, (int)Size.X, (int)Size.Y); } protected virtual void UpdateItems() { //Grab the rect for this layout var rect = CalculateRect(); //update the positions of all the current widgets foreach (var item in Items) { UpdateItemPosition(item, rect); } } /// <summary> /// Set the position of a widget to be relative to this layout /// </summary> /// <param name="item"></param> protected virtual void UpdateItemPosition(IScreenItem item, Rectangle rect) { //Get the delta position var delta = PreviousRect.Location - rect.Location; //add the delt to the current position item.Position -= delta; } /// <summary> /// Set the position of a widget to be relative to this layout /// </summary> /// <param name="item"></param> protected virtual void SetItemPosition(IScreenItem item, Rectangle rect) { //add the position of the layout to the item item.Position += rect.Location; } #endregion //Methods } }
using Xunit; namespace Common.Utilities; public enum Architecture { Arm64, Amd64 } public static class DockerContextExtensions { public static bool SkipArm64Image(this ICakeContext context, DockerImage dockerImage) { if (dockerImage.Architecture != Architecture.Arm64) return false; if (!Constants.DistrosToSkip.Contains(dockerImage.Distro)) return false; context.Information($"Skipping Target: {dockerImage.TargetFramework}, Distro: {dockerImage.Distro}, Arch: {dockerImage.Architecture}"); return true; } public static void DockerBuildImage(this BuildContextBase context, DockerImage dockerImage) { if (context.Version == null) return; var (distro, targetFramework, arch, registry, _) = dockerImage; context.Information($"Building image: {dockerImage}"); var workDir = Paths.Src.Combine("Docker"); var tags = context.GetDockerTags(dockerImage, arch); var suffix = arch.ToSuffix(); var platforms = new List<string> { $"linux/{suffix}" }; var buildSettings = new DockerImageBuildSettings { Rm = true, Tag = tags.ToArray(), File = workDir.CombineWithFilePath("Dockerfile").FullPath, BuildArg = new[] { $"contentFolder=/content", $"REGISTRY={registry}", $"DOTNET_VERSION={targetFramework}", $"DISTRO={distro}", $"VERSION={context.Version.NugetVersion}" }, Pull = true, Platform = string.Join(",", platforms), }; context.DockerBuild(buildSettings, workDir.ToString(), "--output type=docker"); } public static void DockerPushImage(this BuildContextBase context, DockerImage dockerImage) { var tags = context.GetDockerTags(dockerImage, dockerImage.Architecture); foreach (var tag in tags) { context.DockerPush(tag); } } public static void DockerCreateManifest(this BuildContextBase context, DockerImage dockerImage, bool skipArm64Image) { var manifestTags = context.GetDockerTags(dockerImage); foreach (var tag in manifestTags) { var manifestCreateSettings = new DockerManifestCreateSettings { Amend = true }; var amd64Tag = $"{tag}-{Architecture.Amd64.ToSuffix()}"; if (skipArm64Image) { context.DockerManifestCreate(manifestCreateSettings, tag, amd64Tag); } else { var arm64Tag = $"{tag}-{Architecture.Arm64.ToSuffix()}"; context.DockerManifestCreate(manifestCreateSettings, tag, amd64Tag, arm64Tag); } } } public static void DockerPushManifest(this BuildContextBase context, DockerImage dockerImage) { var manifestTags = context.GetDockerTags(dockerImage); foreach (var tag in manifestTags) { context.DockerManifestPush(new DockerManifestPushSettings { Purge = true }, tag); } } public static void DockerPullImage(this ICakeContext context, DockerImage dockerImage) { var tag = $"{dockerImage.DockerImageName()}:{dockerImage.Distro}-sdk-{dockerImage.TargetFramework}"; var platform = $"linux/{dockerImage.Architecture.ToString().ToLower()}"; context.DockerPull(new DockerImagePullSettings { Platform = platform }, tag); } public static void DockerTestImage(this BuildContextBase context, DockerImage dockerImage) { var tags = context.GetDockerTags(dockerImage, dockerImage.Architecture); foreach (var tag in tags) { context.DockerTestRun(tag, dockerImage.Architecture, "/repo", "/showvariable", "FullSemver"); } } public static void DockerTestArtifact(this BuildContextBase context, DockerImage dockerImage, string cmd) { var tag = $"{dockerImage.DockerImageName()}:{dockerImage.Distro}-sdk-{dockerImage.TargetFramework}"; context.DockerTestRun(tag, dockerImage.Architecture, "sh", cmd); } private static void DockerBuild( this ICakeContext context, DockerImageBuildSettings settings, string path, params string[] args) { GenericDockerRunner<DockerImageBuildSettings> genericDockerRunner = new(context.FileSystem, context.Environment, context.ProcessRunner, context.Tools); string str; switch (string.IsNullOrEmpty(path)) { case false: { string str2 = path.Trim(); str = str2.Length <= 1 || !str2.StartsWith("\"") || !str2.EndsWith("\"") ? "\"" + path + "\"" : path; break; } default: str = path; break; } var additional = args.Concat(new[] { str }).ToArray(); genericDockerRunner.Run("buildx build", settings, additional); } private static void DockerTestRun(this BuildContextBase context, string image, Architecture arch, string command, params string[] args) { var settings = GetDockerRunSettings(context, arch); context.Information($"Testing image: {image}"); var output = context.DockerRunImage(settings, image, command, args); context.Information("Output : " + output); Assert.Contains(context.Version?.GitVersion.FullSemVer, output); } private static IEnumerable<string> GetDockerTags(this BuildContextBase context, DockerImage dockerImage, Architecture? arch = null) { var name = dockerImage.DockerImageName(); var distro = dockerImage.Distro; var targetFramework = dockerImage.TargetFramework; if (context.Version == null) return Enumerable.Empty<string>(); var tags = new List<string> { $"{name}:{context.Version.Version}-{distro}-{targetFramework}", $"{name}:{context.Version.SemVersion}-{distro}-{targetFramework}", }; if (distro == Constants.DockerDistroLatest && targetFramework == Constants.Version50) { tags.AddRange(new[] { $"{name}:{context.Version.Version}", $"{name}:{context.Version.SemVersion}", $"{name}:{context.Version.Version}-{distro}", $"{name}:{context.Version.SemVersion}-{distro}" }); if (context.IsStableRelease) { tags.AddRange(new[] { $"{name}:latest", $"{name}:latest-{targetFramework}", $"{name}:latest-{distro}", $"{name}:latest-{distro}-{targetFramework}", }); } } if (!arch.HasValue) return tags.Distinct(); var suffix = arch.Value.ToSuffix(); return tags.Select(x => $"{x}-{suffix}").Distinct(); } private static string DockerImageName(this DockerImage image) => $"{image.Registry}/{(image.UseBaseImage ? Constants.DockerBaseImageName : Constants.DockerImageName)}"; private static DockerContainerRunSettings GetDockerRunSettings(this BuildContextBase context, Architecture arch) { var currentDir = context.MakeAbsolute(context.Directory(".")); var root = string.Empty; var settings = new DockerContainerRunSettings { Rm = true, Volume = new[] { $"{currentDir}:{root}/repo", $"{currentDir}/tests/scripts:{root}/scripts", $"{currentDir}/artifacts/packages/nuget:{root}/nuget", $"{currentDir}/artifacts/packages/native:{root}/native", }, Platform = $"linux/{arch.ToString().ToLower()}" }; if (context.IsAzurePipelineBuild) { settings.Env = new[] { "TF_BUILD=true", $"BUILD_SOURCEBRANCH={context.EnvironmentVariable("BUILD_SOURCEBRANCH")}" }; } if (context.IsGitHubActionsBuild) { settings.Env = new[] { "GITHUB_ACTIONS=true", $"GITHUB_REF={context.EnvironmentVariable("GITHUB_REF")}" }; } return settings; } private static string DockerRunImage(this ICakeContext context, DockerContainerRunSettings settings, string image, string command, params string[] args) { if (string.IsNullOrEmpty(image)) { throw new ArgumentNullException(nameof(image)); } var runner = new GenericDockerRunner<DockerContainerRunSettings>(context.FileSystem, context.Environment, context.ProcessRunner, context.Tools); List<string> arguments = new() { image }; if (!string.IsNullOrEmpty(command)) { arguments.Add(command); if (args.Length > 0) { arguments.AddRange(args); } } var result = runner.RunWithResult("run", settings, r => r.ToArray(), arguments.ToArray()); return string.Join("\n", result); } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Immutable; using System.Threading; using Microsoft.CodeAnalysis.CodeStyle; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.QualifyMemberAccess; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Text; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Diagnostics.SimplifyTypeNames { internal abstract class SimplifyTypeNamesDiagnosticAnalyzerBase<TLanguageKindEnum> : DiagnosticAnalyzer, IBuiltInAnalyzer where TLanguageKindEnum : struct { private static readonly LocalizableString s_localizableMessage = new LocalizableResourceString(nameof(WorkspacesResources.Name_can_be_simplified), WorkspacesResources.ResourceManager, typeof(WorkspacesResources)); private static readonly LocalizableString s_localizableTitleSimplifyNames = new LocalizableResourceString(nameof(FeaturesResources.Simplify_Names), FeaturesResources.ResourceManager, typeof(FeaturesResources)); private static readonly DiagnosticDescriptor s_descriptorSimplifyNames = new DiagnosticDescriptor(IDEDiagnosticIds.SimplifyNamesDiagnosticId, s_localizableTitleSimplifyNames, s_localizableMessage, DiagnosticCategory.Style, DiagnosticSeverity.Hidden, isEnabledByDefault: true, customTags: DiagnosticCustomTags.Unnecessary); private static readonly LocalizableString s_localizableTitleSimplifyMemberAccess = new LocalizableResourceString(nameof(FeaturesResources.Simplify_Member_Access), FeaturesResources.ResourceManager, typeof(FeaturesResources)); private static readonly DiagnosticDescriptor s_descriptorSimplifyMemberAccess = new DiagnosticDescriptor(IDEDiagnosticIds.SimplifyMemberAccessDiagnosticId, s_localizableTitleSimplifyMemberAccess, s_localizableMessage, DiagnosticCategory.Style, DiagnosticSeverity.Hidden, isEnabledByDefault: true, customTags: DiagnosticCustomTags.Unnecessary); private static readonly LocalizableString s_localizableTitleRemoveThisOrMe = new LocalizableResourceString(nameof(FeaturesResources.Remove_qualification), FeaturesResources.ResourceManager, typeof(FeaturesResources)); private static readonly DiagnosticDescriptor s_descriptorRemoveThisOrMe = new DiagnosticDescriptor(IDEDiagnosticIds.RemoveQualificationDiagnosticId, s_localizableTitleRemoveThisOrMe, s_localizableMessage, DiagnosticCategory.Style, DiagnosticSeverity.Hidden, isEnabledByDefault: true, customTags: DiagnosticCustomTags.Unnecessary); private static readonly DiagnosticDescriptor s_descriptorPreferIntrinsicTypeInDeclarations = new DiagnosticDescriptor(IDEDiagnosticIds.PreferIntrinsicPredefinedTypeInDeclarationsDiagnosticId, s_localizableTitleSimplifyNames, s_localizableMessage, DiagnosticCategory.Style, DiagnosticSeverity.Hidden, isEnabledByDefault: true, customTags: DiagnosticCustomTags.Unnecessary); private static readonly DiagnosticDescriptor s_descriptorPreferIntrinsicTypeInMemberAccess = new DiagnosticDescriptor(IDEDiagnosticIds.PreferIntrinsicPredefinedTypeInMemberAccessDiagnosticId, s_localizableTitleSimplifyNames, s_localizableMessage, DiagnosticCategory.Style, DiagnosticSeverity.Hidden, isEnabledByDefault: true, customTags: DiagnosticCustomTags.Unnecessary); public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics { get; } = ImmutableArray.Create( s_descriptorSimplifyNames, s_descriptorSimplifyMemberAccess, s_descriptorRemoveThisOrMe, s_descriptorPreferIntrinsicTypeInDeclarations, s_descriptorPreferIntrinsicTypeInMemberAccess); private readonly ImmutableArray<TLanguageKindEnum> _kindsOfInterest; protected SimplifyTypeNamesDiagnosticAnalyzerBase(ImmutableArray<TLanguageKindEnum> kindsOfInterest) { _kindsOfInterest = kindsOfInterest; } public bool OpenFileOnly(Workspace workspace) { var preferTypeKeywordInDeclarationOption = workspace.Options.GetOption( CodeStyleOptions.PreferIntrinsicPredefinedTypeKeywordInDeclaration, GetLanguageName()).Notification; var preferTypeKeywordInMemberAccessOption = workspace.Options.GetOption( CodeStyleOptions.PreferIntrinsicPredefinedTypeKeywordInMemberAccess, GetLanguageName()).Notification; return !(preferTypeKeywordInDeclarationOption == NotificationOption.Warning || preferTypeKeywordInDeclarationOption == NotificationOption.Error || preferTypeKeywordInMemberAccessOption == NotificationOption.Warning || preferTypeKeywordInMemberAccessOption == NotificationOption.Error); } public sealed override void Initialize(AnalysisContext context) { context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None); context.RegisterSyntaxNodeAction(AnalyzeNode, _kindsOfInterest); } protected abstract void AnalyzeNode(SyntaxNodeAnalysisContext context); protected abstract bool CanSimplifyTypeNameExpressionCore(SemanticModel model, SyntaxNode node, OptionSet optionSet, out TextSpan issueSpan, out string diagnosticId, CancellationToken cancellationToken); protected abstract string GetLanguageName(); protected bool TrySimplifyTypeNameExpression(SemanticModel model, SyntaxNode node, AnalyzerOptions analyzerOptions, out Diagnostic diagnostic, CancellationToken cancellationToken) { diagnostic = default(Diagnostic); var syntaxTree = node.SyntaxTree; var optionSet = analyzerOptions.GetDocumentOptionSetAsync(syntaxTree, cancellationToken).GetAwaiter().GetResult(); if (optionSet == null) { return false; } if (!CanSimplifyTypeNameExpressionCore(model, node, optionSet, out var issueSpan, out string diagnosticId, cancellationToken)) { return false; } if (model.SyntaxTree.OverlapsHiddenPosition(issueSpan, cancellationToken)) { return false; } PerLanguageOption<CodeStyleOption<bool>> option; DiagnosticDescriptor descriptor; switch (diagnosticId) { case IDEDiagnosticIds.SimplifyNamesDiagnosticId: descriptor = s_descriptorSimplifyNames; break; case IDEDiagnosticIds.SimplifyMemberAccessDiagnosticId: descriptor = s_descriptorSimplifyMemberAccess; break; case IDEDiagnosticIds.RemoveQualificationDiagnosticId: descriptor = GetRemoveQualificationDiagnosticDescriptor(model, node, optionSet, cancellationToken); break; case IDEDiagnosticIds.PreferIntrinsicPredefinedTypeInDeclarationsDiagnosticId: option = CodeStyleOptions.PreferIntrinsicPredefinedTypeKeywordInDeclaration; descriptor = GetApplicablePredefinedTypeDiagnosticDescriptor( IDEDiagnosticIds.PreferIntrinsicPredefinedTypeInDeclarationsDiagnosticId, option, optionSet); break; case IDEDiagnosticIds.PreferIntrinsicPredefinedTypeInMemberAccessDiagnosticId: option = CodeStyleOptions.PreferIntrinsicPredefinedTypeKeywordInMemberAccess; descriptor = GetApplicablePredefinedTypeDiagnosticDescriptor( IDEDiagnosticIds.PreferIntrinsicPredefinedTypeInMemberAccessDiagnosticId, option, optionSet); break; default: throw ExceptionUtilities.Unreachable; } if (descriptor == null) { return false; } var tree = model.SyntaxTree; var builder = ImmutableDictionary.CreateBuilder<string, string>(); builder["OptionName"] = nameof(CodeStyleOptions.PreferIntrinsicPredefinedTypeKeywordInMemberAccess); // TODO: need the actual one builder["OptionLanguage"] = model.Language; diagnostic = Diagnostic.Create(descriptor, tree.GetLocation(issueSpan), builder.ToImmutable()); return true; } private DiagnosticDescriptor GetApplicablePredefinedTypeDiagnosticDescriptor<T>(string id, PerLanguageOption<T> option, OptionSet optionSet) where T : CodeStyleOption<bool> { var optionValue = optionSet.GetOption(option, GetLanguageName()); DiagnosticDescriptor descriptor = null; if (optionValue.Notification.Value != DiagnosticSeverity.Hidden) { descriptor = new DiagnosticDescriptor(id, s_localizableTitleSimplifyNames, s_localizableMessage, DiagnosticCategory.Style, optionValue.Notification.Value, isEnabledByDefault: true, customTags: DiagnosticCustomTags.Unnecessary); } return descriptor; } private DiagnosticDescriptor GetRemoveQualificationDiagnosticDescriptor(SemanticModel model, SyntaxNode node, OptionSet optionSet, CancellationToken cancellationToken) { var symbolInfo = model.GetSymbolInfo(node, cancellationToken); if (symbolInfo.Symbol == null) { return null; } var applicableOption = AbstractQualifyMemberAccessDiagnosticAnalyzer<TLanguageKindEnum>.GetApplicableOptionFromSymbolKind(symbolInfo.Symbol.Kind); var optionValue = optionSet.GetOption(applicableOption, GetLanguageName()); var severity = optionValue.Notification.Value; return new DiagnosticDescriptor( IDEDiagnosticIds.RemoveQualificationDiagnosticId, s_localizableTitleRemoveThisOrMe, s_localizableMessage, DiagnosticCategory.Style, severity, isEnabledByDefault: true, customTags: DiagnosticCustomTags.Unnecessary); } public DiagnosticAnalyzerCategory GetAnalyzerCategory() => DiagnosticAnalyzerCategory.SemanticSpanAnalysis; } }
using System; using System.Collections; using NBitcoin.BouncyCastle.Asn1; using NBitcoin.BouncyCastle.Asn1.CryptoPro; using NBitcoin.BouncyCastle.Asn1.Kisa; using NBitcoin.BouncyCastle.Asn1.Misc; using NBitcoin.BouncyCastle.Asn1.Nist; using NBitcoin.BouncyCastle.Asn1.Ntt; using NBitcoin.BouncyCastle.Asn1.Oiw; using NBitcoin.BouncyCastle.Asn1.Pkcs; using NBitcoin.BouncyCastle.Crypto; using NBitcoin.BouncyCastle.Crypto.Parameters; using NBitcoin.BouncyCastle.Utilities; namespace NBitcoin.BouncyCastle.Security { public sealed class ParameterUtilities { private ParameterUtilities() { } private static readonly IDictionary algorithms = Platform.CreateHashtable(); private static readonly IDictionary basicIVSizes = Platform.CreateHashtable(); static ParameterUtilities() { AddAlgorithm("AES", "AESWRAP"); AddAlgorithm("AES128", "2.16.840.1.101.3.4.2", NistObjectIdentifiers.IdAes128Cbc, NistObjectIdentifiers.IdAes128Cfb, NistObjectIdentifiers.IdAes128Ecb, NistObjectIdentifiers.IdAes128Ofb, NistObjectIdentifiers.IdAes128Wrap); AddAlgorithm("AES192", "2.16.840.1.101.3.4.22", NistObjectIdentifiers.IdAes192Cbc, NistObjectIdentifiers.IdAes192Cfb, NistObjectIdentifiers.IdAes192Ecb, NistObjectIdentifiers.IdAes192Ofb, NistObjectIdentifiers.IdAes192Wrap); AddAlgorithm("AES256", "2.16.840.1.101.3.4.42", NistObjectIdentifiers.IdAes256Cbc, NistObjectIdentifiers.IdAes256Cfb, NistObjectIdentifiers.IdAes256Ecb, NistObjectIdentifiers.IdAes256Ofb, NistObjectIdentifiers.IdAes256Wrap); AddAlgorithm("BLOWFISH", "1.3.6.1.4.1.3029.1.2"); AddAlgorithm("CAMELLIA", "CAMELLIAWRAP"); AddAlgorithm("CAMELLIA128", NttObjectIdentifiers.IdCamellia128Cbc, NttObjectIdentifiers.IdCamellia128Wrap); AddAlgorithm("CAMELLIA192", NttObjectIdentifiers.IdCamellia192Cbc, NttObjectIdentifiers.IdCamellia192Wrap); AddAlgorithm("CAMELLIA256", NttObjectIdentifiers.IdCamellia256Cbc, NttObjectIdentifiers.IdCamellia256Wrap); AddAlgorithm("CAST5", "1.2.840.113533.7.66.10"); AddAlgorithm("CAST6"); AddAlgorithm("DES", OiwObjectIdentifiers.DesCbc, OiwObjectIdentifiers.DesCfb, OiwObjectIdentifiers.DesEcb, OiwObjectIdentifiers.DesOfb); AddAlgorithm("DESEDE", "DESEDEWRAP", "TDEA", OiwObjectIdentifiers.DesEde, PkcsObjectIdentifiers.IdAlgCms3DesWrap); AddAlgorithm("DESEDE3", PkcsObjectIdentifiers.DesEde3Cbc); AddAlgorithm("GOST28147", "GOST", "GOST-28147", CryptoProObjectIdentifiers.GostR28147Cbc); AddAlgorithm("HC128"); AddAlgorithm("HC256"); AddAlgorithm("IDEA", "1.3.6.1.4.1.188.7.1.1.2"); AddAlgorithm("NOEKEON"); AddAlgorithm("RC2", PkcsObjectIdentifiers.RC2Cbc, PkcsObjectIdentifiers.IdAlgCmsRC2Wrap); AddAlgorithm("RC4", "ARC4", "1.2.840.113549.3.4"); AddAlgorithm("RC5", "RC5-32"); AddAlgorithm("RC5-64"); AddAlgorithm("RC6"); AddAlgorithm("RIJNDAEL"); AddAlgorithm("SALSA20"); AddAlgorithm("SEED", KisaObjectIdentifiers.IdNpkiAppCmsSeedWrap, KisaObjectIdentifiers.IdSeedCbc); AddAlgorithm("SERPENT"); AddAlgorithm("SKIPJACK"); AddAlgorithm("TEA"); AddAlgorithm("TWOFISH"); AddAlgorithm("VMPC"); AddAlgorithm("VMPC-KSA3"); AddAlgorithm("XTEA"); AddBasicIVSizeEntries(8, "BLOWFISH", "DES", "DESEDE", "DESEDE3"); AddBasicIVSizeEntries(16, "AES", "AES128", "AES192", "AES256", "CAMELLIA", "CAMELLIA128", "CAMELLIA192", "CAMELLIA256", "NOEKEON", "SEED"); // TODO These algorithms support an IV // but JCE doesn't seem to provide an AlgorithmParametersGenerator for them // "RIJNDAEL", "SKIPJACK", "TWOFISH" } private static void AddAlgorithm( string canonicalName, params object[] aliases) { algorithms[canonicalName] = canonicalName; foreach (object alias in aliases) { algorithms[alias.ToString()] = canonicalName; } } private static void AddBasicIVSizeEntries(int size, params string[] algorithms) { foreach (string algorithm in algorithms) { basicIVSizes.Add(algorithm, size); } } public static string GetCanonicalAlgorithmName( string algorithm) { return (string) algorithms[Platform.ToUpperInvariant(algorithm)]; } public static KeyParameter CreateKeyParameter( DerObjectIdentifier algOid, byte[] keyBytes) { return CreateKeyParameter(algOid.Id, keyBytes, 0, keyBytes.Length); } public static KeyParameter CreateKeyParameter( string algorithm, byte[] keyBytes) { return CreateKeyParameter(algorithm, keyBytes, 0, keyBytes.Length); } public static KeyParameter CreateKeyParameter( DerObjectIdentifier algOid, byte[] keyBytes, int offset, int length) { return CreateKeyParameter(algOid.Id, keyBytes, offset, length); } public static KeyParameter CreateKeyParameter( string algorithm, byte[] keyBytes, int offset, int length) { if (algorithm == null) throw new ArgumentNullException("algorithm"); string canonical = GetCanonicalAlgorithmName(algorithm); if (canonical == null) throw new SecurityUtilityException("Algorithm " + algorithm + " not recognised."); if (canonical == "DES") return new DesParameters(keyBytes, offset, length); if (canonical == "DESEDE" || canonical =="DESEDE3") return new DesEdeParameters(keyBytes, offset, length); if (canonical == "RC2") return new RC2Parameters(keyBytes, offset, length); return new KeyParameter(keyBytes, offset, length); } public static ICipherParameters GetCipherParameters( DerObjectIdentifier algOid, ICipherParameters key, Asn1Object asn1Params) { return GetCipherParameters(algOid.Id, key, asn1Params); } public static ICipherParameters GetCipherParameters( string algorithm, ICipherParameters key, Asn1Object asn1Params) { if (algorithm == null) throw new ArgumentNullException("algorithm"); string canonical = GetCanonicalAlgorithmName(algorithm); if (canonical == null) throw new SecurityUtilityException("Algorithm " + algorithm + " not recognised."); byte[] iv = null; try { // TODO These algorithms support an IV // but JCE doesn't seem to provide an AlgorithmParametersGenerator for them // "RIJNDAEL", "SKIPJACK", "TWOFISH" int basicIVKeySize = FindBasicIVSize(canonical); if (basicIVKeySize != -1 || canonical == "RIJNDAEL" || canonical == "SKIPJACK" || canonical == "TWOFISH") { iv = ((Asn1OctetString) asn1Params).GetOctets(); } else if (canonical == "CAST5") { iv = Cast5CbcParameters.GetInstance(asn1Params).GetIV(); } else if (canonical == "IDEA") { iv = IdeaCbcPar.GetInstance(asn1Params).GetIV(); } else if (canonical == "RC2") { iv = RC2CbcParameter.GetInstance(asn1Params).GetIV(); } } catch (Exception e) { throw new ArgumentException("Could not process ASN.1 parameters", e); } if (iv != null) { return new ParametersWithIV(key, iv); } throw new SecurityUtilityException("Algorithm " + algorithm + " not recognised."); } public static Asn1Encodable GenerateParameters( DerObjectIdentifier algID, SecureRandom random) { return GenerateParameters(algID.Id, random); } public static Asn1Encodable GenerateParameters( string algorithm, SecureRandom random) { if (algorithm == null) throw new ArgumentNullException("algorithm"); string canonical = GetCanonicalAlgorithmName(algorithm); if (canonical == null) throw new SecurityUtilityException("Algorithm " + algorithm + " not recognised."); // TODO These algorithms support an IV // but JCE doesn't seem to provide an AlgorithmParametersGenerator for them // "RIJNDAEL", "SKIPJACK", "TWOFISH" int basicIVKeySize = FindBasicIVSize(canonical); if (basicIVKeySize != -1) return CreateIVOctetString(random, basicIVKeySize); if (canonical == "CAST5") return new Cast5CbcParameters(CreateIV(random, 8), 128); if (canonical == "IDEA") return new IdeaCbcPar(CreateIV(random, 8)); if (canonical == "RC2") return new RC2CbcParameter(CreateIV(random, 8)); throw new SecurityUtilityException("Algorithm " + algorithm + " not recognised."); } private static Asn1OctetString CreateIVOctetString( SecureRandom random, int ivLength) { return new DerOctetString(CreateIV(random, ivLength)); } private static byte[] CreateIV( SecureRandom random, int ivLength) { byte[] iv = new byte[ivLength]; random.NextBytes(iv); return iv; } private static int FindBasicIVSize( string canonicalName) { if (!basicIVSizes.Contains(canonicalName)) return -1; return (int)basicIVSizes[canonicalName]; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ namespace ParquetSharp.Encoding.BitPacking { using System; using System.Globalization; using System.IO; using System.Text; /** * * This class generates bit packers that pack the most significant bit first. * The result of the generation is checked in. To regenerate the code run this class and check in the result. * * TODO: remove the unnecessary masks for perf * * @author Julien Le Dem * */ public class ByteBasedBitPackingGenerator { private const string CLASS_NAME_PREFIX = "ByteBitPacking"; private const int PACKER_COUNT = 32; public static void Main(string[] args) { string basePath = args[0]; generateScheme(CLASS_NAME_PREFIX + "BE", true, basePath); generateScheme(CLASS_NAME_PREFIX + "LE", false, basePath); } private static void generateScheme(string className, bool msbFirst, string basePath) { string file = Path.Combine(basePath, "ParquetEncoding\\src\\Column\\Values\\BitPacking\\" + className + ".cs"); using (StreamWriter fw = File.CreateText(file)) { fw.WriteLine("/**"); if (msbFirst) { fw.WriteLine(" * Packs from the Most Significant Bit first"); } else { fw.WriteLine(" * Packs from the Least Significant Bit first"); } fw.WriteLine(" * "); fw.WriteLine(" * @author automatically generated"); fw.WriteLine(" * @see ByteBasedBitPackingGenerator"); fw.WriteLine(" *"); fw.WriteLine(" */"); fw.WriteLine(); fw.WriteLine("namespace ParquetSharp.Column.Values.BitPacking"); fw.WriteLine("{"); fw.WriteLine(" using System;"); fw.WriteLine(" using ParquetSharp.External;"); fw.WriteLine(); fw.WriteLine(" internal abstract class {0}", className); fw.WriteLine(" {"); fw.WriteLine(" private static readonly BytePacker[] packers = new BytePacker[33];"); fw.WriteLine(); fw.WriteLine(" static {0}()", className); fw.WriteLine(" {"); for (int i = 0; i <= PACKER_COUNT; i++) { fw.WriteLine(" packers[" + i + "] = new Packer" + i + "();"); } fw.WriteLine(" }"); fw.WriteLine(); fw.WriteLine(" public static readonly BytePackerFactory factory = new Factory();"); fw.WriteLine(); fw.WriteLine(" class Factory : BytePackerFactory"); fw.WriteLine(" {"); fw.WriteLine(" public BytePacker newBytePacker(int bitWidth)"); fw.WriteLine(" {"); fw.WriteLine(" return packers[bitWidth];"); fw.WriteLine(" }"); fw.WriteLine(" }"); fw.WriteLine(); for (int i = 0; i <= PACKER_COUNT; i++) { generateClass(fw, i, msbFirst); fw.WriteLine(); } fw.WriteLine(" }"); fw.WriteLine("}"); } } private static void generateClass(StreamWriter fw, int bitWidth, bool msbFirst) { fw.WriteLine(" sealed private class Packer{0} : BytePacker", bitWidth); fw.WriteLine(" {"); fw.WriteLine(" internal Packer{0}() : base({0}) {{ }}", bitWidth); fw.WriteLine(); // Packing generatePack(fw, bitWidth, 1, msbFirst); generatePack(fw, bitWidth, 4, msbFirst); // Unpacking generateUnpack(fw, bitWidth, 1, msbFirst, true); generateUnpack(fw, bitWidth, 1, msbFirst, false); generateUnpack(fw, bitWidth, 4, msbFirst, true); generateUnpack(fw, bitWidth, 4, msbFirst, false); fw.WriteLine(" }"); } private static int getShift(StreamWriter fw, int bitWidth, bool msbFirst, int byteIndex, int valueIndex) { // relative positions of the start and end of the value to the start and end of the byte int valueStartBitIndex = (valueIndex * bitWidth) - (8 * (byteIndex)); int valueEndBitIndex = ((valueIndex + 1) * bitWidth) - (8 * (byteIndex + 1)); // boundaries of the current value that we want int valueStartBitWanted; int valueEndBitWanted; // boundaries of the current byte that will receive them int byteStartBitWanted; int byteEndBitWanted; int shift; if (msbFirst) { valueStartBitWanted = valueStartBitIndex < 0 ? bitWidth - 1 + valueStartBitIndex : bitWidth - 1; valueEndBitWanted = valueEndBitIndex > 0 ? valueEndBitIndex : 0; byteStartBitWanted = valueStartBitIndex < 0 ? 8 : 7 - valueStartBitIndex; byteEndBitWanted = valueEndBitIndex > 0 ? 0 : -valueEndBitIndex; shift = valueEndBitWanted - byteEndBitWanted; } else { valueStartBitWanted = bitWidth - 1 - (valueEndBitIndex > 0 ? valueEndBitIndex : 0); valueEndBitWanted = bitWidth - 1 - (valueStartBitIndex < 0 ? bitWidth - 1 + valueStartBitIndex : bitWidth - 1); byteStartBitWanted = 7 - (valueEndBitIndex > 0 ? 0 : -valueEndBitIndex); byteEndBitWanted = 7 - (valueStartBitIndex < 0 ? 8 : 7 - valueStartBitIndex); shift = valueStartBitWanted - byteStartBitWanted; } visualizeAlignment( fw, bitWidth, valueEndBitIndex, valueStartBitWanted, valueEndBitWanted, byteStartBitWanted, byteEndBitWanted, shift ); return shift; } private static void visualizeAlignment(StreamWriter fw, int bitWidth, int valueEndBitIndex, int valueStartBitWanted, int valueEndBitWanted, int byteStartBitWanted, int byteEndBitWanted, int shift) { // ASCII art to visualize what is happening fw.Write("//"); int buf = 2 + Math.Max(0, bitWidth + 8); for (int i = 0; i < buf; i++) { fw.Write(" "); } fw.Write("["); for (int i = 7; i >= 0; i--) { if (i <= byteStartBitWanted && i >= byteEndBitWanted) { fw.Write(i.ToString(CultureInfo.InvariantCulture)); } else { fw.Write("_"); } } fw.WriteLine("]"); fw.Write(" //"); for (int i = 0; i < buf + (8 - bitWidth + shift); i++) { fw.Write(" "); } fw.Write("["); for (int i = bitWidth - 1; i >= 0; i--) { if (i <= valueStartBitWanted && i >= valueEndBitWanted) { fw.Write((i % 10).ToString(CultureInfo.InvariantCulture)); } else { fw.Write("_"); } } fw.WriteLine("]"); fw.Write(" "); } private static void generatePack(StreamWriter fw, int bitWidth, int batch, bool msbFirst) { int mask = genMask(bitWidth); fw.WriteLine(" public override void pack" + (batch * 8) + "Values(int[] @in, int inPos, byte[] @out, int outPos)"); fw.WriteLine(" {"); for (int byteIndex = 0; byteIndex < bitWidth * batch; ++byteIndex) { fw.WriteLine(" @out[" + align(byteIndex, 2) + " + outPos] = (byte)(("); int startIndex = (byteIndex * 8) / bitWidth; int endIndex = ((byteIndex + 1) * 8 + bitWidth - 1) / bitWidth; for (int valueIndex = startIndex; valueIndex < endIndex; valueIndex++) { if (valueIndex == startIndex) { fw.Write(" "); } else { fw.WriteLine(); fw.Write(" | "); } int shift = getShift(fw, bitWidth, msbFirst, byteIndex, valueIndex); string shiftString = string.Empty; // used when shift == 0 if (shift > 0) { shiftString = " >> " + shift; } else if (shift < 0) { shiftString = " << " + (-shift); } fw.Write("(int)((uint)(@in[" + align(valueIndex, 2) + " + inPos] & " + mask + ")" + shiftString + ")"); } fw.WriteLine(") & 255);"); } fw.WriteLine(" }"); } private static void generateUnpack(StreamWriter fw, int bitWidth, int batch, bool msbFirst, bool useByteArray) { string bufferDataType; if (useByteArray) { bufferDataType = "byte[]"; fw.WriteLine(" [Obsolete]"); } else { bufferDataType = "ByteBuffer"; } fw.WriteLine(" public override void unpack" + (batch * 8) + "Values(" + bufferDataType + " @in, int inPos, int[] @out, int outPos)"); fw.WriteLine(" {"); if (bitWidth > 0) { int mask = genMask(bitWidth); for (int valueIndex = 0; valueIndex < (batch * 8); ++valueIndex) { fw.WriteLine(" @out[" + align(valueIndex, 2) + " + outPos] ="); int startIndex = valueIndex * bitWidth / 8; int endIndex = paddedByteCountFromBits((valueIndex + 1) * bitWidth); for (int byteIndex = startIndex; byteIndex < endIndex; byteIndex++) { if (byteIndex == startIndex) { fw.Write(" "); } else { fw.WriteLine(); fw.Write(" | ", Environment.NewLine); } int shift = getShift(fw, bitWidth, msbFirst, byteIndex, valueIndex); string shiftString = string.Empty; // when shift == 0 if (shift < 0) { shiftString = ">> " + (-shift); } else if (shift > 0) { shiftString = "<< " + shift; } string byteAccess; if (useByteArray) { byteAccess = "@in[" + align(byteIndex, 2) + " + inPos]"; } else { // use ByteBuffer#get(index) method byteAccess = "@in.get(" + align(byteIndex, 2) + " + inPos)"; } fw.Write(" (int)(((((uint)" + byteAccess + ") & 255) " + shiftString + ") & " + mask + ")"); } fw.WriteLine(";"); } } fw.WriteLine(" }"); } private static int genMask(int bitWidth) { int mask = 0; for (int i = 0; i < bitWidth; i++) { mask <<= 1; mask |= 1; } return mask; } private static string align(int value, int digits) { string valueString = value.ToString(CultureInfo.InvariantCulture); StringBuilder result = new StringBuilder(); for (int i = valueString.Length; i < digits; i++) { result.Append(" "); } result.Append(valueString); return result.ToString(); } // duplicated from BytesUtils to avoid a circular dependency between parquet-common and parquet-generator private static int paddedByteCountFromBits(int bitLength) { return (bitLength + 7) / 8; } } }
using System; using System.Collections.Generic; using System.Linq; using Nop.Core; using Nop.Core.Caching; using Nop.Core.Data; using Nop.Core.Domain.Localization; using Nop.Core.Domain.Seo; namespace Nop.Services.Seo { /// <summary> /// Provides information about URL records /// </summary> public partial class UrlRecordService : IUrlRecordService { #region Constants /// <summary> /// Key for caching /// </summary> /// <remarks> /// {0} : entity ID /// {1} : entity name /// {2} : language ID /// </remarks> private const string URLRECORD_ACTIVE_BY_ID_NAME_LANGUAGE_KEY = "Nop.urlrecord.active.id-name-language-{0}-{1}-{2}"; /// <summary> /// Key for caching /// </summary> private const string URLRECORD_ALL_KEY = "Nop.urlrecord.all"; /// <summary> /// Key for caching /// </summary> /// <remarks> /// {0} : slug /// </remarks> private const string URLRECORD_BY_SLUG_KEY = "Nop.urlrecord.active.slug-{0}"; /// <summary> /// Key pattern to clear cache /// </summary> private const string URLRECORD_PATTERN_KEY = "Nop.urlrecord."; #endregion #region Fields private readonly IRepository<UrlRecord> _urlRecordRepository; private readonly ICacheManager _cacheManager; private readonly LocalizationSettings _localizationSettings; #endregion #region Ctor /// <summary> /// Ctor /// </summary> /// <param name="cacheManager">Cache manager</param> /// <param name="urlRecordRepository">URL record repository</param> /// <param name="localizationSettings">Localization settings</param> public UrlRecordService(ICacheManager cacheManager, IRepository<UrlRecord> urlRecordRepository, LocalizationSettings localizationSettings) { this._cacheManager = cacheManager; this._urlRecordRepository = urlRecordRepository; this._localizationSettings = localizationSettings; } #endregion #region Utilities protected UrlRecordForCaching Map(UrlRecord record) { if (record == null) throw new ArgumentNullException("record"); var urlRecordForCaching = new UrlRecordForCaching() { Id = record.Id, EntityId = record.EntityId, EntityName = record.EntityName, Slug = record.Slug, IsActive = record.IsActive, LanguageId = record.LanguageId }; return urlRecordForCaching; } /// <summary> /// Gets all cached URL records /// </summary> /// <returns>cached URL records</returns> protected virtual IList<UrlRecordForCaching> GetAllUrlRecordsCached() { //cache string key = string.Format(URLRECORD_ALL_KEY); return _cacheManager.Get(key, () => { var query = from ur in _urlRecordRepository.Table select ur; var urlRecords = query.ToList(); var list = new List<UrlRecordForCaching>(); foreach (var ur in urlRecords) { var urlRecordForCaching = Map(ur); list.Add(urlRecordForCaching); } return list; }); } #endregion #region Nested classes [Serializable] public class UrlRecordForCaching { public int Id { get; set; } public int EntityId { get; set; } public string EntityName { get; set; } public string Slug { get; set; } public bool IsActive { get; set; } public int LanguageId { get; set; } } #endregion #region Methods /// <summary> /// Deletes an URL record /// </summary> /// <param name="urlRecord">URL record</param> public virtual void DeleteUrlRecord(UrlRecord urlRecord) { if (urlRecord == null) throw new ArgumentNullException("urlRecord"); _urlRecordRepository.Delete(urlRecord); //cache _cacheManager.RemoveByPattern(URLRECORD_PATTERN_KEY); } /// <summary> /// Gets an URL record /// </summary> /// <param name="urlRecordId">URL record identifier</param> /// <returns>URL record</returns> public virtual UrlRecord GetUrlRecordById(int urlRecordId) { if (urlRecordId == 0) return null; return _urlRecordRepository.GetById(urlRecordId); } /// <summary> /// Inserts an URL record /// </summary> /// <param name="urlRecord">URL record</param> public virtual void InsertUrlRecord(UrlRecord urlRecord) { if (urlRecord == null) throw new ArgumentNullException("urlRecord"); _urlRecordRepository.Insert(urlRecord); //cache _cacheManager.RemoveByPattern(URLRECORD_PATTERN_KEY); } /// <summary> /// Updates the URL record /// </summary> /// <param name="urlRecord">URL record</param> public virtual void UpdateUrlRecord(UrlRecord urlRecord) { if (urlRecord == null) throw new ArgumentNullException("urlRecord"); _urlRecordRepository.Update(urlRecord); //cache _cacheManager.RemoveByPattern(URLRECORD_PATTERN_KEY); } /// <summary> /// Find URL record /// </summary> /// <param name="slug">Slug</param> /// <returns>Found URL record</returns> public virtual UrlRecord GetBySlug(string slug) { if (String.IsNullOrEmpty(slug)) return null; var query = from ur in _urlRecordRepository.Table where ur.Slug == slug select ur; var urlRecord = query.FirstOrDefault(); return urlRecord; } /// <summary> /// Find URL record (cached version). /// This method works absolutely the same way as "GetBySlug" one but caches the results. /// Hence, it's used only for performance optimization in public store /// </summary> /// <param name="slug">Slug</param> /// <returns>Found URL record</returns> public virtual UrlRecordForCaching GetBySlugCached(string slug) { if (String.IsNullOrEmpty(slug)) return null; if (_localizationSettings.LoadAllUrlRecordsOnStartup) { //load all records (we know they are cached) var source = GetAllUrlRecordsCached(); var query = from ur in source where ur.Slug.Equals(slug, StringComparison.InvariantCultureIgnoreCase) select ur; var urlRecordForCaching = query.FirstOrDefault(); return urlRecordForCaching; } else { //gradual loading string key = string.Format(URLRECORD_BY_SLUG_KEY, slug); return _cacheManager.Get(key, () => { var urlRecord = GetBySlug(slug); if (urlRecord == null) return null; var urlRecordForCaching = Map(urlRecord); return urlRecordForCaching; }); } } /// <summary> /// Gets all URL records /// </summary> /// <param name="slug">Slug</param> /// <param name="pageIndex">Page index</param> /// <param name="pageSize">Page size</param> /// <returns>Customer collection</returns> public virtual IPagedList<UrlRecord> GetAllUrlRecords(string slug, int pageIndex, int pageSize) { var query = _urlRecordRepository.Table; if (!String.IsNullOrWhiteSpace(slug)) query = query.Where(ur => ur.Slug.Contains(slug)); query = query.OrderBy(ur => ur.Slug); var urlRecords = new PagedList<UrlRecord>(query, pageIndex, pageSize); return urlRecords; } /// <summary> /// Find slug /// </summary> /// <param name="entityId">Entity identifier</param> /// <param name="entityName">Entity name</param> /// <param name="languageId">Language identifier</param> /// <returns>Found slug</returns> public virtual string GetActiveSlug(int entityId, string entityName, int languageId) { if (_localizationSettings.LoadAllUrlRecordsOnStartup) { string key = string.Format(URLRECORD_ACTIVE_BY_ID_NAME_LANGUAGE_KEY, entityId, entityName, languageId); return _cacheManager.Get(key, () => { //load all records (we know they are cached) var source = GetAllUrlRecordsCached(); var query = from ur in source where ur.EntityId == entityId && ur.EntityName == entityName && ur.LanguageId == languageId && ur.IsActive orderby ur.Id descending select ur.Slug; var slug = query.FirstOrDefault(); //little hack here. nulls aren't cacheable so set it to "" if (slug == null) slug = ""; return slug; }); } else { //gradual loading string key = string.Format(URLRECORD_ACTIVE_BY_ID_NAME_LANGUAGE_KEY, entityId, entityName, languageId); return _cacheManager.Get(key, () => { var source = _urlRecordRepository.Table; var query = from ur in source where ur.EntityId == entityId && ur.EntityName == entityName && ur.LanguageId == languageId && ur.IsActive orderby ur.Id descending select ur.Slug; var slug = query.FirstOrDefault(); //little hack here. nulls aren't cacheable so set it to "" if (slug == null) slug = ""; return slug; }); } } /// <summary> /// Save slug /// </summary> /// <typeparam name="T">Type</typeparam> /// <param name="entity">Entity</param> /// <param name="slug">Slug</param> /// <param name="languageId">Language ID</param> public virtual void SaveSlug<T>(T entity, string slug, int languageId) where T : BaseEntity, ISlugSupported { if (entity == null) throw new ArgumentNullException("entity"); int entityId = entity.Id; string entityName = typeof(T).Name; var query = from ur in _urlRecordRepository.Table where ur.EntityId == entityId && ur.EntityName == entityName && ur.LanguageId == languageId orderby ur.Id descending select ur; var allUrlRecords = query.ToList(); var activeUrlRecord = allUrlRecords.FirstOrDefault(x => x.IsActive); if (activeUrlRecord == null && !string.IsNullOrWhiteSpace(slug)) { //find in non-active records with the specified slug var nonActiveRecordWithSpecifiedSlug = allUrlRecords .FirstOrDefault(x => x.Slug.Equals(slug, StringComparison.InvariantCultureIgnoreCase) && !x.IsActive); if (nonActiveRecordWithSpecifiedSlug != null) { //mark non-active record as active nonActiveRecordWithSpecifiedSlug.IsActive = true; UpdateUrlRecord(nonActiveRecordWithSpecifiedSlug); } else { //new record var urlRecord = new UrlRecord() { EntityId = entity.Id, EntityName = entityName, Slug = slug, LanguageId = languageId, IsActive = true, }; InsertUrlRecord(urlRecord); } } if (activeUrlRecord != null && string.IsNullOrWhiteSpace(slug)) { //disable the previous active URL record activeUrlRecord.IsActive = false; UpdateUrlRecord(activeUrlRecord); } if (activeUrlRecord != null && !string.IsNullOrWhiteSpace(slug)) { //is it the same slug as in active URL record? if (activeUrlRecord.Slug.Equals(slug, StringComparison.InvariantCultureIgnoreCase)) { //yes. do nothing //P.S. wrote this way for more source code readability } else { //find in non-active records with the specified slug var nonActiveRecordWithSpecifiedSlug = allUrlRecords .FirstOrDefault(x => x.Slug.Equals(slug, StringComparison.InvariantCultureIgnoreCase) && !x.IsActive); if (nonActiveRecordWithSpecifiedSlug != null) { //mark non-active record as active nonActiveRecordWithSpecifiedSlug.IsActive = true; UpdateUrlRecord(nonActiveRecordWithSpecifiedSlug); //disable the previous active URL record activeUrlRecord.IsActive = false; UpdateUrlRecord(activeUrlRecord); } else { //insert new record //we do not update the existing record because we should track all previously entered slugs //to ensure that URLs will work fine var urlRecord = new UrlRecord() { EntityId = entity.Id, EntityName = entityName, Slug = slug, LanguageId = languageId, IsActive = true, }; InsertUrlRecord(urlRecord); //disable the previous active URL record activeUrlRecord.IsActive = false; UpdateUrlRecord(activeUrlRecord); } } } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Diagnostics.Contracts; namespace System.Globalization { #if INSIDE_CLR using Debug = BCLDebug; #endif /*=================================JapaneseCalendar========================== ** ** JapaneseCalendar is based on Gregorian calendar. The month and day values are the same as ** Gregorian calendar. However, the year value is an offset to the Gregorian ** year based on the era. ** ** This system is adopted by Emperor Meiji in 1868. The year value is counted based on the reign of an emperor, ** and the era begins on the day an emperor ascends the throne and continues until his death. ** The era changes at 12:00AM. ** ** For example, the current era is Heisei. It started on 1989/1/8 A.D. Therefore, Gregorian year 1989 is also Heisei 1st. ** 1989/1/8 A.D. is also Heisei 1st 1/8. ** ** Any date in the year during which era is changed can be reckoned in either era. For example, ** 1989/1/1 can be 1/1 Heisei 1st year or 1/1 Showa 64th year. ** ** Note: ** The DateTime can be represented by the JapaneseCalendar are limited to two factors: ** 1. The min value and max value of DateTime class. ** 2. The available era information. ** ** Calendar support range: ** Calendar Minimum Maximum ** ========== ========== ========== ** Gregorian 1868/09/08 9999/12/31 ** Japanese Meiji 01/01 Heisei 8011/12/31 ============================================================================*/ [Serializable] [System.Runtime.InteropServices.ComVisible(true)] public partial class JapaneseCalendar : Calendar { internal static readonly DateTime calendarMinValue = new DateTime(1868, 9, 8); [System.Runtime.InteropServices.ComVisible(false)] public override DateTime MinSupportedDateTime { get { return (calendarMinValue); } } [System.Runtime.InteropServices.ComVisible(false)] public override DateTime MaxSupportedDateTime { get { return (DateTime.MaxValue); } } [System.Runtime.InteropServices.ComVisible(false)] public override CalendarAlgorithmType AlgorithmType { get { return CalendarAlgorithmType.SolarCalendar; } } // // Using a field initializer rather than a static constructor so that the whole class can be lazy // init. internal static volatile EraInfo[] japaneseEraInfo; // // Read our era info // // m_EraInfo must be listed in reverse chronological order. The most recent era // should be the first element. // That is, m_EraInfo[0] contains the most recent era. // // We know about 4 built-in eras, however users may add additional era(s) from the // registry, by adding values to HKLM\SYSTEM\CurrentControlSet\Control\Nls\Calendars\Japanese\Eras // we don't read the registry and instead we call WinRT to get the needed informatio // // Registry values look like: // yyyy.mm.dd=era_abbrev_english_englishabbrev // // Where yyyy.mm.dd is the registry value name, and also the date of the era start. // yyyy, mm, and dd are the year, month & day the era begins (4, 2 & 2 digits long) // era is the Japanese Era name // abbrev is the Abbreviated Japanese Era Name // english is the English name for the Era (unused) // englishabbrev is the Abbreviated English name for the era. // . is a delimiter, but the value of . doesn't matter. // '_' marks the space between the japanese era name, japanese abbreviated era name // english name, and abbreviated english names. // internal static EraInfo[] GetEraInfo() { // See if we need to build it if (japaneseEraInfo == null) { japaneseEraInfo = GetJapaneseEras(); // See if we have to use the built-in eras if (japaneseEraInfo == null) { // We know about some built-in ranges EraInfo[] defaultEraRanges = new EraInfo[4]; defaultEraRanges[0] = new EraInfo(4, 1989, 1, 8, 1988, 1, GregorianCalendar.MaxYear - 1988, "\x5e73\x6210", "\x5e73", "H"); // era #4 start year/month/day, yearOffset, minEraYear defaultEraRanges[1] = new EraInfo(3, 1926, 12, 25, 1925, 1, 1989 - 1925, "\x662d\x548c", "\x662d", "S"); // era #3,start year/month/day, yearOffset, minEraYear defaultEraRanges[2] = new EraInfo(2, 1912, 7, 30, 1911, 1, 1926 - 1911, "\x5927\x6b63", "\x5927", "T"); // era #2,start year/month/day, yearOffset, minEraYear defaultEraRanges[3] = new EraInfo(1, 1868, 1, 1, 1867, 1, 1912 - 1867, "\x660e\x6cbb", "\x660e", "M"); // era #1,start year/month/day, yearOffset, minEraYear // Remember the ranges we built japaneseEraInfo = defaultEraRanges; } } // return the era we found/made return japaneseEraInfo; } internal static volatile Calendar s_defaultInstance; internal GregorianCalendarHelper helper; /*=================================GetDefaultInstance========================== **Action: Internal method to provide a default intance of JapaneseCalendar. Used by NLS+ implementation ** and other calendars. **Returns: **Arguments: **Exceptions: ============================================================================*/ internal static Calendar GetDefaultInstance() { if (s_defaultInstance == null) { s_defaultInstance = new JapaneseCalendar(); } return (s_defaultInstance); } public JapaneseCalendar() { try { new CultureInfo("ja-JP"); } catch (ArgumentException e) { throw new TypeInitializationException(this.GetType().ToString(), e); } helper = new GregorianCalendarHelper(this, GetEraInfo()); } internal override CalendarId ID { get { return CalendarId.JAPAN; } } public override DateTime AddMonths(DateTime time, int months) { return (helper.AddMonths(time, months)); } public override DateTime AddYears(DateTime time, int years) { return (helper.AddYears(time, years)); } /*=================================GetDaysInMonth========================== **Action: Returns the number of days in the month given by the year and month arguments. **Returns: The number of days in the given month. **Arguments: ** year The year in Japanese calendar. ** month The month ** era The Japanese era value. **Exceptions ** ArgumentException If month is less than 1 or greater * than 12. ============================================================================*/ public override int GetDaysInMonth(int year, int month, int era) { return (helper.GetDaysInMonth(year, month, era)); } public override int GetDaysInYear(int year, int era) { return (helper.GetDaysInYear(year, era)); } public override int GetDayOfMonth(DateTime time) { return (helper.GetDayOfMonth(time)); } public override DayOfWeek GetDayOfWeek(DateTime time) { return (helper.GetDayOfWeek(time)); } public override int GetDayOfYear(DateTime time) { return (helper.GetDayOfYear(time)); } public override int GetMonthsInYear(int year, int era) { return (helper.GetMonthsInYear(year, era)); } [SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems. [System.Runtime.InteropServices.ComVisible(false)] public override int GetWeekOfYear(DateTime time, CalendarWeekRule rule, DayOfWeek firstDayOfWeek) { return (helper.GetWeekOfYear(time, rule, firstDayOfWeek)); } /*=================================GetEra========================== **Action: Get the era value of the specified time. **Returns: The era value for the specified time. **Arguments: ** time the specified date time. **Exceptions: ArgumentOutOfRangeException if time is out of the valid era ranges. ============================================================================*/ public override int GetEra(DateTime time) { return (helper.GetEra(time)); } public override int GetMonth(DateTime time) { return (helper.GetMonth(time)); } public override int GetYear(DateTime time) { return (helper.GetYear(time)); } public override bool IsLeapDay(int year, int month, int day, int era) { return (helper.IsLeapDay(year, month, day, era)); } public override bool IsLeapYear(int year, int era) { return (helper.IsLeapYear(year, era)); } // Returns the leap month in a calendar year of the specified era. This method returns 0 // if this calendar does not have leap month, or this year is not a leap year. // [System.Runtime.InteropServices.ComVisible(false)] public override int GetLeapMonth(int year, int era) { return (helper.GetLeapMonth(year, era)); } public override bool IsLeapMonth(int year, int month, int era) { return (helper.IsLeapMonth(year, month, era)); } public override DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era) { return (helper.ToDateTime(year, month, day, hour, minute, second, millisecond, era)); } // For Japanese calendar, four digit year is not used. Few emperors will live for more than one hundred years. // Therefore, for any two digit number, we just return the original number. public override int ToFourDigitYear(int year) { if (year <= 0) { throw new ArgumentOutOfRangeException("year", SR.ArgumentOutOfRange_NeedPosNum); } Contract.EndContractBlock(); if (year > helper.MaxYear) { throw new ArgumentOutOfRangeException( "year", String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Range, 1, helper.MaxYear)); } return (year); } public override int[] Eras { get { return (helper.Eras); } } // // Return the various era strings // Note: The arrays are backwards of the eras // internal static String[] EraNames() { EraInfo[] eras = GetEraInfo(); String[] eraNames = new String[eras.Length]; for (int i = 0; i < eras.Length; i++) { // Strings are in chronological order, eras are backwards order. eraNames[i] = eras[eras.Length - i - 1].eraName; } return eraNames; } internal static String[] AbbrevEraNames() { EraInfo[] eras = GetEraInfo(); String[] erasAbbrev = new String[eras.Length]; for (int i = 0; i < eras.Length; i++) { // Strings are in chronological order, eras are backwards order. erasAbbrev[i] = eras[eras.Length - i - 1].abbrevEraName; } return erasAbbrev; } internal static String[] EnglishEraNames() { EraInfo[] eras = GetEraInfo(); String[] erasEnglish = new String[eras.Length]; for (int i = 0; i < eras.Length; i++) { // Strings are in chronological order, eras are backwards order. erasEnglish[i] = eras[eras.Length - i - 1].englishEraName; } return erasEnglish; } private const int DEFAULT_TWO_DIGIT_YEAR_MAX = 99; internal override bool IsValidYear(int year, int era) { return helper.IsValidYear(year, era); } public override int TwoDigitYearMax { get { if (twoDigitYearMax == -1) { twoDigitYearMax = GetSystemTwoDigitYearSetting(ID, DEFAULT_TWO_DIGIT_YEAR_MAX); } return (twoDigitYearMax); } set { VerifyWritable(); if (value < 99 || value > helper.MaxYear) { throw new ArgumentOutOfRangeException( "year", String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Range, 99, helper.MaxYear)); } twoDigitYearMax = value; } } } }
namespace Microsoft.Protocols.TestSuites.MS_OXCMAPIHTTP { /// <summary> /// The request type of MS-OXCMAPIHTTP /// </summary> public enum RequestType { /// <summary> /// The connect request type /// </summary> Connect, /// <summary> /// The Execute request type /// </summary> Execute, /// <summary> /// The Disconnect type /// </summary> Disconnect, /// <summary> /// The NotificationWait request type /// </summary> NotificationWait, /// <summary> /// The PING request type /// </summary> PING, /// <summary> /// The Bind request type /// </summary> Bind, /// <summary> /// The Unbind request type /// </summary> Unbind, /// <summary> /// The CompareMIds request type /// </summary> CompareMIds, /// <summary> /// The DNToMId request type /// </summary> DNToMId, /// <summary> /// The GetMatches request type /// </summary> GetMatches, /// <summary> /// The GetPropList request type /// </summary> GetPropList, /// <summary> /// The GetProps request type /// </summary> GetProps, /// <summary> /// The GetSpecialTable request type /// </summary> GetSpecialTable, /// <summary> /// The GetTemplateInfo request type /// </summary> GetTemplateInfo, /// <summary> /// The ModLinkAtt request type /// </summary> ModLinkAtt, /// <summary> /// The ModProps request type /// </summary> ModProps, /// <summary> /// The QueryColumns request type /// </summary> QueryColumns, /// <summary> /// The QueryRows request type /// </summary> QueryRows, /// <summary> /// The ResolveNames request type /// </summary> ResolveNames, /// <summary> /// The ResortRestriction request type /// </summary> ResortRestriction, /// <summary> /// The SeekEntries request type /// </summary> SeekEntries, /// <summary> /// The UpdateStat request type /// </summary> UpdateStat, /// <summary> /// The GetMailboxUrl request type /// </summary> GetMailboxUrl, /// <summary> /// The GetAddressBookUrl request type /// </summary> GetAddressBookUrl } /// <summary> /// The server endpoint type /// </summary> public enum ServerEndpoint { /// <summary> /// The endpoint used for Mailbox server /// </summary> MailboxServerEndpoint, /// <summary> /// The endpoint used for address book server /// </summary> AddressBookServerEndpoint } #region Address Book /// <summary> /// NspiGetSpecialTable flag values are used as bit flags in the NspiGetSpecialTable method to specify optional behavior to a server. /// </summary> public enum NspiGetSpecialTableFlags { /// <summary> /// Specify none to 0. /// </summary> None = 0x00000000, /// <summary> /// Specify that the server MUST return the table of the available address creation templates. /// Specify that this flag causes the server to ignore the NspiUnicodeStrings flag. /// </summary> NspiAddressCreationTemplates = 0x00000002, /// <summary> /// Specifies that the server MUST return all strings as Unicode representations /// rather than as multibyte strings in the client's code page. /// </summary> NspiUnicodeStrings = 0x00000004, } /// <summary> /// The NspiGetTemplateInfo flag values are used as bit flags in the NspiGetTemplateInfo method to specify optional behavior to a server. /// </summary> public enum NspiGetTemplateInfoFlags { /// <summary> /// Specifies that the server is to return the value that represents a template. /// </summary> TI_TEMPLATE = 0x00000001, /// <summary> /// Specifies that the server is to return the value of the script that is associated with a template. /// </summary> TI_SCRIPT = 0x00000004, /// <summary> /// Specifies that the server is to return the e-mail type that is associated with a template. /// </summary> TI_EMT = 0x00000010, /// <summary> /// Specifies that the server is to return the name of the help file that is associated with a template. /// </summary> TI_HELPFILE_NAME = 0x00000020, /// <summary> /// Specifies that the server is to return the contents of the help file that is associated with a template. /// </summary> TI_HELPFILE_CONTENTS = 0x00000040, } /// <summary> /// The values are used to specify a specific sort orders for tables. /// </summary> public enum TableSortOrders { /// <summary> /// The table is sorted ascending on the PidTagDisplayName property, as specified in [MS-OXCFOLD] section 2.2.2.2.2.3. /// All Exchange NSPI servers MUST support this sort order for at least one LCID. /// </summary> SortTypeDisplayName = 0x00000000, /// <summary> /// The table is sorted ascending on the PidTagAddressBookPhoneticDisplayName property, as specified in [MS-OXOABK] section 2.2.3.9. /// Exchange NSPI servers SHOULD support this sort order. Exchange NSPI servers MAY support this only for some LCIDs. /// </summary> SortTypePhoneticDisplayName = 0x00000003, /// <summary> /// The table is sorted ascending on the PidTagDisplayName property. /// The client MUST set this value only when using the NspiGetMatches method to open a non-writable table on an object-valued property. /// </summary> SortTypeDisplayName_RO = 0x000003E8, /// <summary> /// The table is sorted ascending on the PidTagDisplayName property. /// The client MUST set this value only when using the NspiGetMatches method to open a writable table on an object-valued property. /// </summary> SortTypeDisplayName_W = 0x000003E9, } /// <summary> /// The values are used to specify display types. /// </summary> public enum DisplayTypeValues : uint { /// <summary> /// A typical messaging user. /// </summary> DT_MAILUSER = 0x00000000, /// <summary> /// A distribution list. /// </summary> DT_DISTLIST = 0x00000001, /// <summary> /// A forum, such as a bulletin board service or a public or shared folder. /// </summary> DT_FORUM = 0x00000002, /// <summary> /// An automated agent, such as Quote-Of-The-Day or a weather chart display. /// </summary> DT_AGENT = 0x00000003, /// <summary> /// An Address Book object defined for a large group, such as helpdesk, accounting, coordinator, /// or department. Department objects usually have this display type. /// </summary> DT_ORGANIZATION = 0x00000004, /// <summary> /// A private, personally administered distribution list. /// </summary> DT_PRIVATE_DISTLIST = 0x00000005, /// <summary> /// An Address Book object known to be from a foreign or remote messaging system. /// </summary> DT_REMOTE_MAILUSER = 0x00000006, /// <summary> /// An address book hierarchy table container. /// An Exchange NSPI server MUST NOT return this display type except as part of an EntryID of an object in the address book hierarchy table. /// </summary> DT_CONTAINER = 0x00000100, /// <summary> /// A display template object. An Exchange NSPI server MUST NOT return this display type. /// </summary> DT_TEMPLATE = 0x00000101, /// <summary> /// An address creation template. /// An Exchange NSPI server MUST NOT return this display type except as part of an EntryID of an object in the Address Creation Table. /// </summary> DT_ADDRESS_TEMPLATE = 0x00000102, /// <summary> /// A search template. An Exchange NSPI server MUST NOT return this display type. /// </summary> DT_SEARCH = 0x00000200 } /// <summary> /// The language code identifier (LCID) specified in this section is associated with the minimal required sort order for Unicode strings. /// </summary> public enum DefaultLCID { /// <summary> /// Represents the default LCID that is used for comparison of Unicode string representations. /// </summary> NSPI_DEFAULT_LOCALE = 0x00000409, } /// <summary> /// The required code pages listed in this section are associated with the string handling in the Exchange Server NSPI Protocol, /// and they appear in input parameters to methods in the Exchange Server NSPI Protocol. /// </summary> public enum RequiredCodePages : uint { /// <summary> /// Represents the Teletex code page. /// </summary> CP_TELETEX = 0x00004F25, /// <summary> /// Represents the Unicode code page. /// </summary> CP_WINUNICODE = 0x000004B0, } /// <summary> /// The positioning Minimal Entry IDs are used to specify objects in the address book as a function of their positions in tables. /// </summary> public enum MinimalEntryIDs { /// <summary> /// Specifies the position before the first row in the current address book container. /// </summary> MID_BEGINNING_OF_TABLE = 0x00000000, /// <summary> /// Specifies the position after the last row in the current address book container. /// </summary> MID_END_OF_TABLE = 0x00000002, /// <summary> /// Specifies the current position in a table. This Minimal Entry ID is only valid in the NspiUpdateStat method. /// In all other cases, it is an invalid Minimal Entry ID, guaranteed to not specify any object in the address book. /// </summary> MID_CURRENT = 0x00000001, } /// <summary> /// The property type values are used to specify property types. /// </summary> public enum PropertyTypeValues : uint { /// <summary> /// 2 bytes, a 16-bit integer. /// </summary> PtypInteger16 = 0x00000002, /// <summary> /// 4 bytes, a 32-bit integer. /// </summary> PtypInteger32 = 0x00000003, /// <summary> /// 1 byte, restricted to 1 or 0. /// </summary> PtypBoolean = 0x0000000B, /// <summary> /// Variable size, a string of multi-byte characters in externally specified encoding with terminating null character (single 0 byte). /// </summary> PtypString8 = 0x0000001E, /// <summary> /// Variable size, a COUNT followed by that many bytes. /// </summary> PtypBinary = 0x00000102, /// <summary> /// Variable size, a string of Unicode characters in UTF-16LE encoding with terminating null character (2 bytes of zero). /// </summary> PtypString = 0x0000001F, /// <summary> /// 16 bytes, a GUID with Data1, Data2, and Data3 fields in little-endian format. /// </summary> PtypGuid = 0x00000048, /// <summary> /// 8 bytes, a 64-bit integer representing the number of 100-nanosecond intervals since January 1, 1601. /// </summary> PtypTime = 0x00000040, /// <summary> /// 4 bytes, a 32-bit integer encoding error information. /// </summary> PtypErrorCode = 0x0000000A, /// <summary> /// Variable size, a COUNT followed by that many PtypInteger16 values. /// </summary> PtypMultipleInteger16 = 0x00001002, /// <summary> /// Variable size, a COUNT followed by that many PtypInteger32 values. /// </summary> PtypMultipleInteger32 = 0x00001003, /// <summary> /// Variable size, a COUNT followed by that many PtypString8 values. /// </summary> PtypMultipleString8 = 0x0000101E, /// <summary> /// Variable size, a COUNT followed by that many PtypBinary values. /// </summary> PtypMultipleBinary = 0x00001102, /// <summary> /// Variable size, a COUNT followed by that PtypString values. /// </summary> PtypMultipleString = 0x0000101F, /// <summary> /// Variable size, a COUNT followed by that many PtypGuid values. /// </summary> PtypMultipleGuid = 0x00001048, /// <summary> /// Variable size, a COUNT followed by that many PtypTime values. /// </summary> PtypMultipleTime = 0x00001040, /// <summary> /// Single 32-bit value, referencing an address list. /// </summary> PtypEmbeddedTable = 0x0000000D, /// <summary> /// Clients MUST NOT specify this property type in any method's input parameters. /// The server MUST specify this property type in any method's output parameters to indicate that a property has a value that cannot be expressed in the Exchange Server NSPI Protocol. /// </summary> PtypNull = 0x00000001, /// <summary> /// Clients specify this property type in a method's input parameter to indicate that the client will accept any property type the server chooses when returning propvalues. /// Servers MUST NOT specify this property type in any method's output parameters except the method NspiGetIDsFromNames. /// </summary> PtypUnspecified = 0x00000000 } /// <summary> /// Ambiguous name resolution (ANR) Minimal Entry IDs are used to specify the outcome of the ANR process. /// </summary> public enum ANRMinEntryIDs { /// <summary> /// The ANR process is unable to map a string to any objects in the address book. /// </summary> MID_UNRESOLVED = 0x00000000, /// <summary> /// The ANR process maps a string to multiple objects in the address book. /// </summary> MID_AMBIGUOUS = 0x0000001, /// <summary> /// The ANR process maps a string to a single object in the address book. /// </summary> MID_RESOLVED = 0x0000002, } /// <summary> /// The property flag values that are used as bit flags in GetPropList, GetProps, and QueryRows methods to specify optional behavior to a server. /// </summary> public enum RetrievePropertyFlags { /// <summary> /// Client requires that the server MUST NOT include proptags with the PtypEmbeddedTable property type /// in any lists of proptags that the server creates on behalf of the client. /// </summary> fSkipObjects = 0x00000001, /// <summary> /// Client requires that the server MUST return Entry ID values in Ephemeral Entry ID form. /// </summary> fEphID = 0x00000002, } /// <summary> /// The QueryColumns flag value is used as a bit flag in the QueryColumns method to specify optional behavior to a server. /// </summary> public enum NspiQueryColumnsFlag : uint { /// <summary> /// Specifies that the server MUST return all proptags that specify values with string /// representations as having the PtypString property type. /// </summary> NspiUnicodeProptypes = 0x80000000, } /// <summary> /// The property ID values are used to specify property ID. /// </summary> public enum PropertyID : uint { /// <summary> /// The property ID of PidTagDisplayName. /// </summary> PidTagDisplayName = 0x3001, /// <summary> /// The property ID of PidTagAddressBookX509Certificate. /// </summary> PidTagAddressBookX509Certificate = 0x8c6a, /// <summary> /// The property ID of PidTagUserX509Certificate. /// </summary> PidTagUserX509Certificate = 0x3A70, /// <summary> /// The property ID of PidTagEntryId. /// </summary> PidTagEntryId = 0x0FFF, /// <summary> /// The property ID of PidTagDisplayType. /// </summary> PidTagDisplayType = 0x3900, /// <summary> /// The property ID of PidTagAddressBookMember. /// </summary> PidTagAddressBookMember = 0x8009, /// <summary> /// The property ID of PidTagAddressBookPublicDelegates. /// </summary> PidTagAddressBookPublicDelegates = 0x8015, } #endregion }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.Linq; using osu.Framework.Allocation; using osu.Framework.Extensions.EnumExtensions; using osu.Framework.Graphics; using osu.Framework.Graphics.Primitives; using osu.Framework.Graphics.UserInterface; using osu.Framework.Utils; using osu.Game.Extensions; using osu.Game.Graphics.UserInterface; using osu.Game.Rulesets.Edit; using osu.Game.Screens.Edit.Compose.Components; using osuTK; namespace osu.Game.Skinning.Editor { public class SkinSelectionHandler : SelectionHandler<ISkinnableDrawable> { [Resolved] private SkinEditor skinEditor { get; set; } public override bool HandleRotation(float angle) { if (SelectedBlueprints.Count == 1) { // for single items, rotate around the origin rather than the selection centre. ((Drawable)SelectedBlueprints.First().Item).Rotation += angle; } else { var selectionQuad = getSelectionQuad(); foreach (var b in SelectedBlueprints) { var drawableItem = (Drawable)b.Item; var rotatedPosition = RotatePointAroundOrigin(b.ScreenSpaceSelectionPoint, selectionQuad.Centre, angle); updateDrawablePosition(drawableItem, rotatedPosition); drawableItem.Rotation += angle; } } // this isn't always the case but let's be lenient for now. return true; } public override bool HandleScale(Vector2 scale, Anchor anchor) { // convert scale to screen space scale = ToScreenSpace(scale) - ToScreenSpace(Vector2.Zero); adjustScaleFromAnchor(ref scale, anchor); // the selection quad is always upright, so use an AABB rect to make mutating the values easier. var selectionRect = getSelectionQuad().AABBFloat; // If the selection has no area we cannot scale it if (selectionRect.Area == 0) return false; // copy to mutate, as we will need to compare to the original later on. var adjustedRect = selectionRect; // first, remove any scale axis we are not interested in. if (anchor.HasFlagFast(Anchor.x1)) scale.X = 0; if (anchor.HasFlagFast(Anchor.y1)) scale.Y = 0; // for now aspect lock scale adjustments that occur at corners.. if (!anchor.HasFlagFast(Anchor.x1) && !anchor.HasFlagFast(Anchor.y1)) { // project scale vector along diagonal Vector2 diag = (selectionRect.TopLeft - selectionRect.BottomRight).Normalized(); scale = Vector2.Dot(scale, diag) * diag; } // ..or if any of the selection have been rotated. // this is to avoid requiring skew logic (which would likely not be the user's expected transform anyway). else if (SelectedBlueprints.Any(b => !Precision.AlmostEquals(((Drawable)b.Item).Rotation, 0))) { if (anchor.HasFlagFast(Anchor.x1)) // if dragging from the horizontal centre, only a vertical component is available. scale.X = scale.Y / selectionRect.Height * selectionRect.Width; else // in all other cases (arbitrarily) use the horizontal component for aspect lock. scale.Y = scale.X / selectionRect.Width * selectionRect.Height; } if (anchor.HasFlagFast(Anchor.x0)) adjustedRect.X -= scale.X; if (anchor.HasFlagFast(Anchor.y0)) adjustedRect.Y -= scale.Y; adjustedRect.Width += scale.X; adjustedRect.Height += scale.Y; // scale adjust applied to each individual item should match that of the quad itself. var scaledDelta = new Vector2( MathF.Max(adjustedRect.Width / selectionRect.Width, 0), MathF.Max(adjustedRect.Height / selectionRect.Height, 0) ); foreach (var b in SelectedBlueprints) { var drawableItem = (Drawable)b.Item; // each drawable's relative position should be maintained in the scaled quad. var screenPosition = b.ScreenSpaceSelectionPoint; var relativePositionInOriginal = new Vector2( (screenPosition.X - selectionRect.TopLeft.X) / selectionRect.Width, (screenPosition.Y - selectionRect.TopLeft.Y) / selectionRect.Height ); var newPositionInAdjusted = new Vector2( adjustedRect.TopLeft.X + adjustedRect.Width * relativePositionInOriginal.X, adjustedRect.TopLeft.Y + adjustedRect.Height * relativePositionInOriginal.Y ); updateDrawablePosition(drawableItem, newPositionInAdjusted); drawableItem.Scale *= scaledDelta; } return true; } public override bool HandleFlip(Direction direction) { var selectionQuad = getSelectionQuad(); Vector2 scaleFactor = direction == Direction.Horizontal ? new Vector2(-1, 1) : new Vector2(1, -1); foreach (var b in SelectedBlueprints) { var drawableItem = (Drawable)b.Item; var flippedPosition = GetFlippedPosition(direction, selectionQuad, b.ScreenSpaceSelectionPoint); updateDrawablePosition(drawableItem, flippedPosition); drawableItem.Scale *= scaleFactor; drawableItem.Rotation -= drawableItem.Rotation % 180 * 2; } return true; } public override bool HandleMovement(MoveSelectionEvent<ISkinnableDrawable> moveEvent) { foreach (var c in SelectedBlueprints) { var item = c.Item; Drawable drawable = (Drawable)item; drawable.Position += drawable.ScreenSpaceDeltaToParentSpace(moveEvent.ScreenSpaceDelta); if (item.UsesFixedAnchor) continue; applyClosestAnchor(drawable); } return true; } private static void applyClosestAnchor(Drawable drawable) => applyAnchor(drawable, getClosestAnchor(drawable)); protected override void OnSelectionChanged() { base.OnSelectionChanged(); SelectionBox.CanRotate = true; SelectionBox.CanScaleX = true; SelectionBox.CanScaleY = true; SelectionBox.CanFlipX = true; SelectionBox.CanFlipY = true; SelectionBox.CanReverse = false; } protected override void DeleteItems(IEnumerable<ISkinnableDrawable> items) => skinEditor.DeleteItems(items.ToArray()); protected override IEnumerable<MenuItem> GetContextMenuItemsForSelection(IEnumerable<SelectionBlueprint<ISkinnableDrawable>> selection) { var closestItem = new TernaryStateRadioMenuItem("Closest", MenuItemType.Standard, _ => applyClosestAnchors()) { State = { Value = GetStateFromSelection(selection, c => !c.Item.UsesFixedAnchor) } }; yield return new OsuMenuItem("Anchor") { Items = createAnchorItems((d, a) => d.UsesFixedAnchor && ((Drawable)d).Anchor == a, applyFixedAnchors) .Prepend(closestItem) .ToArray() }; yield return new OsuMenuItem("Origin") { Items = createAnchorItems((d, o) => ((Drawable)d).Origin == o, applyOrigins).ToArray() }; foreach (var item in base.GetContextMenuItemsForSelection(selection)) yield return item; IEnumerable<TernaryStateMenuItem> createAnchorItems(Func<ISkinnableDrawable, Anchor, bool> checkFunction, Action<Anchor> applyFunction) { var displayableAnchors = new[] { Anchor.TopLeft, Anchor.TopCentre, Anchor.TopRight, Anchor.CentreLeft, Anchor.Centre, Anchor.CentreRight, Anchor.BottomLeft, Anchor.BottomCentre, Anchor.BottomRight, }; return displayableAnchors.Select(a => { return new TernaryStateRadioMenuItem(a.ToString(), MenuItemType.Standard, _ => applyFunction(a)) { State = { Value = GetStateFromSelection(selection, c => checkFunction(c.Item, a)) } }; }); } } private static void updateDrawablePosition(Drawable drawable, Vector2 screenSpacePosition) { drawable.Position = drawable.Parent.ToLocalSpace(screenSpacePosition) - drawable.AnchorPosition; } private void applyOrigins(Anchor origin) { foreach (var item in SelectedItems) { var drawable = (Drawable)item; if (origin == drawable.Origin) continue; var previousOrigin = drawable.OriginPosition; drawable.Origin = origin; drawable.Position += drawable.OriginPosition - previousOrigin; if (item.UsesFixedAnchor) continue; applyClosestAnchor(drawable); } } /// <summary> /// A screen-space quad surrounding all selected drawables, accounting for their full displayed size. /// </summary> /// <returns></returns> private Quad getSelectionQuad() => GetSurroundingQuad(SelectedBlueprints.SelectMany(b => b.Item.ScreenSpaceDrawQuad.GetVertices().ToArray())); private void applyFixedAnchors(Anchor anchor) { foreach (var item in SelectedItems) { var drawable = (Drawable)item; item.UsesFixedAnchor = true; applyAnchor(drawable, anchor); } } private void applyClosestAnchors() { foreach (var item in SelectedItems) { item.UsesFixedAnchor = false; applyClosestAnchor((Drawable)item); } } private static Anchor getClosestAnchor(Drawable drawable) { var parent = drawable.Parent; if (parent == null) return drawable.Anchor; var screenPosition = getScreenPosition(); var absolutePosition = parent.ToLocalSpace(screenPosition); var factor = parent.RelativeToAbsoluteFactor; var result = default(Anchor); static Anchor getAnchorFromPosition(float xOrY, Anchor anchor0, Anchor anchor1, Anchor anchor2) { if (xOrY >= 2 / 3f) return anchor2; if (xOrY >= 1 / 3f) return anchor1; return anchor0; } result |= getAnchorFromPosition(absolutePosition.X / factor.X, Anchor.x0, Anchor.x1, Anchor.x2); result |= getAnchorFromPosition(absolutePosition.Y / factor.Y, Anchor.y0, Anchor.y1, Anchor.y2); return result; Vector2 getScreenPosition() { var quad = drawable.ScreenSpaceDrawQuad; var origin = drawable.Origin; var pos = quad.TopLeft; if (origin.HasFlagFast(Anchor.x2)) pos.X += quad.Width; else if (origin.HasFlagFast(Anchor.x1)) pos.X += quad.Width / 2f; if (origin.HasFlagFast(Anchor.y2)) pos.Y += quad.Height; else if (origin.HasFlagFast(Anchor.y1)) pos.Y += quad.Height / 2f; return pos; } } private static void applyAnchor(Drawable drawable, Anchor anchor) { if (anchor == drawable.Anchor) return; var previousAnchor = drawable.AnchorPosition; drawable.Anchor = anchor; drawable.Position -= drawable.AnchorPosition - previousAnchor; } private static void adjustScaleFromAnchor(ref Vector2 scale, Anchor reference) { // cancel out scale in axes we don't care about (based on which drag handle was used). if ((reference & Anchor.x1) > 0) scale.X = 0; if ((reference & Anchor.y1) > 0) scale.Y = 0; // reverse the scale direction if dragging from top or left. if ((reference & Anchor.x0) > 0) scale.X = -scale.X; if ((reference & Anchor.y0) > 0) scale.Y = -scale.Y; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Collections.Generic; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc.Abstractions; using Microsoft.AspNetCore.Mvc.Filters; using Microsoft.AspNetCore.Routing; namespace Microsoft.AspNetCore.Mvc.Diagnostics { /// <summary> /// An <see cref="EventData"/> that occurs before an action. /// </summary> public sealed class BeforeActionEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeAction"; /// <summary> /// Initializes a new instance of <see cref="BeforeActionEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="httpContext">The <see cref="HttpContext"/>.</param> /// <param name="routeData">The <see cref="RouteData"/>.</param> public BeforeActionEventData(ActionDescriptor actionDescriptor, HttpContext httpContext, RouteData routeData) { ActionDescriptor = actionDescriptor; HttpContext = httpContext; RouteData = routeData; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public HttpContext HttpContext { get; } /// <summary> /// The route data. /// </summary> public RouteData RouteData { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(HttpContext), HttpContext), 2 => new KeyValuePair<string, object>(nameof(RouteData), RouteData), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after an action. /// </summary> public sealed class AfterActionEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterAction"; /// <summary> /// Initializes a new instance of <see cref="AfterActionEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="httpContext">The <see cref="HttpContext"/>.</param> /// <param name="routeData">The <see cref="RouteData"/>.</param> public AfterActionEventData(ActionDescriptor actionDescriptor, HttpContext httpContext, RouteData routeData) { ActionDescriptor = actionDescriptor; HttpContext = httpContext; RouteData = routeData; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public HttpContext HttpContext { get; } /// <summary> /// The route data. /// </summary> public RouteData RouteData { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(HttpContext), HttpContext), 2 => new KeyValuePair<string, object>(nameof(RouteData), RouteData), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IAuthorizationFilter.OnAuthorization(AuthorizationFilterContext)"/>. /// </summary> public sealed class BeforeAuthorizationFilterOnAuthorizationEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnAuthorization"; /// <summary> /// Initializes a new instance of <see cref="BeforeAuthorizationFilterOnAuthorizationEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="authorizationContext">The <see cref="AuthorizationFilterContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeAuthorizationFilterOnAuthorizationEventData(ActionDescriptor actionDescriptor, AuthorizationFilterContext authorizationContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; AuthorizationContext = authorizationContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The authorization context. /// </summary> public AuthorizationFilterContext AuthorizationContext { get; } /// <summary> /// The authorization filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(AuthorizationContext), AuthorizationContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IAuthorizationFilter.OnAuthorization(AuthorizationFilterContext)"/>. /// </summary> public sealed class AfterAuthorizationFilterOnAuthorizationEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnAuthorization"; /// <summary> /// Initializes a new instance of <see cref="AfterAuthorizationFilterOnAuthorizationEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="authorizationContext">The <see cref="AuthorizationFilterContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterAuthorizationFilterOnAuthorizationEventData(ActionDescriptor actionDescriptor, AuthorizationFilterContext authorizationContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; AuthorizationContext = authorizationContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The authorization context. /// </summary> public AuthorizationFilterContext AuthorizationContext { get; } /// <summary> /// The authorization filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(AuthorizationContext), AuthorizationContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IResourceFilter"/> execution. /// </summary> public sealed class BeforeResourceFilterOnResourceExecutionEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnResourceExecution"; /// <summary> /// Initializes a new instance of <see cref="BeforeResourceFilterOnResourceExecutionEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resourceExecutingContext">The <see cref="ResourceExecutingContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeResourceFilterOnResourceExecutionEventData(ActionDescriptor actionDescriptor, ResourceExecutingContext resourceExecutingContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResourceExecutingContext = resourceExecutingContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResourceExecutingContext ResourceExecutingContext { get; } /// <summary> /// The resource filter that will run. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResourceExecutingContext), ResourceExecutingContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IResourceFilter"/> execution. /// </summary> public sealed class AfterResourceFilterOnResourceExecutionEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnResourceExecution"; /// <summary> /// Initializes a new instance of <see cref="AfterResourceFilterOnResourceExecutionEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resourceExecutedContext">The <see cref="ResourceExecutedContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterResourceFilterOnResourceExecutionEventData(ActionDescriptor actionDescriptor, ResourceExecutedContext resourceExecutedContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResourceExecutedContext = resourceExecutedContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResourceExecutedContext ResourceExecutedContext { get; } /// <summary> /// The resource filter that will be run. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResourceExecutedContext), ResourceExecutedContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IResourceFilter.OnResourceExecuting(ResourceExecutingContext)"/>. /// </summary> public sealed class BeforeResourceFilterOnResourceExecutingEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnResourceExecuting"; /// <summary> /// Initializes a new instance of <see cref="BeforeResourceFilterOnResourceExecutingEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resourceExecutingContext">The <see cref="ResourceExecutingContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeResourceFilterOnResourceExecutingEventData(ActionDescriptor actionDescriptor, ResourceExecutingContext resourceExecutingContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResourceExecutingContext = resourceExecutingContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResourceExecutingContext ResourceExecutingContext { get; } /// <summary> /// The resource filter that will run. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResourceExecutingContext), ResourceExecutingContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IResourceFilter.OnResourceExecuting(ResourceExecutingContext)"/>. /// </summary> public sealed class AfterResourceFilterOnResourceExecutingEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnResourceExecuting"; /// <summary> /// Initializes a new instance of <see cref="AfterResourceFilterOnResourceExecutingEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resourceExecutingContext">The <see cref="ResourceExecutingContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterResourceFilterOnResourceExecutingEventData(ActionDescriptor actionDescriptor, ResourceExecutingContext resourceExecutingContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResourceExecutingContext = resourceExecutingContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResourceExecutingContext ResourceExecutingContext { get; } /// <summary> /// The resource filter that ran. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResourceExecutingContext), ResourceExecutingContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IResourceFilter.OnResourceExecuted(ResourceExecutedContext)"/>. /// </summary> public sealed class BeforeResourceFilterOnResourceExecutedEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnResourceExecuted"; /// <summary> /// Initializes a new instance of <see cref="BeforeResourceFilterOnResourceExecutedEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resourceExecutedContext">The <see cref="ResourceExecutedContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeResourceFilterOnResourceExecutedEventData(ActionDescriptor actionDescriptor, ResourceExecutedContext resourceExecutedContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResourceExecutedContext = resourceExecutedContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResourceExecutedContext ResourceExecutedContext { get; } /// <summary> /// The resource filter that will run. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResourceExecutedContext), ResourceExecutedContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IResourceFilter.OnResourceExecuted(ResourceExecutedContext)"/>. /// </summary> public sealed class AfterResourceFilterOnResourceExecutedEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnResourceExecuted"; /// <summary> /// Initializes a new instance of <see cref="AfterResourceFilterOnResourceExecutedEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resourceExecutedContext">The <see cref="ResourceExecutedContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterResourceFilterOnResourceExecutedEventData(ActionDescriptor actionDescriptor, ResourceExecutedContext resourceExecutedContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResourceExecutedContext = resourceExecutedContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The resource context. /// </summary> public ResourceExecutedContext ResourceExecutedContext { get; } /// <summary> /// The filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResourceExecutedContext), ResourceExecutedContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IExceptionFilter.OnException(ExceptionContext)"/>. /// </summary> public sealed class BeforeExceptionFilterOnException : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnException"; /// <summary> /// Initializes a new instance of <see cref="BeforeExceptionFilterOnException"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="exceptionContext">The <see cref="ExceptionContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeExceptionFilterOnException(ActionDescriptor actionDescriptor, ExceptionContext exceptionContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ExceptionContext = exceptionContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ExceptionContext ExceptionContext { get; } /// <summary> /// The exception filter that will run. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ExceptionContext), ExceptionContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IExceptionFilter.OnException(ExceptionContext)"/>. /// </summary> public sealed class AfterExceptionFilterOnExceptionEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnException"; /// <summary> /// Initializes a new instance of <see cref="AfterExceptionFilterOnExceptionEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="exceptionContext">The <see cref="ExceptionContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterExceptionFilterOnExceptionEventData(ActionDescriptor actionDescriptor, ExceptionContext exceptionContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ExceptionContext = exceptionContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The exception context. /// </summary> public ExceptionContext ExceptionContext { get; } /// <summary> /// The exception filter that ran. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ExceptionContext), ExceptionContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IActionFilter"/> execution. /// </summary> public sealed class BeforeActionFilterOnActionExecutionEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnActionExecution"; /// <summary> /// Initializes a new instance of <see cref="BeforeActionFilterOnActionExecutionEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="actionExecutingContext">The <see cref="ActionExecutingContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeActionFilterOnActionExecutionEventData(ActionDescriptor actionDescriptor, ActionExecutingContext actionExecutingContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ActionExecutingContext = actionExecutingContext; Filter = filter; } /// <summary> /// The action that will run.. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The action context. /// </summary> public ActionExecutingContext ActionExecutingContext { get; } /// <summary> /// The action filter that will run. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ActionExecutingContext), ActionExecutingContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IActionFilter"/> execution. /// </summary> public sealed class AfterActionFilterOnActionExecutionEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnActionExecution"; /// <summary> /// Initializes a new instance of <see cref="AfterActionFilterOnActionExecutionEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="actionExecutedContext">The <see cref="ActionExecutedContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterActionFilterOnActionExecutionEventData(ActionDescriptor actionDescriptor, ActionExecutedContext actionExecutedContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ActionExecutedContext = actionExecutedContext; Filter = filter; } /// <summary> /// The action that ran. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The action executed context. /// </summary> public ActionExecutedContext ActionExecutedContext { get; } /// <summary> /// The action filter that ran. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ActionExecutedContext), ActionExecutedContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IActionFilter.OnActionExecuting(ActionExecutingContext)"/>. /// </summary> public sealed class BeforeActionFilterOnActionExecutingEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnActionExecuting"; /// <summary> /// Initializes a new instance of <see cref="BeforeActionFilterOnActionExecutingEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="actionExecutingContext">The <see cref="ActionExecutingContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeActionFilterOnActionExecutingEventData(ActionDescriptor actionDescriptor, ActionExecutingContext actionExecutingContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ActionExecutingContext = actionExecutingContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The action context. /// </summary> public ActionExecutingContext ActionExecutingContext { get; } /// <summary> /// The action filter that will run. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ActionExecutingContext), ActionExecutingContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IActionFilter.OnActionExecuting(ActionExecutingContext)"/>. /// </summary> public sealed class AfterActionFilterOnActionExecutingEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnActionExecuting"; /// <summary> /// Initializes a new instance of <see cref="AfterActionFilterOnActionExecutingEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="actionExecutingContext">The <see cref="ActionExecutingContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterActionFilterOnActionExecutingEventData(ActionDescriptor actionDescriptor, ActionExecutingContext actionExecutingContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ActionExecutingContext = actionExecutingContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ActionExecutingContext ActionExecutingContext { get; } /// <summary> /// The action filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ActionExecutingContext), ActionExecutingContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IActionFilter.OnActionExecuted(ActionExecutedContext)"/>. /// </summary> public sealed class BeforeActionFilterOnActionExecutedEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnActionExecuted"; /// <summary> /// Initializes a new instance of <see cref="BeforeActionFilterOnActionExecutedEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="actionExecutedContext">The <see cref="ActionExecutedContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeActionFilterOnActionExecutedEventData(ActionDescriptor actionDescriptor, ActionExecutedContext actionExecutedContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ActionExecutedContext = actionExecutedContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ActionExecutedContext ActionExecutedContext { get; } /// <summary> /// The action filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ActionExecutedContext), ActionExecutedContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IActionFilter.OnActionExecuted(ActionExecutedContext)"/>. /// </summary> public sealed class AfterActionFilterOnActionExecutedEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnActionExecuted"; /// <summary> /// Initializes a new instance of <see cref="AfterActionFilterOnActionExecutedEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="actionExecutedContext">The <see cref="ActionExecutedContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterActionFilterOnActionExecutedEventData(ActionDescriptor actionDescriptor, ActionExecutedContext actionExecutedContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ActionExecutedContext = actionExecutedContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ActionExecutedContext ActionExecutedContext { get; } /// <summary> /// The action filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ActionExecutedContext), ActionExecutedContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before an controller action method. /// </summary> public sealed class BeforeControllerActionMethodEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeControllerActionMethod"; /// <summary> /// Initializes a new instance of <see cref="BeforeControllerActionMethodEventData"/>. /// </summary> /// <param name="actionContext">The <see cref="ActionContext"/>.</param> /// <param name="actionArguments">The arguments to the action.</param> /// <param name="controller">The controller.</param> public BeforeControllerActionMethodEventData(ActionContext actionContext, IReadOnlyDictionary<string, object> actionArguments, object controller) { ActionContext = actionContext; ActionArguments = actionArguments; Controller = controller; } /// <summary> /// The action context. /// </summary> public ActionContext ActionContext { get; } /// <summary> /// The action arguments. /// </summary> public IReadOnlyDictionary<string, object> ActionArguments { get; } /// <summary> /// The controller. /// </summary> public object Controller { get; } /// <inheritdoc/> protected sealed override int Count => 3; /// <inheritdoc/> protected sealed override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionContext), ActionContext), 1 => new KeyValuePair<string, object>(nameof(ActionArguments), ActionArguments), 2 => new KeyValuePair<string, object>(nameof(Controller), Controller), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after an controller action method. /// </summary> public sealed class AfterControllerActionMethodEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterControllerActionMethod"; /// <summary> /// Initializes a new instance of <see cref="AfterControllerActionMethodEventData"/>. /// </summary> /// <param name="actionContext">The <see cref="ActionContext"/>.</param> /// <param name="arguments">The arguments to the action.</param> /// <param name="controller">The controller.</param> /// <param name="result">The <see cref="IActionResult"/>.</param> public AfterControllerActionMethodEventData(ActionContext actionContext, IReadOnlyDictionary<string, object> arguments, object controller, IActionResult result) { ActionContext = actionContext; Arguments = arguments; Controller = controller; Result = result; } /// <summary> /// The context. /// </summary> public ActionContext ActionContext { get; } /// <summary> /// The arguments. /// </summary> public IReadOnlyDictionary<string, object> Arguments { get; } /// <summary> /// The controller. /// </summary> public object Controller { get; } /// <summary> /// The result. /// </summary> public IActionResult Result { get; } /// <inheritdoc/> protected override int Count => 4; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionContext), ActionContext), 1 => new KeyValuePair<string, object>(nameof(Controller), Controller), 2 => new KeyValuePair<string, object>(nameof(Controller), Controller), 3 => new KeyValuePair<string, object>(nameof(Result), Result), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before a ResultFilter's OnResultExecution /// </summary> public sealed class BeforeResultFilterOnResultExecutionEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnResultExecution"; /// <summary> /// Initializes a new instance of <see cref="BeforeResultFilterOnResultExecutionEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resultExecutingContext">The <see cref="ResultExecutingContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeResultFilterOnResultExecutionEventData(ActionDescriptor actionDescriptor, ResultExecutingContext resultExecutingContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResultExecutingContext = resultExecutingContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResultExecutingContext ResultExecutingContext { get; } /// <summary> /// The result filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResultExecutingContext), ResultExecutingContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after a ResultFilter's OnResultExecution /// </summary> public sealed class AfterResultFilterOnResultExecutionEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnResultExecution"; /// <summary> /// Initializes a new instance of <see cref="AfterResultFilterOnResultExecutionEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resultExecutedContext">The <see cref="ResultExecutedContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterResultFilterOnResultExecutionEventData(ActionDescriptor actionDescriptor, ResultExecutedContext resultExecutedContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResultExecutedContext = resultExecutedContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResultExecutedContext ResultExecutedContext { get; } /// <summary> /// The result filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResultExecutedContext), ResultExecutedContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IResultFilter.OnResultExecuting(ResultExecutingContext)"/>. /// </summary> public sealed class BeforeResultFilterOnResultExecutingEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnResultExecuting"; /// <summary> /// Initializes a new instance of <see cref="BeforeResultFilterOnResultExecutingEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resultExecutingContext">The <see cref="ResultExecutingContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeResultFilterOnResultExecutingEventData(ActionDescriptor actionDescriptor, ResultExecutingContext resultExecutingContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResultExecutingContext = resultExecutingContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResultExecutingContext ResultExecutingContext { get; } /// <summary> /// The result filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResultExecutingContext), ResultExecutingContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IResultFilter.OnResultExecuting(ResultExecutingContext)"/>. /// </summary> public sealed class AfterResultFilterOnResultExecutingEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnResultExecuting"; /// <summary> /// Initializes a new instance of <see cref="AfterResultFilterOnResultExecutedEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resultExecutingContext">The <see cref="ResultExecutingContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterResultFilterOnResultExecutingEventData(ActionDescriptor actionDescriptor, ResultExecutingContext resultExecutingContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResultExecutingContext = resultExecutingContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResultExecutingContext ResultExecutingContext { get; } /// <summary> /// The filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResultExecutingContext), ResultExecutingContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before <see cref="IResultFilter.OnResultExecuted(ResultExecutedContext)"/>. /// </summary> public sealed class BeforeResultFilterOnResultExecutedEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeOnResultExecuted"; /// <summary> /// Initializes a new instance of <see cref="BeforeResultFilterOnResultExecutedEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resultExecutedContext">The <see cref="ResultExecutedContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public BeforeResultFilterOnResultExecutedEventData(ActionDescriptor actionDescriptor, ResultExecutedContext resultExecutedContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResultExecutedContext = resultExecutedContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The context. /// </summary> public ResultExecutedContext ResultExecutedContext { get; } /// <summary> /// The result filter. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResultExecutedContext), ResultExecutedContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after <see cref="IResultFilter.OnResultExecuted(ResultExecutedContext)"/>. /// </summary> public sealed class AfterResultFilterOnResultExecutedEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterOnResultExecuted"; /// <summary> /// Initializes a new instance of <see cref="AfterResultFilterOnResultExecutedEventData"/>. /// </summary> /// <param name="actionDescriptor">The <see cref="ActionDescriptor"/>.</param> /// <param name="resultExecutedContext">The <see cref="ResultExecutedContext"/>.</param> /// <param name="filter">The <see cref="IFilterMetadata"/>.</param> public AfterResultFilterOnResultExecutedEventData(ActionDescriptor actionDescriptor, ResultExecutedContext resultExecutedContext, IFilterMetadata filter) { ActionDescriptor = actionDescriptor; ResultExecutedContext = resultExecutedContext; Filter = filter; } /// <summary> /// The action. /// </summary> public ActionDescriptor ActionDescriptor { get; } /// <summary> /// The result executed context. /// </summary> public ResultExecutedContext ResultExecutedContext { get; } /// <summary> /// The filter that ran. /// </summary> public IFilterMetadata Filter { get; } /// <inheritdoc/> protected override int Count => 3; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionDescriptor), ActionDescriptor), 1 => new KeyValuePair<string, object>(nameof(ResultExecutedContext), ResultExecutedContext), 2 => new KeyValuePair<string, object>(nameof(Filter), Filter), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs before an action result is invoked. /// </summary> public sealed class BeforeActionResultEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "BeforeActionResult"; /// <summary> /// Initializes a new instance of <see cref="BeforeActionResultEventData"/>. /// </summary> /// <param name="actionContext">The <see cref="ActionContext"/>.</param> /// <param name="result">The <see cref="IActionResult"/>.</param> public BeforeActionResultEventData(ActionContext actionContext, IActionResult result) { ActionContext = actionContext; Result = result; } /// <summary> /// The action context. /// </summary> public ActionContext ActionContext { get; } /// <summary> /// The action result. /// </summary> public IActionResult Result { get; } /// <inheritdoc/> protected override int Count => 2; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionContext), ActionContext), 1 => new KeyValuePair<string, object>(nameof(Result), Result), _ => throw new IndexOutOfRangeException(nameof(index)) }; } /// <summary> /// An <see cref="EventData"/> that occurs after an action result is invoked. /// </summary> public sealed class AfterActionResultEventData : EventData { /// <summary> /// The name of the event. /// </summary> public const string EventName = EventNamespace + "AfterActionResult"; /// <summary> /// Initializes a new instance of <see cref="AfterActionResultEventData"/>. /// </summary> /// <param name="actionContext">The <see cref="ActionContext"/>.</param> /// <param name="result">The <see cref="IActionResult"/>.</param> public AfterActionResultEventData(ActionContext actionContext, IActionResult result) { ActionContext = actionContext; Result = result; } /// <summary> /// The action context. /// </summary> public ActionContext ActionContext { get; } /// <summary> /// The result. /// </summary> public IActionResult Result { get; } /// <inheritdoc/> protected override int Count => 2; /// <inheritdoc/> protected override KeyValuePair<string, object> this[int index] => index switch { 0 => new KeyValuePair<string, object>(nameof(ActionContext), ActionContext), 1 => new KeyValuePair<string, object>(nameof(Result), Result), _ => throw new IndexOutOfRangeException(nameof(index)) }; } }
using System.Collections.Generic; using System.Globalization; using System.Reflection; using System.Text; using FileHelpers.Helpers; namespace FileHelpers { /// <summary> /// Define a field that is delimited, eg CSV and may be quoted /// </summary> public sealed class DelimitedField : FieldBase { #region " Constructor " private static readonly CompareInfo mCompare = StringHelper.CreateComparer(); /// <summary> /// Create an empty delimited field structure /// </summary> private DelimitedField() {} /// <summary> /// Create a delimited field with defined separator /// </summary> /// <param name="fi">field info structure</param> /// <param name="sep">field separator</param> /// <param name="defaultCultureName">Default culture name used for each properties if no converter is specified otherwise. If null, the default decimal separator (".") will be used.</param> internal DelimitedField(FieldInfo fi, string sep, string defaultCultureName=null) : base(fi,defaultCultureName) { QuoteChar = '\0'; QuoteMultiline = MultilineMode.AllowForBoth; Separator = sep; // string.Intern(sep); } #endregion #region " Properties " /// <summary> /// Set the separator string /// </summary> /// <remarks>Also sets the discard count</remarks> internal string Separator { get; set; } internal override int CharsToDiscard { get { if (IsLast && IsArray == false) return 0; else return Separator.Length; } } /// <summary> /// allow a quoted multiline format /// </summary> public MultilineMode QuoteMultiline { get; set; } /// <summary> /// whether quotes are optional for read and / or write /// </summary> public QuoteMode QuoteMode { get; set; } /// <summary> /// quote character around field (and repeated within it) /// </summary> public char QuoteChar { get; set; } #endregion #region " Overrides String Handling " /// <summary> /// Extract the field from the delimited file, removing separators and quotes /// and any duplicate quotes within the record /// </summary> /// <param name="line">line containing record input</param> /// <returns>Extract information</returns> internal override ExtractedInfo ExtractFieldString(LineInfo line) { if (IsOptional && line.IsEOL()) return ExtractedInfo.Empty; if (QuoteChar == '\0') return BasicExtractString(line); else { if (TrimMode == TrimMode.Both || TrimMode == TrimMode.Left) line.TrimStart(TrimChars); string quotedStr = QuoteChar.ToString(); if (line.StartsWith(quotedStr)) { var res = StringHelper.ExtractQuotedString(line, QuoteChar, QuoteMultiline == MultilineMode.AllowForBoth || QuoteMultiline == MultilineMode.AllowForRead); if (TrimMode == TrimMode.Both || TrimMode == TrimMode.Right) line.TrimStart(TrimChars); if (!IsLast && !line.StartsWith(Separator) && !line.IsEOL()) { //?QuotedCharBeforeSeparator"The field {0} is quoted but the quoted char: {1} not is just before the separator (You can use [FieldTrim] to avoid this error)" throw new BadUsageException(line, "FileHelperMsg_QuotedCharBeforeSeparator", new List<string>() { this.FieldInfo.Name, quotedStr }); } return res; } else { if (QuoteMode == QuoteMode.OptionalForBoth || QuoteMode == QuoteMode.OptionalForRead) return BasicExtractString(line); else if (line.StartsWithTrim(quotedStr)) { //?SpaceBeforeQuotedChar"The field '{0}' has spaces before the QuotedChar at line {1}. Use the TrimAttribute to by pass this error. Field String: {2}" throw new BadUsageException("FileHelperMsg_SpaceBeforeQuotedChar", new List<string>() { FieldInfo.Name, line.mReader.LineNumber.ToString(), line.CurrentString }); } else { //?FieldNotStartsWithQuotedChar"The field '{0}' does not begin with the QuotedChar at line {1}. You can use FieldQuoted(QuoteMode.OptionalForRead) to allow optional quoted field. Field String: {2}" throw new BadUsageException("FileHelperMsg_FieldNotStartsWithQuotedChar", new List<string>() { FieldInfo.Name, line.mReader.LineNumber.ToString(), line.CurrentString }); } } } } private ExtractedInfo BasicExtractString(LineInfo line) { //we intentionally ignore delimiter after the last expected field as well as all data after it /*if (IsLast && !IsArray) { var sepPos = line.IndexOf(Separator); if (sepPos == -1) return new ExtractedInfo(line); // Now check for one extra separator var msg = string.Format( "Delimiter '{0}' found after the last field '{1}' (the file is wrong or you need to add a field to the record class)", Separator, FieldInfo.Name, line.mReader.LineNumber); throw new BadUsageException(line.mReader.LineNumber, line.mCurrentPos, msg); } else */{ int sepPos = line.IndexOf(Separator); if (sepPos == -1) { if (IsLast /*&& IsArray*/) return new ExtractedInfo(line); if ( NextIsOptional == false) { List<string> msg; string msgCode; if (IsFirst && line.EmptyFromPos()) { //!"The line {0} is empty. Maybe you need to use the attribute [IgnoreEmptyLines] in your record class." msg = new List<string>() { line.mReader.LineNumber.ToString() }; msgCode = "FileHelperMsg_LineIsEmpty"; } else { //!"Delimiter '{0}' not found after field '{1}' (the record has less fields, the delimiter is wrong or the next field must be marked as optional)." msg = new List<string>() { Separator, this.FieldInfo.Name, line.mReader.LineNumber.ToString() }; msgCode = "FileHelperMsg_DelimiterNotFoundAfterField"; } throw new FileHelpersException(line.mReader.LineNumber, line.mCurrentPos, msgCode, msg); } else sepPos = line.mLineStr.Length; } return new ExtractedInfo(line, sepPos); } } /// <summary> /// Output the field string adding delimiters and any required quotes /// </summary> /// <param name="sb">buffer to add field to</param> /// <param name="fieldValue">value object to add</param> /// <param name="isLast">Indicates if we are processing last field</param> internal override void CreateFieldString(StringBuilder sb, object fieldValue, bool isLast) { string field = base.CreateFieldString(fieldValue); bool hasNewLine = mCompare.IndexOf(field, StringHelper.NewLine, CompareOptions.Ordinal) >= 0; // If have a new line and this is not allowed. We throw an exception if (hasNewLine && (QuoteMultiline == MultilineMode.AllowForRead || QuoteMultiline == MultilineMode.NotAllow)) { //?NewLineInsideValue"One value for the field {0} has a new line inside. To allow write this value you must add a FieldQuoted attribute with the multiline option in true." throw new BadUsageException("FileHelperMsg_NewLineInsideValue", new List<string>() { this.FieldInfo.Name }); } // Add Quotes If: // - optional == false // - is optional and contains the separator // - is optional and contains a new line if ((QuoteChar != '\0') && (QuoteMode == QuoteMode.AlwaysQuoted || QuoteMode == QuoteMode.OptionalForRead || ((QuoteMode == QuoteMode.OptionalForWrite || QuoteMode == QuoteMode.OptionalForBoth) && mCompare.IndexOf(field, Separator, CompareOptions.Ordinal) >= 0) || hasNewLine)) StringHelper.CreateQuotedString(sb, field, QuoteChar); else sb.Append(field); if (isLast == false) sb.Append(Separator); } /// <summary> /// create a field base class and populate the delimited values /// base class will add its own values /// </summary> /// <returns>fieldbase ready to be populated with extra info</returns> protected override FieldBase CreateClone() { var res = new DelimitedField { Separator = Separator, QuoteChar = QuoteChar, QuoteMode = QuoteMode, QuoteMultiline = QuoteMultiline }; return res; } #endregion } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Reflection; using Cosmos.Build.Common; using Cosmos.IL2CPU; namespace Cosmos.TestRunner.Core { partial class Engine { private string FindCosmosRoot() { var xCurrentDirectory = AppContext.BaseDirectory; var xCurrentInfo = new DirectoryInfo(xCurrentDirectory); while (xCurrentInfo.Parent != null) { if (xCurrentInfo.GetDirectories("source").Any()) { return xCurrentDirectory; } xCurrentInfo = xCurrentInfo.Parent; xCurrentDirectory = xCurrentInfo.FullName; } return string.Empty; } private void RunDotnetPublish(string aProjectPath, string aOutputPath, string aRuntimeTarget) { var xArgsString = $"publish \"{aProjectPath}\" -o \"{aOutputPath}\" -r {aRuntimeTarget}"; RunProcess("dotnet", aProjectPath, xArgsString); } private void RunProcess(string aProcess, string aWorkingDirectory, List<string> aArguments, bool aAttachDebugger = false) { if (string.IsNullOrWhiteSpace(aProcess)) { throw new ArgumentNullException(aProcess); } var xArgsString = aArguments.Aggregate("", (aArgs, aArg) => $"{aArgs} \"{aArg}\""); RunProcess(aProcess, aWorkingDirectory, xArgsString, aAttachDebugger); } private void RunProcess(string aProcess, string aWorkingDirectory, string aArguments, bool aAttachDebugger = false) { if (string.IsNullOrWhiteSpace(aProcess)) { throw new ArgumentNullException(aProcess); } if (aAttachDebugger) { aArguments += " \"AttachVsDebugger:True\""; } Action<string> xErrorReceived = OutputHandler.LogError; Action<string> xOutputReceived = OutputHandler.LogMessage; bool xResult = false; var xProcessStartInfo = new ProcessStartInfo { WorkingDirectory = aWorkingDirectory, FileName = aProcess, Arguments = aArguments, UseShellExecute = false, RedirectStandardOutput = true, RedirectStandardError = true, CreateNoWindow = true }; xOutputReceived($"Executing command line '{aProcess} {aArguments}'"); xOutputReceived($"Working directory = '{aWorkingDirectory}'"); using (var xProcess = new Process()) { xProcess.StartInfo = xProcessStartInfo; xProcess.ErrorDataReceived += delegate (object aSender, DataReceivedEventArgs e) { if (e.Data != null) { xErrorReceived(e.Data); } }; xProcess.OutputDataReceived += delegate (object aSender, DataReceivedEventArgs e) { if (e.Data != null) { xOutputReceived(e.Data); } }; xProcess.Start(); xProcess.BeginErrorReadLine(); xProcess.BeginOutputReadLine(); xProcess.WaitForExit(AllowedSecondsInKernel * 1000); if (!xProcess.HasExited) { xProcess.Kill(); xErrorReceived($"'{aProcess}' timed out."); } else { if (xProcess.ExitCode == 0) { xResult = true; } else { xErrorReceived($"Error invoking '{aProcess}'."); } } } if (!xResult) { throw new Exception("Error running process!"); } } public static string RunObjDump(string cosmosBuildDir, string workingDir, string inputFile, Action<string> errorReceived, Action<string> outputReceived) { var xMapFile = Path.ChangeExtension(inputFile, "map"); File.Delete(xMapFile); if (File.Exists(xMapFile)) { throw new Exception("Could not delete " + xMapFile); } var xTempBatFile = Path.Combine(workingDir, "ExtractElfMap.bat"); File.WriteAllText(xTempBatFile, "@ECHO OFF\r\n\"" + Path.Combine(cosmosBuildDir, @"tools\cygwin\objdump.exe") + "\" --wide --syms \"" + inputFile + "\" > \"" + Path.GetFileName(xMapFile) + "\""); var xProcessStartInfo = new ProcessStartInfo { WorkingDirectory = workingDir, FileName = xTempBatFile, Arguments = "", UseShellExecute = false, RedirectStandardOutput = true, RedirectStandardError = true, CreateNoWindow = true }; var xProcess = Process.Start(xProcessStartInfo); xProcess.WaitForExit(20000); File.Delete(xTempBatFile); return xMapFile; } private void RunExtractMapFromElfFile(string workingDir, string kernelFileName) { RunObjDump(CosmosPaths.Build, workingDir, kernelFileName, OutputHandler.LogError, OutputHandler.LogMessage); } private void RunTheRingMaster(string kernelFileName) { var xArgs = new List<string>() { kernelFileName }; bool xUsingUserKit = false; string xTheRingMasterPath = Path.Combine(FindCosmosRoot(), "source", "TheRingMaster"); if (!Directory.Exists(xTheRingMasterPath)) { xUsingUserKit = true; xTheRingMasterPath = Path.Combine(GetCosmosUserkitFolder(), "Build", "TheRingMaster"); } if (xUsingUserKit) { RunProcess("TheRingMaster.exe", xTheRingMasterPath, xArgs); } else { xArgs.Insert(0, "run"); xArgs.Insert(1, "--no-build"); RunProcess("dotnet", xTheRingMasterPath, xArgs); } } private void RunIL2CPU(string kernelFileName, string outputFile) { References = new List<string>() { kernelFileName }; if (KernelPkg == "X86") { References.Add(Assembly.Load(new AssemblyName("Cosmos.CPU_Plugs")).Location); References.Add(Assembly.Load(new AssemblyName("Cosmos.CPU_Asm")).Location); References.Add(Assembly.Load(new AssemblyName("Cosmos.Plugs.TapRoot")).Location); } else { References.Add(Assembly.Load(new AssemblyName("Cosmos.Core_Plugs")).Location); References.Add(Assembly.Load(new AssemblyName("Cosmos.Core_Asm")).Location); References.Add(Assembly.Load(new AssemblyName("Cosmos.System2_Plugs")).Location); References.Add(Assembly.Load(new AssemblyName("Cosmos.Debug.Kernel.Plugs.Asm")).Location); } var xArgs = new List<string> { "KernelPkg:" + KernelPkg, "DebugEnabled:True", "StackCorruptionDetectionEnabled:" + EnableStackCorruptionChecks, "StackCorruptionDetectionLevel:" + StackCorruptionChecksLevel, "DebugMode:Source", "TraceAssemblies:" + TraceAssembliesLevel, "DebugCom:1", "OutputFilename:" + outputFile, "EnableLogging:True", "EmitDebugSymbols:True", "IgnoreDebugStubAttribute:False" }; xArgs.AddRange(References.Select(aReference => "References:" + aReference)); bool xUsingUserkit = false; string xIL2CPUPath = Path.Combine(FindCosmosRoot(), "..", "IL2CPU", "source", "IL2CPU"); if (!Directory.Exists(xIL2CPUPath)) { xUsingUserkit = true; xIL2CPUPath = Path.Combine(GetCosmosUserkitFolder(), "Build", "IL2CPU"); } if (xUsingUserkit) { RunProcess("IL2CPU.exe", xIL2CPUPath, xArgs, DebugIL2CPU); } else { if (DebugIL2CPU) { if (KernelsToRun.Count > 1) { throw new Exception("Cannot run multiple kernels with in-process compilation!"); } // ensure we're using the referenced (= solution) version Cosmos.IL2CPU.CosmosAssembler.ReadDebugStubFromDisk = false; Program.Run(xArgs.ToArray(), OutputHandler.LogMessage, OutputHandler.LogError); } else { xArgs.Insert(0, "run"); xArgs.Insert(1, "--no-build"); xArgs.Insert(2, " -- "); RunProcess("dotnet", xIL2CPUPath, xArgs); } } } private void RunNasm(string inputFile, string outputFile, bool isElf) { bool xUsingUserkit = false; string xNasmPath = Path.Combine(FindCosmosRoot(), "Tools", "NASM"); if (!Directory.Exists(xNasmPath)) { xUsingUserkit = true; xNasmPath = Path.Combine(GetCosmosUserkitFolder(), "Build", "NASM"); } if (!Directory.Exists(xNasmPath)) { throw new DirectoryNotFoundException("NASM path not found."); } var xArgs = new List<string> { $"ExePath:{Path.Combine(xUsingUserkit ? GetCosmosUserkitFolder() : FindCosmosRoot(), "Build", "Tools", "NAsm", "nasm.exe")}", $"InputFile:{inputFile}", $"OutputFile:{outputFile}", $"IsELF:{isElf}" }; if (xUsingUserkit) { RunProcess("NASM.exe", xNasmPath, xArgs); } else { xArgs.Insert(0, "run"); xArgs.Insert(1, " -- "); RunProcess("dotnet", xNasmPath, xArgs); } } private void RunLd(string inputFile, string outputFile) { string[] arguments = new[] { "-Ttext", "0x2000000", "-Tdata", " 0x1000000", "-e", "Kernel_Start", "-o",outputFile.Replace('\\', '/'), inputFile.Replace('\\', '/') }; var xArgsString = arguments.Aggregate("", (a, b) => a + " \"" + b + "\""); var xProcess = Process.Start(Path.Combine(GetCosmosUserkitFolder(), "build", "tools", "cygwin", "ld.exe"), xArgsString); xProcess.WaitForExit(10000); //RunProcess(Path.Combine(GetCosmosUserkitFolder(), "build", "tools", "cygwin", "ld.exe"), // mBaseWorkingDirectory, // new[] // { // "-Ttext", "0x2000000", // "-Tdata", " 0x1000000", // "-e", "Kernel_Start", // "-o",outputFile.Replace('\\', '/'), // inputFile.Replace('\\', '/') // }); } private static string GetCosmosUserkitFolder() { CosmosPaths.Initialize(); return CosmosPaths.UserKit; } private void MakeIso(string objectFile, string isoFile) { IsoMaker.Generate(objectFile, isoFile); if (!File.Exists(isoFile)) { throw new Exception("Error building iso"); } } } }
//------------------------------------------------------------------------------ // <copyright file="VisualStyleTypesAndProperties.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ // This file contains the enums defining various ThemeData Types and Properties. [assembly: System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.MSInternal", "CA905:SystemAndMicrosoftNamespacesRequireApproval", Scope="namespace", Target="System.Windows.Forms.VisualStyles")] namespace System.Windows.Forms.VisualStyles { using System.Diagnostics.CodeAnalysis; /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BackgroundType"]/*' /> public enum BackgroundType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BackgroundType.ImageFile"]/*' /> ImageFile = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BackgroundType.BorderFill"]/*' /> BorderFill = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BackgroundType.None"]/*' /> None = 2, // TM_ENUM(0, BT, IMAGEFILE) // TM_ENUM(1, BT, BORDERFILL) // TM_ENUM(2, BT, NONE) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BorderType"]/*' /> public enum BorderType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BorderType.Rectangle"]/*' /> Rectangle = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BorderType.RoundedRectangle"]/*' /> RoundedRectangle = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BorderType.Ellipse"]/*' /> Ellipse = 2, // TM_ENUM(0, BT, RECT) // TM_ENUM(1, BT, ROUNDRECT) // TM_ENUM(2, BT, ELLIPSE) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ImageOrientation"]/*' /> public enum ImageOrientation { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ImageOrientation.Vertical"]/*' /> Vertical = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ImageOrientation.Horizontal"]/*' /> Horizontal = 1, // TM_ENUM(0, IL, VERTICAL) // TM_ENUM(1, IL, HORIZONTAL) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="SizingType"]/*' /> public enum SizingType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="SizingType.FixedSize"]/*' /> FixedSize = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="SizingType.Stretch"]/*' /> Stretch = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="SizingType.Tile"]/*' /> Tile = 2, // TM_ENUM(0, ST, TRUESIZE) // TM_ENUM(1, ST, STRETCH) // TM_ENUM(2, ST, TILE) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FillType"]/*' /> public enum FillType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FillType.Solid"]/*' /> Solid = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FillType.VerticalGradient"]/*' /> VerticalGradient = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FillType.HorizontalGradient"]/*' /> HorizontalGradient = 2, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FillType.RadialGradient"]/*' /> RadialGradient = 3, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FillType.TileImage"]/*' /> TileImage = 4, // TM_ENUM(0, FT, SOLID) // TM_ENUM(1, FT, VERTGRADIENT) // TM_ENUM(2, FT, HORZGRADIENT) // TM_ENUM(3, FT, RADIALGRADIENT) // TM_ENUM(4, FT, TILEIMAGE) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HorizontalAlignment"]/*' /> public enum HorizontalAlign { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HorizontalAlignment.Left"]/*' /> Left = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HorizontalAlignment.Center"]/*' /> Center = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HorizontalAlignment.Right"]/*' /> Right = 2, // TM_ENUM(0, HA, LEFT) // TM_ENUM(1, HA, CENTER) // TM_ENUM(2, HA, RIGHT) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ContentAlignment"]/*' /> public enum ContentAlignment { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ContentAlignment.Left"]/*' /> Left = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ContentAlignment.Center"]/*' /> Center = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ContentAlignment.Right"]/*' /> Right = 2, // TM_ENUM(0, CA, LEFT) // TM_ENUM(1, CA, CENTER) // TM_ENUM(2, CA, RIGHT) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="VerticalAlignment"]/*' /> public enum VerticalAlignment { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="VerticalAlignment.Top"]/*' /> Top = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="VerticalAlignment.Center"]/*' /> Center = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="VerticalAlignment.Bottom"]/*' /> Bottom = 2, // TM_ENUM(0, VA, TOP) // TM_ENUM(1, VA, CENTER) // TM_ENUM(2, VA, BOTTOM) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType"]/*' /> public enum OffsetType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.TopLeft"]/*' /> TopLeft = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.TopRight"]/*' /> TopRight = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.TopMiddle"]/*' /> TopMiddle = 2, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.BottomLeft"]/*' /> BottomLeft = 3, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.BottomRight"]/*' /> BottomRight = 4, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.BottomMiddle"]/*' /> BottomMiddle = 5, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.MiddleLeft"]/*' /> MiddleLeft = 6, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.MiddleRight"]/*' /> MiddleRight = 7, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.LeftOfCaption"]/*' /> LeftOfCaption = 8, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.RightOfCaption"]/*' /> RightOfCaption = 9, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.LeftOfLastButton"]/*' /> LeftOfLastButton = 10, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.RightOfLastButton"]/*' /> RightOfLastButton = 11, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.AboveLastButton"]/*' /> AboveLastButton = 12, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="OffsetType.BelowLastButton"]/*' /> BelowLastButton = 13, // TM_ENUM(0, OT, TOPLEFT) // TM_ENUM(1, OT, TOPRIGHT) // TM_ENUM(2, OT, TOPMIDDLE) // TM_ENUM(3, OT, BOTTOMLEFT) // TM_ENUM(4, OT, BOTTOMRIGHT) // TM_ENUM(5, OT, BOTTOMMIDDLE) // TM_ENUM(6, OT, MIDDLELEFT) // TM_ENUM(7, OT, MIDDLERIGHT) // TM_ENUM(8, OT, LEFTOFCAPTION) // TM_ENUM(9, OT, RIGHTOFCAPTION) // TM_ENUM(10, OT, LEFTOFLASTBUTTON) // TM_ENUM(11, OT, RIGHTOFLASTBUTTON) // TM_ENUM(12, OT, ABOVELASTBUTTON) // TM_ENUM(13, OT, BELOWLASTBUTTON) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IconEffect"]/*' /> public enum IconEffect { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IconEffect.None"]/*' /> None = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IconEffect.Glow"]/*' /> Glow = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IconEffect.Shadow"]/*' /> Shadow = 2, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IconEffect.Pulse"]/*' /> Pulse = 3, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IconEffect.Alpha"]/*' /> Alpha = 4, // TM_ENUM(0, ICE, NONE) // TM_ENUM(1, ICE, GLOW) // TM_ENUM(2, ICE, SHADOW) // TM_ENUM(3, ICE, PULSE) // TM_ENUM(4, ICE, ALPHA) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextShadowType"]/*' /> public enum TextShadowType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextShadowType.None"]/*' /> None = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextShadowType.Single"]/*' /> Single = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextShadowType.Continuous"]/*' /> Continuous = 2, // TM_ENUM(0, TST, NONE) // TM_ENUM(1, TST, SINGLE) // TM_ENUM(2, TST, CONTINUOUS) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="GlyphType"]/*' /> public enum GlyphType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="GlyphType.None"]/*' /> None = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="GlyphType.ImageGlyph"]/*' /> ImageGlyph = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="GlyphType.FontGlyph"]/*' /> FontGlyph = 2, // TM_ENUM(0, GT, NONE) // TM_ENUM(1, GT, IMAGEGLYPH) // TM_ENUM(2, GT, FONTGLYPH) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ImageSelectType"]/*' /> public enum ImageSelectType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ImageSelectType.None"]/*' /> None = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ImageSelectType.Size"]/*' /> Size = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ImageSelectType.Dpi"]/*' /> Dpi = 2, // TM_ENUM(0, IST, NONE) // TM_ENUM(1, IST, SIZE) // TM_ENUM(2, IST, DPI) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TrueSizeScalingType"]/*' /> public enum TrueSizeScalingType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TrueSizeScalingType.None"]/*' /> None = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TrueSizeScalingType.Size"]/*' /> Size = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TrueSizeScalingType.Dpi"]/*' /> Dpi = 2, // TM_ENUM(0, TSST, NONE) // TM_ENUM(1, TSST, SIZE) // TM_ENUM(2, TSST, DPI) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="GlyphFontSizingType"]/*' /> public enum GlyphFontSizingType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="GlyphFontSizingType.None"]/*' /> None = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="GlyphFontSizingType.Size"]/*' /> Size = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="GlyphFontSizingType.Dpi"]/*' /> Dpi = 2, // TM_ENUM(0, GFST, NONE) // TM_ENUM(1, GFST, SIZE) // TM_ENUM(2, GFST, DPI) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum ColorProperty { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.BorderColor"]/*' /> BorderColor = 3801, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.FillColor"]/*' /> FillColor = 3802, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.TextColor"]/*' /> TextColor = 3803, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.EdgeLightColor"]/*' /> EdgeLightColor = 3804, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.EdgeHighlightColor"]/*' /> EdgeHighlightColor = 3805, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.EdgeShadowColor"]/*' /> EdgeShadowColor = 3806, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.EdgeDarkShadowColor"]/*' /> EdgeDarkShadowColor = 3807, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.EdgeFillColor"]/*' /> EdgeFillColor = 3808, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.TransparentColor"]/*' /> TransparentColor = 3809, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.GradientColor1"]/*' /> GradientColor1 = 3810, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.GradientColor2"]/*' /> GradientColor2 = 3811, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.GradientColor3"]/*' /> GradientColor3 = 3812, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.GradientColor4"]/*' /> GradientColor4 = 3813, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.GradientColor5"]/*' /> GradientColor5 = 3814, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.ShadowColor"]/*' /> ShadowColor = 3815, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.GlowColor"]/*' /> GlowColor = 3816, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.TextBorderColor"]/*' /> TextBorderColor = 3817, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.TextShadowColor"]/*' /> TextShadowColor = 3818, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.GlyphTextColor"]/*' /> GlyphTextColor = 3819, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.GlyphTransparentColor"]/*' /> GlyphTransparentColor = 3820, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.FillColorHint"]/*' /> FillColorHint = 3821, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.BorderColorHint"]/*' /> BorderColorHint = 3822, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ColorProperty.AccentColorHint"]/*' /> AccentColorHint = 3823 // TM_PROP(3801, TMT, BORDERCOLOR, COLOR) // color of borders for BorderFill // TM_PROP(3802, TMT, FILLCOLOR, COLOR) // color of bg fill // TM_PROP(3803, TMT, TEXTCOLOR, COLOR) // color text is drawn in // TM_PROP(3804, TMT, EDGELIGHTCOLOR, COLOR) // edge color // TM_PROP(3805, TMT, EDGEHIGHLIGHTCOLOR, COLOR) // edge color // TM_PROP(3806, TMT, EDGESHADOWCOLOR, COLOR) // edge color // TM_PROP(3807, TMT, EDGEDKSHADOWCOLOR, COLOR) // edge color // TM_PROP(3808, TMT, EDGEFILLCOLOR, COLOR) // edge color // TM_PROP(3809, TMT, TRANSPARENTCOLOR, COLOR) // color of pixels that are treated as transparent (not drawn) // TM_PROP(3810, TMT, GRADIENTCOLOR1, COLOR) // first color in gradient // TM_PROP(3811, TMT, GRADIENTCOLOR2, COLOR) // second color in gradient // TM_PROP(3812, TMT, GRADIENTCOLOR3, COLOR) // third color in gradient // TM_PROP(3813, TMT, GRADIENTCOLOR4, COLOR) // forth color in gradient // TM_PROP(3814, TMT, GRADIENTCOLOR5, COLOR) // fifth color in gradient // TM_PROP(3815, TMT, SHADOWCOLOR, COLOR) // color of text shadow // TM_PROP(3816, TMT, GLOWCOLOR, COLOR) // color of glow produced by DrawThemeIcon // TM_PROP(3817, TMT, TEXTBORDERCOLOR, COLOR) // color of text border // TM_PROP(3818, TMT, TEXTSHADOWCOLOR, COLOR) // color of text shadow // TM_PROP(3819, TMT, GLYPHTEXTCOLOR, COLOR) // color that font-based glyph is drawn with // TM_PROP(3820, TMT, GLYPHTRANSPARENTCOLOR, COLOR) // color of transparent pixels in GlyphImageFile // TM_PROP(3821, TMT, FILLCOLORHINT, COLOR) // hint about fill color used (for custom controls) // TM_PROP(3822, TMT, BORDERCOLORHINT, COLOR) // hint about border color used (for custom controls) // TM_PROP(3823, TMT, ACCENTCOLORHINT, COLOR) // hint about accent color used (for custom controls) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // EnumProperty maps to native enum. ] public enum EnumProperty { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.BackgroundType"]/*' /> BackgroundType = 4001, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.BorderType"]/*' /> BorderType = 4002, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.FillType"]/*' /> FillType = 4003, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.SizingType"]/*' /> SizingType = 4004, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.HorizontalAlignment"]/*' /> HorizontalAlignment = 4005, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.ContentAlignment"]/*' /> ContentAlignment = 4006, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.VerticalAlignment"]/*' /> VerticalAlignment = 4007, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.OffsetType"]/*' /> OffsetType = 4008, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.IconEffect"]/*' /> IconEffect = 4009, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.TextShadowType"]/*' /> TextShadowType = 4010, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.ImageLayout"]/*' /> ImageLayout = 4011, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.GlyphType"]/*' /> GlyphType = 4012, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.ImageSelectType"]/*' /> ImageSelectType = 4013, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.GlyphFontSizingType"]/*' /> GlyphFontSizingType = 4014, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EnumProperty.TrueSizeScalingType"]/*' /> TrueSizeScalingType = 4015 // TM_PROP(4001, TMT, BGTYPE, ENUM) // basic drawing type for each part // TM_PROP(4002, TMT, BORDERTYPE, ENUM) // type of border for BorderFill parts // TM_PROP(4003, TMT, FILLTYPE, ENUM) // fill shape for BorderFill parts // TM_PROP(4004, TMT, SIZINGTYPE, ENUM) // how to size ImageFile parts // TM_PROP(4005, TMT, HALIGN, ENUM) // horizontal alignment for TRUESIZE parts & glyphs // TM_PROP(4006, TMT, CONTENTALIGNMENT, ENUM) // custom window prop: how text is aligned in caption // TM_PROP(4007, TMT, VALIGN, ENUM) // horizontal alignment for TRUESIZE parts & glyphs // TM_PROP(4008, TMT, OFFSETTYPE, ENUM) // how window part should be placed // TM_PROP(4009, TMT, ICONEFFECT, ENUM) // type of effect to use with DrawThemeIcon // TM_PROP(4010, TMT, TEXTSHADOWTYPE, ENUM) // type of shadow to draw with text // TM_PROP(4011, TMT, IMAGELAYOUT, ENUM) // how multiple images are arranged (horz. or vert.) // TM_PROP(4012, TMT, GLYPHTYPE, ENUM) // controls type of glyph in imagefile objects // TM_PROP(4013, TMT, IMAGESELECTTYPE, ENUM) // controls when to select from IMAGEFILE1...IMAGEFILE5 // TM_PROP(4014, TMT, GLYPHFONTSIZINGTYPE, ENUM) // controls when to select a bigger/small glyph font size // TM_PROP(4015, TMT, TRUESIZESCALINGTYPE, ENUM) // controls how TrueSize image is scaled } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FilenameProperty"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum FilenameProperty { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FilenameProperty.ImageFile"]/*' /> ImageFile = 3001, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FilenameProperty.ImageFile1"]/*' /> ImageFile1 = 3002, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FilenameProperty.ImageFile2"]/*' /> ImageFile2 = 3003, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FilenameProperty.ImageFile3"]/*' /> ImageFile3 = 3004, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FilenameProperty.ImageFile4"]/*' /> ImageFile4 = 3005, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FilenameProperty.ImageFile5"]/*' /> ImageFile5 = 3006, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FilenameProperty.StockImageFile"]/*' /> StockImageFile = 3007, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FilenameProperty.GlyphImageFile"]/*' /> GlyphImageFile = 3008 // TM_PROP(3001, TMT, IMAGEFILE, FILENAME) // the filename of the image (or basename, for mult. images) // TM_PROP(3002, TMT, IMAGEFILE1, FILENAME) // multiresolution image file // TM_PROP(3003, TMT, IMAGEFILE2, FILENAME) // multiresolution image file // TM_PROP(3004, TMT, IMAGEFILE3, FILENAME) // multiresolution image file // TM_PROP(3005, TMT, IMAGEFILE4, FILENAME) // multiresolution image file // TM_PROP(3006, TMT, IMAGEFILE5, FILENAME) // multiresolution image file // TM_PROP(3007, TMT, STOCKIMAGEFILE, FILENAME) // These are the only images that you can call GetThemeBitmap on // TM_PROP(3008, TMT, GLYPHIMAGEFILE, FILENAME) // the filename for the glyph image } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FontProperty"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum FontProperty { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="FontProperty.GlyphFont"]/*' /> GlyphFont = 2601 // TM_PROP(2601, TMT, GLYPHFONT, FONT) // the font that the glyph is drawn with } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum IntegerProperty { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.ImageCount"]/*' /> ImageCount = 2401, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.AlphaLevel"]/*' /> AlphaLevel = 2402, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.BorderSize"]/*' /> BorderSize = 2403, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.RoundCornerWidth"]/*' /> RoundCornerWidth = 2404, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.RoundCornerHeight"]/*' /> RoundCornerHeight = 2405, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.GradientRatio1"]/*' /> GradientRatio1 = 2406, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.GradientRatio2"]/*' /> GradientRatio2 = 2407, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.GradientRatio3"]/*' /> GradientRatio3 = 2408, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.GradientRatio4"]/*' /> GradientRatio4 = 2409, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.GradientRatio5"]/*' /> GradientRatio5 = 2410, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.ProgressChunkSize"]/*' /> ProgressChunkSize = 2411, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.ProgressSpaceSize"]/*' /> ProgressSpaceSize = 2412, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.Saturation"]/*' /> Saturation = 2413, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.TextBorderSize"]/*' /> TextBorderSize = 2414, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.AlphaThreshold"]/*' /> AlphaThreshold = 2415, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.Width"]/*' /> Width = 2416, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.Height"]/*' /> Height = 2417, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.GlyphIndex"]/*' /> GlyphIndex = 2418, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.TrueSizeStretchMark"]/*' /> TrueSizeStretchMark = 2419, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.MinDpi1"]/*' /> MinDpi1 = 2420, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.MinDpi2"]/*' /> MinDpi2 = 2421, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.MinDpi3"]/*' /> MinDpi3 = 2422, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.MinDpi4"]/*' /> MinDpi4 = 2423, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="IntegerProperty.MinDpi5"]/*' /> MinDpi5 = 2424 // TM_PROP(2401, TMT, IMAGECOUNT, INT) // the number of state images in an imagefile // TM_PROP(2402, TMT, ALPHALEVEL, INT) // (0-255) alpha value for an icon (DrawThemeIcon part) // TM_PROP(2403, TMT, BORDERSIZE, INT) // the size of the border line for bgtype=BorderFill // TM_PROP(2404, TMT, ROUNDCORNERWIDTH, INT) // (0-100) % of roundness for rounded rects // TM_PROP(2405, TMT, ROUNDCORNERHEIGHT, INT) // (0-100) % of roundness for rounded rects // TM_PROP(2406, TMT, GRADIENTRATIO1, INT) // (0-255) - amt of gradient color 1 to use (all must total=255) // TM_PROP(2407, TMT, GRADIENTRATIO2, INT) // (0-255) - amt of gradient color 2 to use (all must total=255) // TM_PROP(2408, TMT, GRADIENTRATIO3, INT) // (0-255) - amt of gradient color 3 to use (all must total=255) // TM_PROP(2409, TMT, GRADIENTRATIO4, INT) // (0-255) - amt of gradient color 4 to use (all must total=255) // TM_PROP(2410, TMT, GRADIENTRATIO5, INT) // (0-255) - amt of gradient color 5 to use (all must total=255) // TM_PROP(2411, TMT, PROGRESSCHUNKSIZE, INT) // size of progress control chunks // TM_PROP(2412, TMT, PROGRESSSPACESIZE, INT) // size of progress control spaces // TM_PROP(2413, TMT, SATURATION, INT) // (0-255) amt of saturation for DrawThemeIcon() part // TM_PROP(2414, TMT, TEXTBORDERSIZE, INT) // size of border around text chars // TM_PROP(2415, TMT, ALPHATHRESHOLD, INT) // (0-255) the min. alpha value of a pixel that is solid // TM_PROP(2416, TMT, WIDTH, SIZE) // custom window prop: size of part (min. window) // TM_PROP(2417, TMT, HEIGHT, SIZE) // custom window prop: size of part (min. window) // TM_PROP(2418, TMT, GLYPHINDEX, INT) // for font-based glyphs, the char index into the font // TM_PROP(2419, TMT, TRUESIZESTRETCHMARK, INT) // stretch TrueSize image when target exceeds source by this percent // TM_PROP(2420, TMT, MINDPI1, INT) // min DPI ImageFile1 was designed for // TM_PROP(2421, TMT, MINDPI2, INT) // min DPI ImageFile1 was designed for // TM_PROP(2422, TMT, MINDPI3, INT) // min DPI ImageFile1 was designed for // TM_PROP(2423, TMT, MINDPI4, INT) // min DPI ImageFile1 was designed for // TM_PROP(2424, TMT, MINDPI5, INT) // min DPI ImageFile1 was designed for } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="PointProperty"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum PointProperty { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="PointProperty.Offset"]/*' /> Offset = 3401, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="PointProperty.TextShadowOffset"]/*' /> TextShadowOffset = 3402, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="PointProperty.MinSize"]/*' /> MinSize = 3403, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="PointProperty.MinSize1"]/*' /> MinSize1 = 3404, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="PointProperty.MinSize2"]/*' /> MinSize2 = 3405, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="PointProperty.MinSize3"]/*' /> MinSize3 = 3406, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="PointProperty.MinSize4"]/*' /> MinSize4 = 3407, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="PointProperty.MinSize5"]/*' /> MinSize5 = 3408 // TM_PROP(3401, TMT, OFFSET, POSITION) // for window part layout // TM_PROP(3402, TMT, TEXTSHADOWOFFSET, POSITION) // where char shadows are drawn, relative to [....]. chars // TM_PROP(3403, TMT, MINSIZE, POSITION) // min dest rect than ImageFile was designed for // TM_PROP(3404, TMT, MINSIZE1, POSITION) // min dest rect than ImageFile1 was designed for // TM_PROP(3405, TMT, MINSIZE2, POSITION) // min dest rect than ImageFile2 was designed for // TM_PROP(3406, TMT, MINSIZE3, POSITION) // min dest rect than ImageFile3 was designed for // TM_PROP(3407, TMT, MINSIZE4, POSITION) // min dest rect than ImageFile4 was designed for // TM_PROP(3408, TMT, MINSIZE5, POSITION) // min dest rect than ImageFile5 was designed for } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="MarginProperty"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum MarginProperty { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="MarginProperty.SizingMargins"]/*' /> SizingMargins = 3601, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="MarginProperty.ContentMargins"]/*' /> ContentMargins = 3602, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="MarginProperty.CaptionMargins"]/*' /> CaptionMargins = 3603 // TM_PROP(3601, TMT, SIZINGMARGINS, MARGINS) // margins used for 9-grid sizing // TM_PROP(3602, TMT, CONTENTMARGINS, MARGINS) // margins that define where content can be placed // TM_PROP(3603, TMT, CAPTIONMARGINS, MARGINS) // margins that define where caption text can be placed } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="StringProperty"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum StringProperty { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="StringProperty.Text"]/*' /> Text = 3201 //TM_PROP(3201, TMT, TEXT, STRING) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum BooleanProperty { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.Transparent"]/*' /> Transparent = 2201, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.AutoSize"]/*' /> AutoSize = 2202, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.BorderOnly"]/*' /> BorderOnly = 2203, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.Composited"]/*' /> Composited = 2204, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.BackgroundFill"]/*' /> BackgroundFill = 2205, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.GlyphTransparent"]/*' /> GlyphTransparent = 2206, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.GlyphOnly"]/*' /> GlyphOnly = 2207, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.AlwaysShowSizingBar"]/*' /> AlwaysShowSizingBar = 2208, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.MirrorImage"]/*' /> MirrorImage = 2209, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.UniformSizing"]/*' /> UniformSizing = 2210, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.IntegralSizing"]/*' /> IntegralSizing = 2211, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.SourceGrow"]/*' /> SourceGrow = 2212, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="BooleanProperty.SourceShrink"]/*' /> SourceShrink = 2213 // TM_PROP(2201, TMT, TRANSPARENT, BOOL) // image has transparent areas (see TransparentColor) // TM_PROP(2202, TMT, AUTOSIZE, BOOL) // if TRUE, nonclient caption width varies with text extent // TM_PROP(2203, TMT, BORDERONLY, BOOL) // only draw the border area of the image // TM_PROP(2204, TMT, COMPOSITED, BOOL) // control will handle the composite drawing // TM_PROP(2205, TMT, BGFILL, BOOL) // if TRUE, TRUESIZE images should be drawn on bg fill // TM_PROP(2206, TMT, GLYPHTRANSPARENT, BOOL) // glyph has transparent areas (see GlyphTransparentColor) // TM_PROP(2207, TMT, GLYPHONLY, BOOL) // only draw glyph (not background) // TM_PROP(2208, TMT, ALWAYSSHOWSIZINGBAR, BOOL) // TM_PROP(2209, TMT, MIRRORIMAGE, BOOL) // default=TRUE means image gets mirrored in RTL (Mirror) windows // TM_PROP(2210, TMT, UNIFORMSIZING, BOOL) // if TRUE, height & width must be uniformly sized // TM_PROP(2211, TMT, INTEGRALSIZING, BOOL) // for TRUESIZE and Border sizing; if TRUE, factor must be integer // TM_PROP(2212, TMT, SOURCEGROW, BOOL) // if TRUE, will scale up src image when needed // TM_PROP(2213, TMT, SOURCESHRINK, BOOL) // if TRUE, will scale down src image when needed } // Some other misc enums /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="Edges"]/*' /> [Flags] [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum Edges { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="Edges.Left"]/*' /> Left = 0x0001, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="Edges.Top"]/*' /> Top = 0x0002, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="Edges.Right"]/*' /> Right = 0x0004, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="Edges.Bottom"]/*' /> Bottom = 0x0008, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="Edges.Diagonal"]/*' /> Diagonal = 0x0010, // #define BF_LEFT 0x0001 // #define BF_TOP 0x0002 // #define BF_RIGHT 0x0004 // #define BF_BOTTOM 0x0008 // // #define BF_TOPLEFT (BF_TOP | BF_LEFT) // #define BF_TOPRIGHT (BF_TOP | BF_RIGHT) // #define BF_BOTTOMLEFT (BF_BOTTOM | BF_LEFT) // #define BF_BOTTOMRIGHT (BF_BOTTOM | BF_RIGHT) // #define BF_RECT (BF_LEFT | BF_TOP | BF_RIGHT | BF_BOTTOM) // // #define BF_DIAGONAL 0x0010 // // For diagonal lines, the BF_RECT flags specify the end point of the // // vector bounded by the rectangle parameter. // #define BF_DIAGONAL_ENDTOPRIGHT (BF_DIAGONAL | BF_TOP | BF_RIGHT) // #define BF_DIAGONAL_ENDTOPLEFT (BF_DIAGONAL | BF_TOP | BF_LEFT) // #define BF_DIAGONAL_ENDBOTTOMLEFT (BF_DIAGONAL | BF_BOTTOM | BF_LEFT) // #define BF_DIAGONAL_ENDBOTTOMRIGHT (BF_DIAGONAL | BF_BOTTOM | BF_RIGHT) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeStyle"]/*' /> [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum EdgeStyle { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeStyle.Raised"]/*' /> Raised = 0x0001 | 0x0004, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeStyle.Sunken"]/*' /> Sunken = 0x0002 | 0x0008, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeStyle.Etched"]/*' /> Etched = 0x0002 | 0x0004, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeStyle.Bump"]/*' /> Bump = 0x0001 | 0x0008 // #define BDR_RAISEDOUTER 0x0001 // #define BDR_SUNKENOUTER 0x0002 // #define BDR_RAISEDINNER 0x0004 // #define BDR_SUNKENINNER 0x0008 // #define EDGE_RAISED (BDR_RAISEDOUTER | BDR_RAISEDINNER) // #define EDGE_SUNKEN (BDR_SUNKENOUTER | BDR_SUNKENINNER) // #define EDGE_ETCHED (BDR_SUNKENOUTER | BDR_RAISEDINNER) // #define EDGE_BUMP (BDR_RAISEDOUTER | BDR_SUNKENINNER) } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeEffects"]/*' /> [Flags] public enum EdgeEffects { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeEffects.None"]/*' /> None = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeEffects.FillInterior"]/*' /> FillInterior = 0x0800, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeEffects.Flat"]/*' /> Flat = 0x1000, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeEffects.Soft"]/*' /> Soft = 0x4000, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="EdgeEffects.Mono"]/*' /> Mono = 0x8000, // #define BF_SOFT 0x1000 /* For softer buttons */ // #define BF_FLAT 0x4000 /* For flat rather than 3D borders */ // #define BF_MONO 0x8000 /* For monochrome borders */ } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics"]/*' /> public struct TextMetrics { private int height; private int ascent; private int descent; private int internalLeading; private int externalLeading; private int aveCharWidth; private int maxCharWidth; private int weight; private int overhang; private int digitizedAspectX; private int digitizedAspectY; private char firstChar; private char lastChar; private char defaultChar; private char breakChar; private bool italic; private bool underlined; private bool struckOut; private TextMetricsPitchAndFamilyValues pitchAndFamily; private TextMetricsCharacterSet charSet; /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.Height;"]/*' /> public int Height { get { return height; } set { height = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.Ascent;"]/*' /> public int Ascent { get { return ascent; } set { ascent = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.Descent;"]/*' /> public int Descent { get { return descent; } set { descent = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.InternalLeading;"]/*' /> public int InternalLeading { get { return internalLeading; } set { internalLeading = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.ExternalLeading;"]/*' /> public int ExternalLeading { get { return externalLeading; } set { externalLeading = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.AveCharWidth;"]/*' /> public int AverageCharWidth { get { return aveCharWidth; } set { aveCharWidth = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.MaxCharWidth;"]/*' /> public int MaxCharWidth { get { return maxCharWidth; } set { maxCharWidth = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.Weight;"]/*' /> public int Weight { get { return weight; } set { weight = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.Overhang;"]/*' /> public int Overhang { get { return overhang; } set { overhang = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.DigitizedAspectX;"]/*' /> public int DigitizedAspectX { get { return digitizedAspectX; } set { digitizedAspectX = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.DigitizedAspectY;"]/*' /> public int DigitizedAspectY { get { return digitizedAspectY; } set { digitizedAspectY = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.FirstChar;"]/*' /> public char FirstChar { get { return firstChar; } set { firstChar = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.LastChar;"]/*' /> public char LastChar { get { return lastChar; } set { lastChar = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.DefaultChar;"]/*' /> public char DefaultChar { get { return defaultChar; } set { defaultChar = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.BreakChar;"]/*' /> public char BreakChar { get { return breakChar; } set { breakChar = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.Italic;"]/*' /> public bool Italic { get { return italic; } set { italic = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.Underlined;"]/*' /> public bool Underlined { get { return underlined; } set { underlined = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.StruckOut;"]/*' /> public bool StruckOut { get { return struckOut; } set { struckOut = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.PitchAndFamily;"]/*' /> public TextMetricsPitchAndFamilyValues PitchAndFamily { get { return pitchAndFamily; } set { pitchAndFamily = value; } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetrics.CharSet;"]/*' /> public TextMetricsCharacterSet CharSet { get { return charSet; } set { charSet = value; } } } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricPitchAndFamilyValues"]/*' /> [Flags] public enum TextMetricsPitchAndFamilyValues { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricPitchAndFamilyValues.FixedPitch"]/*' /> FixedPitch = 0x01, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricPitchAndFamilyValues.Vector"]/*' /> Vector = 0x02, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricPitchAndFamilyValues.TrueType"]/*' /> TrueType = 0x04, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricPitchAndFamilyValues.Device"]/*' /> Device = 0x08 // #define TMPF_FIXED_PITCH 0x01 // #define TMPF_VECTOR 0x02 // #define TMPF_DEVICE 0x08 // #define TMPF_TRUETYPE 0x04 } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet"]/*' /> public enum TextMetricsCharacterSet { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Ansi"]/*' /> Ansi = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Baltic"]/*' /> Baltic = 186, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.ChineseBig5"]/*' /> ChineseBig5 = 136, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Default"]/*' /> Default = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.EastEurope"]/*' /> EastEurope = 238, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Gb2312"]/*' /> Gb2312 = 134, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Greek"]/*' /> Greek = 161, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Hangul"]/*' /> Hangul = 129, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Mac"]/*' /> Mac = 77, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Oem"]/*' /> Oem = 255, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Russian"]/*' /> Russian = 204, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.ShiftJis"]/*' /> ShiftJis = 128, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Symbol"]/*' /> Symbol = 2, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Turkish"]/*' /> Turkish = 162, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Vietnamese"]/*' /> Vietnamese = 163, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Johab"]/*' /> Johab = 130, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Arabic"]/*' /> Arabic = 178, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Hebrew"]/*' /> Hebrew = 177, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="TextMetricCharacterSet.Thai"]/*' /> Thai = 222, // #define ANSI_CHARSET 0 // #define BALTIC_CHARSET 186 // #define CHINESEBIG5_CHARSET 136 // #define DEFAULT_CHARSET 1 // #define EASTEUROPE_CHARSET 238 // #define GB2312_CHARSET 134 // #define GREEK_CHARSET 161 // #define HANGUL_CHARSET 129 // #define MAC_CHARSET 77 // #define OEM_CHARSET 255 // #define RUSSIAN_CHARSET 204 // #define SHIFTJIS_CHARSET 128 // #define SYMBOL_CHARSET 2 // #define TURKISH_CHARSET 162 // #define VIETNAMESE_CHARSET 163 // Korean // #define JOHAB_CHARSET 130 // Middle East // #define ARABIC_CHARSET 178 // #define HEBREW_CHARSET 177 // Thai // #define THAI_CHARSET 222 } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions"]/*' /> [Flags] [ SuppressMessage("Microsoft.Design", "CA1008:EnumsShouldHaveZeroValue") // Maps to native enum. ] public enum HitTestOptions { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.BackgroundSegment"]/*' /> BackgroundSegment = 0x0000, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.FixedBorder"]/*' /> FixedBorder = 0x0002, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.Caption"]/*' /> Caption = 0x0004, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.ResizingBorderLeft"]/*' /> ResizingBorderLeft = 0x0010, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.ResizingBorderTop"]/*' /> ResizingBorderTop = 0x0020, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.ResizingBorderRight"]/*' /> ResizingBorderRight = 0x0040, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.ResizingBorderBottom"]/*' /> ResizingBorderBottom = 0x0080, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.ResizingBorder"]/*' /> ResizingBorder = ResizingBorderLeft | ResizingBorderTop | ResizingBorderRight | ResizingBorderBottom, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.SizingTemplate"]/*' /> SizingTemplate = 0x0100, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestOptions.SystemSizingMargins"]/*' /> SystemSizingMargins = 0x0200 // Theme background segment hit test flag (default). possible return values are: // HTCLIENT: hit test succeeded in the middle background segment // HTTOP, HTLEFT, HTTOPLEFT, etc: // hit test succeeded in the the respective theme background segment. //#define HTTB_BACKGROUNDSEG 0x0000 // Fixed border hit test option. possible return values are: // HTCLIENT: hit test succeeded in the middle background segment // HTBORDER: hit test succeeded in any other background segment //#define HTTB_FIXEDBORDER 0x0002 // Return code may be either HTCLIENT or HTBORDER. // Caption hit test option. Possible return values are: // HTCAPTION: hit test succeeded in the top, top left, or top right background segments // HTNOWHERE or another return code, depending on absence or presence of accompanying flags, resp. //#define HTTB_CAPTION 0x0004 // Resizing border hit test flags. Possible return values are: // HTCLIENT: hit test succeeded in middle background segment // HTTOP, HTTOPLEFT, HTLEFT, HTRIGHT, etc: hit test succeeded in the respective system resizing zone // HTBORDER: hit test failed in middle segment and resizing zones, but succeeded in a background border segment //#define HTTB_RESIZINGBORDER_LEFT 0x0010 // Hit test left resizing border, //#define HTTB_RESIZINGBORDER_TOP 0x0020 // Hit test top resizing border //#define HTTB_RESIZINGBORDER_RIGHT 0x0040 // Hit test right resizing border //#define HTTB_RESIZINGBORDER_BOTTOM 0x0080 // Hit test bottom resizing border //#define HTTB_RESIZINGBORDER (HTTB_RESIZINGBORDER_LEFT|HTTB_RESIZINGBORDER_TOP|\ // HTTB_RESIZINGBORDER_RIGHT|HTTB_RESIZINGBORDER_BOTTOM) // Resizing border is specified as a template, not just window edges. // This option is mutually exclusive with HTTB_SYSTEMSIZINGWIDTH; HTTB_SIZINGTEMPLATE takes precedence //#define HTTB_SIZINGTEMPLATE 0x0100 // Use system resizing border width rather than theme content margins. // This option is mutually exclusive with HTTB_SIZINGTEMPLATE, which takes precedence. //#define HTTB_SYSTEMSIZINGMARGINS 0x0200 } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode"]/*' /> public enum HitTestCode { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.Nowhere"]/*' /> Nowhere = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.Client"]/*' /> Client = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.Left"]/*' /> Left = 10, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.Right"]/*' /> Right = 11, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.Top"]/*' /> Top = 12, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.Bottom"]/*' /> Bottom = 15, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.TopLeft"]/*' /> TopLeft = 13, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.TopRight"]/*' /> TopRight = 14, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.BottomLeft"]/*' /> BottomLeft = 16, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="HitTestCode.BottomRight"]/*' /> BottomRight = 17 // #define HTNOWHERE 0 // #define HTCLIENT 1 // #define HTLEFT 10 // #define HTRIGHT 11 // #define HTTOP 12 // #define HTTOPLEFT 13 // #define HTTOPRIGHT 14 // #define HTBOTTOM 15 // #define HTBOTTOMLEFT 16 // #define HTBOTTOMRIGHT 17 } /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ThemeSizeType"]/*' /> public enum ThemeSizeType { /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ThemeSizeType.Minimum"]/*' /> Minimum = 0, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ThemeSizeType.True"]/*' /> True = 1, /// <include file='doc\VisualStyleTypesAndProperties.uex' path='docs/doc[@for="ThemeSizeType.Draw"]/*' /> Draw = 2 } // Internal enums for VisualStyleInformation internal struct VisualStyleDocProperty { internal static string DisplayName = "DisplayName"; internal static string Company = "Company"; internal static string Author = "Author"; internal static string Copyright = "Copyright"; internal static string Url = "Url"; internal static string Version = "Version"; internal static string Description = "Description"; } internal struct VisualStyleSystemProperty { internal static int SupportsFlatMenus = 1001; internal static int MinimumColorDepth = 1301; } }
/********************************************************************++ Copyright (c) Microsoft Corporation. All rights reserved. --********************************************************************/ using System; using System.Globalization; using System.Management.Automation; using System.Management; using System.Text; using System.Collections; using System.Threading; namespace Microsoft.PowerShell.Commands { /// <summary> /// A command to get WMI Objects /// </summary> [Cmdlet(VerbsCommon.Get, "WmiObject", DefaultParameterSetName = "query", HelpUri = "http://go.microsoft.com/fwlink/?LinkID=113337", RemotingCapability = RemotingCapability.OwnedByCommand)] public class GetWmiObjectCommand : WmiBaseCmdlet { #region Parameters /// <summary> /// The WMI class to query /// </summary> [Alias("ClassName")] [Parameter(Position = 0, Mandatory = true, ParameterSetName = "query")] [Parameter(Position = 1, ParameterSetName = "list")] public string Class { get; set; } /// <summary> /// To specify whether to get the results recursively /// </summary> [Parameter(ParameterSetName = "list")] public SwitchParameter Recurse { get; set; } = false; /// <summary> /// The WMI properties to retrieve /// </summary> [Parameter(Position = 1, ParameterSetName = "query")] public string[] Property { get { return (string[])_property.Clone(); } set { _property = value; } } /// <summary> /// The filter to be used in the search /// </summary> [Parameter(ParameterSetName = "query")] public string Filter { get; set; } /// <summary> /// If Amended qualifier to use /// </summary> [Parameter] public SwitchParameter Amended { get; set; } /// <summary> /// If Enumerate Deep flag to use. When 'list' parameter is specified 'EnumerateDeep' parameter is ignored. /// </summary> [Parameter(ParameterSetName = "WQLQuery")] [Parameter(ParameterSetName = "query")] public SwitchParameter DirectRead { get; set; } /// <summary> /// The list of classes /// </summary> [Parameter(ParameterSetName = "list")] public SwitchParameter List { get; set; } = false; /// <summary> /// The query string to search for objects /// </summary> [Parameter(Mandatory = true, ParameterSetName = "WQLQuery")] public string Query { get; set; } #endregion Parameters #region parameter data private string[] _property = new string[] { "*" }; #endregion parameter data #region Command code /// <summary> /// Uses this.filter, this.wmiClass and this.property to retrieve the filter /// </summary> internal string GetQueryString() { StringBuilder returnValue = new StringBuilder("select "); returnValue.Append(String.Join(", ", _property)); returnValue.Append(" from "); returnValue.Append(Class); if (!String.IsNullOrEmpty(Filter)) { returnValue.Append(" where "); returnValue.Append(Filter); } return returnValue.ToString(); } /// <summary> /// Uses filter table to convert the class into WMI understandable language. /// Character Description Example Match Comment /// * Matches zero or more characters starting at the specified position A* A,ag,Apple Supported by PowerShell. /// ? Matches any character at the specified position ?n An,in,on (does not match ran) Supported by PowerShell. /// _ Matches any character at the specified position _n An,in,on (does not match ran) Supperted by WMI /// % Matches zero or more characters starting at the specified position A% A,ag,Apple Supperted by WMI /// [] Matches a range of characters [a-l]ook Book,cook,look (does not match took) Supported by WMI and powershell /// [] Matches specified characters [bc]ook Book,cook, (does not match look) Supported by WMI and powershell /// ^ Does not Match specified characters. [^bc]ook Look, took (does not match book, cook) Supported by WMI. /// </summary> internal string GetFilterClassName() { if (string.IsNullOrEmpty(this.Class)) return string.Empty; string filterClass = string.Copy(this.Class); filterClass = filterClass.Replace('*', '%'); filterClass = filterClass.Replace('?', '_'); return filterClass; } internal bool IsLocalizedNamespace(string sNamespace) { bool toReturn = false; if (sNamespace.StartsWith("ms_", StringComparison.OrdinalIgnoreCase)) { toReturn = true; } return toReturn; } internal bool ValidateClassFormat() { string filterClass = this.Class; if (string.IsNullOrEmpty(filterClass)) return true; StringBuilder newClassName = new StringBuilder(); for (int i = 0; i < filterClass.Length; i++) { if (Char.IsLetterOrDigit(filterClass[i]) || filterClass[i].Equals('[') || filterClass[i].Equals(']') || filterClass[i].Equals('*') || filterClass[i].Equals('?') || filterClass[i].Equals('-')) { newClassName.Append(filterClass[i]); continue; } else if (filterClass[i].Equals('_')) { newClassName.Append('['); newClassName.Append(filterClass[i]); newClassName.Append(']'); continue; } return false; } this.Class = newClassName.ToString(); return true; } /// <summary> /// Gets the ManagementObjectSearcher object /// </summary> internal ManagementObjectSearcher GetObjectList(ManagementScope scope) { StringBuilder queryStringBuilder = new StringBuilder(); if (string.IsNullOrEmpty(this.Class)) { queryStringBuilder.Append("select * from meta_class"); } else { string filterClass = GetFilterClassName(); if (filterClass == null) return null; queryStringBuilder.Append("select * from meta_class where __class like '"); queryStringBuilder.Append(filterClass); queryStringBuilder.Append("'"); } ObjectQuery classQuery = new ObjectQuery(queryStringBuilder.ToString()); EnumerationOptions enumOptions = new EnumerationOptions(); enumOptions.EnumerateDeep = true; enumOptions.UseAmendedQualifiers = this.Amended; var searcher = new ManagementObjectSearcher(scope, classQuery, enumOptions); return searcher; } /// <summary> /// Gets the properties of an item at the specified path /// </summary> protected override void BeginProcessing() { ConnectionOptions options = GetConnectionOption(); if (this.AsJob) { RunAsJob("Get-WMIObject"); return; } else { if (List.IsPresent) { if (!this.ValidateClassFormat()) { ErrorRecord errorRecord = new ErrorRecord( new ArgumentException( String.Format( Thread.CurrentThread.CurrentCulture, "Class", this.Class)), "INVALID_QUERY_IDENTIFIER", ErrorCategory.InvalidArgument, null); errorRecord.ErrorDetails = new ErrorDetails(this, "WmiResources", "WmiFilterInvalidClass", this.Class); WriteError(errorRecord); return; } foreach (string name in ComputerName) { if (this.Recurse.IsPresent) { Queue namespaceElement = new Queue(); namespaceElement.Enqueue(this.Namespace); while (namespaceElement.Count > 0) { string connectNamespace = (string)namespaceElement.Dequeue(); ManagementScope scope = new ManagementScope(WMIHelper.GetScopeString(name, connectNamespace), options); try { scope.Connect(); } catch (ManagementException e) { ErrorRecord errorRecord = new ErrorRecord( e, "INVALID_NAMESPACE_IDENTIFIER", ErrorCategory.ObjectNotFound, null); errorRecord.ErrorDetails = new ErrorDetails(this, "WmiResources", "WmiNamespaceConnect", connectNamespace, e.Message); WriteError(errorRecord); continue; } catch (System.Runtime.InteropServices.COMException e) { ErrorRecord errorRecord = new ErrorRecord( e, "INVALID_NAMESPACE_IDENTIFIER", ErrorCategory.ObjectNotFound, null); errorRecord.ErrorDetails = new ErrorDetails(this, "WmiResources", "WmiNamespaceConnect", connectNamespace, e.Message); WriteError(errorRecord); continue; } catch (System.UnauthorizedAccessException e) { ErrorRecord errorRecord = new ErrorRecord( e, "INVALID_NAMESPACE_IDENTIFIER", ErrorCategory.ObjectNotFound, null); errorRecord.ErrorDetails = new ErrorDetails(this, "WmiResources", "WmiNamespaceConnect", connectNamespace, e.Message); WriteError(errorRecord); continue; } ManagementClass namespaceClass = new ManagementClass(scope, new ManagementPath("__Namespace"), new ObjectGetOptions()); foreach (ManagementBaseObject obj in namespaceClass.GetInstances()) { if (!IsLocalizedNamespace((string)obj["Name"])) { namespaceElement.Enqueue(connectNamespace + "\\" + obj["Name"]); } } ManagementObjectSearcher searcher = this.GetObjectList(scope); if (searcher == null) continue; foreach (ManagementBaseObject obj in searcher.Get()) { WriteObject(obj); } } } else { ManagementScope scope = new ManagementScope(WMIHelper.GetScopeString(name, this.Namespace), options); try { scope.Connect(); } catch (ManagementException e) { ErrorRecord errorRecord = new ErrorRecord( e, "INVALID_NAMESPACE_IDENTIFIER", ErrorCategory.ObjectNotFound, null); errorRecord.ErrorDetails = new ErrorDetails(this, "WmiResources", "WmiNamespaceConnect", this.Namespace, e.Message); WriteError(errorRecord); continue; } catch (System.Runtime.InteropServices.COMException e) { ErrorRecord errorRecord = new ErrorRecord( e, "INVALID_NAMESPACE_IDENTIFIER", ErrorCategory.ObjectNotFound, null); errorRecord.ErrorDetails = new ErrorDetails(this, "WmiResources", "WmiNamespaceConnect", this.Namespace, e.Message); WriteError(errorRecord); continue; } catch (System.UnauthorizedAccessException e) { ErrorRecord errorRecord = new ErrorRecord( e, "INVALID_NAMESPACE_IDENTIFIER", ErrorCategory.ObjectNotFound, null); errorRecord.ErrorDetails = new ErrorDetails(this, "WmiResources", "WmiNamespaceConnect", this.Namespace, e.Message); WriteError(errorRecord); continue; } ManagementObjectSearcher searcher = this.GetObjectList(scope); if (searcher == null) continue; foreach (ManagementBaseObject obj in searcher.Get()) { WriteObject(obj); } } } return; } // When -List is not specified and -Recurse is specified, we need the -Class parameter to compose the right query string if (this.Recurse.IsPresent && string.IsNullOrEmpty(Class)) { string errormMsg = string.Format(CultureInfo.InvariantCulture, WmiResources.WmiParameterMissing, "-Class"); ErrorRecord er = new ErrorRecord(new InvalidOperationException(errormMsg), "InvalidOperationException", ErrorCategory.InvalidOperation, null); WriteError(er); return; } string queryString = string.IsNullOrEmpty(this.Query) ? GetQueryString() : this.Query; ObjectQuery query = new ObjectQuery(queryString.ToString()); foreach (string name in ComputerName) { try { ManagementScope scope = new ManagementScope(WMIHelper.GetScopeString(name, this.Namespace), options); EnumerationOptions enumOptions = new EnumerationOptions(); enumOptions.UseAmendedQualifiers = Amended; enumOptions.DirectRead = DirectRead; ManagementObjectSearcher searcher = new ManagementObjectSearcher(scope, query, enumOptions); foreach (ManagementBaseObject obj in searcher.Get()) { WriteObject(obj); } } catch (ManagementException e) { ErrorRecord errorRecord = null; if (e.ErrorCode.Equals(ManagementStatus.InvalidClass)) { string className = GetClassNameFromQuery(queryString); string errorMsg = String.Format(CultureInfo.InvariantCulture, WmiResources.WmiQueryFailure, e.Message, className); errorRecord = new ErrorRecord(new ManagementException(errorMsg), "GetWMIManagementException", ErrorCategory.InvalidType, null); } else if (e.ErrorCode.Equals(ManagementStatus.InvalidQuery)) { string errorMsg = String.Format(CultureInfo.InvariantCulture, WmiResources.WmiQueryFailure, e.Message, queryString); errorRecord = new ErrorRecord(new ManagementException(errorMsg), "GetWMIManagementException", ErrorCategory.InvalidArgument, null); } else if (e.ErrorCode.Equals(ManagementStatus.InvalidNamespace)) { string errorMsg = String.Format(CultureInfo.InvariantCulture, WmiResources.WmiQueryFailure, e.Message, this.Namespace); errorRecord = new ErrorRecord(new ManagementException(errorMsg), "GetWMIManagementException", ErrorCategory.InvalidArgument, null); } else { errorRecord = new ErrorRecord(e, "GetWMIManagementException", ErrorCategory.InvalidOperation, null); } WriteError(errorRecord); continue; } catch (System.Runtime.InteropServices.COMException e) { ErrorRecord errorRecord = new ErrorRecord(e, "GetWMICOMException", ErrorCategory.InvalidOperation, null); WriteError(errorRecord); continue; } } // foreach computerName } } // BeginProcessing /// <summary> /// Get the class name from a query string /// </summary> /// <param name="query"></param> /// <returns></returns> private string GetClassNameFromQuery(string query) { System.Management.Automation.Diagnostics.Assert(query.Contains("from"), "Only get called when ErrorCode is InvalidClass, which means the query string contains 'from' and the class name"); if (Class != null) { return Class; } int fromIndex = query.IndexOf(" from ", StringComparison.OrdinalIgnoreCase); string subQuery = query.Substring(fromIndex + " from ".Length); string className = subQuery.Split(' ')[0]; return className; } #endregion Command code } // GetWmiObjectCommand } // namespace Microsoft.PowerShell.Commands
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading; using ERY.EMath; namespace TightBindingSuite { public class RPA { int threads; static void Main(string[] args) { using (BootStrap b = new BootStrap()) { string inputfile = b.GetInputFile("Random Phase Approximation code", "rpa", args); RPA inst = new RPA(); inst.Run(inputfile); } } double Beta; void Run(string inputfile) { TightBinding tb = new TightBinding(); tb.LoadTB(inputfile); tb.RunTB(); bool ranRPA = false; SetCpus(); if (tb.UseQPlane && tb.QPlane != null && tb.QPlane.Kpts.Count > 0) { RunRpa(tb, tb.QPlane, true); ranRPA = true; } if (tb.QMesh != null) { RunRpa(tb, tb.QMesh, false); ranRPA = true; } if (!ranRPA) Output.WriteLine("No q-points defined, so we will not run the RPA."); } private void SetCpus() { const string threadsFile = "rpa_threads"; if (File.Exists(threadsFile)) { string text = File.ReadAllText(threadsFile); if (int.TryParse(text, out threads)) return; } threads = Environment.ProcessorCount - 1; if (threads < 1) threads = 1; using (var w = new StreamWriter(threadsFile)) { w.WriteLine(threads.ToString()); } } public void RunRpa(TightBinding tb, KptList qpts, bool plane) { List<KPoint> QMesh = qpts.Kpts; List<RpaParams> rpa = CreateRpaParameterList(tb, QMesh); Output.WriteLine("Calculating susceptibility for {0} q-points.", QMesh.Count); CalcSusceptibility(tb, qpts, rpa); if (plane) { SaveMatricesQPlane(tb, QMesh, rpa, x => x.X0, "chi_0"); SaveMatricesQPlane(tb, QMesh, rpa, x => x.Xs, "chi_s"); SaveMatricesQPlane(tb, QMesh, rpa, x => x.Xc, "chi_c"); } else { OutputBands(tb, qpts, rpa, CalcX0 => CalcX0.X0, "chi_0"); } } void OutputBands(TightBinding tb, KptList ks, List<RpaParams> rpa, MatrixGetter g, string name) { using (StreamWriter w = new StreamWriter("eigenvalues." + name + ".q")) { w.WriteLine("# Grid"); w.WriteLine("{0} {1} {2} {3} {4} {5}", ks.Mesh[0], ks.Mesh[1], ks.Mesh[2], ks.Shift[0], ks.Shift[1], ks.Shift[2]); w.WriteLine("# Eigenvalues"); foreach(var rpa_i in rpa) { var qpt = rpa_i.QptValue; w.Write("{0} {1} {2} ", qpt.X, qpt.Y, qpt.Z); Matrix chi = g(rpa_i); Matrix evalues = chi.EigenValues(); for (int j = 0; j < evalues.Rows; j++) { w.Write("{0} ", evalues[j, 0].RealPart); } w.WriteLine(); } } } public List<RpaParams> CreateRpaParameterList(TightBinding tb, List<KPoint> QMesh) { double[] FrequencyMesh = tb.FrequencyMesh; double[] TemperatureMesh = tb.TemperatureMesh; List<RpaParams> rpa = new List<RpaParams>(); for (int tempIndex = 0; tempIndex < TemperatureMesh.Length; tempIndex++) { for (int muIndex = 0; muIndex < tb.MuMesh.Length; muIndex++) { for (int qIndex = 0; qIndex < QMesh.Count; qIndex++) { for (int freqIndex = 0; freqIndex < FrequencyMesh.Length; freqIndex++) { rpa.Add(new RpaParams( qIndex, QMesh[qIndex].Value, TemperatureMesh[tempIndex], FrequencyMesh[freqIndex], tb.MuMesh[muIndex])); } } } } return rpa; } public Wavefunction Bands(TightBinding tb, int kpt, int band) { return tb.KMesh.AllKpts[kpt].Wavefunctions[band]; } void SetTemperature(TightBinding tb, double temperature, double mu) { //currentTemperature = value; Beta = 1 / temperature; tb.KMesh.SetTemperature(temperature, mu); } private void CalcSusceptibility(TightBinding tb, KptList qpts, List<RpaParams> rpa) { Matrix ident = Matrix.Identity(tb.Orbitals.Count * tb.Orbitals.Count); Matrix[] S, C; CalcSpinChargeMatrices(tb, rpa, out S, out C); Output.WriteLine("Calculating X0..."); RpaThreadInfo[] threadInfos = CreateThreadInfos(tb, rpa, qpts); Output.WriteLine("Using {0} threads.", threads); for (int i = 0; i < threadInfos.Length; i++) { RunRpaThread(threadInfos[i]); if (i == 0) Thread.Sleep(20); } bool threadsRunning; do { threadsRunning = false; for (int i = 0; i < threadInfos.Length; i++) { if (threadInfos[i].Thread.ThreadState == ThreadState.Running) threadsRunning = true; } Thread.Sleep(10); } while (threadsRunning); Output.WriteLine(); Output.WriteLine("Bare susceptibility calculation completed."); Output.WriteLine(); double factor = InteractionAdjustment(rpa, S, C, tb); if (tb.Interactions.AdjustInteractions) { Output.WriteLine("Multiplying interactions by {0}.", factor); for (int i = 0; i < rpa.Count; i++) { S[i] *= factor; C[i] *= factor; } } else if (factor < 1) { Output.WriteLine("WARNING: There will be divergent geometric series."); Output.WriteLine(" Interpret results with care!"); } Output.WriteLine(); Output.WriteLine("Calculating dressed susceptibilities."); Output.WriteLine(); RpaParams largestParams = null; double largest = 0; string indices = ""; bool charge = false; for (int i = 0; i < rpa.Count; i++) { Matrix s_denom = (ident - S[i] * rpa[i].X0); Matrix c_denom = (ident + C[i] * rpa[i].X0); Matrix s_inv = s_denom.Invert(); Matrix c_inv = c_denom.Invert(); System.Diagnostics.Debug.Assert((s_denom * s_inv).IsIdentity); rpa[i].Xs = rpa[i].X0 * s_inv; rpa[i].Xc = rpa[i].X0 * c_inv; for (int l1 = 0; l1 < tb.Orbitals.Count; l1++) { for (int l2 = 0; l2 < tb.Orbitals.Count; l2++) { for (int l3 = 0; l3 < tb.Orbitals.Count; l3++) { for (int l4 = 0; l4 < tb.Orbitals.Count; l4++) { int a = GetIndex(tb, l1, l2); int b = GetIndex(tb, l3, l4); bool found = false; if (rpa[i].Xs[a,b].MagnitudeSquared > largest) { largest = rpa[i].Xs[a,b].MagnitudeSquared; charge = false; found = true; } if (rpa[i].Xc[a, b].MagnitudeSquared > largest) { largest = rpa[i].Xc[a, b].MagnitudeSquared; charge = true; found = true; } if (found == false) continue; indices = string.Format("{0}{1}{2}{3}", l1, l2, l3, l4); largestParams = rpa[i]; } } } } } Output.WriteLine("Largest susceptibility found at:"); Output.WriteLine(" {0} susceptibility: {1}", charge ? "Charge" : "Spin", Math.Sqrt(largest)); Output.WriteLine(" Indices: {0}", indices); Output.WriteLine(" Temperature: {0}", largestParams.Temperature); Output.WriteLine(" Frequency: {0}", largestParams.Frequency); Output.WriteLine(" Chemical Potential: {0}", largestParams.ChemicalPotential); Output.WriteLine(" Q: {0}", largestParams.QptValue); } private void VerifySymmetry(TightBinding tb, Matrix S, int a, int b) { for (int m1 = 0; m1 < 2; m1++) { for (int m2 = 0; m2 < 2; m2++) { for (int m3 = 0; m3 < 2; m3++) { for (int m4 = 0; m4 < 2; m4++) { int l1 = Select(m1, a, b); int l2 = Select(m2, a, b); int l3 = Select(m3, a, b); int l4 = Select(m4, a, b); int a1 = Select(m1, b, a); int a2 = Select(m2, b, a); int a3 = Select(m3, b, a); int a4 = Select(m4, b, a); int i = GetIndex(tb, l1, l2); int j = GetIndex(tb, l3, l4); int ii = GetIndex(tb, a1, a2); int jj = GetIndex(tb, a3, a4); var diff = S[i, j] - S[ii, jj]; if (diff.Magnitude > 1e-8) { Output.WriteLine("ERROR: Failed to verify symmetry."); Output.WriteLine(" a = {0} b = {1}", a, b); Output.WriteLine(" L = {0}{1}{2}{3}", l1, l2, l3, l4); Output.WriteLine(" A = {0}{1}{2}{3}", a1, a2, a3, a4); Output.WriteLine(" ij = {0},{1} {2},{3}", i, j, ii, jj); Output.WriteLine(" M[i,j] = {0} {1}", S[i,j], S[ii,jj]); Output.WriteLine(" diff = {0}", diff.Magnitude); try { throw new Exception("blah"); } catch(Exception e) { Output.WriteLine(e.StackTrace); } Environment.Exit(4); } } } } } } private int Select(int m1, int a, int b) { if (m1 == 0) return a; else return b; } private void RunRpaThread(RpaThreadInfo info) { Thread t = new Thread(RpaChi0Thread); info.Thread = t; info.Thread.Start(info); } private void RpaChi0Thread(object obj) { RpaThreadInfo info = (RpaThreadInfo)obj; System.Diagnostics.Stopwatch watch = new System.Diagnostics.Stopwatch(); watch.Start(); TightBinding tb = info.tb; List<RpaParams> rpa = info.RpaParams; KptList qpts = info.qpts; for (int i = 0; i < rpa.Count; i++) { SetTemperature(tb, rpa[i].Temperature, rpa[i].ChemicalPotential); rpa[i].X0 = CalcX0(tb, rpa[i].Frequency, qpts.Kpts[rpa[i].Qindex]); if (i == 0 && info.PrimaryThread) { long time = watch.ElapsedTicks * rpa.Count; TimeSpan s = new TimeSpan(time); Output.WriteLine("Estimated total time {0:+hh.mm}", s); } Complex val = rpa[i].X0.Trace(); Output.Write("q = {0}, T = {1:0.000}, mu = {2:0.000}, omega = {3:0.0000}", rpa[i].Qindex + 1, rpa[i].Temperature, rpa[i].ChemicalPotential, rpa[i].Frequency); Output.WriteLine(", Tr(X_0) = {0}", val.ToString("0.0000")); } } private RpaThreadInfo[] CreateThreadInfos(TightBinding tb, List<RpaParams> rpa, KptList qpts) { RpaThreadInfo[] infos = new RpaThreadInfo[threads]; for (int i = 0; i < infos.Length; i++) { infos[i] = new RpaThreadInfo(); infos[i].tb = tb.Clone(); infos[i].qpts = qpts; } infos[0].PrimaryThread = true; for (int i = 0; i < rpa.Count; i++) { infos[i % threads].RpaParams.Add(rpa[i]); } return infos; } double InteractionAdjustment(List<RpaParams> rpa, Matrix[] S, Matrix[] C, TightBinding tb) { double largest = double.MinValue; RpaParams largestParams = null; bool Cdiv = false; for (int i = 0; i < rpa.Count; i++) { RpaParams p = rpa[i]; Matrix x0 = p.X0; double lv = LargestPositiveEigenvalue(x0 * S[i]); if (lv > largest) { largest = lv; largestParams = p; Cdiv = false; } lv = LargestPositiveEigenvalue(-x0 * C[i]); if (lv > largest) { largest = lv; largestParams = p; Cdiv = true; } } if (largest >= 1) { Output.WriteLine("Interaction should be reduced to avoid divergence.", largest); } Output.WriteLine("Largest eigenvalue of denominator found at:"); Output.WriteLine(" Eigenvalue: {0}", largest); Output.WriteLine(" {0} susceptibility", Cdiv ? "Charge" : "Spin"); Output.WriteLine(" q = {0}", largestParams.QptValue); Output.WriteLine(" Temperature = {0}", largestParams.Temperature); Output.WriteLine(" Chemical Potential = {0}", largestParams.ChemicalPotential); Output.WriteLine(" Frequency = {0}", largestParams.Frequency); largest /= tb.Interactions.MaxEigenvalue; Output.WriteLine(); return 1 / largest; } private static double LargestPositiveEigenvalue(Matrix x) { Matrix eigenvals, eigenvecs; double shift = 5; double lastValue = 0; double thisValue = 0; int iter = 0; if (x.IsHermitian) { x.EigenValsVecs(out eigenvals, out eigenvecs); return eigenvals[eigenvals.Rows - 1, 0].Magnitude; } else if (Matrix.CanDiagonalizeNonHermitian) { x.EigenValsVecs(out eigenvals, out eigenvecs); double largest = double.MinValue; for (int i = 0; i < eigenvals.Rows; i++) { if (largest < eigenvals[i, 0].RealPart) largest = eigenvals[i, 0].RealPart; } return largest; } do { lastValue = thisValue; Matrix x1 = x + Matrix.Identity(x.Rows) * shift; Matrix As = x1 * x1.HermitianConjugate(); As.EigenValsVecs(out eigenvals, out eigenvecs); thisValue = Math.Sqrt(eigenvals[eigenvals.Rows - 1, 0].RealPart); thisValue -= shift; shift += (thisValue - lastValue); iter++; } while (Math.Abs(thisValue - lastValue) > 1e-8 || iter < 2); return thisValue; } delegate Matrix MatrixGetter(RpaParams p); private void SaveByTemperature(TightBinding tb, List<KPoint> QMesh, List<RpaParams> rpa, MatrixGetter g, string name) { rpa.Sort(RpaParams.TemperatureComparison); Complex[] chisum = new Complex[rpa.Count]; double[] chimag = new double[rpa.Count]; double[] chimagsqr = new double[rpa.Count]; for (int l1 = 0; l1 < tb.Orbitals.Count; l1++) { for (int l2 = 0; l2 < tb.Orbitals.Count; l2++) { for (int l3 = 0; l3 < tb.Orbitals.Count; l3++) { for (int l4 = 0; l4 < tb.Orbitals.Count; l4++) { int i = GetIndex(tb, l1, l2); int j = GetIndex(tb, l3, l4); // organize by temperature string filename = string.Format( "temperature/{0}.{1}{2}{3}{4}.T", name, l1, l2, l3, l4); double lastFreq = double.MinValue; double lastMu = double.MinValue; double lastq = int.MinValue; using (StreamWriter w = new StreamWriter(filename)) { for (int index = 0; index < rpa.Count; index++) { bool newline = false; newline |= ChangeValue(ref lastFreq, rpa[index].Frequency); newline |= ChangeValue(ref lastMu, rpa[index].ChemicalPotential); newline |= ChangeValue(ref lastq, rpa[index].Qindex); if (newline) { w.WriteLine(); w.WriteLine("# Frequency: {0}", rpa[index].Frequency); w.WriteLine("# Chemical Potential: {0}", rpa[index].ChemicalPotential); w.WriteLine("# Q: {0}", QMesh[rpa[index].Qindex]); w.WriteLine("#"); w.WriteLine("# Temperature\tRe(Chi)\tIm(Chi)"); } Complex val = g(rpa[index])[i, j]; chisum[index] += val; chimag[index] += val.Magnitude; chimagsqr[index] += val.MagnitudeSquared; w.WriteLine("\t{0:0.000000}\t{1:0.0000000}\t{2:0.0000000}", rpa[index].Temperature, val.RealPart, val.ImagPart); } } } } } } } private void SaveByQPlane(TightBinding tb, List<KPoint> QMesh, List<RpaParams> rpa, MatrixGetter g, string name) { rpa.Sort(RpaParams.QIndexComparison); Complex[] chisum = new Complex[rpa.Count]; double[] chimag = new double[rpa.Count]; double[] chimagsqr = new double[rpa.Count]; for (int l1 = 0; l1 < tb.Orbitals.Count; l1++) { for (int l2 = 0; l2 < tb.Orbitals.Count; l2++) { for (int l3 = 0; l3 < tb.Orbitals.Count; l3++) { for (int l4 = 0; l4 < tb.Orbitals.Count; l4++) { double lastFreq = double.MinValue; double lastMu = double.MinValue; double lastq = int.MinValue; int baseIndex = 0; for (int ti = 0; ti < tb.TemperatureMesh.Length; ti++) { for (int ui = 0; ui < tb.MuMesh.Length; ui++) { for (int wi = 0; wi < tb.FrequencyMesh.Length; wi++) { string filename_re = string.Format("{0}.re.{1}{2}{3}{4}.w{5}.T{6}.u{7}.qm", name, l1, l2, l3, l4, wi, ti, ui); string filename_im = string.Format("{0}.im.{1}{2}{3}{4}.w{5}.T{6}.u{7}.qm", name, l1, l2, l3, l4, wi, ti, ui); string filename_mag = string.Format("{0}.mag.{1}{2}{3}{4}.w{5}.T{6}.u{7}.qm", name, l1, l2, l3, l4, wi, ti, ui); Complex maxvalue = new Complex(double.MinValue, double.MinValue); Complex minvalue = new Complex(double.MaxValue, double.MaxValue); using (StreamWriter w_re = new StreamWriter(filename_re)) using (StreamWriter w_im = new StreamWriter(filename_im)) using (StreamWriter w_mag = new StreamWriter(filename_mag)) { double last_t; double last_s; tb.QPlane.GetPlaneST(tb.QPlane.AllKpts[0], out last_s, out last_t); for (int qi = 0; qi < tb.QPlane.AllKpts.Count; qi++) { Vector3 qpt = tb.QPlane.AllKpts[qi]; List<int> orbitalMap; double s, t; tb.QPlane.GetPlaneST(tb.QPlane.AllKpts[qi], out s, out t); if (Math.Abs(t - last_t) > 1e-6) { w_re.WriteLine(); w_im.WriteLine(); w_mag.WriteLine(); } int kindex = tb.QPlane.IrreducibleIndex(qpt, tb.Lattice, tb.Symmetries, out orbitalMap); int index = GetRpaIndex(rpa, kindex, tb.TemperatureMesh[ti], tb.FrequencyMesh[wi], tb.MuMesh[ui]); int newL1 = tb.Symmetries.TransformOrbital(orbitalMap, l1); int newL2 = tb.Symmetries.TransformOrbital(orbitalMap, l2); int newL3 = tb.Symmetries.TransformOrbital(orbitalMap, l3); int newL4 = tb.Symmetries.TransformOrbital(orbitalMap, l4); int newii = GetIndex(tb, newL1, newL2); int newjj = GetIndex(tb, newL3, newL4); Complex val = g(rpa[index])[newii, newjj]; w_re.WriteLine(" {0} {1} {2:0.0000000}", s, t, val.RealPart); w_im.WriteLine(" {0} {1} {2:0.0000000}", s, t, val.ImagPart); w_mag.WriteLine(" {0} {1} {2:0.0000000}", s, t, val.Magnitude); if (val.RealPart > maxvalue.RealPart) maxvalue.RealPart = val.RealPart; if (val.ImagPart > maxvalue.ImagPart) maxvalue.ImagPart = val.ImagPart; if (val.RealPart < minvalue.RealPart) minvalue.RealPart = val.RealPart; if (val.ImagPart < minvalue.ImagPart) minvalue.ImagPart = val.ImagPart; last_t = t; last_s = s; } } for (int i = 0; i < 3; i++) { string filename; switch (i) { case 0: filename = filename_re; break; case 1: filename = filename_im; break; case 2: filename = filename_mag; break; default: continue; } string gpfilename = "gnuplot." + filename; //minvalue.RealPart = Math.Floor(minvalue.RealPart); //maxvalue.RealPart = Math.Ceiling(maxvalue.RealPart); using (StreamWriter w = new StreamWriter(gpfilename)) { w.WriteLine("#!/usr/bin/gnuplot"); //w.WriteLine("set pm3d at bs flush center ftriangles scansbackward interpolate 1,1"); w.WriteLine("set pm3d map flush center ftriangles scansbackward interpolate 5,5"); w.WriteLine("set palette rgbformula 23,9,-36"); //w.WriteLine("set border 895"); w.WriteLine("set key off"); //w.WriteLine("set zrange [{0}:{1}]", minvalue.RealPart, maxvalue.RealPart); // label z = minvalue - 0.5 * (maxvalue - minvalue) // set label 1 "G" at 0,0,1 font "Symbol" center front w.WriteLine("splot '{0}' with pm3d", filename); } } } baseIndex += QMesh.Count; } } } } } } } private int GetRpaIndex(List<RpaParams> rpa, int qindex, double temperature, double freq, double mu) { for (int i = 0; i < rpa.Count; i++) { if (rpa[i].Qindex != qindex) continue; if (Math.Abs(rpa[i].Temperature - temperature) > 1e-10) continue; if (Math.Abs(rpa[i].Frequency - freq) > 1e-10) continue; if (Math.Abs(rpa[i].ChemicalPotential - mu) > 1e-10) continue; return i; } throw new Exception("Could not find rpa index!"); } private static bool ChangeValue(ref double value, double newValue) { if (value != newValue) { value = newValue; return true; } else return false; } private void SaveMatricesQPlane(TightBinding tb, List<KPoint> QMesh, List<RpaParams> chi, MatrixGetter g, string name) { if (tb.TemperatureMesh.Length > 1) { Directory.CreateDirectory("temperature"); SaveByTemperature(tb, QMesh, chi, g, name); } SaveByQPlane(tb, QMesh, chi, g, name); } private void Analyze(string name, Matrix S) { Output.WriteLine("Analysis of matrix {0}", name); Matrix evals, evecs; S.EigenValsVecs(out evals, out evecs); Complex lastEigenvalue = evals[0, 0]; int multiplicity = 1; Output.WriteLine("Eigenvalues:"); for (int i = 1; i < evals.Rows; i++) { Complex c = evals[i, 0]; if (c == lastEigenvalue) { multiplicity++; continue; } Output.WriteLine("{0} multiplicity: {1}", lastEigenvalue, multiplicity); lastEigenvalue = c; multiplicity = 1; } Output.WriteLine("{0} multiplicity: {1}", lastEigenvalue, multiplicity); } private void CalcSpinChargeMatrices(TightBinding tb, List<RpaParams> rpa, out Matrix[] S, out Matrix[] C) { S = new Matrix[rpa.Count]; C = new Matrix[rpa.Count]; for (int rpa_index = 0; rpa_index < rpa.Count; rpa_index++) { Vector3 q = rpa[rpa_index].QptValue; int size = tb.Orbitals.Count * tb.Orbitals.Count; Matrix _S = new Matrix(size, size); Matrix _C = new Matrix(size, size); foreach (var interaction in tb.Interactions) { double structureFactor = interaction.StructureFactor(q); if (interaction.OnSite) CalcOnSiteInteraction(tb, _S, _C, interaction); else CalcOffSiteInteraction(tb, _S, _C, interaction, structureFactor); } System.Diagnostics.Debug.Assert(_S.IsSymmetric); System.Diagnostics.Debug.Assert(_C.IsSymmetric); S[rpa_index] = _S; C[rpa_index] = _C; } } private void CalcOnSiteInteraction(TightBinding tb, Matrix _S, Matrix _C, InteractionPair interaction) { foreach (int l1 in interaction.OrbitalsLeft) { foreach (int l2 in interaction.OrbitalsLeft) { foreach (int l3 in interaction.OrbitalsRight) { foreach (int l4 in interaction.OrbitalsRight) { int i = GetIndex(tb, l1, l2); int j = GetIndex(tb, l3, l4); if (l1 == l2 && l2 == l3 && l3 == l4) { _S[i, j] += interaction.HubbardU; _C[i, j] += interaction.HubbardU; } else if (l1 == l4 && l4 != l2 && l2 == l3) { _S[i, j] += interaction.InterorbitalU; _C[i, j] += (-interaction.InterorbitalU + interaction.Exchange); } else if (l1 == l2 && l2 != l3 && l3 == l4) { _S[i, j] += interaction.Exchange; _C[i, j] += 2 * interaction.InterorbitalU - interaction.Exchange; } else if (l1 == l3 && l3 != l2 && l2 == l4) { _S[i, j] += interaction.PairHopping; _C[i, j] += interaction.PairHopping; } } } } } } private void CalcOffSiteInteraction(TightBinding tb, Matrix _S, Matrix _C, InteractionPair interaction, double structureFactor) { foreach (int l1 in interaction.OrbitalsLeft) { foreach (int l2 in interaction.OrbitalsLeft) { foreach (int l3 in interaction.OrbitalsRight) { foreach (int l4 in interaction.OrbitalsRight) { int i = GetIndex(tb, l1, l2); int j = GetIndex(tb, l3, l4); double Sval = 0, Cval = 0; if (l1 == l2 && l2 == l3 && l3 == l4) { Sval = -0.5 * interaction.Exchange * structureFactor; Cval = (2 * interaction.InterorbitalU - 0.5 * interaction.Exchange) * structureFactor; } else if (l1 == l2 && l2 != l3 && l3 == l4) { Sval = -0.25 * interaction.Exchange * structureFactor; Cval = (2 * interaction.InterorbitalU - 0.5 * interaction.Exchange) * structureFactor; } _S[i, j] += Sval; _S[j, i] += Sval; _C[i, j] += Cval; _C[j, i] += Cval; } } } } } Matrix CalcX0(TightBinding tb, double freq, Vector3 q) { int orbitalCount = tb.Orbitals.Count; int size = orbitalCount * orbitalCount; Matrix x = new Matrix(size, size); Complex denom_factor = new Complex(0, 1e-4); //StreamWriter w = new StreamWriter(string.Format("qcont.{0}", q.ToString("0.000"))); //bool writeThis = false; for (int l1 = 0; l1 < orbitalCount; l1++) { for (int l4 = 0; l4 < orbitalCount; l4++) { for (int l3 = l1; l3 < orbitalCount; l3++) { for (int l2 = l4; l2 < orbitalCount; l2++) { int i = GetIndex(tb, l1, l2); int j = GetIndex(tb, l3, l4); bool foundSymmetry = false; //if (l1 == 0 && l2 == 0 && l3 == 0 && l4 == 0) // writeThis = true; //else //writeThis = false; //if (writeThis) // w.WriteLine("{0}{1}{2}{3}", l1, l2, l3, l4); for (int s = 0; s < tb.Symmetries.Count; s++) { Symmetry sym = tb.Symmetries[s]; if (sym.OrbitalTransform == null || sym.OrbitalTransform.Count == 0) continue; int newL1 = sym.OrbitalTransform[l1]; int newL2 = sym.OrbitalTransform[l2]; int newL3 = sym.OrbitalTransform[l3]; int newL4 = sym.OrbitalTransform[l4]; int newI = GetIndex(tb, newL1, newL2); int newJ = GetIndex(tb, newL3, newL4); if (newI == i && newJ == j) continue; foundSymmetry = true; if (newL1 > l1) foundSymmetry = false; if (newL2 > l2) foundSymmetry = false; if (newL3 > l3) foundSymmetry = false; if (newL4 > l4) foundSymmetry = false; if (foundSymmetry) { x[i, j] = x[newI, newJ]; x[j, i] = x[i, j].Conjugate(); break; } } if (foundSymmetry) continue; Complex total = 0; for (int allkindex = 0; allkindex < tb.KMesh.AllKpts.Count; allkindex++) { Complex val = 0; Vector3 k = tb.KMesh.AllKpts[allkindex]; Vector3 kq = k + q; //List<int> kOrbitalMap; //List<int> kqOrbitalMap; //int kindex = tb.KMesh.IrreducibleIndex(k, tb.Lattice, tb.Symmetries, out kOrbitalMap); //int kqindex = tb.KMesh.IrreducibleIndex(kq, tb.Lattice, tb.Symmetries, out kqOrbitalMap); int kindex = tb.KMesh.AllKindex(k, tb.Lattice); int kqindex = tb.KMesh.AllKindex(kq, tb.Lattice); System.Diagnostics.Debug.Assert(kindex == allkindex); //int newL1 = TransformOrbital(kqOrbitalMap, l1); //int newL2 = TransformOrbital(kOrbitalMap, l2); //int newL3 = TransformOrbital(kqOrbitalMap, l3); //int newL4 = TransformOrbital(kOrbitalMap, l4); for (int n1 = 0; n1 < orbitalCount; n1++) { Wavefunction wfk = Bands(tb, kindex, n1); double e1 = wfk.Energy; double f1 = wfk.FermiFunction; for (int n2 = 0; n2 < orbitalCount; n2++) { Wavefunction wfq = Bands(tb, kqindex, n2); double e2 = wfq.Energy; double f2 = wfq.FermiFunction; Complex coeff = wfq.Coeffs[l1] * wfq.Coeffs[l4].Conjugate() * wfk.Coeffs[l3] * wfk.Coeffs[l2].Conjugate(); if (coeff == 0) continue; if (f1 < 1e-15 && f2 < 1e-15) continue; Complex denom_p = (e2 - e1 + freq + denom_factor); //Complex denom_n = (e2 - e1 - freq - denom_factor); //Complex lindhard = (f1 - f2) * (1.0 / denom_p + 1.0 / denom_n); Complex lindhard = (f1 - f2) * (1.0 / denom_p); Complex contrib = coeff * lindhard; if (Math.Abs(f1 - f2) < 1e-11 && freq == 0.0) { contrib = coeff * f1 * (1 - f1) * Beta; } //w.Write("{0} {1} {2} {3} ", kindex, kqindex, n1, n2); //w.WriteLine("{0} {1} {2} {3} {4}", coeff, e1, e2, f1, f2); if (double.IsNaN(contrib.RealPart) || double.IsNaN(contrib.ImagPart)) { throw new Exception("Found NaN when evaluating X0"); } val += contrib; } } //w.WriteLine("{0} {1} total {2} + {3}i", kindex, kqindex, // Math.Round(val.RealPart, 4), Math.Round(val.ImagPart, 4)); //Output.WriteLine(tb.KMesh.AllKpts[kindex].Weight.ToString()); val *= tb.KMesh.AllKpts[kindex].Weight; total += val; //if (writeThis) // w.WriteLine("{0} {1} {2}", allkindex, total, val); } x[i, j] = total; x[j, i] = total.Conjugate(); //if (writeThis) //{ // w.WriteLine("total for {0}{1}{2}{3}: {4}", l1, l2, l3, l4, total); // w.WriteLine("---------------------"); //} } } } } //w.Close(); return x; } int GetIndex(TightBinding tb, int l1, int l2) { // if this changes, be sure to correct the way // x[i,j] and x[j,i] are set in CalcX0. return l1 * tb.Orbitals.Count + l2; } } }
//----------------------------------------------------------------------- // <copyright file="FanOut.cs" company="Akka.NET Project"> // Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Immutable; using System.Linq; using Akka.Actor; using Akka.Event; using Akka.Pattern; using Reactive.Streams; namespace Akka.Streams.Implementation { /// <summary> /// TBD /// </summary> /// <typeparam name="T">TBD</typeparam> public class OutputBunch<T> { #region internal classes private sealed class FanoutOutputs : SimpleOutputs { private readonly int _id; public FanoutOutputs(int id, IActorRef actor, IPump pump) : base(actor, pump) { _id = id; } public new ISubscription CreateSubscription() => new FanOut.SubstreamSubscription(Actor, _id); } #endregion private readonly int _outputCount; private bool _bunchCancelled; private readonly FanoutOutputs[] _outputs; private readonly bool[] _marked; private int _markedCount; private readonly bool[] _pending; private int _markedPending; private readonly bool[] _cancelled; private int _markedCanceled; private readonly bool[] _completed; private readonly bool[] _errored; private bool _unmarkCancelled = true; private int _preferredId; /// <summary> /// TBD /// </summary> /// <param name="outputCount">TBD</param> /// <param name="impl">TBD</param> /// <param name="pump">TBD</param> public OutputBunch(int outputCount, IActorRef impl, IPump pump) { _outputCount = outputCount; _outputs = new FanoutOutputs[outputCount]; for (var i = 0; i < outputCount; i++) _outputs[i] = new FanoutOutputs(i, impl, pump); _marked = new bool[outputCount]; _pending = new bool[outputCount]; _cancelled = new bool[outputCount]; _completed = new bool[outputCount]; _errored = new bool[outputCount]; AllOfMarkedOutputs = new LambdaTransferState( isCompleted: () => _markedCanceled > 0 || _markedCount == 0, isReady: () => _markedPending == _markedCount); AnyOfMarkedOutputs = new LambdaTransferState( isCompleted: () => _markedCanceled == _markedCount, isReady: () => _markedPending > 0); // FIXME: Eliminate re-wraps SubReceive = new SubReceive(message => message.Match() .With<FanOut.ExposedPublishers<T>>(exposed => { var publishers = exposed.Publishers.GetEnumerator(); var outputs = _outputs.AsEnumerable().GetEnumerator(); while (publishers.MoveNext() && outputs.MoveNext()) outputs.Current.SubReceive.CurrentReceive(new ExposedPublisher(publishers.Current)); }) .With<FanOut.SubstreamRequestMore>(more => { if (more.Demand < 1) // According to Reactive Streams Spec 3.9, with non-positive demand must yield onError Error(more.Id, ReactiveStreamsCompliance.NumberOfElementsInRequestMustBePositiveException); else { if (_marked[more.Id] && !_pending[more.Id]) _markedPending += 1; _pending[more.Id] = true; _outputs[more.Id].SubReceive.CurrentReceive(new RequestMore(null, more.Demand)); } }) .With<FanOut.SubstreamCancel>(cancel => { if (_unmarkCancelled) UnmarkOutput(cancel.Id); if (_marked[cancel.Id] && !_cancelled[cancel.Id]) _markedCanceled += 1; _cancelled[cancel.Id] = true; OnCancel(cancel.Id); _outputs[cancel.Id].SubReceive.CurrentReceive(new Cancel(null)); }) .With<FanOut.SubstreamSubscribePending>(pending => _outputs[pending.Id].SubReceive.CurrentReceive(SubscribePending.Instance)) .WasHandled); } /// <summary> /// Will only transfer an element when all marked outputs /// have demand, and will complete as soon as any of the marked /// outputs have canceled. /// </summary> public readonly TransferState AllOfMarkedOutputs; /// <summary> /// Will transfer an element when any of the marked outputs /// have demand, and will complete when all of the marked /// outputs have canceled. /// </summary> public readonly TransferState AnyOfMarkedOutputs; /// <summary> /// TBD /// </summary> public readonly SubReceive SubReceive; /// <summary> /// TBD /// </summary> /// <param name="output">TBD</param> /// <returns>TBD</returns> public bool IsPending(int output) => _pending[output]; /// <summary> /// TBD /// </summary> /// <param name="output">TBD</param> /// <returns>TBD</returns> public bool IsCompleted(int output) => _completed[output]; /// <summary> /// TBD /// </summary> /// <param name="output">TBD</param> /// <returns>TBD</returns> public bool IsCancelled(int output) => _cancelled[output]; /// <summary> /// TBD /// </summary> /// <param name="output">TBD</param> /// <returns>TBD</returns> public bool IsErrored(int output) => _errored[output]; /// <summary> /// TBD /// </summary> public void Complete() { if (!_bunchCancelled) { _bunchCancelled = true; for (var i = 0; i < _outputs.Length; i++) Complete(i); } } /// <summary> /// TBD /// </summary> /// <param name="output">TBD</param> public void Complete(int output) { if (!_completed[output] && !_errored[output] && !_cancelled[output]) { _outputs[output].Complete(); _completed[output] = true; UnmarkOutput(output); } } /// <summary> /// TBD /// </summary> /// <param name="e">TBD</param> public void Cancel(Exception e) { if (!_bunchCancelled) { _bunchCancelled = true; for (var i = 0; i < _outputs.Length; i++) Error(i, e); } } /// <summary> /// TBD /// </summary> /// <param name="output">TBD</param> /// <param name="e">TBD</param> public void Error(int output, Exception e) { if (!_errored[output] && !_cancelled[output] && !_completed[output]) { _outputs[output].Error(e); _errored[output] = true; UnmarkOutput(output); } } /// <summary> /// TBD /// </summary> /// <param name="output">TBD</param> public void MarkOutput(int output) { if (!_marked[output]) { if (_cancelled[output]) _markedCanceled += 1; if (_pending[output]) _markedPending += 1; _marked[output] = true; _markedCount += 1; } } /// <summary> /// TBD /// </summary> /// <param name="output">TBD</param> public void UnmarkOutput(int output) { if (_marked[output]) { if (_cancelled[output]) _markedCanceled -= 1; if (_pending[output]) _markedPending -= 1; _marked[output] = false; _markedCount -= 1; } } /// <summary> /// TBD /// </summary> public void MarkAllOutputs() { for (var i = 0; i < _outputCount; i++) MarkOutput(i); } /// <summary> /// TBD /// </summary> public void UnmarkAllOutputs() { for (var i = 0; i < _outputCount; i++) UnmarkOutput(i); } /// <summary> /// TBD /// </summary> /// <param name="enabled">TBD</param> public void UnmarkCancelledOutputs(bool enabled) => _unmarkCancelled = enabled; /// <summary> /// TBD /// </summary> /// <exception cref="ArgumentException">TBD</exception> /// <returns>TBD</returns> public int IdToEnqueue() { var id = _preferredId; while (!(_marked[id] && _pending[id])) { id += 1; if (id == _outputCount) id = 0; if (id != _preferredId) throw new ArgumentException("Tried to equeue without waiting for any demand"); } return id; } /// <summary> /// TBD /// </summary> /// <param name="id">TBD</param> /// <param name="element">TBD</param> public void Enqueue(int id, T element) { var output = _outputs[id]; output.EnqueueOutputElement(element); if (!output.IsDemandAvailable) { if (_marked[id]) _markedPending -= 1; _pending[id] = false; } } /// <summary> /// TBD /// </summary> /// <param name="element">TBD</param> public void EnqueueMarked(T element) { for (var id = 0; id < _outputCount; id++) if (_marked[id]) Enqueue(id, element); } /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public int IdToEnqueueAndYield() { var id = IdToEnqueue(); _preferredId = id + 1; if (_preferredId == _outputCount) _preferredId = 0; return id; } /// <summary> /// TBD /// </summary> /// <param name="element">TBD</param> public void EnqueueAndYield(T element) => Enqueue(IdToEnqueueAndYield(), element); /// <summary> /// TBD /// </summary> /// <param name="element">TBD</param> /// <param name="preferred">TBD</param> public void EnqueueAndPrefer(T element, int preferred) { var id = IdToEnqueue(); _preferredId = preferred; Enqueue(id, element); } /// <summary> /// TBD /// </summary> /// <param name="output">TBD</param> public void OnCancel(int output) { } /// <summary> /// TBD /// </summary> /// <param name="id">TBD</param> /// <returns>TBD</returns> public TransferState DemandAvailableFor(int id) => new LambdaTransferState(isReady: () => _pending[id], isCompleted: () => _cancelled[id] || _completed[id] || _errored[id]); /// <summary> /// TBD /// </summary> /// <param name="id">TBD</param> /// <returns>TBD</returns> public TransferState DemandOrCancelAvailableFor(int id) => new LambdaTransferState(isReady: () => _pending[id] || _cancelled[id], isCompleted: () => false); } /// <summary> /// INTERNAL API /// </summary> public static class FanOut { /// <summary> /// TBD /// </summary> [Serializable] public struct SubstreamRequestMore : INoSerializationVerificationNeeded, IDeadLetterSuppression { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> public readonly long Demand; /// <summary> /// TBD /// </summary> /// <param name="id">TBD</param> /// <param name="demand">TBD</param> public SubstreamRequestMore(int id, long demand) { Id = id; Demand = demand; } } /// <summary> /// TBD /// </summary> [Serializable] public struct SubstreamCancel : INoSerializationVerificationNeeded, IDeadLetterSuppression { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> /// <param name="id">TBD</param> public SubstreamCancel(int id) { Id = id; } } /// <summary> /// TBD /// </summary> [Serializable] public struct SubstreamSubscribePending : INoSerializationVerificationNeeded, IDeadLetterSuppression { /// <summary> /// TBD /// </summary> public readonly int Id; /// <summary> /// TBD /// </summary> /// <param name="id">TBD</param> public SubstreamSubscribePending(int id) { Id = id; } } /// <summary> /// TBD /// </summary> public class SubstreamSubscription : ISubscription { private readonly IActorRef _parent; private readonly int _id; /// <summary> /// TBD /// </summary> /// <param name="parent">TBD</param> /// <param name="id">TBD</param> public SubstreamSubscription(IActorRef parent, int id) { _parent = parent; _id = id; } /// <summary> /// TBD /// </summary> /// <param name="elements">TBD</param> public void Request(long elements) => _parent.Tell(new SubstreamRequestMore(_id, elements)); /// <summary> /// TBD /// </summary> public void Cancel() => _parent.Tell(new SubstreamCancel(_id)); /// <summary> /// TBD /// </summary> /// <returns>TBD</returns> public override string ToString() => "SubstreamSubscription" + GetHashCode(); } /// <summary> /// TBD /// </summary> /// <typeparam name="T">TBD</typeparam> [Serializable] public struct ExposedPublishers<T> : INoSerializationVerificationNeeded, IDeadLetterSuppression { /// <summary> /// TBD /// </summary> public readonly ImmutableList<ActorPublisher<T>> Publishers; /// <summary> /// TBD /// </summary> /// <param name="publishers">TBD</param> public ExposedPublishers(ImmutableList<ActorPublisher<T>> publishers) { Publishers = publishers; } } } /// <summary> /// INTERNAL API /// </summary> /// <typeparam name="T">TBD</typeparam> public abstract class FanOut<T> : ActorBase, IPump { #region internal classes private sealed class AnonymousBatchingInputBuffer : BatchingInputBuffer { private readonly FanOut<T> _pump; public AnonymousBatchingInputBuffer(int count, FanOut<T> pump) : base(count, pump) { _pump = pump; } protected override void OnError(Exception e) => _pump.Fail(e); } #endregion private readonly ActorMaterializerSettings _settings; /// <summary> /// TBD /// </summary> protected readonly OutputBunch<T> OutputBunch; /// <summary> /// TBD /// </summary> protected readonly BatchingInputBuffer PrimaryInputs; /// <summary> /// TBD /// </summary> /// <param name="settings">TBD</param> /// <param name="outputCount">TBD</param> protected FanOut(ActorMaterializerSettings settings, int outputCount) { _log = Context.GetLogger(); _settings = settings; OutputBunch = new OutputBunch<T>(outputCount, Self, this); PrimaryInputs = new AnonymousBatchingInputBuffer(settings.MaxInputBufferSize, this); this.Init(); } #region Actor implementation private ILoggingAdapter _log; /// <summary> /// TBD /// </summary> protected ILoggingAdapter Log => _log ?? (_log = Context.GetLogger()); /// <summary> /// TBD /// </summary> protected override void PostStop() { PrimaryInputs.Cancel(); OutputBunch.Cancel(new AbruptTerminationException(Self)); } /// <summary> /// TBD /// </summary> /// <param name="reason">TBD</param> /// <exception cref="IllegalStateException">TBD</exception> protected override void PostRestart(Exception reason) { base.PostRestart(reason); throw new IllegalStateException("This actor cannot be restarted"); } /// <summary> /// TBD /// </summary> /// <param name="e">TBD</param> protected void Fail(Exception e) { if (_settings.IsDebugLogging) Log.Debug($"fail due to: {e.Message}"); PrimaryInputs.Cancel(); OutputBunch.Cancel(e); Pump(); } /// <summary> /// TBD /// </summary> /// <param name="message">TBD</param> /// <returns>TBD</returns> protected override bool Receive(object message) { return PrimaryInputs.SubReceive.CurrentReceive(message) || OutputBunch.SubReceive.CurrentReceive(message); } #endregion #region Pump implementation /// <summary> /// TBD /// </summary> public TransferState TransferState { get; set; } /// <summary> /// TBD /// </summary> public Action CurrentAction { get; set; } /// <summary> /// TBD /// </summary> public bool IsPumpFinished => this.IsPumpFinished(); /// <summary> /// TBD /// </summary> /// <param name="waitForUpstream">TBD</param> /// <param name="andThen">TBD</param> public void InitialPhase(int waitForUpstream, TransferPhase andThen) => Pumps.InitialPhase(this, waitForUpstream, andThen); /// <summary> /// TBD /// </summary> /// <param name="waitForUpstream">TBD</param> public void WaitForUpstream(int waitForUpstream) => Pumps.WaitForUpstream(this, waitForUpstream); /// <summary> /// TBD /// </summary> public void GotUpstreamSubscription() => Pumps.GotUpstreamSubscription(this); /// <summary> /// TBD /// </summary> /// <param name="phase">TBD</param> public void NextPhase(TransferPhase phase) => Pumps.NextPhase(this, phase); /// <summary> /// TBD /// </summary> public void Pump() => Pumps.Pump(this); /// <summary> /// TBD /// </summary> /// <param name="e">TBD</param> public void PumpFailed(Exception e) => Fail(e); /// <summary> /// TBD /// </summary> public void PumpFinished() { PrimaryInputs.Cancel(); OutputBunch.Complete(); Context.Stop(Self); } #endregion } /// <summary> /// INTERNAL API /// </summary> internal static class Unzip { /// <summary> /// TBD /// </summary> /// <typeparam name="T">TBD</typeparam> /// <param name="settings">TBD</param> /// <returns>TBD</returns> public static Props Props<T>(ActorMaterializerSettings settings) => Actor.Props.Create(() => new Unzip<T>(settings, 2)).WithDeploy(Deploy.Local); } /// <summary> /// INTERNAL API /// TODO Find out where this class will be used and check if the type parameter fit /// since we need to cast messages into a tuple and therefore maybe need aditional type parameters /// </summary> /// <typeparam name="T">TBD</typeparam> internal sealed class Unzip<T> : FanOut<T> { /// <summary> /// TBD /// </summary> /// <param name="settings">TBD</param> /// <param name="outputCount">TBD</param> /// <exception cref="ArgumentException">TBD</exception>> public Unzip(ActorMaterializerSettings settings, int outputCount = 2) : base(settings, outputCount) { OutputBunch.MarkAllOutputs(); InitialPhase(1, new TransferPhase(PrimaryInputs.NeedsInput.And(OutputBunch.AllOfMarkedOutputs), () => { var message = PrimaryInputs.DequeueInputElement(); var tuple = message as Tuple<T, T>; if (tuple == null) throw new ArgumentException($"Unable to unzip elements of type {message.GetType().Name}"); OutputBunch.Enqueue(0, tuple.Item1); OutputBunch.Enqueue(1, tuple.Item2); })); } } }
// Copyright (c) 2014-2019 The Khronos Group Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and/or associated documentation files (the "Materials"), // to deal in the Materials without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Materials, and to permit persons to whom the // Materials are furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Materials. // // MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS // STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND // HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ // // THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS // IN THE MATERIALS. // This header is automatically generated by the same tool that creates // the Binary Section of the SPIR-V specification. // Enumeration tokens for SPIR-V, in various styles: // C, C++, C++11, JSON, Lua, Python, C#, D // // - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL // - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL // - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL // - Lua will use tables, e.g.: spv.SourceLanguage.GLSL // - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL'] // - C# will use enum classes in the Specification class located in the "Spv" namespace, // e.g.: Spv.Specification.SourceLanguage.GLSL // - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL // // Some tokens act like mask values, which can be OR'd together, // while others are mutually exclusive. The mask-like ones have // "Mask" in their name, and a parallel enum that has the shift // amount (1 << x) for each corresponding enumerant. namespace Spv { public static class Specification { public const uint MagicNumber = 0x07230203; public const uint Version = 0x00010400; public const uint Revision = 1; public const uint OpCodeMask = 0xffff; public const uint WordCountShift = 16; public enum SourceLanguage { Unknown = 0, ESSL = 1, GLSL = 2, OpenCL_C = 3, OpenCL_CPP = 4, HLSL = 5, } public enum ExecutionModel { Vertex = 0, TessellationControl = 1, TessellationEvaluation = 2, Geometry = 3, Fragment = 4, GLCompute = 5, Kernel = 6, TaskNV = 5267, MeshNV = 5268, RayGenerationNV = 5313, IntersectionNV = 5314, AnyHitNV = 5315, ClosestHitNV = 5316, MissNV = 5317, CallableNV = 5318, } public enum AddressingModel { Logical = 0, Physical32 = 1, Physical64 = 2, PhysicalStorageBuffer64EXT = 5348, } public enum MemoryModel { Simple = 0, GLSL450 = 1, OpenCL = 2, VulkanKHR = 3, } public enum ExecutionMode { Invocations = 0, SpacingEqual = 1, SpacingFractionalEven = 2, SpacingFractionalOdd = 3, VertexOrderCw = 4, VertexOrderCcw = 5, PixelCenterInteger = 6, OriginUpperLeft = 7, OriginLowerLeft = 8, EarlyFragmentTests = 9, PointMode = 10, Xfb = 11, DepthReplacing = 12, DepthGreater = 14, DepthLess = 15, DepthUnchanged = 16, LocalSize = 17, LocalSizeHint = 18, InputPoints = 19, InputLines = 20, InputLinesAdjacency = 21, Triangles = 22, InputTrianglesAdjacency = 23, Quads = 24, Isolines = 25, OutputVertices = 26, OutputPoints = 27, OutputLineStrip = 28, OutputTriangleStrip = 29, VecTypeHint = 30, ContractionOff = 31, Initializer = 33, Finalizer = 34, SubgroupSize = 35, SubgroupsPerWorkgroup = 36, SubgroupsPerWorkgroupId = 37, LocalSizeId = 38, LocalSizeHintId = 39, PostDepthCoverage = 4446, DenormPreserve = 4459, DenormFlushToZero = 4460, SignedZeroInfNanPreserve = 4461, RoundingModeRTE = 4462, RoundingModeRTZ = 4463, StencilRefReplacingEXT = 5027, OutputLinesNV = 5269, OutputPrimitivesNV = 5270, DerivativeGroupQuadsNV = 5289, DerivativeGroupLinearNV = 5290, OutputTrianglesNV = 5298, PixelInterlockOrderedEXT = 5366, PixelInterlockUnorderedEXT = 5367, SampleInterlockOrderedEXT = 5368, SampleInterlockUnorderedEXT = 5369, ShadingRateInterlockOrderedEXT = 5370, ShadingRateInterlockUnorderedEXT = 5371, } public enum StorageClass { UniformConstant = 0, Input = 1, Uniform = 2, Output = 3, Workgroup = 4, CrossWorkgroup = 5, Private = 6, Function = 7, Generic = 8, PushConstant = 9, AtomicCounter = 10, Image = 11, StorageBuffer = 12, CallableDataNV = 5328, IncomingCallableDataNV = 5329, RayPayloadNV = 5338, HitAttributeNV = 5339, IncomingRayPayloadNV = 5342, ShaderRecordBufferNV = 5343, PhysicalStorageBufferEXT = 5349, } public enum Dim { Dim1D = 0, Dim2D = 1, Dim3D = 2, Cube = 3, Rect = 4, Buffer = 5, SubpassData = 6, } public enum SamplerAddressingMode { None = 0, ClampToEdge = 1, Clamp = 2, Repeat = 3, RepeatMirrored = 4, } public enum SamplerFilterMode { Nearest = 0, Linear = 1, } public enum ImageFormat { Unknown = 0, Rgba32f = 1, Rgba16f = 2, R32f = 3, Rgba8 = 4, Rgba8Snorm = 5, Rg32f = 6, Rg16f = 7, R11fG11fB10f = 8, R16f = 9, Rgba16 = 10, Rgb10A2 = 11, Rg16 = 12, Rg8 = 13, R16 = 14, R8 = 15, Rgba16Snorm = 16, Rg16Snorm = 17, Rg8Snorm = 18, R16Snorm = 19, R8Snorm = 20, Rgba32i = 21, Rgba16i = 22, Rgba8i = 23, R32i = 24, Rg32i = 25, Rg16i = 26, Rg8i = 27, R16i = 28, R8i = 29, Rgba32ui = 30, Rgba16ui = 31, Rgba8ui = 32, R32ui = 33, Rgb10a2ui = 34, Rg32ui = 35, Rg16ui = 36, Rg8ui = 37, R16ui = 38, R8ui = 39, } public enum ImageChannelOrder { R = 0, A = 1, RG = 2, RA = 3, RGB = 4, RGBA = 5, BGRA = 6, ARGB = 7, Intensity = 8, Luminance = 9, Rx = 10, RGx = 11, RGBx = 12, Depth = 13, DepthStencil = 14, sRGB = 15, sRGBx = 16, sRGBA = 17, sBGRA = 18, ABGR = 19, } public enum ImageChannelDataType { SnormInt8 = 0, SnormInt16 = 1, UnormInt8 = 2, UnormInt16 = 3, UnormShort565 = 4, UnormShort555 = 5, UnormInt101010 = 6, SignedInt8 = 7, SignedInt16 = 8, SignedInt32 = 9, UnsignedInt8 = 10, UnsignedInt16 = 11, UnsignedInt32 = 12, HalfFloat = 13, Float = 14, UnormInt24 = 15, UnormInt101010_2 = 16, } public enum ImageOperandsShift { Bias = 0, Lod = 1, Grad = 2, ConstOffset = 3, Offset = 4, ConstOffsets = 5, Sample = 6, MinLod = 7, MakeTexelAvailableKHR = 8, MakeTexelVisibleKHR = 9, NonPrivateTexelKHR = 10, VolatileTexelKHR = 11, SignExtend = 12, ZeroExtend = 13, } public enum ImageOperandsMask { MaskNone = 0, Bias = 0x00000001, Lod = 0x00000002, Grad = 0x00000004, ConstOffset = 0x00000008, Offset = 0x00000010, ConstOffsets = 0x00000020, Sample = 0x00000040, MinLod = 0x00000080, MakeTexelAvailableKHR = 0x00000100, MakeTexelVisibleKHR = 0x00000200, NonPrivateTexelKHR = 0x00000400, VolatileTexelKHR = 0x00000800, SignExtend = 0x00001000, ZeroExtend = 0x00002000, } public enum FPFastMathModeShift { NotNaN = 0, NotInf = 1, NSZ = 2, AllowRecip = 3, Fast = 4, } public enum FPFastMathModeMask { MaskNone = 0, NotNaN = 0x00000001, NotInf = 0x00000002, NSZ = 0x00000004, AllowRecip = 0x00000008, Fast = 0x00000010, } public enum FPRoundingMode { RTE = 0, RTZ = 1, RTP = 2, RTN = 3, } public enum LinkageType { Export = 0, Import = 1, } public enum AccessQualifier { ReadOnly = 0, WriteOnly = 1, ReadWrite = 2, } public enum FunctionParameterAttribute { Zext = 0, Sext = 1, ByVal = 2, Sret = 3, NoAlias = 4, NoCapture = 5, NoWrite = 6, NoReadWrite = 7, } public enum Decoration { RelaxedPrecision = 0, SpecId = 1, Block = 2, BufferBlock = 3, RowMajor = 4, ColMajor = 5, ArrayStride = 6, MatrixStride = 7, GLSLShared = 8, GLSLPacked = 9, CPacked = 10, BuiltIn = 11, NoPerspective = 13, Flat = 14, Patch = 15, Centroid = 16, Sample = 17, Invariant = 18, Restrict = 19, Aliased = 20, Volatile = 21, Constant = 22, Coherent = 23, NonWritable = 24, NonReadable = 25, Uniform = 26, UniformId = 27, SaturatedConversion = 28, Stream = 29, Location = 30, Component = 31, Index = 32, Binding = 33, DescriptorSet = 34, Offset = 35, XfbBuffer = 36, XfbStride = 37, FuncParamAttr = 38, FPRoundingMode = 39, FPFastMathMode = 40, LinkageAttributes = 41, NoContraction = 42, InputAttachmentIndex = 43, Alignment = 44, MaxByteOffset = 45, AlignmentId = 46, MaxByteOffsetId = 47, NoSignedWrap = 4469, NoUnsignedWrap = 4470, ExplicitInterpAMD = 4999, OverrideCoverageNV = 5248, PassthroughNV = 5250, ViewportRelativeNV = 5252, SecondaryViewportRelativeNV = 5256, PerPrimitiveNV = 5271, PerViewNV = 5272, PerTaskNV = 5273, PerVertexNV = 5285, NonUniformEXT = 5300, RestrictPointerEXT = 5355, AliasedPointerEXT = 5356, CounterBuffer = 5634, HlslCounterBufferGOOGLE = 5634, HlslSemanticGOOGLE = 5635, UserSemantic = 5635, UserTypeGOOGLE = 5636, } public enum BuiltIn { Position = 0, PointSize = 1, ClipDistance = 3, CullDistance = 4, VertexId = 5, InstanceId = 6, PrimitiveId = 7, InvocationId = 8, Layer = 9, ViewportIndex = 10, TessLevelOuter = 11, TessLevelInner = 12, TessCoord = 13, PatchVertices = 14, FragCoord = 15, PointCoord = 16, FrontFacing = 17, SampleId = 18, SamplePosition = 19, SampleMask = 20, FragDepth = 22, HelperInvocation = 23, NumWorkgroups = 24, WorkgroupSize = 25, WorkgroupId = 26, LocalInvocationId = 27, GlobalInvocationId = 28, LocalInvocationIndex = 29, WorkDim = 30, GlobalSize = 31, EnqueuedWorkgroupSize = 32, GlobalOffset = 33, GlobalLinearId = 34, SubgroupSize = 36, SubgroupMaxSize = 37, NumSubgroups = 38, NumEnqueuedSubgroups = 39, SubgroupId = 40, SubgroupLocalInvocationId = 41, VertexIndex = 42, InstanceIndex = 43, SubgroupEqMask = 4416, SubgroupEqMaskKHR = 4416, SubgroupGeMask = 4417, SubgroupGeMaskKHR = 4417, SubgroupGtMask = 4418, SubgroupGtMaskKHR = 4418, SubgroupLeMask = 4419, SubgroupLeMaskKHR = 4419, SubgroupLtMask = 4420, SubgroupLtMaskKHR = 4420, BaseVertex = 4424, BaseInstance = 4425, DrawIndex = 4426, DeviceIndex = 4438, ViewIndex = 4440, BaryCoordNoPerspAMD = 4992, BaryCoordNoPerspCentroidAMD = 4993, BaryCoordNoPerspSampleAMD = 4994, BaryCoordSmoothAMD = 4995, BaryCoordSmoothCentroidAMD = 4996, BaryCoordSmoothSampleAMD = 4997, BaryCoordPullModelAMD = 4998, FragStencilRefEXT = 5014, ViewportMaskNV = 5253, SecondaryPositionNV = 5257, SecondaryViewportMaskNV = 5258, PositionPerViewNV = 5261, ViewportMaskPerViewNV = 5262, FullyCoveredEXT = 5264, TaskCountNV = 5274, PrimitiveCountNV = 5275, PrimitiveIndicesNV = 5276, ClipDistancePerViewNV = 5277, CullDistancePerViewNV = 5278, LayerPerViewNV = 5279, MeshViewCountNV = 5280, MeshViewIndicesNV = 5281, BaryCoordNV = 5286, BaryCoordNoPerspNV = 5287, FragSizeEXT = 5292, FragmentSizeNV = 5292, FragInvocationCountEXT = 5293, InvocationsPerPixelNV = 5293, LaunchIdNV = 5319, LaunchSizeNV = 5320, WorldRayOriginNV = 5321, WorldRayDirectionNV = 5322, ObjectRayOriginNV = 5323, ObjectRayDirectionNV = 5324, RayTminNV = 5325, RayTmaxNV = 5326, InstanceCustomIndexNV = 5327, ObjectToWorldNV = 5330, WorldToObjectNV = 5331, HitTNV = 5332, HitKindNV = 5333, IncomingRayFlagsNV = 5351, WarpsPerSMNV = 5374, SMCountNV = 5375, WarpIDNV = 5376, SMIDNV = 5377, } public enum SelectionControlShift { Flatten = 0, DontFlatten = 1, } public enum SelectionControlMask { MaskNone = 0, Flatten = 0x00000001, DontFlatten = 0x00000002, } public enum LoopControlShift { Unroll = 0, DontUnroll = 1, DependencyInfinite = 2, DependencyLength = 3, MinIterations = 4, MaxIterations = 5, IterationMultiple = 6, PeelCount = 7, PartialCount = 8, } public enum LoopControlMask { MaskNone = 0, Unroll = 0x00000001, DontUnroll = 0x00000002, DependencyInfinite = 0x00000004, DependencyLength = 0x00000008, MinIterations = 0x00000010, MaxIterations = 0x00000020, IterationMultiple = 0x00000040, PeelCount = 0x00000080, PartialCount = 0x00000100, } public enum FunctionControlShift { Inline = 0, DontInline = 1, Pure = 2, Const = 3, } public enum FunctionControlMask { MaskNone = 0, Inline = 0x00000001, DontInline = 0x00000002, Pure = 0x00000004, Const = 0x00000008, } public enum MemorySemanticsShift { Acquire = 1, Release = 2, AcquireRelease = 3, SequentiallyConsistent = 4, UniformMemory = 6, SubgroupMemory = 7, WorkgroupMemory = 8, CrossWorkgroupMemory = 9, AtomicCounterMemory = 10, ImageMemory = 11, OutputMemoryKHR = 12, MakeAvailableKHR = 13, MakeVisibleKHR = 14, Volatile = 15, } public enum MemorySemanticsMask { MaskNone = 0, Acquire = 0x00000002, Release = 0x00000004, AcquireRelease = 0x00000008, SequentiallyConsistent = 0x00000010, UniformMemory = 0x00000040, SubgroupMemory = 0x00000080, WorkgroupMemory = 0x00000100, CrossWorkgroupMemory = 0x00000200, AtomicCounterMemory = 0x00000400, ImageMemory = 0x00000800, OutputMemoryKHR = 0x00001000, MakeAvailableKHR = 0x00002000, MakeVisibleKHR = 0x00004000, Volatile = 0x00008000, } public enum MemoryAccessShift { Volatile = 0, Aligned = 1, Nontemporal = 2, MakePointerAvailableKHR = 3, MakePointerVisibleKHR = 4, NonPrivatePointerKHR = 5, } public enum MemoryAccessMask { MaskNone = 0, Volatile = 0x00000001, Aligned = 0x00000002, Nontemporal = 0x00000004, MakePointerAvailableKHR = 0x00000008, MakePointerVisibleKHR = 0x00000010, NonPrivatePointerKHR = 0x00000020, } public enum Scope { CrossDevice = 0, Device = 1, Workgroup = 2, Subgroup = 3, Invocation = 4, QueueFamilyKHR = 5, } public enum GroupOperation { Reduce = 0, InclusiveScan = 1, ExclusiveScan = 2, ClusteredReduce = 3, PartitionedReduceNV = 6, PartitionedInclusiveScanNV = 7, PartitionedExclusiveScanNV = 8, } public enum KernelEnqueueFlags { NoWait = 0, WaitKernel = 1, WaitWorkGroup = 2, } public enum KernelProfilingInfoShift { CmdExecTime = 0, } public enum KernelProfilingInfoMask { MaskNone = 0, CmdExecTime = 0x00000001, } public enum Capability { Matrix = 0, Shader = 1, Geometry = 2, Tessellation = 3, Addresses = 4, Linkage = 5, Kernel = 6, Vector16 = 7, Float16Buffer = 8, Float16 = 9, Float64 = 10, Int64 = 11, Int64Atomics = 12, ImageBasic = 13, ImageReadWrite = 14, ImageMipmap = 15, Pipes = 17, Groups = 18, DeviceEnqueue = 19, LiteralSampler = 20, AtomicStorage = 21, Int16 = 22, TessellationPointSize = 23, GeometryPointSize = 24, ImageGatherExtended = 25, StorageImageMultisample = 27, UniformBufferArrayDynamicIndexing = 28, SampledImageArrayDynamicIndexing = 29, StorageBufferArrayDynamicIndexing = 30, StorageImageArrayDynamicIndexing = 31, ClipDistance = 32, CullDistance = 33, ImageCubeArray = 34, SampleRateShading = 35, ImageRect = 36, SampledRect = 37, GenericPointer = 38, Int8 = 39, InputAttachment = 40, SparseResidency = 41, MinLod = 42, Sampled1D = 43, Image1D = 44, SampledCubeArray = 45, SampledBuffer = 46, ImageBuffer = 47, ImageMSArray = 48, StorageImageExtendedFormats = 49, ImageQuery = 50, DerivativeControl = 51, InterpolationFunction = 52, TransformFeedback = 53, GeometryStreams = 54, StorageImageReadWithoutFormat = 55, StorageImageWriteWithoutFormat = 56, MultiViewport = 57, SubgroupDispatch = 58, NamedBarrier = 59, PipeStorage = 60, GroupNonUniform = 61, GroupNonUniformVote = 62, GroupNonUniformArithmetic = 63, GroupNonUniformBallot = 64, GroupNonUniformShuffle = 65, GroupNonUniformShuffleRelative = 66, GroupNonUniformClustered = 67, GroupNonUniformQuad = 68, SubgroupBallotKHR = 4423, DrawParameters = 4427, SubgroupVoteKHR = 4431, StorageBuffer16BitAccess = 4433, StorageUniformBufferBlock16 = 4433, StorageUniform16 = 4434, UniformAndStorageBuffer16BitAccess = 4434, StoragePushConstant16 = 4435, StorageInputOutput16 = 4436, DeviceGroup = 4437, MultiView = 4439, VariablePointersStorageBuffer = 4441, VariablePointers = 4442, AtomicStorageOps = 4445, SampleMaskPostDepthCoverage = 4447, StorageBuffer8BitAccess = 4448, UniformAndStorageBuffer8BitAccess = 4449, StoragePushConstant8 = 4450, DenormPreserve = 4464, DenormFlushToZero = 4465, SignedZeroInfNanPreserve = 4466, RoundingModeRTE = 4467, RoundingModeRTZ = 4468, Float16ImageAMD = 5008, ImageGatherBiasLodAMD = 5009, FragmentMaskAMD = 5010, StencilExportEXT = 5013, ImageReadWriteLodAMD = 5015, SampleMaskOverrideCoverageNV = 5249, GeometryShaderPassthroughNV = 5251, ShaderViewportIndexLayerEXT = 5254, ShaderViewportIndexLayerNV = 5254, ShaderViewportMaskNV = 5255, ShaderStereoViewNV = 5259, PerViewAttributesNV = 5260, FragmentFullyCoveredEXT = 5265, MeshShadingNV = 5266, ImageFootprintNV = 5282, FragmentBarycentricNV = 5284, ComputeDerivativeGroupQuadsNV = 5288, FragmentDensityEXT = 5291, ShadingRateNV = 5291, GroupNonUniformPartitionedNV = 5297, ShaderNonUniformEXT = 5301, RuntimeDescriptorArrayEXT = 5302, InputAttachmentArrayDynamicIndexingEXT = 5303, UniformTexelBufferArrayDynamicIndexingEXT = 5304, StorageTexelBufferArrayDynamicIndexingEXT = 5305, UniformBufferArrayNonUniformIndexingEXT = 5306, SampledImageArrayNonUniformIndexingEXT = 5307, StorageBufferArrayNonUniformIndexingEXT = 5308, StorageImageArrayNonUniformIndexingEXT = 5309, InputAttachmentArrayNonUniformIndexingEXT = 5310, UniformTexelBufferArrayNonUniformIndexingEXT = 5311, StorageTexelBufferArrayNonUniformIndexingEXT = 5312, RayTracingNV = 5340, VulkanMemoryModelKHR = 5345, VulkanMemoryModelDeviceScopeKHR = 5346, PhysicalStorageBufferAddressesEXT = 5347, ComputeDerivativeGroupLinearNV = 5350, CooperativeMatrixNV = 5357, FragmentShaderSampleInterlockEXT = 5363, FragmentShaderShadingRateInterlockEXT = 5372, ShaderSMBuiltinsNV = 5373, FragmentShaderPixelInterlockEXT = 5378, DemoteToHelperInvocationEXT = 5379, SubgroupShuffleINTEL = 5568, SubgroupBufferBlockIOINTEL = 5569, SubgroupImageBlockIOINTEL = 5570, SubgroupImageMediaBlockIOINTEL = 5579, IntegerFunctions2INTEL = 5584, SubgroupAvcMotionEstimationINTEL = 5696, SubgroupAvcMotionEstimationIntraINTEL = 5697, SubgroupAvcMotionEstimationChromaINTEL = 5698, } public enum Op { OpNop = 0, OpUndef = 1, OpSourceContinued = 2, OpSource = 3, OpSourceExtension = 4, OpName = 5, OpMemberName = 6, OpString = 7, OpLine = 8, OpExtension = 10, OpExtInstImport = 11, OpExtInst = 12, OpMemoryModel = 14, OpEntryPoint = 15, OpExecutionMode = 16, OpCapability = 17, OpTypeVoid = 19, OpTypeBool = 20, OpTypeInt = 21, OpTypeFloat = 22, OpTypeVector = 23, OpTypeMatrix = 24, OpTypeImage = 25, OpTypeSampler = 26, OpTypeSampledImage = 27, OpTypeArray = 28, OpTypeRuntimeArray = 29, OpTypeStruct = 30, OpTypeOpaque = 31, OpTypePointer = 32, OpTypeFunction = 33, OpTypeEvent = 34, OpTypeDeviceEvent = 35, OpTypeReserveId = 36, OpTypeQueue = 37, OpTypePipe = 38, OpTypeForwardPointer = 39, OpConstantTrue = 41, OpConstantFalse = 42, OpConstant = 43, OpConstantComposite = 44, OpConstantSampler = 45, OpConstantNull = 46, OpSpecConstantTrue = 48, OpSpecConstantFalse = 49, OpSpecConstant = 50, OpSpecConstantComposite = 51, OpSpecConstantOp = 52, OpFunction = 54, OpFunctionParameter = 55, OpFunctionEnd = 56, OpFunctionCall = 57, OpVariable = 59, OpImageTexelPointer = 60, OpLoad = 61, OpStore = 62, OpCopyMemory = 63, OpCopyMemorySized = 64, OpAccessChain = 65, OpInBoundsAccessChain = 66, OpPtrAccessChain = 67, OpArrayLength = 68, OpGenericPtrMemSemantics = 69, OpInBoundsPtrAccessChain = 70, OpDecorate = 71, OpMemberDecorate = 72, OpDecorationGroup = 73, OpGroupDecorate = 74, OpGroupMemberDecorate = 75, OpVectorExtractDynamic = 77, OpVectorInsertDynamic = 78, OpVectorShuffle = 79, OpCompositeConstruct = 80, OpCompositeExtract = 81, OpCompositeInsert = 82, OpCopyObject = 83, OpTranspose = 84, OpSampledImage = 86, OpImageSampleImplicitLod = 87, OpImageSampleExplicitLod = 88, OpImageSampleDrefImplicitLod = 89, OpImageSampleDrefExplicitLod = 90, OpImageSampleProjImplicitLod = 91, OpImageSampleProjExplicitLod = 92, OpImageSampleProjDrefImplicitLod = 93, OpImageSampleProjDrefExplicitLod = 94, OpImageFetch = 95, OpImageGather = 96, OpImageDrefGather = 97, OpImageRead = 98, OpImageWrite = 99, OpImage = 100, OpImageQueryFormat = 101, OpImageQueryOrder = 102, OpImageQuerySizeLod = 103, OpImageQuerySize = 104, OpImageQueryLod = 105, OpImageQueryLevels = 106, OpImageQuerySamples = 107, OpConvertFToU = 109, OpConvertFToS = 110, OpConvertSToF = 111, OpConvertUToF = 112, OpUConvert = 113, OpSConvert = 114, OpFConvert = 115, OpQuantizeToF16 = 116, OpConvertPtrToU = 117, OpSatConvertSToU = 118, OpSatConvertUToS = 119, OpConvertUToPtr = 120, OpPtrCastToGeneric = 121, OpGenericCastToPtr = 122, OpGenericCastToPtrExplicit = 123, OpBitcast = 124, OpSNegate = 126, OpFNegate = 127, OpIAdd = 128, OpFAdd = 129, OpISub = 130, OpFSub = 131, OpIMul = 132, OpFMul = 133, OpUDiv = 134, OpSDiv = 135, OpFDiv = 136, OpUMod = 137, OpSRem = 138, OpSMod = 139, OpFRem = 140, OpFMod = 141, OpVectorTimesScalar = 142, OpMatrixTimesScalar = 143, OpVectorTimesMatrix = 144, OpMatrixTimesVector = 145, OpMatrixTimesMatrix = 146, OpOuterProduct = 147, OpDot = 148, OpIAddCarry = 149, OpISubBorrow = 150, OpUMulExtended = 151, OpSMulExtended = 152, OpAny = 154, OpAll = 155, OpIsNan = 156, OpIsInf = 157, OpIsFinite = 158, OpIsNormal = 159, OpSignBitSet = 160, OpLessOrGreater = 161, OpOrdered = 162, OpUnordered = 163, OpLogicalEqual = 164, OpLogicalNotEqual = 165, OpLogicalOr = 166, OpLogicalAnd = 167, OpLogicalNot = 168, OpSelect = 169, OpIEqual = 170, OpINotEqual = 171, OpUGreaterThan = 172, OpSGreaterThan = 173, OpUGreaterThanEqual = 174, OpSGreaterThanEqual = 175, OpULessThan = 176, OpSLessThan = 177, OpULessThanEqual = 178, OpSLessThanEqual = 179, OpFOrdEqual = 180, OpFUnordEqual = 181, OpFOrdNotEqual = 182, OpFUnordNotEqual = 183, OpFOrdLessThan = 184, OpFUnordLessThan = 185, OpFOrdGreaterThan = 186, OpFUnordGreaterThan = 187, OpFOrdLessThanEqual = 188, OpFUnordLessThanEqual = 189, OpFOrdGreaterThanEqual = 190, OpFUnordGreaterThanEqual = 191, OpShiftRightLogical = 194, OpShiftRightArithmetic = 195, OpShiftLeftLogical = 196, OpBitwiseOr = 197, OpBitwiseXor = 198, OpBitwiseAnd = 199, OpNot = 200, OpBitFieldInsert = 201, OpBitFieldSExtract = 202, OpBitFieldUExtract = 203, OpBitReverse = 204, OpBitCount = 205, OpDPdx = 207, OpDPdy = 208, OpFwidth = 209, OpDPdxFine = 210, OpDPdyFine = 211, OpFwidthFine = 212, OpDPdxCoarse = 213, OpDPdyCoarse = 214, OpFwidthCoarse = 215, OpEmitVertex = 218, OpEndPrimitive = 219, OpEmitStreamVertex = 220, OpEndStreamPrimitive = 221, OpControlBarrier = 224, OpMemoryBarrier = 225, OpAtomicLoad = 227, OpAtomicStore = 228, OpAtomicExchange = 229, OpAtomicCompareExchange = 230, OpAtomicCompareExchangeWeak = 231, OpAtomicIIncrement = 232, OpAtomicIDecrement = 233, OpAtomicIAdd = 234, OpAtomicISub = 235, OpAtomicSMin = 236, OpAtomicUMin = 237, OpAtomicSMax = 238, OpAtomicUMax = 239, OpAtomicAnd = 240, OpAtomicOr = 241, OpAtomicXor = 242, OpPhi = 245, OpLoopMerge = 246, OpSelectionMerge = 247, OpLabel = 248, OpBranch = 249, OpBranchConditional = 250, OpSwitch = 251, OpKill = 252, OpReturn = 253, OpReturnValue = 254, OpUnreachable = 255, OpLifetimeStart = 256, OpLifetimeStop = 257, OpGroupAsyncCopy = 259, OpGroupWaitEvents = 260, OpGroupAll = 261, OpGroupAny = 262, OpGroupBroadcast = 263, OpGroupIAdd = 264, OpGroupFAdd = 265, OpGroupFMin = 266, OpGroupUMin = 267, OpGroupSMin = 268, OpGroupFMax = 269, OpGroupUMax = 270, OpGroupSMax = 271, OpReadPipe = 274, OpWritePipe = 275, OpReservedReadPipe = 276, OpReservedWritePipe = 277, OpReserveReadPipePackets = 278, OpReserveWritePipePackets = 279, OpCommitReadPipe = 280, OpCommitWritePipe = 281, OpIsValidReserveId = 282, OpGetNumPipePackets = 283, OpGetMaxPipePackets = 284, OpGroupReserveReadPipePackets = 285, OpGroupReserveWritePipePackets = 286, OpGroupCommitReadPipe = 287, OpGroupCommitWritePipe = 288, OpEnqueueMarker = 291, OpEnqueueKernel = 292, OpGetKernelNDrangeSubGroupCount = 293, OpGetKernelNDrangeMaxSubGroupSize = 294, OpGetKernelWorkGroupSize = 295, OpGetKernelPreferredWorkGroupSizeMultiple = 296, OpRetainEvent = 297, OpReleaseEvent = 298, OpCreateUserEvent = 299, OpIsValidEvent = 300, OpSetUserEventStatus = 301, OpCaptureEventProfilingInfo = 302, OpGetDefaultQueue = 303, OpBuildNDRange = 304, OpImageSparseSampleImplicitLod = 305, OpImageSparseSampleExplicitLod = 306, OpImageSparseSampleDrefImplicitLod = 307, OpImageSparseSampleDrefExplicitLod = 308, OpImageSparseSampleProjImplicitLod = 309, OpImageSparseSampleProjExplicitLod = 310, OpImageSparseSampleProjDrefImplicitLod = 311, OpImageSparseSampleProjDrefExplicitLod = 312, OpImageSparseFetch = 313, OpImageSparseGather = 314, OpImageSparseDrefGather = 315, OpImageSparseTexelsResident = 316, OpNoLine = 317, OpAtomicFlagTestAndSet = 318, OpAtomicFlagClear = 319, OpImageSparseRead = 320, OpSizeOf = 321, OpTypePipeStorage = 322, OpConstantPipeStorage = 323, OpCreatePipeFromPipeStorage = 324, OpGetKernelLocalSizeForSubgroupCount = 325, OpGetKernelMaxNumSubgroups = 326, OpTypeNamedBarrier = 327, OpNamedBarrierInitialize = 328, OpMemoryNamedBarrier = 329, OpModuleProcessed = 330, OpExecutionModeId = 331, OpDecorateId = 332, OpGroupNonUniformElect = 333, OpGroupNonUniformAll = 334, OpGroupNonUniformAny = 335, OpGroupNonUniformAllEqual = 336, OpGroupNonUniformBroadcast = 337, OpGroupNonUniformBroadcastFirst = 338, OpGroupNonUniformBallot = 339, OpGroupNonUniformInverseBallot = 340, OpGroupNonUniformBallotBitExtract = 341, OpGroupNonUniformBallotBitCount = 342, OpGroupNonUniformBallotFindLSB = 343, OpGroupNonUniformBallotFindMSB = 344, OpGroupNonUniformShuffle = 345, OpGroupNonUniformShuffleXor = 346, OpGroupNonUniformShuffleUp = 347, OpGroupNonUniformShuffleDown = 348, OpGroupNonUniformIAdd = 349, OpGroupNonUniformFAdd = 350, OpGroupNonUniformIMul = 351, OpGroupNonUniformFMul = 352, OpGroupNonUniformSMin = 353, OpGroupNonUniformUMin = 354, OpGroupNonUniformFMin = 355, OpGroupNonUniformSMax = 356, OpGroupNonUniformUMax = 357, OpGroupNonUniformFMax = 358, OpGroupNonUniformBitwiseAnd = 359, OpGroupNonUniformBitwiseOr = 360, OpGroupNonUniformBitwiseXor = 361, OpGroupNonUniformLogicalAnd = 362, OpGroupNonUniformLogicalOr = 363, OpGroupNonUniformLogicalXor = 364, OpGroupNonUniformQuadBroadcast = 365, OpGroupNonUniformQuadSwap = 366, OpCopyLogical = 400, OpPtrEqual = 401, OpPtrNotEqual = 402, OpPtrDiff = 403, OpSubgroupBallotKHR = 4421, OpSubgroupFirstInvocationKHR = 4422, OpSubgroupAllKHR = 4428, OpSubgroupAnyKHR = 4429, OpSubgroupAllEqualKHR = 4430, OpSubgroupReadInvocationKHR = 4432, OpGroupIAddNonUniformAMD = 5000, OpGroupFAddNonUniformAMD = 5001, OpGroupFMinNonUniformAMD = 5002, OpGroupUMinNonUniformAMD = 5003, OpGroupSMinNonUniformAMD = 5004, OpGroupFMaxNonUniformAMD = 5005, OpGroupUMaxNonUniformAMD = 5006, OpGroupSMaxNonUniformAMD = 5007, OpFragmentMaskFetchAMD = 5011, OpFragmentFetchAMD = 5012, OpImageSampleFootprintNV = 5283, OpGroupNonUniformPartitionNV = 5296, OpWritePackedPrimitiveIndices4x8NV = 5299, OpReportIntersectionNV = 5334, OpIgnoreIntersectionNV = 5335, OpTerminateRayNV = 5336, OpTraceNV = 5337, OpTypeAccelerationStructureNV = 5341, OpExecuteCallableNV = 5344, OpTypeCooperativeMatrixNV = 5358, OpCooperativeMatrixLoadNV = 5359, OpCooperativeMatrixStoreNV = 5360, OpCooperativeMatrixMulAddNV = 5361, OpCooperativeMatrixLengthNV = 5362, OpBeginInvocationInterlockEXT = 5364, OpEndInvocationInterlockEXT = 5365, OpDemoteToHelperInvocationEXT = 5380, OpIsHelperInvocationEXT = 5381, OpSubgroupShuffleINTEL = 5571, OpSubgroupShuffleDownINTEL = 5572, OpSubgroupShuffleUpINTEL = 5573, OpSubgroupShuffleXorINTEL = 5574, OpSubgroupBlockReadINTEL = 5575, OpSubgroupBlockWriteINTEL = 5576, OpSubgroupImageBlockReadINTEL = 5577, OpSubgroupImageBlockWriteINTEL = 5578, OpSubgroupImageMediaBlockReadINTEL = 5580, OpSubgroupImageMediaBlockWriteINTEL = 5581, OpUCountLeadingZerosINTEL = 5585, OpUCountTrailingZerosINTEL = 5586, OpAbsISubINTEL = 5587, OpAbsUSubINTEL = 5588, OpIAddSatINTEL = 5589, OpUAddSatINTEL = 5590, OpIAverageINTEL = 5591, OpUAverageINTEL = 5592, OpIAverageRoundedINTEL = 5593, OpUAverageRoundedINTEL = 5594, OpISubSatINTEL = 5595, OpUSubSatINTEL = 5596, OpIMul32x16INTEL = 5597, OpUMul32x16INTEL = 5598, OpDecorateString = 5632, OpDecorateStringGOOGLE = 5632, OpMemberDecorateString = 5633, OpMemberDecorateStringGOOGLE = 5633, OpVmeImageINTEL = 5699, OpTypeVmeImageINTEL = 5700, OpTypeAvcImePayloadINTEL = 5701, OpTypeAvcRefPayloadINTEL = 5702, OpTypeAvcSicPayloadINTEL = 5703, OpTypeAvcMcePayloadINTEL = 5704, OpTypeAvcMceResultINTEL = 5705, OpTypeAvcImeResultINTEL = 5706, OpTypeAvcImeResultSingleReferenceStreamoutINTEL = 5707, OpTypeAvcImeResultDualReferenceStreamoutINTEL = 5708, OpTypeAvcImeSingleReferenceStreaminINTEL = 5709, OpTypeAvcImeDualReferenceStreaminINTEL = 5710, OpTypeAvcRefResultINTEL = 5711, OpTypeAvcSicResultINTEL = 5712, OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL = 5713, OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL = 5714, OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL = 5715, OpSubgroupAvcMceSetInterShapePenaltyINTEL = 5716, OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL = 5717, OpSubgroupAvcMceSetInterDirectionPenaltyINTEL = 5718, OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL = 5719, OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL = 5720, OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL = 5721, OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL = 5722, OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL = 5723, OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL = 5724, OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL = 5725, OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL = 5726, OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL = 5727, OpSubgroupAvcMceSetAcOnlyHaarINTEL = 5728, OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL = 5729, OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL = 5730, OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL = 5731, OpSubgroupAvcMceConvertToImePayloadINTEL = 5732, OpSubgroupAvcMceConvertToImeResultINTEL = 5733, OpSubgroupAvcMceConvertToRefPayloadINTEL = 5734, OpSubgroupAvcMceConvertToRefResultINTEL = 5735, OpSubgroupAvcMceConvertToSicPayloadINTEL = 5736, OpSubgroupAvcMceConvertToSicResultINTEL = 5737, OpSubgroupAvcMceGetMotionVectorsINTEL = 5738, OpSubgroupAvcMceGetInterDistortionsINTEL = 5739, OpSubgroupAvcMceGetBestInterDistortionsINTEL = 5740, OpSubgroupAvcMceGetInterMajorShapeINTEL = 5741, OpSubgroupAvcMceGetInterMinorShapeINTEL = 5742, OpSubgroupAvcMceGetInterDirectionsINTEL = 5743, OpSubgroupAvcMceGetInterMotionVectorCountINTEL = 5744, OpSubgroupAvcMceGetInterReferenceIdsINTEL = 5745, OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL = 5746, OpSubgroupAvcImeInitializeINTEL = 5747, OpSubgroupAvcImeSetSingleReferenceINTEL = 5748, OpSubgroupAvcImeSetDualReferenceINTEL = 5749, OpSubgroupAvcImeRefWindowSizeINTEL = 5750, OpSubgroupAvcImeAdjustRefOffsetINTEL = 5751, OpSubgroupAvcImeConvertToMcePayloadINTEL = 5752, OpSubgroupAvcImeSetMaxMotionVectorCountINTEL = 5753, OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL = 5754, OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL = 5755, OpSubgroupAvcImeSetWeightedSadINTEL = 5756, OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL = 5757, OpSubgroupAvcImeEvaluateWithDualReferenceINTEL = 5758, OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL = 5759, OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL = 5760, OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL = 5761, OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL = 5762, OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL = 5763, OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL = 5764, OpSubgroupAvcImeConvertToMceResultINTEL = 5765, OpSubgroupAvcImeGetSingleReferenceStreaminINTEL = 5766, OpSubgroupAvcImeGetDualReferenceStreaminINTEL = 5767, OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL = 5768, OpSubgroupAvcImeStripDualReferenceStreamoutINTEL = 5769, OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL = 5770, OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL = 5771, OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL = 5772, OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL = 5773, OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL = 5774, OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL = 5775, OpSubgroupAvcImeGetBorderReachedINTEL = 5776, OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL = 5777, OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL = 5778, OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL = 5779, OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL = 5780, OpSubgroupAvcFmeInitializeINTEL = 5781, OpSubgroupAvcBmeInitializeINTEL = 5782, OpSubgroupAvcRefConvertToMcePayloadINTEL = 5783, OpSubgroupAvcRefSetBidirectionalMixDisableINTEL = 5784, OpSubgroupAvcRefSetBilinearFilterEnableINTEL = 5785, OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL = 5786, OpSubgroupAvcRefEvaluateWithDualReferenceINTEL = 5787, OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL = 5788, OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL = 5789, OpSubgroupAvcRefConvertToMceResultINTEL = 5790, OpSubgroupAvcSicInitializeINTEL = 5791, OpSubgroupAvcSicConfigureSkcINTEL = 5792, OpSubgroupAvcSicConfigureIpeLumaINTEL = 5793, OpSubgroupAvcSicConfigureIpeLumaChromaINTEL = 5794, OpSubgroupAvcSicGetMotionVectorMaskINTEL = 5795, OpSubgroupAvcSicConvertToMcePayloadINTEL = 5796, OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL = 5797, OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL = 5798, OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL = 5799, OpSubgroupAvcSicSetBilinearFilterEnableINTEL = 5800, OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL = 5801, OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL = 5802, OpSubgroupAvcSicEvaluateIpeINTEL = 5803, OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL = 5804, OpSubgroupAvcSicEvaluateWithDualReferenceINTEL = 5805, OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL = 5806, OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL = 5807, OpSubgroupAvcSicConvertToMceResultINTEL = 5808, OpSubgroupAvcSicGetIpeLumaShapeINTEL = 5809, OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL = 5810, OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL = 5811, OpSubgroupAvcSicGetPackedIpeLumaModesINTEL = 5812, OpSubgroupAvcSicGetIpeChromaModeINTEL = 5813, OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814, OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815, OpSubgroupAvcSicGetInterRawSadsINTEL = 5816, } } }
/* Copyright (c) Citrix Systems Inc. * All rights reserved. * * Redistribution and use in source and binary forms, * with or without modification, are permitted provided * that the following conditions are met: * * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the * following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the * following disclaimer in the documentation and/or other * materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ using System; using System.Collections.Generic; using Moq; using NUnit.Framework; using XenAdmin.Alerts; using XenAdmin.Core; using XenAdmin.Network; using XenAdminTests.UnitTests.UnitTestHelper; using XenAPI; namespace XenAdminTests.UnitTests.AlertTests { [TestFixture, Category(TestCategories.Unit), Category(TestCategories.SmokeTest)] public class XenServerPatchAlertTests { private Mock<IXenConnection> connA; private Mock<IXenConnection> connB; private Mock<Host> hostA; private Mock<Host> hostB; [Test] public void TestAlertWithConnectionAndHosts() { XenServerPatch p = new XenServerPatch("uuid", "name", "My description", "guidance", "6.0.1", "http://url", "http://patchUrl", new DateTime(2011, 4, 1).ToString(), ""); XenServerPatchAlert alert = new XenServerPatchAlert(p); alert.IncludeConnection(connA.Object); alert.IncludeConnection(connB.Object); alert.IncludeHosts(new List<Host> { hostA.Object, hostB.Object }); IUnitTestVerifier validator = new VerifyGetters(alert); validator.Verify(new AlertClassUnitTestData { AppliesTo = "HostAName, HostBName, ConnAName, ConnBName", FixLinkText = "Go to Web Page", HelpID = "XenServerPatchAlert", Description = "My description", HelpLinkText = "Help", Title = "New Update Available - name", Priority = "Priority2" }); Assert.IsFalse(alert.CanIgnore); VerifyConnExpectations(Times.Once); VerifyHostsExpectations(Times.Once); } [Test] public void TestAlertWithHostsAndNoConnection() { XenServerPatch p = new XenServerPatch("uuid", "name", "My description", "guidance", "6.0.1", "http://url", "http://patchUrl", new DateTime(2011, 4, 1).ToString(), "1"); XenServerPatchAlert alert = new XenServerPatchAlert(p); alert.IncludeHosts(new List<Host>() { hostA.Object, hostB.Object }); IUnitTestVerifier validator = new VerifyGetters(alert); validator.Verify(new AlertClassUnitTestData { AppliesTo = "HostAName, HostBName", FixLinkText = "Go to Web Page", HelpID = "XenServerPatchAlert", Description = "My description", HelpLinkText = "Help", Title = "New Update Available - name", Priority = "Priority1" }); Assert.IsFalse(alert.CanIgnore); VerifyConnExpectations(Times.Never); VerifyHostsExpectations(Times.Once); } [Test] public void TestAlertWithConnectionAndNoHosts() { XenServerPatch p = new XenServerPatch("uuid", "name", "My description", "guidance", "6.0.1", "http://url", "http://patchUrl", new DateTime(2011, 4, 1).ToString(), "0"); XenServerPatchAlert alert = new XenServerPatchAlert(p); alert.IncludeConnection(connA.Object); alert.IncludeConnection(connB.Object); IUnitTestVerifier validator = new VerifyGetters(alert); validator.Verify(new AlertClassUnitTestData { AppliesTo = "ConnAName, ConnBName", FixLinkText = "Go to Web Page", HelpID = "XenServerPatchAlert", Description = "My description", HelpLinkText = "Help", Title = "New Update Available - name", Priority = "Unknown" }); Assert.IsFalse(alert.CanIgnore); VerifyConnExpectations(Times.Once); VerifyHostsExpectations(Times.Never); } [Test] public void TestAlertWithNoConnectionAndNoHosts() { XenServerPatch p = new XenServerPatch("uuid", "name", "My description", "guidance", "6.0.1", "http://url", "http://patchUrl", new DateTime(2011, 4, 1).ToString(), "5"); XenServerPatchAlert alert = new XenServerPatchAlert(p); IUnitTestVerifier validator = new VerifyGetters(alert); validator.Verify(new AlertClassUnitTestData { AppliesTo = string.Empty, FixLinkText = "Go to Web Page", HelpID = "XenServerPatchAlert", Description = "My description", HelpLinkText = "Help", Title = "New Update Available - name", Priority = "Priority5" }); Assert.IsTrue(alert.CanIgnore); VerifyConnExpectations(Times.Never); VerifyHostsExpectations(Times.Never); } [Test, ExpectedException(typeof(NullReferenceException))] public void TestAlertWithNullPatch() { XenServerPatchAlert alert = new XenServerPatchAlert(null); } private void VerifyConnExpectations(Func<Times> times) { connA.VerifyGet(n => n.Name, times()); connB.VerifyGet(n => n.Name, times()); } private void VerifyHostsExpectations(Func<Times> times) { hostA.VerifyGet(n => n.Name, times()); hostB.VerifyGet(n => n.Name, times()); } [SetUp] public void TestSetUp() { connA = new Mock<IXenConnection>(MockBehavior.Strict); connA.Setup(n => n.Name).Returns("ConnAName"); connB = new Mock<IXenConnection>(MockBehavior.Strict); connB.Setup(n => n.Name).Returns("ConnBName"); hostA = new Mock<Host>(MockBehavior.Strict); hostA.Setup(n => n.Name).Returns("HostAName"); hostA.Setup(n => n.Equals(It.IsAny<object>())).Returns((object o) => ReferenceEquals(o, hostA.Object)); hostB = new Mock<Host>(MockBehavior.Strict); hostB.Setup(n => n.Name).Returns("HostBName"); hostB.Setup(n => n.Equals(It.IsAny<object>())).Returns((object o) => ReferenceEquals(o, hostB.Object)); } [TearDown] public void TestTearDown() { connA = null; connB = null; hostA = null; hostB = null; } } }
// // PkzipClassic encryption // // Copyright 2004 John Reilly // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // Linking this library statically or dynamically with other modules is // making a combined work based on this library. Thus, the terms and // conditions of the GNU General Public License cover the whole // combination. // // As a special exception, the copyright holders of this library give you // permission to link this library with independent modules to produce an // executable, regardless of the license terms of these independent // modules, and to copy and distribute the resulting executable under // terms of your choice, provided that you also meet, for each linked // independent module, the terms and conditions of the license of that // module. An independent module is a module which is not derived from // or based on this library. If you modify this library, you may extend // this exception to your version of the library, but you are not // obligated to do so. If you do not wish to do so, delete this // exception statement from your version. // // ************************************************************************* // // Name: PkzipClassic.cs // // Created: 19-02-2008 SharedCache.com, rschuetz // Modified: 19-02-2008 SharedCache.com, rschuetz : Creation // ************************************************************************* using System; using System.Security.Cryptography; using SharedCache.WinServiceCommon.SharpZipLib.Checksum; namespace SharedCache.WinServiceCommon.SharpZipLib.Encryption { /// <summary> /// PkzipClassic embodies the classic or original encryption facilities used in Pkzip archives. /// While it has been superceded by more recent and more powerful algorithms, its still in use and /// is viable for preventing casual snooping /// </summary> public abstract class PkzipClassic : SymmetricAlgorithm { /// <summary> /// Generates new encryption keys based on given seed /// </summary> /// <param name="seed">The seed value to initialise keys with.</param> /// <returns>A new key value.</returns> static public byte[] GenerateKeys(byte[] seed) { if (seed == null) { throw new ArgumentNullException("seed"); } if (seed.Length == 0) { throw new ArgumentException("Length is zero", "seed"); } uint[] newKeys = new uint[] { 0x12345678, 0x23456789, 0x34567890 }; for (int i = 0; i < seed.Length; ++i) { newKeys[0] = Crc32.ComputeCrc32(newKeys[0], seed[i]); newKeys[1] = newKeys[1] + (byte)newKeys[0]; newKeys[1] = newKeys[1] * 134775813 + 1; newKeys[2] = Crc32.ComputeCrc32(newKeys[2], (byte)(newKeys[1] >> 24)); } byte[] result = new byte[12]; result[0] = (byte)(newKeys[0] & 0xff); result[1] = (byte)((newKeys[0] >> 8) & 0xff); result[2] = (byte)((newKeys[0] >> 16) & 0xff); result[3] = (byte)((newKeys[0] >> 24) & 0xff); result[4] = (byte)(newKeys[1] & 0xff); result[5] = (byte)((newKeys[1] >> 8) & 0xff); result[6] = (byte)((newKeys[1] >> 16) & 0xff); result[7] = (byte)((newKeys[1] >> 24) & 0xff); result[8] = (byte)(newKeys[2] & 0xff); result[9] = (byte)((newKeys[2] >> 8) & 0xff); result[10] = (byte)((newKeys[2] >> 16) & 0xff); result[11] = (byte)((newKeys[2] >> 24) & 0xff); return result; } } /// <summary> /// PkzipClassicCryptoBase provides the low level facilities for encryption /// and decryption using the PkzipClassic algorithm. /// </summary> class PkzipClassicCryptoBase { /// <summary> /// Transform a single byte /// </summary> /// <returns> /// The transformed value /// </returns> protected byte TransformByte() { uint temp = ((keys[2] & 0xFFFF) | 2); return (byte)((temp * (temp ^ 1)) >> 8); } /// <summary> /// Set the key schedule for encryption/decryption. /// </summary> /// <param name="keyData">The data use to set the keys from.</param> protected void SetKeys(byte[] keyData) { if (keyData == null) { throw new ArgumentNullException("keyData"); } if (keyData.Length != 12) { throw new InvalidOperationException("Key length is not valid"); } keys = new uint[3]; keys[0] = (uint)((keyData[3] << 24) | (keyData[2] << 16) | (keyData[1] << 8) | keyData[0]); keys[1] = (uint)((keyData[7] << 24) | (keyData[6] << 16) | (keyData[5] << 8) | keyData[4]); keys[2] = (uint)((keyData[11] << 24) | (keyData[10] << 16) | (keyData[9] << 8) | keyData[8]); } /// <summary> /// Update encryption keys /// </summary> protected void UpdateKeys(byte ch) { keys[0] = Crc32.ComputeCrc32(keys[0], ch); keys[1] = keys[1] + (byte)keys[0]; keys[1] = keys[1] * 134775813 + 1; keys[2] = Crc32.ComputeCrc32(keys[2], (byte)(keys[1] >> 24)); } /// <summary> /// Reset the internal state. /// </summary> protected void Reset() { keys[0] = 0; keys[1] = 0; keys[2] = 0; } #region Instance Fields uint[] keys; #endregion } /// <summary> /// PkzipClassic CryptoTransform for encryption. /// </summary> class PkzipClassicEncryptCryptoTransform : PkzipClassicCryptoBase, ICryptoTransform { /// <summary> /// Initialise a new instance of <see cref="PkzipClassicEncryptCryptoTransform"></see> /// </summary> /// <param name="keyBlock">The key block to use.</param> internal PkzipClassicEncryptCryptoTransform(byte[] keyBlock) { SetKeys(keyBlock); } #region ICryptoTransform Members /// <summary> /// Transforms the specified region of the specified byte array. /// </summary> /// <param name="inputBuffer">The input for which to compute the transform.</param> /// <param name="inputOffset">The offset into the byte array from which to begin using data.</param> /// <param name="inputCount">The number of bytes in the byte array to use as data.</param> /// <returns>The computed transform.</returns> public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount) { byte[] result = new byte[inputCount]; TransformBlock(inputBuffer, inputOffset, inputCount, result, 0); return result; } /// <summary> /// Transforms the specified region of the input byte array and copies /// the resulting transform to the specified region of the output byte array. /// </summary> /// <param name="inputBuffer">The input for which to compute the transform.</param> /// <param name="inputOffset">The offset into the input byte array from which to begin using data.</param> /// <param name="inputCount">The number of bytes in the input byte array to use as data.</param> /// <param name="outputBuffer">The output to which to write the transform.</param> /// <param name="outputOffset">The offset into the output byte array from which to begin writing data.</param> /// <returns>The number of bytes written.</returns> public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset) { for (int i = inputOffset; i < inputOffset + inputCount; ++i) { byte oldbyte = inputBuffer[i]; outputBuffer[outputOffset++] = (byte)(inputBuffer[i] ^ TransformByte()); UpdateKeys(oldbyte); } return inputCount; } /// <summary> /// Gets a value indicating whether the current transform can be reused. /// </summary> public bool CanReuseTransform { get { return true; } } /// <summary> /// Gets the size of the input data blocks in bytes. /// </summary> public int InputBlockSize { get { return 1; } } /// <summary> /// Gets the size of the output data blocks in bytes. /// </summary> public int OutputBlockSize { get { return 1; } } /// <summary> /// Gets a value indicating whether multiple blocks can be transformed. /// </summary> public bool CanTransformMultipleBlocks { get { return true; } } #endregion #region IDisposable Members /// <summary> /// Cleanup internal state. /// </summary> public void Dispose() { Reset(); } #endregion } /// <summary> /// PkzipClassic CryptoTransform for decryption. /// </summary> class PkzipClassicDecryptCryptoTransform : PkzipClassicCryptoBase, ICryptoTransform { /// <summary> /// Initialise a new instance of <see cref="PkzipClassicDecryptCryptoTransform"></see>. /// </summary> /// <param name="keyBlock">The key block to decrypt with.</param> internal PkzipClassicDecryptCryptoTransform(byte[] keyBlock) { SetKeys(keyBlock); } #region ICryptoTransform Members /// <summary> /// Transforms the specified region of the specified byte array. /// </summary> /// <param name="inputBuffer">The input for which to compute the transform.</param> /// <param name="inputOffset">The offset into the byte array from which to begin using data.</param> /// <param name="inputCount">The number of bytes in the byte array to use as data.</param> /// <returns>The computed transform.</returns> public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount) { byte[] result = new byte[inputCount]; TransformBlock(inputBuffer, inputOffset, inputCount, result, 0); return result; } /// <summary> /// Transforms the specified region of the input byte array and copies /// the resulting transform to the specified region of the output byte array. /// </summary> /// <param name="inputBuffer">The input for which to compute the transform.</param> /// <param name="inputOffset">The offset into the input byte array from which to begin using data.</param> /// <param name="inputCount">The number of bytes in the input byte array to use as data.</param> /// <param name="outputBuffer">The output to which to write the transform.</param> /// <param name="outputOffset">The offset into the output byte array from which to begin writing data.</param> /// <returns>The number of bytes written.</returns> public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset) { for (int i = inputOffset; i < inputOffset + inputCount; ++i) { byte newByte = (byte)(inputBuffer[i] ^ TransformByte()); outputBuffer[outputOffset++] = newByte; UpdateKeys(newByte); } return inputCount; } /// <summary> /// Gets a value indicating whether the current transform can be reused. /// </summary> public bool CanReuseTransform { get { return true; } } /// <summary> /// Gets the size of the input data blocks in bytes. /// </summary> public int InputBlockSize { get { return 1; } } /// <summary> /// Gets the size of the output data blocks in bytes. /// </summary> public int OutputBlockSize { get { return 1; } } /// <summary> /// Gets a value indicating whether multiple blocks can be transformed. /// </summary> public bool CanTransformMultipleBlocks { get { return true; } } #endregion #region IDisposable Members /// <summary> /// Cleanup internal state. /// </summary> public void Dispose() { Reset(); } #endregion } /// <summary> /// Defines a wrapper object to access the Pkzip algorithm. /// This class cannot be inherited. /// </summary> public sealed class PkzipClassicManaged : PkzipClassic { /// <summary> /// Get / set the applicable block size in bits. /// </summary> /// <remarks>The only valid block size is 8.</remarks> public override int BlockSize { get { return 8; } set { if (value != 8) { throw new CryptographicException("Block size is invalid"); } } } /// <summary> /// Get an array of legal <see cref="KeySizes">key sizes.</see> /// </summary> public override KeySizes[] LegalKeySizes { get { KeySizes[] keySizes = new KeySizes[1]; keySizes[0] = new KeySizes(12 * 8, 12 * 8, 0); return keySizes; } } /// <summary> /// Generate an initial vector. /// </summary> public override void GenerateIV() { // Do nothing. } /// <summary> /// Get an array of legal <see cref="KeySizes">block sizes</see>. /// </summary> public override KeySizes[] LegalBlockSizes { get { KeySizes[] keySizes = new KeySizes[1]; keySizes[0] = new KeySizes(1 * 8, 1 * 8, 0); return keySizes; } } /// <summary> /// Get / set the key value applicable. /// </summary> public override byte[] Key { get { if (key_ == null) { GenerateKey(); } return (byte[])key_.Clone(); } set { if (value == null) { throw new ArgumentNullException("value"); } if (value.Length != 12) { throw new CryptographicException("Key size is illegal"); } key_ = (byte[])value.Clone(); } } /// <summary> /// Generate a new random key. /// </summary> public override void GenerateKey() { key_ = new byte[12]; Random rnd = new Random(); rnd.NextBytes(key_); } /// <summary> /// Create an encryptor. /// </summary> /// <param name="rgbKey">The key to use for this encryptor.</param> /// <param name="rgbIV">Initialisation vector for the new encryptor.</param> /// <returns>Returns a new PkzipClassic encryptor</returns> public override ICryptoTransform CreateEncryptor( byte[] rgbKey, byte[] rgbIV) { key_ = rgbKey; return new PkzipClassicEncryptCryptoTransform(Key); } /// <summary> /// Create a decryptor. /// </summary> /// <param name="rgbKey">Keys to use for this new decryptor.</param> /// <param name="rgbIV">Initialisation vector for the new decryptor.</param> /// <returns>Returns a new decryptor.</returns> public override ICryptoTransform CreateDecryptor( byte[] rgbKey, byte[] rgbIV) { key_ = rgbKey; return new PkzipClassicDecryptCryptoTransform(Key); } #region Instance Fields byte[] key_; #endregion } }
using System; using System.Collections.Generic; using System.Linq; using UnityEditor; using UnityEngine; namespace UnityTest { [Serializable] public class IntegrationTestRunnerRenderer { private static class Styles { public static readonly GUIStyle selectedTestStyle; public static readonly GUIStyle testStyle; public static readonly GUIStyle iconStyle; public static GUIStyle buttonLeft; public static GUIStyle buttonMid; public static GUIStyle buttonRight; static Styles () { testStyle = new GUIStyle (EditorStyles.label); selectedTestStyle = new GUIStyle (EditorStyles.label); selectedTestStyle.active.textColor = selectedTestStyle.normal.textColor = selectedTestStyle.onActive.textColor = new Color (0.3f, 0.5f, 0.85f); iconStyle = new GUIStyle(EditorStyles.label); iconStyle.fixedWidth = 24; buttonLeft = GUI.skin.FindStyle (GUI.skin.button.name + "left"); buttonMid = GUI.skin.FindStyle (GUI.skin.button.name + "mid"); buttonRight = GUI.skin.FindStyle (GUI.skin.button.name + "right"); } } private Action<IList<GameObject>> RunTest; private TestManager testManager; private bool showDetails; #region runner options vars [SerializeField] private bool showOptions; [SerializeField] private bool showTestRunnerObjectOnScene = true; [SerializeField] private bool addNewGameObjectUnderSelectedTest = false; [SerializeField] private bool focusOnSelection = true; [SerializeField] private bool hideTestsInHierarchy; [SerializeField] private bool keepTestComponentObjectsOnTop = true; [SerializeField] internal bool blockUIWhenRunning = true; #endregion #region filter vars [SerializeField] private bool showAdvancedFilter; [SerializeField] private string filterString = ""; [SerializeField] private bool showSucceededTest = true; [SerializeField] internal bool showFailedTest = true; [SerializeField] private bool showNotRunnedTest = true; [SerializeField] private bool showIgnoredTest = true; #endregion #region runner steering vars [SerializeField] private Vector2 testListScroll; [SerializeField] public bool forceRepaint; private List<TestResult> selectedTests = new List<TestResult>(); #endregion #region GUI Contents private readonly GUIContent guiOptionsHideLabel = new GUIContent ("Hide", Icons.gearImg); private readonly GUIContent guiOptionsShowLabel = new GUIContent ("Options", Icons.gearImg); private readonly GUIContent guiCreateNewTest = new GUIContent (Icons.plusImg, "Create new test"); private readonly GUIContent guiRunSelectedTests = new GUIContent (Icons.runImg, "Run selected test(s)"); private readonly GUIContent guiRunAllTests = new GUIContent (Icons.runAllImg, "Run all tests"); private readonly GUIContent guiAdvancedFilterShow = new GUIContent ("Advanced"); private readonly GUIContent guiAdvancedFilterHide = new GUIContent ("Hide"); private readonly GUIContent guiTimeoutIcon = new GUIContent (Icons.stopwatchImg, "Timeout"); private readonly GUIContent guiRunSelected = new GUIContent ("Run selected"); private readonly GUIContent guiRun = new GUIContent ("Run"); private readonly GUIContent guiRunAll = new GUIContent ("Run All"); private readonly GUIContent guiRunAllIncludingIgnored = new GUIContent ("Run All (include ignored)"); private readonly GUIContent guiDelete = new GUIContent ("Delete"); private readonly GUIContent guiAddGOUderTest = new GUIContent ("Add GOs under test", "Add new GameObject under selected test"); private readonly GUIContent guiBlockUI = new GUIContent ("Block UI when running", "Block UI when running tests"); private readonly GUIContent guiHideTestInHierarchy = new GUIContent ("Hide tests in hierarchy", "Hide tests in hierarchy"); private readonly GUIContent guiHideTestRunner = new GUIContent ("Hide Test Runner", "Hides Test Runner object in hierarchy"); #endregion public IntegrationTestRunnerRenderer (Action<IList<GameObject>> RunTest) { testManager = new TestManager (); this.RunTest = RunTest; if (EditorPrefs.HasKey ("ITR-addNewGameObjectUnderSelectedTest")) { addNewGameObjectUnderSelectedTest = EditorPrefs.GetBool ("ITR-addNewGameObjectUnderSelectedTest"); focusOnSelection = EditorPrefs.GetBool ("ITR-focusOnSelection"); hideTestsInHierarchy = EditorPrefs.GetBool ("ITR-hideTestsInHierarchy"); keepTestComponentObjectsOnTop = EditorPrefs.GetBool ("ITR-keepTestComponentObjectsOnTop"); showOptions = EditorPrefs.GetBool ("ITR-showOptions"); showTestRunnerObjectOnScene = EditorPrefs.GetBool ("ITR-showTestRunnerObjectOnScene"); blockUIWhenRunning = EditorPrefs.GetBool ("ITR-blockUIWhenRunning"); showAdvancedFilter = EditorPrefs.GetBool ("ITR-showAdvancedFilter"); filterString = EditorPrefs.GetString ("ITR-filterString"); showSucceededTest = EditorPrefs.GetBool ("ITR-showSucceededTest"); showFailedTest = EditorPrefs.GetBool ("ITR-showFailedTest"); showIgnoredTest = EditorPrefs.GetBool ("ITR-showIgnoredTest"); showNotRunnedTest = EditorPrefs.GetBool ("ITR-showNotRunnedTest"); } } private void SaveSettings() { EditorPrefs.SetBool("ITR-addNewGameObjectUnderSelectedTest", addNewGameObjectUnderSelectedTest); EditorPrefs.SetBool("ITR-focusOnSelection", focusOnSelection); EditorPrefs.SetBool("ITR-hideTestsInHierarchy", hideTestsInHierarchy); EditorPrefs.SetBool("ITR-keepTestComponentObjectsOnTop", keepTestComponentObjectsOnTop); EditorPrefs.SetBool("ITR-showOptions", showOptions); EditorPrefs.SetBool("ITR-showTestRunnerObjectOnScene", showTestRunnerObjectOnScene); EditorPrefs.SetBool("ITR-blockUIWhenRunning", blockUIWhenRunning); EditorPrefs.SetBool ("ITR-showAdvancedFilter", showAdvancedFilter); EditorPrefs.SetString ("ITR-filterString", filterString); EditorPrefs.SetBool ("ITR-showSucceededTest", showSucceededTest); EditorPrefs.SetBool ("ITR-showFailedTest", showFailedTest); EditorPrefs.SetBool ("ITR-showIgnoredTest", showIgnoredTest); EditorPrefs.SetBool ("ITR-showNotRunnedTest", showNotRunnedTest); } private void DrawTest (TestResult testInfo) { EditorGUIUtility.SetIconSize (new Vector2 (16, 16)); Color tempColor = GUI.color; if (testInfo.isRunning) { var frame = Mathf.Abs(Mathf.Cos (Time.realtimeSinceStartup*4)) * 0.6f + 0.4f; GUI.color = new Color (1, 1, 1, frame); } var label = new GUIContent (testInfo.name, GetIconBasedOnResultType (testInfo).image); var labelRect = GUILayoutUtility.GetRect (label, EditorStyles.label, GUILayout.ExpandWidth (true)); if (labelRect.Contains (Event.current.mousePosition) && Event.current.type == EventType.MouseDown && Event.current.button == 0) { SelectTest (testInfo); } else if (labelRect.Contains (Event.current.mousePosition) && Event.current.type == EventType.ContextClick) { Event.current.Use (); DrawContextTestMenu (testInfo); } EditorGUI.LabelField (labelRect, label, selectedTests.Contains (testInfo) ? Styles.selectedTestStyle : Styles.testStyle); if (testInfo.isRunning) GUI.color = tempColor; EditorGUIUtility.SetIconSize (Vector2.zero); } private void SelectTest (TestResult testToSelect) { if (!Event.current.control && !Event.current.shift) selectedTests.Clear(); if (Event.current.control && selectedTests.Contains (testToSelect)) selectedTests.Remove (testToSelect); else if (Event.current.shift && selectedTests.Any ()) { var tests = testManager.GetTestsToSelect(selectedTests, testToSelect); selectedTests.Clear (); selectedTests.AddRange (tests); } else selectedTests.Add (testToSelect); if (!EditorApplication.isPlayingOrWillChangePlaymode && selectedTests.Count == 1) { var selectedTest = selectedTests.Single (); TestManager.SelectInHierarchy(selectedTest.go, hideTestsInHierarchy); EditorApplication.RepaintHierarchyWindow (); } Selection.objects = selectedTests.Select(result => result.go).ToArray(); forceRepaint = true; GUI.FocusControl(""); } private GUIContent GetIconBasedOnResultType (TestResult result) { if (result == null) return Icons.guiUnknownImg; if (result.isRunning) return Icons.guiUnknownImg; if (result.resultType == TestResult.ResultType.NotRun && result.TestComponent.ignored) return Icons.guiIgnoreImg; switch (result.resultType) { case TestResult.ResultType.Success: return Icons.guiSuccessImg; case TestResult.ResultType.Timeout: case TestResult.ResultType.Failed: case TestResult.ResultType.FailedException: return Icons.guiFailImg; case TestResult.ResultType.Ignored: return Icons.guiIgnoreImg; case TestResult.ResultType.NotRun: default: return Icons.guiUnknownImg; } } public void PrintHeadPanel (bool isRunning) { var sceneName = ""; if (!string.IsNullOrEmpty (EditorApplication.currentScene)) { sceneName = EditorApplication.currentScene.Substring (EditorApplication.currentScene.LastIndexOf ('/') + 1); sceneName = sceneName.Substring (0, sceneName.LastIndexOf ('.')); } GUILayout.Label ("Integration Tests (" + sceneName + ")", EditorStyles.boldLabel); EditorGUILayout.BeginHorizontal (); var layoutOptions = new [] { GUILayout.Height(24), GUILayout.Width(32), }; if (GUILayout.Button (guiRunAllTests, Styles.buttonLeft, layoutOptions ) && !isRunning) { RunTest (GetVisibleNotIgnoredTests ()); } if (GUILayout.Button(guiRunSelectedTests, Styles.buttonMid, layoutOptions ) && !isRunning) { RunTest(selectedTests.Select (t=>t.go).ToList ()); } if (GUILayout.Button (guiCreateNewTest, Styles.buttonRight, layoutOptions ) && !isRunning) { SelectTest (testManager.AddTest ()); } GUILayout.FlexibleSpace (); if (GUILayout.Button (showOptions ? guiOptionsHideLabel : guiOptionsShowLabel, GUILayout.Height (24), GUILayout.Width (80))) { showOptions = !showOptions; SaveSettings (); } EditorGUILayout.EndHorizontal (); if(showOptions) PrintOptions(); EditorGUILayout.BeginHorizontal (); EditorGUILayout.LabelField ("Filter:", GUILayout.Width (35)); EditorGUI.BeginChangeCheck (); filterString = EditorGUILayout.TextField (filterString); if(EditorGUI.EndChangeCheck ()) SaveSettings (); if (GUILayout.Button (showAdvancedFilter ? guiAdvancedFilterHide : guiAdvancedFilterShow, GUILayout.Width (80))) { showAdvancedFilter = !showAdvancedFilter; SaveSettings (); } EditorGUILayout.EndHorizontal (); if (showAdvancedFilter) PrintAdvancedFilter (); GUILayout.Space (5); } private void PrintAdvancedFilter () { EditorGUILayout.BeginHorizontal (); EditorGUILayout.BeginVertical (); EditorGUI.BeginChangeCheck (); showSucceededTest = EditorGUILayout.Toggle ("Show succeeded", showSucceededTest); showFailedTest = EditorGUILayout.Toggle ("Show failed", showFailedTest); EditorGUILayout.EndVertical (); EditorGUILayout.BeginVertical (); showIgnoredTest = EditorGUILayout.Toggle ("Show ignored", showIgnoredTest); showNotRunnedTest = EditorGUILayout.Toggle ("Show not runed", showNotRunnedTest); if(EditorGUI.EndChangeCheck ()) SaveSettings (); EditorGUILayout.EndVertical (); EditorGUILayout.EndHorizontal (); } public void PrintTestList () { GUILayout.Box ("", new[] {GUILayout.ExpandWidth (true), GUILayout.Height (1)}); GUILayout.Space (5); testListScroll = EditorGUILayout.BeginScrollView (testListScroll, new[] {GUILayout.ExpandHeight (true)}); foreach (var testInfo in GetFilteredTestsResults ()) { EditorGUILayout.BeginHorizontal(); DrawTest (testInfo); if (testInfo.resultType == TestResult.ResultType.Timeout) { GUILayout.Label(guiTimeoutIcon, GUILayout.Width(24) ); GUILayout.FlexibleSpace(); } EditorGUILayout.EndHorizontal (); } EditorGUILayout.EndScrollView (); } private List<TestResult> GetFilteredTestsResults () { return testManager.GetAllTestsResults ().Where (IsNotFiltered).ToList (); } public void PrintSelectedTestDetails () { if (Event.current.type == EventType.Layout) { if (showDetails != selectedTests.Any ()) showDetails = !showDetails; } if (!showDetails) return; GUILayout.Box ("", new[] { GUILayout.ExpandWidth (true), GUILayout.Height (1) }); EditorGUILayout.LabelField ("Test details"); string messages = "", stacktrace = ""; if (selectedTests.Count == 1) { var test = selectedTests.Single(); if (test != null) { messages = test.messages; stacktrace = test.stacktrace; } } EditorGUILayout.SelectableLabel (messages, EditorStyles.miniLabel, GUILayout.MaxHeight(50)); EditorGUILayout.SelectableLabel(stacktrace, EditorStyles.miniLabel, GUILayout.MaxHeight(50)); } private void DrawContextTestMenu (TestResult test) { if (EditorApplication.isPlayingOrWillChangePlaymode) return; var m = new GenericMenu (); var localTest = test; if(selectedTests.Count > 1) m.AddItem(guiRunSelected, false, data => RunTest(selectedTests.Select (t=>t.go).ToList ()), ""); m.AddItem (guiRun, false, data => RunTest(new List<GameObject> { localTest.go}), ""); m.AddItem (guiRunAll, false, data => RunTest (GetVisibleNotIgnoredTests ()), ""); m.AddItem (guiRunAllIncludingIgnored, false, data => RunTest (GetVisibleTestsIncludingIgnored ()), ""); m.AddSeparator (""); m.AddItem (guiDelete, false, data => RemoveTest (localTest), ""); m.ShowAsContext (); } private void RemoveTest (TestResult test) { var testsToDelete = new List<TestResult> { test }; if (selectedTests.Count > 1) testsToDelete = selectedTests; if (EditorUtility.DisplayDialog ("", "Are you sure you want to delete " + ((testsToDelete.Count > 1) ? (testsToDelete.Count + " tests?"):(testsToDelete.Single().name + "?")), "Delete", "Cancel")) { foreach (var t in testsToDelete) { #if !UNITY_4_0 && !UNITY_4_0_1 && !UNITY_4_1 && !UNITY_4_2 Undo.DestroyObjectImmediate ((t as TestResult).go); #else Undo.RegisterSceneUndo ("Destroy Objects"); GameObject.DestroyImmediate (t.go); #endif } testManager.DeleteTest(testsToDelete); selectedTests.Clear (); forceRepaint = true; } } public void OnHierarchyWindowItemOnGui (int id, Rect rect) { var o = EditorUtility.InstanceIDToObject (id); if (o is GameObject) { var go = o as GameObject; if (TestManager.AnyTestsOnScene() && go.GetComponent<TestComponent>() != null) { if (!EditorApplication.isPlayingOrWillChangePlaymode && rect.Contains (Event.current.mousePosition) && Event.current.type == EventType.MouseDown && Event.current.button == 1) { DrawContextTestMenu (testManager.GetResultFor(go)); } EditorGUIUtility.SetIconSize (new Vector2 (15, 15)); EditorGUI.LabelField (new Rect (rect.xMax - 18, rect.yMin - 2, rect.width, rect.height), GetIconBasedOnResultType (testManager.GetResultFor(go))); EditorGUIUtility.SetIconSize (Vector2.zero); } } } public void PrintOptions () { EditorGUILayout.BeginVertical (); EditorGUI.BeginChangeCheck(); var style = EditorStyles.toggle; //Temporary disabled //focusOnSelection = EditorGUILayoutExt.ToggleLeft((new GUIContent("Focus on selected test", "Focus on selected test")), // focusOnSelection, // style); addNewGameObjectUnderSelectedTest = EditorGUILayout.Toggle(guiAddGOUderTest, addNewGameObjectUnderSelectedTest, style); blockUIWhenRunning = EditorGUILayout.Toggle(guiBlockUI, blockUIWhenRunning, style); EditorGUI.BeginChangeCheck (); hideTestsInHierarchy = EditorGUILayout.Toggle(guiHideTestInHierarchy, hideTestsInHierarchy, style); if (EditorGUI.EndChangeCheck ()) { TestManager.ShowOrHideTestInHierarchy(hideTestsInHierarchy); if (selectedTests.Count == 1) TestManager.SelectInHierarchy(selectedTests.Single().go, hideTestsInHierarchy); EditorApplication.RepaintHierarchyWindow(); } EditorGUI.BeginChangeCheck(); showTestRunnerObjectOnScene = !EditorGUILayout.Toggle(guiHideTestRunner, !showTestRunnerObjectOnScene, style); if (EditorGUI.EndChangeCheck ()) ShowTestRunnerObjectOnScene(showTestRunnerObjectOnScene); if (EditorGUI.EndChangeCheck ()) SaveSettings (); EditorGUILayout.EndVertical (); } private void ShowTestRunnerObjectOnScene (bool show) { var tr = TestRunner.GetTestRunner (); if (show) tr.gameObject.hideFlags = 0; else tr.gameObject.hideFlags |= HideFlags.HideInHierarchy; tr.hideFlags = HideFlags.NotEditable; EditorUtility.SetDirty(tr.gameObject); } public void SelectInHierarchy (IEnumerable<GameObject> go) { selectedTests.Clear(); selectedTests.AddRange(go.Select (o=>testManager.GetResultFor (o))); if (selectedTests.Count () == 1) TestManager.SelectInHierarchy (selectedTests.Single ().go, hideTestsInHierarchy); if (focusOnSelection && SceneView.currentDrawingSceneView != null) { SceneView.lastActiveSceneView.FrameSelected (); } } public void InvalidateTestList () { selectedTests.Clear (); testManager.ClearTestList (); } public void UpdateResults (List<TestResult> testToRun) { testManager.UpdateResults (testToRun); } public void OnHierarchyChange(bool isRunning) { if (isRunning || EditorApplication.isPlayingOrWillChangePlaymode) return; if (addNewGameObjectUnderSelectedTest && Selection.activeGameObject != null) { var go = Selection.activeGameObject; if (selectedTests.Count == 1 && go.transform.parent == null && go.GetComponent<TestComponent>() == null && go.GetComponent<TestRunner>() == null) { go.transform.parent = selectedTests.Single().go.transform; } } if (keepTestComponentObjectsOnTop) { foreach (var test in TestManager.GetAllTestGameObjects()) { if (test.transform.parent != null) { test.transform.parent = null; Debug.LogWarning("Tests need to be on top of hierarchy."); } } } if (Selection.gameObjects.Count() > 1 && Selection.gameObjects.All(o => o is GameObject && o.GetComponent<TestComponent>())) { selectedTests.Clear (); selectedTests.AddRange (Selection.gameObjects.Select (go=>testManager.GetResultFor (go))); forceRepaint = true; } } public void OnTestRunFinished () { if(selectedTests.Count==1) TestManager.SelectInHierarchy(selectedTests.Single().go, hideTestsInHierarchy); } public List<TestResult> GetTestResultsForGO (IList<GameObject> tests) { return testManager.GetAllTestsResults ().Where (t => tests.Contains (t.go)).ToList (); } private bool IsNotFiltered (TestResult testInfo) { if (!testInfo.name.ToLower ().Contains (filterString.Trim ().ToLower ())) return false; if (!showSucceededTest && testInfo.resultType == TestResult.ResultType.Success) return false; if (!showFailedTest && (testInfo.resultType == TestResult.ResultType.Failed || testInfo.resultType == TestResult.ResultType.FailedException || testInfo.resultType == TestResult.ResultType.Timeout)) return false; if (!showIgnoredTest && (testInfo.resultType == TestResult.ResultType.Ignored || testInfo.TestComponent.ignored)) return false; if (!showNotRunnedTest && testInfo.resultType == TestResult.ResultType.NotRun) return false; return true; } public List<GameObject> GetVisibleNotIgnoredTests () { return testManager.GetAllTestsResults ().Where (tr => tr.TestComponent.ignored != true).Where (IsNotFiltered).Select (result => result.go).ToList (); } public List<GameObject> GetVisibleTestsIncludingIgnored () { return testManager.GetAllTestsResults ().Where (IsNotFiltered).Select (result => result.go).ToList (); } } }
/* * Copyright 2021 Google LLC All Rights Reserved. * Use of this source code is governed by a BSD-style * license that can be found in the LICENSE file or at * https://developers.google.com/open-source/licenses/bsd */ // <auto-generated> // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/quota.proto // </auto-generated> #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Google.Api { /// <summary>Holder for reflection information generated from google/api/quota.proto</summary> public static partial class QuotaReflection { #region Descriptor /// <summary>File descriptor for google/api/quota.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static QuotaReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "ChZnb29nbGUvYXBpL3F1b3RhLnByb3RvEgpnb29nbGUuYXBpIl0KBVF1b3Rh", "EiYKBmxpbWl0cxgDIAMoCzIWLmdvb2dsZS5hcGkuUXVvdGFMaW1pdBIsCgxt", "ZXRyaWNfcnVsZXMYBCADKAsyFi5nb29nbGUuYXBpLk1ldHJpY1J1bGUikQEK", "Ck1ldHJpY1J1bGUSEAoIc2VsZWN0b3IYASABKAkSPQoMbWV0cmljX2Nvc3Rz", "GAIgAygLMicuZ29vZ2xlLmFwaS5NZXRyaWNSdWxlLk1ldHJpY0Nvc3RzRW50", "cnkaMgoQTWV0cmljQ29zdHNFbnRyeRILCgNrZXkYASABKAkSDQoFdmFsdWUY", "AiABKAM6AjgBIpUCCgpRdW90YUxpbWl0EgwKBG5hbWUYBiABKAkSEwoLZGVz", "Y3JpcHRpb24YAiABKAkSFQoNZGVmYXVsdF9saW1pdBgDIAEoAxIRCgltYXhf", "bGltaXQYBCABKAMSEQoJZnJlZV90aWVyGAcgASgDEhAKCGR1cmF0aW9uGAUg", "ASgJEg4KBm1ldHJpYxgIIAEoCRIMCgR1bml0GAkgASgJEjIKBnZhbHVlcxgK", "IAMoCzIiLmdvb2dsZS5hcGkuUXVvdGFMaW1pdC5WYWx1ZXNFbnRyeRIUCgxk", "aXNwbGF5X25hbWUYDCABKAkaLQoLVmFsdWVzRW50cnkSCwoDa2V5GAEgASgJ", "Eg0KBXZhbHVlGAIgASgDOgI4AUJsCg5jb20uZ29vZ2xlLmFwaUIKUXVvdGFQ", "cm90b1ABWkVnb29nbGUuZ29sYW5nLm9yZy9nZW5wcm90by9nb29nbGVhcGlz", "L2FwaS9zZXJ2aWNlY29uZmlnO3NlcnZpY2Vjb25maWeiAgRHQVBJYgZwcm90", "bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Api.Quota), global::Google.Api.Quota.Parser, new[]{ "Limits", "MetricRules" }, null, null, null, null), new pbr::GeneratedClrTypeInfo(typeof(global::Google.Api.MetricRule), global::Google.Api.MetricRule.Parser, new[]{ "Selector", "MetricCosts" }, null, null, null, new pbr::GeneratedClrTypeInfo[] { null, }), new pbr::GeneratedClrTypeInfo(typeof(global::Google.Api.QuotaLimit), global::Google.Api.QuotaLimit.Parser, new[]{ "Name", "Description", "DefaultLimit", "MaxLimit", "FreeTier", "Duration", "Metric", "Unit", "Values", "DisplayName" }, null, null, null, new pbr::GeneratedClrTypeInfo[] { null, }) })); } #endregion } #region Messages /// <summary> /// Quota configuration helps to achieve fairness and budgeting in service /// usage. /// /// The metric based quota configuration works this way: /// - The service configuration defines a set of metrics. /// - For API calls, the quota.metric_rules maps methods to metrics with /// corresponding costs. /// - The quota.limits defines limits on the metrics, which will be used for /// quota checks at runtime. /// /// An example quota configuration in yaml format: /// /// quota: /// limits: /// /// - name: apiWriteQpsPerProject /// metric: library.googleapis.com/write_calls /// unit: "1/min/{project}" # rate limit for consumer projects /// values: /// STANDARD: 10000 /// /// # The metric rules bind all methods to the read_calls metric, /// # except for the UpdateBook and DeleteBook methods. These two methods /// # are mapped to the write_calls metric, with the UpdateBook method /// # consuming at twice rate as the DeleteBook method. /// metric_rules: /// - selector: "*" /// metric_costs: /// library.googleapis.com/read_calls: 1 /// - selector: google.example.library.v1.LibraryService.UpdateBook /// metric_costs: /// library.googleapis.com/write_calls: 2 /// - selector: google.example.library.v1.LibraryService.DeleteBook /// metric_costs: /// library.googleapis.com/write_calls: 1 /// /// Corresponding Metric definition: /// /// metrics: /// - name: library.googleapis.com/read_calls /// display_name: Read requests /// metric_kind: DELTA /// value_type: INT64 /// /// - name: library.googleapis.com/write_calls /// display_name: Write requests /// metric_kind: DELTA /// value_type: INT64 /// </summary> public sealed partial class Quota : pb::IMessage<Quota> #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE , pb::IBufferMessage #endif { private static readonly pb::MessageParser<Quota> _parser = new pb::MessageParser<Quota>(() => new Quota()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pb::MessageParser<Quota> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Api.QuotaReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public Quota() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public Quota(Quota other) : this() { limits_ = other.limits_.Clone(); metricRules_ = other.metricRules_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public Quota Clone() { return new Quota(this); } /// <summary>Field number for the "limits" field.</summary> public const int LimitsFieldNumber = 3; private static readonly pb::FieldCodec<global::Google.Api.QuotaLimit> _repeated_limits_codec = pb::FieldCodec.ForMessage(26, global::Google.Api.QuotaLimit.Parser); private readonly pbc::RepeatedField<global::Google.Api.QuotaLimit> limits_ = new pbc::RepeatedField<global::Google.Api.QuotaLimit>(); /// <summary> /// List of `QuotaLimit` definitions for the service. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public pbc::RepeatedField<global::Google.Api.QuotaLimit> Limits { get { return limits_; } } /// <summary>Field number for the "metric_rules" field.</summary> public const int MetricRulesFieldNumber = 4; private static readonly pb::FieldCodec<global::Google.Api.MetricRule> _repeated_metricRules_codec = pb::FieldCodec.ForMessage(34, global::Google.Api.MetricRule.Parser); private readonly pbc::RepeatedField<global::Google.Api.MetricRule> metricRules_ = new pbc::RepeatedField<global::Google.Api.MetricRule>(); /// <summary> /// List of `MetricRule` definitions, each one mapping a selected method to one /// or more metrics. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public pbc::RepeatedField<global::Google.Api.MetricRule> MetricRules { get { return metricRules_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override bool Equals(object other) { return Equals(other as Quota); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public bool Equals(Quota other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!limits_.Equals(other.limits_)) return false; if(!metricRules_.Equals(other.metricRules_)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override int GetHashCode() { int hash = 1; hash ^= limits_.GetHashCode(); hash ^= metricRules_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void WriteTo(pb::CodedOutputStream output) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE output.WriteRawMessage(this); #else limits_.WriteTo(output, _repeated_limits_codec); metricRules_.WriteTo(output, _repeated_metricRules_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) { limits_.WriteTo(ref output, _repeated_limits_codec); metricRules_.WriteTo(ref output, _repeated_metricRules_codec); if (_unknownFields != null) { _unknownFields.WriteTo(ref output); } } #endif [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int CalculateSize() { int size = 0; size += limits_.CalculateSize(_repeated_limits_codec); size += metricRules_.CalculateSize(_repeated_metricRules_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(Quota other) { if (other == null) { return; } limits_.Add(other.limits_); metricRules_.Add(other.metricRules_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(pb::CodedInputStream input) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE input.ReadRawMessage(this); #else uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 26: { limits_.AddEntriesFrom(input, _repeated_limits_codec); break; } case 34: { metricRules_.AddEntriesFrom(input, _repeated_metricRules_codec); break; } } } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input); break; case 26: { limits_.AddEntriesFrom(ref input, _repeated_limits_codec); break; } case 34: { metricRules_.AddEntriesFrom(ref input, _repeated_metricRules_codec); break; } } } } #endif } /// <summary> /// Bind API methods to metrics. Binding a method to a metric causes that /// metric's configured quota behaviors to apply to the method call. /// </summary> public sealed partial class MetricRule : pb::IMessage<MetricRule> #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE , pb::IBufferMessage #endif { private static readonly pb::MessageParser<MetricRule> _parser = new pb::MessageParser<MetricRule>(() => new MetricRule()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pb::MessageParser<MetricRule> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Api.QuotaReflection.Descriptor.MessageTypes[1]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public MetricRule() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public MetricRule(MetricRule other) : this() { selector_ = other.selector_; metricCosts_ = other.metricCosts_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public MetricRule Clone() { return new MetricRule(this); } /// <summary>Field number for the "selector" field.</summary> public const int SelectorFieldNumber = 1; private string selector_ = ""; /// <summary> /// Selects the methods to which this rule applies. /// /// Refer to [selector][google.api.DocumentationRule.selector] for syntax details. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public string Selector { get { return selector_; } set { selector_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "metric_costs" field.</summary> public const int MetricCostsFieldNumber = 2; private static readonly pbc::MapField<string, long>.Codec _map_metricCosts_codec = new pbc::MapField<string, long>.Codec(pb::FieldCodec.ForString(10, ""), pb::FieldCodec.ForInt64(16, 0L), 18); private readonly pbc::MapField<string, long> metricCosts_ = new pbc::MapField<string, long>(); /// <summary> /// Metrics to update when the selected methods are called, and the associated /// cost applied to each metric. /// /// The key of the map is the metric name, and the values are the amount /// increased for the metric against which the quota limits are defined. /// The value must not be negative. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public pbc::MapField<string, long> MetricCosts { get { return metricCosts_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override bool Equals(object other) { return Equals(other as MetricRule); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public bool Equals(MetricRule other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Selector != other.Selector) return false; if (!MetricCosts.Equals(other.MetricCosts)) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override int GetHashCode() { int hash = 1; if (Selector.Length != 0) hash ^= Selector.GetHashCode(); hash ^= MetricCosts.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void WriteTo(pb::CodedOutputStream output) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE output.WriteRawMessage(this); #else if (Selector.Length != 0) { output.WriteRawTag(10); output.WriteString(Selector); } metricCosts_.WriteTo(output, _map_metricCosts_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) { if (Selector.Length != 0) { output.WriteRawTag(10); output.WriteString(Selector); } metricCosts_.WriteTo(ref output, _map_metricCosts_codec); if (_unknownFields != null) { _unknownFields.WriteTo(ref output); } } #endif [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int CalculateSize() { int size = 0; if (Selector.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Selector); } size += metricCosts_.CalculateSize(_map_metricCosts_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(MetricRule other) { if (other == null) { return; } if (other.Selector.Length != 0) { Selector = other.Selector; } metricCosts_.Add(other.metricCosts_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(pb::CodedInputStream input) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE input.ReadRawMessage(this); #else uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { Selector = input.ReadString(); break; } case 18: { metricCosts_.AddEntriesFrom(input, _map_metricCosts_codec); break; } } } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input); break; case 10: { Selector = input.ReadString(); break; } case 18: { metricCosts_.AddEntriesFrom(ref input, _map_metricCosts_codec); break; } } } } #endif } /// <summary> /// `QuotaLimit` defines a specific limit that applies over a specified duration /// for a limit type. There can be at most one limit for a duration and limit /// type combination defined within a `QuotaGroup`. /// </summary> public sealed partial class QuotaLimit : pb::IMessage<QuotaLimit> #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE , pb::IBufferMessage #endif { private static readonly pb::MessageParser<QuotaLimit> _parser = new pb::MessageParser<QuotaLimit>(() => new QuotaLimit()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pb::MessageParser<QuotaLimit> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Api.QuotaReflection.Descriptor.MessageTypes[2]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public QuotaLimit() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public QuotaLimit(QuotaLimit other) : this() { name_ = other.name_; description_ = other.description_; defaultLimit_ = other.defaultLimit_; maxLimit_ = other.maxLimit_; freeTier_ = other.freeTier_; duration_ = other.duration_; metric_ = other.metric_; unit_ = other.unit_; values_ = other.values_.Clone(); displayName_ = other.displayName_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public QuotaLimit Clone() { return new QuotaLimit(this); } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 6; private string name_ = ""; /// <summary> /// Name of the quota limit. /// /// The name must be provided, and it must be unique within the service. The /// name can only include alphanumeric characters as well as '-'. /// /// The maximum length of the limit name is 64 characters. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "description" field.</summary> public const int DescriptionFieldNumber = 2; private string description_ = ""; /// <summary> /// Optional. User-visible, extended description for this quota limit. /// Should be used only when more context is needed to understand this limit /// than provided by the limit's display name (see: `display_name`). /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public string Description { get { return description_; } set { description_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "default_limit" field.</summary> public const int DefaultLimitFieldNumber = 3; private long defaultLimit_; /// <summary> /// Default number of tokens that can be consumed during the specified /// duration. This is the number of tokens assigned when a client /// application developer activates the service for his/her project. /// /// Specifying a value of 0 will block all requests. This can be used if you /// are provisioning quota to selected consumers and blocking others. /// Similarly, a value of -1 will indicate an unlimited quota. No other /// negative values are allowed. /// /// Used by group-based quotas only. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public long DefaultLimit { get { return defaultLimit_; } set { defaultLimit_ = value; } } /// <summary>Field number for the "max_limit" field.</summary> public const int MaxLimitFieldNumber = 4; private long maxLimit_; /// <summary> /// Maximum number of tokens that can be consumed during the specified /// duration. Client application developers can override the default limit up /// to this maximum. If specified, this value cannot be set to a value less /// than the default limit. If not specified, it is set to the default limit. /// /// To allow clients to apply overrides with no upper bound, set this to -1, /// indicating unlimited maximum quota. /// /// Used by group-based quotas only. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public long MaxLimit { get { return maxLimit_; } set { maxLimit_ = value; } } /// <summary>Field number for the "free_tier" field.</summary> public const int FreeTierFieldNumber = 7; private long freeTier_; /// <summary> /// Free tier value displayed in the Developers Console for this limit. /// The free tier is the number of tokens that will be subtracted from the /// billed amount when billing is enabled. /// This field can only be set on a limit with duration "1d", in a billable /// group; it is invalid on any other limit. If this field is not set, it /// defaults to 0, indicating that there is no free tier for this service. /// /// Used by group-based quotas only. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public long FreeTier { get { return freeTier_; } set { freeTier_ = value; } } /// <summary>Field number for the "duration" field.</summary> public const int DurationFieldNumber = 5; private string duration_ = ""; /// <summary> /// Duration of this limit in textual notation. Must be "100s" or "1d". /// /// Used by group-based quotas only. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public string Duration { get { return duration_; } set { duration_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "metric" field.</summary> public const int MetricFieldNumber = 8; private string metric_ = ""; /// <summary> /// The name of the metric this quota limit applies to. The quota limits with /// the same metric will be checked together during runtime. The metric must be /// defined within the service config. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public string Metric { get { return metric_; } set { metric_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "unit" field.</summary> public const int UnitFieldNumber = 9; private string unit_ = ""; /// <summary> /// Specify the unit of the quota limit. It uses the same syntax as /// [Metric.unit][]. The supported unit kinds are determined by the quota /// backend system. /// /// Here are some examples: /// * "1/min/{project}" for quota per minute per project. /// /// Note: the order of unit components is insignificant. /// The "1" at the beginning is required to follow the metric unit syntax. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public string Unit { get { return unit_; } set { unit_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "values" field.</summary> public const int ValuesFieldNumber = 10; private static readonly pbc::MapField<string, long>.Codec _map_values_codec = new pbc::MapField<string, long>.Codec(pb::FieldCodec.ForString(10, ""), pb::FieldCodec.ForInt64(16, 0L), 82); private readonly pbc::MapField<string, long> values_ = new pbc::MapField<string, long>(); /// <summary> /// Tiered limit values. You must specify this as a key:value pair, with an /// integer value that is the maximum number of requests allowed for the /// specified unit. Currently only STANDARD is supported. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public pbc::MapField<string, long> Values { get { return values_; } } /// <summary>Field number for the "display_name" field.</summary> public const int DisplayNameFieldNumber = 12; private string displayName_ = ""; /// <summary> /// User-visible display name for this limit. /// Optional. If not set, the UI will provide a default display name based on /// the quota configuration. This field can be used to override the default /// display name generated from the configuration. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public string DisplayName { get { return displayName_; } set { displayName_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override bool Equals(object other) { return Equals(other as QuotaLimit); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public bool Equals(QuotaLimit other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Name != other.Name) return false; if (Description != other.Description) return false; if (DefaultLimit != other.DefaultLimit) return false; if (MaxLimit != other.MaxLimit) return false; if (FreeTier != other.FreeTier) return false; if (Duration != other.Duration) return false; if (Metric != other.Metric) return false; if (Unit != other.Unit) return false; if (!Values.Equals(other.Values)) return false; if (DisplayName != other.DisplayName) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override int GetHashCode() { int hash = 1; if (Name.Length != 0) hash ^= Name.GetHashCode(); if (Description.Length != 0) hash ^= Description.GetHashCode(); if (DefaultLimit != 0L) hash ^= DefaultLimit.GetHashCode(); if (MaxLimit != 0L) hash ^= MaxLimit.GetHashCode(); if (FreeTier != 0L) hash ^= FreeTier.GetHashCode(); if (Duration.Length != 0) hash ^= Duration.GetHashCode(); if (Metric.Length != 0) hash ^= Metric.GetHashCode(); if (Unit.Length != 0) hash ^= Unit.GetHashCode(); hash ^= Values.GetHashCode(); if (DisplayName.Length != 0) hash ^= DisplayName.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void WriteTo(pb::CodedOutputStream output) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE output.WriteRawMessage(this); #else if (Description.Length != 0) { output.WriteRawTag(18); output.WriteString(Description); } if (DefaultLimit != 0L) { output.WriteRawTag(24); output.WriteInt64(DefaultLimit); } if (MaxLimit != 0L) { output.WriteRawTag(32); output.WriteInt64(MaxLimit); } if (Duration.Length != 0) { output.WriteRawTag(42); output.WriteString(Duration); } if (Name.Length != 0) { output.WriteRawTag(50); output.WriteString(Name); } if (FreeTier != 0L) { output.WriteRawTag(56); output.WriteInt64(FreeTier); } if (Metric.Length != 0) { output.WriteRawTag(66); output.WriteString(Metric); } if (Unit.Length != 0) { output.WriteRawTag(74); output.WriteString(Unit); } values_.WriteTo(output, _map_values_codec); if (DisplayName.Length != 0) { output.WriteRawTag(98); output.WriteString(DisplayName); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) { if (Description.Length != 0) { output.WriteRawTag(18); output.WriteString(Description); } if (DefaultLimit != 0L) { output.WriteRawTag(24); output.WriteInt64(DefaultLimit); } if (MaxLimit != 0L) { output.WriteRawTag(32); output.WriteInt64(MaxLimit); } if (Duration.Length != 0) { output.WriteRawTag(42); output.WriteString(Duration); } if (Name.Length != 0) { output.WriteRawTag(50); output.WriteString(Name); } if (FreeTier != 0L) { output.WriteRawTag(56); output.WriteInt64(FreeTier); } if (Metric.Length != 0) { output.WriteRawTag(66); output.WriteString(Metric); } if (Unit.Length != 0) { output.WriteRawTag(74); output.WriteString(Unit); } values_.WriteTo(ref output, _map_values_codec); if (DisplayName.Length != 0) { output.WriteRawTag(98); output.WriteString(DisplayName); } if (_unknownFields != null) { _unknownFields.WriteTo(ref output); } } #endif [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int CalculateSize() { int size = 0; if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } if (Description.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Description); } if (DefaultLimit != 0L) { size += 1 + pb::CodedOutputStream.ComputeInt64Size(DefaultLimit); } if (MaxLimit != 0L) { size += 1 + pb::CodedOutputStream.ComputeInt64Size(MaxLimit); } if (FreeTier != 0L) { size += 1 + pb::CodedOutputStream.ComputeInt64Size(FreeTier); } if (Duration.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Duration); } if (Metric.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Metric); } if (Unit.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Unit); } size += values_.CalculateSize(_map_values_codec); if (DisplayName.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(DisplayName); } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(QuotaLimit other) { if (other == null) { return; } if (other.Name.Length != 0) { Name = other.Name; } if (other.Description.Length != 0) { Description = other.Description; } if (other.DefaultLimit != 0L) { DefaultLimit = other.DefaultLimit; } if (other.MaxLimit != 0L) { MaxLimit = other.MaxLimit; } if (other.FreeTier != 0L) { FreeTier = other.FreeTier; } if (other.Duration.Length != 0) { Duration = other.Duration; } if (other.Metric.Length != 0) { Metric = other.Metric; } if (other.Unit.Length != 0) { Unit = other.Unit; } values_.Add(other.values_); if (other.DisplayName.Length != 0) { DisplayName = other.DisplayName; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(pb::CodedInputStream input) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE input.ReadRawMessage(this); #else uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 18: { Description = input.ReadString(); break; } case 24: { DefaultLimit = input.ReadInt64(); break; } case 32: { MaxLimit = input.ReadInt64(); break; } case 42: { Duration = input.ReadString(); break; } case 50: { Name = input.ReadString(); break; } case 56: { FreeTier = input.ReadInt64(); break; } case 66: { Metric = input.ReadString(); break; } case 74: { Unit = input.ReadString(); break; } case 82: { values_.AddEntriesFrom(input, _map_values_codec); break; } case 98: { DisplayName = input.ReadString(); break; } } } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input); break; case 18: { Description = input.ReadString(); break; } case 24: { DefaultLimit = input.ReadInt64(); break; } case 32: { MaxLimit = input.ReadInt64(); break; } case 42: { Duration = input.ReadString(); break; } case 50: { Name = input.ReadString(); break; } case 56: { FreeTier = input.ReadInt64(); break; } case 66: { Metric = input.ReadString(); break; } case 74: { Unit = input.ReadString(); break; } case 82: { values_.AddEntriesFrom(ref input, _map_values_codec); break; } case 98: { DisplayName = input.ReadString(); break; } } } } #endif } #endregion } #endregion Designer generated code
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Linq; using Xunit; namespace System.IO.FileSystem.Tests { public class Directory_CreateDirectory : FileSystemTest { #region Utilities public virtual DirectoryInfo Create(string path) { return Directory.CreateDirectory(path); } #endregion #region UniversalTests [Fact] public void NullAsPath_ThrowsArgumentNullException() { Assert.Throws<ArgumentNullException>(() => Create(null)); } [Fact] public void EmptyAsPath_ThrowsArgumentException() { Assert.Throws<ArgumentException>(() => Create(string.Empty)); } [Fact] public void PathWithInvalidCharactersAsPath_ThrowsArgumentException() { var paths = IOInputs.GetPathsWithInvalidCharacters(); Assert.All(paths, (path) => { Assert.Throws<ArgumentException>(() => Create(path)); }); } [Fact] public void PathAlreadyExistsAsFile() { string path = GetTestFilePath(); File.Create(path).Dispose(); Assert.Throws<IOException>(() => Create(path)); Assert.Throws<IOException>(() => Create(IOServices.AddTrailingSlashIfNeeded(path))); Assert.Throws<IOException>(() => Create(IOServices.RemoveTrailingSlash(path))); } [Theory] [InlineData(FileAttributes.Hidden)] [InlineData(FileAttributes.ReadOnly)] [InlineData(FileAttributes.Normal)] public void PathAlreadyExistsAsDirectory(FileAttributes attributes) { DirectoryInfo testDir = Create(GetTestFilePath()); testDir.Attributes = attributes; Assert.Equal(testDir.FullName, Create(testDir.FullName).FullName); } [Fact] public void RootPath() { string dirName = Path.GetPathRoot(Directory.GetCurrentDirectory()); DirectoryInfo dir = Create(dirName); Assert.Equal(dir.FullName, dirName); } [Fact] public void DotIsCurrentDirectory() { string path = GetTestFilePath(); DirectoryInfo result = Create(Path.Combine(path, ".")); Assert.Equal(IOServices.RemoveTrailingSlash(path), result.FullName); result = Create(Path.Combine(path, ".") + Path.DirectorySeparatorChar); Assert.Equal(IOServices.AddTrailingSlashIfNeeded(path), result.FullName); } [Fact] public void CreateCurrentDirectory() { DirectoryInfo result = Create(Directory.GetCurrentDirectory()); Assert.Equal(Directory.GetCurrentDirectory(), result.FullName); } [Fact] public void DotDotIsParentDirectory() { DirectoryInfo result = Create(Path.Combine(GetTestFilePath(), "..")); Assert.Equal(IOServices.RemoveTrailingSlash(TestDirectory), result.FullName); result = Create(Path.Combine(GetTestFilePath(), "..") + Path.DirectorySeparatorChar); Assert.Equal(IOServices.AddTrailingSlashIfNeeded(TestDirectory), result.FullName); } [Fact] public void ValidPathWithTrailingSlash() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); var components = IOInputs.GetValidPathComponentNames(); Assert.All(components, (component) => { string path = IOServices.AddTrailingSlashIfNeeded(Path.Combine(testDir.FullName, component)); DirectoryInfo result = Create(path); Assert.Equal(path, result.FullName); Assert.True(result.Exists); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void ValidExtendedPathWithTrailingSlash() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); var components = IOInputs.GetValidPathComponentNames(); Assert.All(components, (component) => { string path = @"\\?\" + IOServices.AddTrailingSlashIfNeeded(Path.Combine(testDir.FullName, component)); DirectoryInfo result = Create(path); Assert.Equal(path, result.FullName); Assert.True(result.Exists); }); } [Fact] public void ValidPathWithoutTrailingSlash() { DirectoryInfo testDir = Directory.CreateDirectory(GetTestFilePath()); var components = IOInputs.GetValidPathComponentNames(); Assert.All(components, (component) => { string path = testDir.FullName + Path.DirectorySeparatorChar + component; DirectoryInfo result = Create(path); Assert.Equal(path, result.FullName); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] public void ValidPathWithMultipleSubdirectories() { string dirName = Path.Combine(GetTestFilePath(), "Test", "Test", "Test"); DirectoryInfo dir = Create(dirName); Assert.Equal(dir.FullName, dirName); } [Fact] public void AllowedSymbols() { string dirName = Path.Combine(TestDirectory, Path.GetRandomFileName() + "!@#$%^&"); DirectoryInfo dir = Create(dirName); Assert.Equal(dir.FullName, dirName); } [Fact] public void DirectoryEqualToMaxDirectory_CanBeCreated() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, IOInputs.MaxDirectory, IOInputs.MaxComponent); Assert.All(path.SubPaths, (subpath) => { DirectoryInfo result = Create(subpath); Assert.Equal(subpath, result.FullName); Assert.True(Directory.Exists(result.FullName)); }); } [Fact] public void DirectoryEqualToMaxDirectory_CanBeCreatedAllAtOnce() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, IOInputs.MaxDirectory, maxComponent: 10); DirectoryInfo result = Create(path.FullPath); Assert.Equal(path.FullPath, result.FullName); Assert.True(Directory.Exists(result.FullName)); } [Fact] public void DirectoryWithComponentLongerThanMaxComponentAsPath_ThrowsPathTooLongException() { // While paths themselves can be up to 260 characters including trailing null, file systems // limit each components of the path to a total of 255 characters. var paths = IOInputs.GetPathsWithComponentLongerThanMaxComponent(); Assert.All(paths, (path) => { Assert.Throws<PathTooLongException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxPathAsPath_ThrowsPathTooLongException() { var paths = IOInputs.GetPathsLongerThanMaxPath(); Assert.All(paths, (path) => { Assert.Throws<PathTooLongException>(() => Create(path)); }); } #endregion #region PlatformSpecific [Fact] [PlatformSpecific(PlatformID.Windows)] public void DirectoryLongerThanMaxDirectoryAsPath_ThrowsPathTooLongException() { var paths = IOInputs.GetPathsLongerThanMaxDirectory(); Assert.All(paths, (path) => { Assert.Throws<PathTooLongException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] [ActiveIssue(645)] public void UnixPathLongerThan256_Allowed() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, 257, IOInputs.MaxComponent); DirectoryInfo result = Create(path.FullPath); Assert.Equal(path.FullPath, result.FullName); Assert.True(Directory.Exists(result.FullName)); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixPathLongerThan256_Throws() { DirectoryInfo testDir = Create(GetTestFilePath()); PathInfo path = IOServices.GetPath(testDir.FullName, 257, IOInputs.MaxComponent); Assert.Throws<PathTooLongException>(() => Create(path.FullPath)); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void WindowsWhiteSpaceAsPath_ThrowsArgumentException() { var paths = IOInputs.GetWhiteSpace(); Assert.All(paths, (path) => { Assert.Throws<ArgumentException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixWhiteSpaceAsPath_Allowed() { var paths = IOInputs.GetWhiteSpace(); Assert.All(paths, (path) => { Create(Path.Combine(TestDirectory, path)); Assert.True(Directory.Exists(Path.Combine(TestDirectory, path))); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void WindowsTrailingWhiteSpace() { // Windows will remove all nonsignificant whitespace in a path DirectoryInfo testDir = Create(GetTestFilePath()); var components = IOInputs.GetWhiteSpace(); Assert.All(components, (component) => { string path = IOServices.RemoveTrailingSlash(testDir.FullName) + component; DirectoryInfo result = Create(path); Assert.True(Directory.Exists(result.FullName)); Assert.Equal(testDir.FullName, IOServices.RemoveTrailingSlash(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void WindowsExtendedSyntaxWhiteSpace() { var paths = IOInputs.GetSimpleWhiteSpace(); using (TemporaryDirectory directory = new TemporaryDirectory()) { foreach (var path in paths) { string extendedPath = Path.Combine(@"\\?\" + directory.Path, path); Directory.CreateDirectory(extendedPath); Assert.True(Directory.Exists(extendedPath), extendedPath); } } } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] public void UnixNonSignificantTrailingWhiteSpace() { // Unix treats trailing/prename whitespace as significant and a part of the name. DirectoryInfo testDir = Create(GetTestFilePath()); var components = IOInputs.GetWhiteSpace(); Assert.All(components, (component) => { string path = IOServices.RemoveTrailingSlash(testDir.FullName) + component; DirectoryInfo result = Create(path); Assert.True(Directory.Exists(result.FullName)); Assert.NotEqual(testDir.FullName, IOServices.RemoveTrailingSlash(result.FullName)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // alternate data streams public void PathWithAlternateDataStreams_ThrowsNotSupportedException() { var paths = IOInputs.GetPathsWithAlternativeDataStreams(); Assert.All(paths, (path) => { Assert.Throws<NotSupportedException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // device name prefixes public void PathWithReservedDeviceNameAsPath_ThrowsDirectoryNotFoundException() { // Throws DirectoryNotFoundException, when the behavior really should be an invalid path var paths = IOInputs.GetPathsWithReservedDeviceNames(); Assert.All(paths, (path) => { Assert.Throws<DirectoryNotFoundException>(() => Create(path)); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // device name prefixes public void PathWithReservedDeviceNameAsExtendedPath() { var paths = IOInputs.GetReservedDeviceNames(); using (TemporaryDirectory directory = new TemporaryDirectory()) { Assert.All(paths, (path) => { Assert.True(Create(@"\\?\" + Path.Combine(directory.Path, path)).Exists, path); }); } } [Fact] [PlatformSpecific(PlatformID.Windows)] // UNC shares public void UncPathWithoutShareNameAsPath_ThrowsArgumentException() { var paths = IOInputs.GetUncPathsWithoutShareName(); foreach (var path in paths) { Assert.Throws<ArgumentException>(() => Create(path)); } } [Fact] [PlatformSpecific(PlatformID.Windows)] // drive labels public void CDriveCase() { DirectoryInfo dir = Create("c:\\"); DirectoryInfo dir2 = Create("C:\\"); Assert.NotEqual(dir.FullName, dir2.FullName); } [Fact] [PlatformSpecific(PlatformID.Windows)] public void DriveLetter_Windows() { // On Windows, DirectoryInfo will replace "<DriveLetter>:" with "." var driveLetter = Create(Directory.GetCurrentDirectory()[0] + ":"); var current = Create("."); Assert.Equal(current.Name, driveLetter.Name); Assert.Equal(current.FullName, driveLetter.FullName); } [Fact] [PlatformSpecific(PlatformID.AnyUnix)] [ActiveIssue(2459)] public void DriveLetter_Unix() { // On Unix, there's no special casing for drive letters, which are valid file names var driveLetter = Create("C:"); var current = Create("."); Assert.Equal("C:", driveLetter.Name); Assert.Equal(Path.Combine(current.FullName, "C:"), driveLetter.FullName); Directory.Delete("C:"); } [Fact] [PlatformSpecific(PlatformID.Windows)] // testing drive labels public void NonExistentDriveAsPath_ThrowsDirectoryNotFoundException() { Assert.Throws<DirectoryNotFoundException>(() => { Create(IOServices.GetNonExistentDrive()); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // testing drive labels public void SubdirectoryOnNonExistentDriveAsPath_ThrowsDirectoryNotFoundException() { Assert.Throws<DirectoryNotFoundException>(() => { Create(Path.Combine(IOServices.GetNonExistentDrive(), "Subdirectory")); }); } [Fact] [ActiveIssue(1221)] [PlatformSpecific(PlatformID.Windows)] // testing drive labels public void NotReadyDriveAsPath_ThrowsDirectoryNotFoundException() { // Behavior is suspect, should really have thrown IOException similar to the SubDirectory case var drive = IOServices.GetNotReadyDrive(); if (drive == null) { Console.WriteLine("Skipping test. Unable to find a not-ready drive, such as CD-Rom with no disc inserted."); return; } Assert.Throws<DirectoryNotFoundException>(() => { Create(drive); }); } [Fact] [PlatformSpecific(PlatformID.Windows)] // testing drive labels [ActiveIssue(1221)] public void SubdirectoryOnNotReadyDriveAsPath_ThrowsIOException() { var drive = IOServices.GetNotReadyDrive(); if (drive == null) { Console.WriteLine("Skipping test. Unable to find a not-ready drive, such as CD-Rom with no disc inserted."); return; } // 'Device is not ready' Assert.Throws<IOException>(() => { Create(Path.Combine(drive, "Subdirectory")); }); } #if !TEST_WINRT // Cannot set current directory to root from appcontainer with it's default ACL /* [Fact] [ActiveIssue(1220)] // SetCurrentDirectory public void DotDotAsPath_WhenCurrentDirectoryIsRoot_DoesNotThrow() { string root = Path.GetPathRoot(Directory.GetCurrentDirectory()); using (CurrentDirectoryContext context = new CurrentDirectoryContext(root)) { DirectoryInfo result = Create(".."); Assert.True(Directory.Exists(result.FullName)); Assert.Equal(root, result.FullName); } } */ #endif #endregion } }
//------------------------------------------------------------------------------ // <copyright file="DataBoundLiteralControl.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ /* * Control that holds databinding expressions and literals * * Copyright (c) 1999 Microsoft Corporation */ namespace System.Web.UI { using System; using System.ComponentModel; using System.ComponentModel.Design; using System.IO; using System.Text; using System.Security.Permissions; using System.Web.Util; internal class DataBoundLiteralControlBuilder : ControlBuilder { internal DataBoundLiteralControlBuilder() { } internal void AddLiteralString(string s) { Debug.Assert(!InDesigner, "!InDesigner"); // Make sure strings and databinding expressions alternate object lastBuilder = GetLastBuilder(); if (lastBuilder != null && lastBuilder is string) { AddSubBuilder(null); } AddSubBuilder(s); } internal void AddDataBindingExpression(CodeBlockBuilder codeBlockBuilder) { Debug.Assert(!InDesigner, "!InDesigner"); // Make sure strings and databinding expressions alternate object lastBuilder = GetLastBuilder(); if (lastBuilder == null || lastBuilder is CodeBlockBuilder) { AddSubBuilder(null); } AddSubBuilder(codeBlockBuilder); } internal int GetStaticLiteralsCount() { // it's divided by 2 because half are strings and half are databinding // expressions). '+1' because we always start with a literal string. return (SubBuilders.Count+1) / 2; } internal int GetDataBoundLiteralCount() { // it's divided by 2 because half are strings and half are databinding // expressions) return SubBuilders.Count / 2; } } /// <devdoc> /// <para>Defines the properties and methods of the DataBoundLiteralControl class. </para> /// </devdoc> [ ToolboxItem(false) ] public sealed class DataBoundLiteralControl : Control, ITextControl { private string[] _staticLiterals; private string[] _dataBoundLiteral; private bool _hasDataBoundStrings; /// <internalonly/> public DataBoundLiteralControl(int staticLiteralsCount, int dataBoundLiteralCount) { _staticLiterals = new string[staticLiteralsCount]; _dataBoundLiteral = new string[dataBoundLiteralCount]; PreventAutoID(); } /// <internalonly/> public void SetStaticString(int index, string s) { _staticLiterals[index] = s; } /// <internalonly/> public void SetDataBoundString(int index, string s) { _dataBoundLiteral[index] = s; _hasDataBoundStrings = true; } /// <devdoc> /// <para>Gets the text content of the data-bound literal control.</para> /// </devdoc> public string Text { get { StringBuilder sb = new StringBuilder(); int dataBoundLiteralCount = _dataBoundLiteral.Length; // Append literal and databound strings alternatively for (int i=0; i<_staticLiterals.Length; i++) { if (_staticLiterals[i] != null) sb.Append(_staticLiterals[i]); // Could be null if DataBind() was not called if (i < dataBoundLiteralCount && _dataBoundLiteral[i] != null) sb.Append(_dataBoundLiteral[i]); } return sb.ToString(); } } /// <internalonly/> protected override ControlCollection CreateControlCollection() { return new EmptyControlCollection(this); } /// <internalonly/> /// <devdoc> /// <para>Loads the previously saved state. Overridden to synchronize Text property with /// LiteralContent.</para> /// </devdoc> protected override void LoadViewState(object savedState) { if (savedState != null) { _dataBoundLiteral = (string[]) savedState; _hasDataBoundStrings = true; } } /// <internalonly/> /// <devdoc> /// <para>The object that contains the state changes. </para> /// </devdoc> protected override object SaveViewState() { // Return null if we didn't get any databound strings if (!_hasDataBoundStrings) return null; // Only save the databound literals to the view state return _dataBoundLiteral; } /// <internalonly/> protected internal override void Render(HtmlTextWriter output) { int dataBoundLiteralCount = _dataBoundLiteral.Length; // Render literal and databound strings alternatively for (int i=0; i<_staticLiterals.Length; i++) { if (_staticLiterals[i] != null) output.Write(_staticLiterals[i]); // Could be null if DataBind() was not called if (i < dataBoundLiteralCount && _dataBoundLiteral[i] != null) output.Write(_dataBoundLiteral[i]); } } /// <internalonly/> /// <devdoc> /// <para>Implementation of TextControl.Text property. </para> /// </devdoc> string ITextControl.Text { get { return Text; } set { throw new NotSupportedException(); } } } /// <devdoc> /// <para>Simpler version of DataBoundLiteralControlBuilder, used at design time. </para> /// </devdoc> [ DataBindingHandler("System.Web.UI.Design.TextDataBindingHandler, " + AssemblyRef.SystemDesign), ToolboxItem(false) ] public sealed class DesignerDataBoundLiteralControl : Control { private string _text; public DesignerDataBoundLiteralControl() { PreventAutoID(); } /// <devdoc> /// <para>Gets or sets the text content of the data-bound literal control.</para> /// </devdoc> public string Text { get { return _text; } set { _text = (value != null) ? value : String.Empty; } } protected override ControlCollection CreateControlCollection() { return new EmptyControlCollection(this); } /// <devdoc> /// <para>Loads the previously saved state. Overridden to synchronize Text property with /// LiteralContent.</para> /// </devdoc> protected override void LoadViewState(object savedState) { if (savedState != null) _text = (string) savedState; } /// <devdoc> /// <para>Saves any state that was modified after the control began monitoring state changes.</para> /// </devdoc> protected internal override void Render(HtmlTextWriter output) { output.Write(_text); } /// <devdoc> /// <para>The object that contains the state changes. </para> /// </devdoc> protected override object SaveViewState() { return _text; } } }
using System; using System.Linq; using System.Runtime.InteropServices; using Torque6.Engine.SimObjects; using Torque6.Engine.SimObjects.Scene; using Torque6.Engine.Namespaces; using Torque6.Utility; namespace Torque6.Engine.SimObjects.GuiControls { public unsafe class GuiSliderCtrl : GuiControl { public GuiSliderCtrl() { ObjectPtr = Sim.WrapObject(InternalUnsafeMethods.GuiSliderCtrlCreateInstance()); } public GuiSliderCtrl(uint pId) : base(pId) { } public GuiSliderCtrl(string pName) : base(pName) { } public GuiSliderCtrl(IntPtr pObjPtr) : base(pObjPtr) { } public GuiSliderCtrl(Sim.SimObjectPtr* pObjPtr) : base(pObjPtr) { } public GuiSliderCtrl(SimObject pObj) : base(pObj) { } #region UnsafeNativeMethods new internal struct InternalUnsafeMethods { [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate void _GuiSliderCtrlGetRange(IntPtr ctrl, out Point2F outRange); private static _GuiSliderCtrlGetRange _GuiSliderCtrlGetRangeFunc; internal static void GuiSliderCtrlGetRange(IntPtr ctrl, out Point2F outRange) { if (_GuiSliderCtrlGetRangeFunc == null) { _GuiSliderCtrlGetRangeFunc = (_GuiSliderCtrlGetRange)Marshal.GetDelegateForFunctionPointer(Interop.Torque6.DllLoadUtils.GetProcAddress(Interop.Torque6.Torque6LibHandle, "GuiSliderCtrlGetRange"), typeof(_GuiSliderCtrlGetRange)); } _GuiSliderCtrlGetRangeFunc(ctrl, out outRange); } [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate void _GuiSliderCtrlSetRange(IntPtr ctrl, Point2F range); private static _GuiSliderCtrlSetRange _GuiSliderCtrlSetRangeFunc; internal static void GuiSliderCtrlSetRange(IntPtr ctrl, Point2F range) { if (_GuiSliderCtrlSetRangeFunc == null) { _GuiSliderCtrlSetRangeFunc = (_GuiSliderCtrlSetRange)Marshal.GetDelegateForFunctionPointer(Interop.Torque6.DllLoadUtils.GetProcAddress(Interop.Torque6.Torque6LibHandle, "GuiSliderCtrlSetRange"), typeof(_GuiSliderCtrlSetRange)); } _GuiSliderCtrlSetRangeFunc(ctrl, range); } [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate int _GuiSliderCtrlGetTicks(IntPtr ctrl); private static _GuiSliderCtrlGetTicks _GuiSliderCtrlGetTicksFunc; internal static int GuiSliderCtrlGetTicks(IntPtr ctrl) { if (_GuiSliderCtrlGetTicksFunc == null) { _GuiSliderCtrlGetTicksFunc = (_GuiSliderCtrlGetTicks)Marshal.GetDelegateForFunctionPointer(Interop.Torque6.DllLoadUtils.GetProcAddress(Interop.Torque6.Torque6LibHandle, "GuiSliderCtrlGetTicks"), typeof(_GuiSliderCtrlGetTicks)); } return _GuiSliderCtrlGetTicksFunc(ctrl); } [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate void _GuiSliderCtrlSetTicks(IntPtr ctrl, int ticks); private static _GuiSliderCtrlSetTicks _GuiSliderCtrlSetTicksFunc; internal static void GuiSliderCtrlSetTicks(IntPtr ctrl, int ticks) { if (_GuiSliderCtrlSetTicksFunc == null) { _GuiSliderCtrlSetTicksFunc = (_GuiSliderCtrlSetTicks)Marshal.GetDelegateForFunctionPointer(Interop.Torque6.DllLoadUtils.GetProcAddress(Interop.Torque6.Torque6LibHandle, "GuiSliderCtrlSetTicks"), typeof(_GuiSliderCtrlSetTicks)); } _GuiSliderCtrlSetTicksFunc(ctrl, ticks); } [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate float _GuiSliderCtrlGetValue(IntPtr ctrl); private static _GuiSliderCtrlGetValue _GuiSliderCtrlGetValueFunc; internal static float GuiSliderCtrlGetValue(IntPtr ctrl) { if (_GuiSliderCtrlGetValueFunc == null) { _GuiSliderCtrlGetValueFunc = (_GuiSliderCtrlGetValue)Marshal.GetDelegateForFunctionPointer(Interop.Torque6.DllLoadUtils.GetProcAddress(Interop.Torque6.Torque6LibHandle, "GuiSliderCtrlGetValue"), typeof(_GuiSliderCtrlGetValue)); } return _GuiSliderCtrlGetValueFunc(ctrl); } [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate void _GuiSliderCtrlSetValue(IntPtr ctrl, float value); private static _GuiSliderCtrlSetValue _GuiSliderCtrlSetValueFunc; internal static void GuiSliderCtrlSetValue(IntPtr ctrl, float value) { if (_GuiSliderCtrlSetValueFunc == null) { _GuiSliderCtrlSetValueFunc = (_GuiSliderCtrlSetValue)Marshal.GetDelegateForFunctionPointer(Interop.Torque6.DllLoadUtils.GetProcAddress(Interop.Torque6.Torque6LibHandle, "GuiSliderCtrlSetValue"), typeof(_GuiSliderCtrlSetValue)); } _GuiSliderCtrlSetValueFunc(ctrl, value); } [UnmanagedFunctionPointer(CallingConvention.Cdecl)] private delegate IntPtr _GuiSliderCtrlCreateInstance(); private static _GuiSliderCtrlCreateInstance _GuiSliderCtrlCreateInstanceFunc; internal static IntPtr GuiSliderCtrlCreateInstance() { if (_GuiSliderCtrlCreateInstanceFunc == null) { _GuiSliderCtrlCreateInstanceFunc = (_GuiSliderCtrlCreateInstance)Marshal.GetDelegateForFunctionPointer(Interop.Torque6.DllLoadUtils.GetProcAddress(Interop.Torque6.Torque6LibHandle, "GuiSliderCtrlCreateInstance"), typeof(_GuiSliderCtrlCreateInstance)); } return _GuiSliderCtrlCreateInstanceFunc(); } } #endregion #region Properties public Point2F Range { get { if (IsDead()) throw new Exceptions.SimObjectPointerInvalidException(); Point2F outVal; InternalUnsafeMethods.GuiSliderCtrlGetRange(ObjectPtr->ObjPtr, out outVal); return outVal; } set { if (IsDead()) throw new Exceptions.SimObjectPointerInvalidException(); InternalUnsafeMethods.GuiSliderCtrlSetRange(ObjectPtr->ObjPtr, value); } } public int Ticks { get { if (IsDead()) throw new Exceptions.SimObjectPointerInvalidException(); return InternalUnsafeMethods.GuiSliderCtrlGetTicks(ObjectPtr->ObjPtr); } set { if (IsDead()) throw new Exceptions.SimObjectPointerInvalidException(); InternalUnsafeMethods.GuiSliderCtrlSetTicks(ObjectPtr->ObjPtr, value); } } public float Value { get { if (IsDead()) throw new Exceptions.SimObjectPointerInvalidException(); return InternalUnsafeMethods.GuiSliderCtrlGetValue(ObjectPtr->ObjPtr); } set { if (IsDead()) throw new Exceptions.SimObjectPointerInvalidException(); InternalUnsafeMethods.GuiSliderCtrlSetValue(ObjectPtr->ObjPtr, value); } } #endregion #region Methods #endregion } }
/* * (c) 2008 MOSA - The Managed Operating System Alliance * * Licensed under the terms of the New BSD License. * * Authors: * Simon Wollwage (rootnode) <kintaro@think-in-co.de> */ using System; namespace Pictor.Transform { ///<summary> /// /// See Implementation agg_trans_affine.cpp /// /// Affine transformation are linear transformations in Cartesian Coordinates /// (strictly speaking not only in Cartesian, but for the beginning we will /// think so). They are Rotation, Scaling, Translation and skewing. /// After any affine transformation a Line segment remains a Line segment /// and it will never become a curve. /// /// There will be no math about matrix calculations, since it has been /// described many times. Ask yourself a very simple question: /// "why do we need to understand and use some matrix stuff instead of just /// rotating, Scaling and so on". The answers are: /// /// 1. Any combination of transformations can be done by only 4 multiplications /// and 4 additions in floating point. /// 2. One matrix transformation is equivalent to the number of consecutive /// discrete transformations, i.e. the matrix "accumulates" all transformations /// in the order of their settings. Suppose we have 4 transformations: /// * Rotate by 30 degrees, /// * Scale X to 2.0, /// * Scale Y to 1.5, /// * move to (100, 100). /// The result will depend on the order of these transformations, /// and the advantage of matrix is that the sequence of discret calls: /// Rotate(30), scaleX(2.0), scaleY(1.5), move(100,100) /// will have exactly the same result as the following matrix transformations: /// /// affine_matrix m; /// m *= rotate_matrix(30); /// m *= scaleX_matrix(2.0); /// m *= scaleY_matrix(1.5); /// m *= move_matrix(100,100); /// /// m.transform_my_point_at_last(x, y); /// /// What is the good of it? In real life we will Set-up the matrix only once /// and then Transform many points, let alone the convenience to Set any /// combination of transformations. /// /// So, how to use it? Very easy - literally as it's shown above. Not quite, /// let us write a correct example: /// /// Pictor::trans_affine m; /// m *= Pictor::trans_affine_rotation(30.0 * 3.1415926 / 180.0); /// m *= Pictor::trans_affine_scaling(2.0, 1.5); /// m *= Pictor::trans_affine_translation(100.0, 100.0); /// m.Transform(x, y); /// /// The affine matrix is all you need to perform any linear transformation, /// but all transformations have origin point (0,0). It means that we need to /// use 2 translations if we want to Rotate someting around (100,100): /// /// m *= Pictor::trans_affine_translation(-100.0, -100.0); // move to (0,0) /// m *= Pictor::trans_affine_rotation(30.0 * 3.1415926 / 180.0); // Rotate /// m *= Pictor::trans_affine_translation(100.0, 100.0); // move back to (100,100) ///</summary> public struct Affine : ITransform { ///<summary> ///</summary> static public double AffineEpsilon = 1e-14; public double sx, shy, shx, sy, tx, ty; ///<summary> ///</summary> ///<param name="copyFrom"></param> public Affine(Affine copyFrom) { sx = copyFrom.sx; shy = copyFrom.shy; shx = copyFrom.shx; sy = copyFrom.sy; tx = copyFrom.tx; ty = copyFrom.ty; } ///<summary> ///</summary> ///<param name="v0"></param> ///<param name="v1"></param> ///<param name="v2"></param> ///<param name="v3"></param> ///<param name="v4"></param> ///<param name="v5"></param> public Affine(double v0, double v1, double v2, double v3, double v4, double v5) { sx = v0; shy = v1; shx = v2; sy = v3; tx = v4; ty = v5; } ///<summary> ///</summary> ///<param name="m"></param> public Affine(double[] m) { sx = m[0]; shy = m[1]; shx = m[2]; sy = m[3]; tx = m[4]; ty = m[5]; } ///<summary> ///</summary> ///<returns></returns> public static Affine NewIdentity() { Affine newAffine = new Affine(1.0, 0.0, 0.0, 1.0, 0.0, 0.0); return newAffine; } ///<summary> ///</summary> ///<param name="angleRadians"></param> ///<returns></returns> public static Affine NewRotation(double angleRadians) { return new Affine(Math.Cos(angleRadians), Math.Sin(angleRadians), -Math.Sin(angleRadians), Math.Cos(angleRadians), 0.0, 0.0); } ///<summary> ///</summary> ///<param name="scale"></param> ///<returns></returns> public static Affine NewScaling(double scale) { return new Affine(scale, 0.0, 0.0, scale, 0.0, 0.0); } ///<summary> ///</summary> ///<param name="x"></param> ///<param name="y"></param> ///<returns></returns> public static Affine NewScaling(double x, double y) { return new Affine(x, 0.0, 0.0, y, 0.0, 0.0); } ///<summary> ///</summary> ///<param name="x"></param> ///<param name="y"></param> ///<returns></returns> public static Affine NewTranslation(double x, double y) { return new Affine(1.0, 0.0, 0.0, 1.0, x, y); } ///<summary> ///</summary> ///<param name="x"></param> ///<param name="y"></param> ///<returns></returns> public static Affine NewSkewing(double x, double y) { return new Affine(1.0, Math.Tan(y), Math.Tan(x), 1.0, 0.0, 0.0); } ///<summary> ///</summary> public void Identity() { sx = sy = 1.0; shy = shx = tx = ty = 0.0; } ///<summary> ///</summary> ///<param name="x"></param> ///<param name="y"></param> public void Translate(double x, double y) { tx += x; ty += y; } ///<summary> ///</summary> ///<param name="angleRadians"></param> public void Rotate(double angleRadians) { double ca = Math.Cos(angleRadians); double sa = Math.Sin(angleRadians); double t0 = sx * ca - shy * sa; double t2 = shx * ca - sy * sa; double t4 = tx * ca - ty * sa; shy = sx * sa + shy * ca; sy = shx * sa + sy * ca; ty = tx * sa + ty * ca; sx = t0; shx = t2; tx = t4; } ///<summary> ///</summary> ///<param name="x"></param> ///<param name="y"></param> public void Scale(double x, double y) { double mm0 = x; // Possible hint for the optimizer double mm3 = y; sx *= mm0; shx *= mm0; tx *= mm0; shy *= mm3; sy *= mm3; ty *= mm3; } ///<summary> ///</summary> ///<param name="scaleAmount"></param> public void Scale(double scaleAmount) { sx *= scaleAmount; shx *= scaleAmount; tx *= scaleAmount; shy *= scaleAmount; sy *= scaleAmount; ty *= scaleAmount; } // Multiply matrix to another one private void Multiply(Affine m) { double t0 = sx * m.sx + shy * m.shx; double t2 = shx * m.sx + sy * m.shx; double t4 = tx * m.sx + ty * m.shx + m.tx; shy = sx * m.shy + shy * m.sy; sy = shx * m.shy + sy * m.sy; ty = tx * m.shy + ty * m.sy + m.ty; sx = t0; shx = t2; tx = t4; } ///<summary> ///</summary> public void Invert() { double d = DeterminantReciprocal; double t0 = sy * d; sy = sx * d; shy = -shy * d; shx = -shx * d; double t4 = -tx * t0 - ty * shx; ty = -tx * shy - ty * sy; sx = t0; tx = t4; } ///<summary> ///</summary> ///<param name="a"></param> ///<param name="b"></param> ///<returns></returns> public static Affine operator *(Affine a, Affine b) { Affine temp = new Affine(a); temp.Multiply(b); return temp; } ///<summary> ///</summary> ///<param name="x"></param> ///<param name="y"></param> public void Transform(ref double x, ref double y) { double tmp = x; x = tmp * sx + y * shx + tx; y = tmp * shy + y * sy + ty; } ///<summary> ///</summary> ///<param name="pointToTransform"></param> public void Transform(ref PointD pointToTransform) { Transform(ref pointToTransform.x, ref pointToTransform.y); } ///<summary> ///</summary> ///<param name="x"></param> ///<param name="y"></param> public void InverseTransform(ref double x, ref double y) { double d = DeterminantReciprocal; double a = (x - tx) * d; double b = (y - ty) * d; x = a * sy - b * shx; y = b * sx - a * shy; } private double DeterminantReciprocal { get { return 1.0 / (sx * sy - shy * shx); } } public double GetScale() { double x = 0.707106781 * sx + 0.707106781 * shx; double y = 0.707106781 * shy + 0.707106781 * sy; return Math.Sqrt(x * x + y * y); } ///<summary> /// Check to see if the matrix is not degenerate ///</summary> ///<param name="epsilon"></param> ///<returns></returns> public bool IsValid(double epsilon) { return Math.Abs(sx) > epsilon && Math.Abs(sy) > epsilon; } /// <summary> /// Check to see if it's an Identity matrix /// </summary> /// <returns></returns> public bool IsIdentity() { return IsIdentity(AffineEpsilon); } ///<summary> ///</summary> ///<param name="epsilon"></param> ///<returns></returns> public bool IsIdentity(double epsilon) { return Basics.IsEqualEps(sx, 1.0, epsilon) && Basics.IsEqualEps(shy, 0.0, epsilon) && Basics.IsEqualEps(shx, 0.0, epsilon) && Basics.IsEqualEps(sy, 1.0, epsilon) && Basics.IsEqualEps(tx, 0.0, epsilon) && Basics.IsEqualEps(ty, 0.0, epsilon); } ///<summary> ///</summary> ///<param name="m"></param> ///<param name="epsilon"></param> ///<returns></returns> public bool IsEqual(Affine m, double epsilon) { return Basics.IsEqualEps(sx, m.sx, epsilon) && Basics.IsEqualEps(shy, m.shy, epsilon) && Basics.IsEqualEps(shx, m.shx, epsilon) && Basics.IsEqualEps(sy, m.sy, epsilon) && Basics.IsEqualEps(tx, m.tx, epsilon) && Basics.IsEqualEps(ty, m.ty, epsilon); } ///<summary> ///</summary> ///<returns></returns> public double Rotation() { double x1 = 0.0; double y1 = 0.0; double x2 = 1.0; double y2 = 0.0; Transform(ref x1, ref y1); Transform(ref x2, ref y2); return Math.Atan2(y2 - y1, x2 - x1); } ///<summary> ///</summary> ///<param name="dx"></param> ///<param name="dy"></param> public void Translation(out double dx, out double dy) { dx = tx; dy = ty; } ///<summary> ///</summary> ///<param name="x"></param> ///<param name="y"></param> public void Scaling(out double x, out double y) { double x1 = 0.0; double y1 = 0.0; double x2 = 1.0; double y2 = 1.0; Affine t = new Affine(this); t *= NewRotation(-Rotation()); t.Transform(ref x1, ref y1); t.Transform(ref x2, ref y2); x = x2 - x1; y = y2 - y1; } ///<summary> /// Used to Calculate Scaling coefficients in image resampling. /// When there is considerable shear this method gives us much /// better estimation than just sx, sy. ///</summary> ///<param name="x"></param> ///<param name="y"></param> public void ScalingAbs(out double x, out double y) { x = Math.Sqrt(sx * sx + shx * shx); y = Math.Sqrt(shy * shy + sy * sy); } }; }
using System; using System.Data; using System.Globalization; using bv.common.Configuration; using bv.common.win; using bv.model.BLToolkit; using bv.tests.AVR.Helpers; using bv.tests.AVR.IntegrationTests; using BLToolkit.Data; using BLToolkit.Data.DataProvider; using DevExpress.XtraPivotGrid; using EIDSS; using eidss.avr; using eidss.avr.ChartForm; using eidss.avr.db.Common; using eidss.avr.PivotComponents; using eidss.model.AVR.ServiceData; using eidss.model.AVR.SourceData; using eidss.model.Core.CultureInfo; using eidss.model.WindowsService.Serialization; using Microsoft.VisualStudio.TestTools.UnitTesting; using StructureMap; namespace bv.tests.AVR.UnitTests { [TestClass] public class ComponentsTests { private IDisposable m_PresenterTransaction; // todo [ivan] implement private static Container StructureMapContainerInit() { Container c = new Container(); c.Configure(r => { }); return c; } [TestInitialize] public void MyTestInitialize() { EIDSS_LookupCacheHelper.Init(); DbManagerFactory.SetSqlFactory(Config.GetSetting("EidssConnectionString")); m_PresenterTransaction = PresenterFactory.BeginSharedPresenterTransaction(StructureMapContainerInit(), new BaseForm()); } [TestCleanup] public void MyTestCleanup() { m_PresenterTransaction.Dispose(); } [TestMethod] public void AvrTableToTableTest() { var originalTable = DataHelper.GenerateTestTable(); var avrTable = new AvrDataTable(originalTable); AccessTests.RemoveCopyColumns(avrTable); var finalTable = avrTable.ToDataTable(); BinarySerializerTests.AssertTablesAreEqual(originalTable, finalTable); } [TestMethod] public void UpdatePivotCaptionTest() { using (var pivotGridControl = new AvrPivotGrid()) { var dataTable = new AvrDataTable(DataHelper.GenerateTestTable()); pivotGridControl.SetDataSourceAndCreateFields(dataTable); Assert.AreEqual("sflHC_PatientAge_Caption", pivotGridControl.Fields[0].Caption); Assert.AreEqual("sflHC_PatientDOB_Caption", pivotGridControl.Fields[2].Caption); Assert.AreEqual("sflHC_CaseID_Caption", pivotGridControl.Fields[4].Caption); } } [TestMethod] public void UpdatePivotDataTest() { using (var pivotGridControl = new AvrPivotGrid()) { var dataTable = new AvrDataTable(DataHelper.GenerateTestTable()); Assert.AreEqual(6, dataTable.Columns.Count); pivotGridControl.SetDataSourceAndCreateFields(dataTable); Assert.AreEqual(dataTable.Columns.Count, pivotGridControl.Fields.Count); } } #region chart tests [TestMethod] public void ChartTitleTest() { var mediator = new ChartPlaceHolder {ChartName = "xxx"}; Assert.AreEqual("xxx", mediator.ChartControl.Titles[0].Text); } [TestMethod] public void EmptyChartTitleTest() { using (new CultureInfoTransaction(CultureInfo.GetCultureInfo("en-US"))) { var mediator = new ChartPlaceHolder {ChartName = string.Empty}; Assert.AreEqual("[Untitled]", mediator.ChartControl.Titles[0].Text); } } public static DataTable GenerateChartTestTable() { var dataTable = new DataTable("testTable"); dataTable.Columns.Add(DataHelper.GenerateColumn("Series", typeof (string))); dataTable.Columns.Add(DataHelper.GenerateColumn("Arguments", typeof (string))); dataTable.Columns.Add(DataHelper.GenerateColumn("Values", typeof (int))); for (int i = 0; i < 10; i++) { DataRow workRow = dataTable.NewRow(); workRow[0] = "series1"; workRow[1] = "name_" + i; workRow[2] = i; dataTable.Rows.Add(workRow); } return dataTable; } #endregion #region calendar tests [TestMethod] public void PivotFormatDateTest() { using (new CultureInfoTransaction(new CultureInfo("en-US"))) { object dateQuarter = new DateTime(2009, 5, 10).ToObject(PivotGroupInterval.DateQuarter); Assert.AreEqual(2, dateQuarter); dateQuarter = new DateTime(2009, 11, 10).ToObject(PivotGroupInterval.DateQuarter); Assert.AreEqual(4, dateQuarter); dateQuarter = new DateTime(2009, 2, 10).ToObject(PivotGroupInterval.DateQuarter); Assert.AreEqual(1, dateQuarter); dateQuarter = new DateTime(2009, 8, 10).ToObject(PivotGroupInterval.DateQuarter); Assert.AreEqual(3, dateQuarter); object dateYear = new DateTime(2009, 5, 10).ToObject(PivotGroupInterval.DateYear); Assert.AreEqual(2009, dateYear); object dateMonth = new DateTime(2009, 5, 10).ToObject(PivotGroupInterval.DateMonth); Assert.AreEqual("May", dateMonth); object dateWeekOfYear = new DateTime(2009, 1, 1).ToObject(PivotGroupInterval.DateWeekOfYear); Assert.AreEqual(1, dateWeekOfYear); dateWeekOfYear = new DateTime(2009, 1, 4).ToObject(PivotGroupInterval.DateWeekOfYear); Assert.AreEqual(1, dateWeekOfYear); dateWeekOfYear = new DateTime(2009, 1, 5).ToObject(PivotGroupInterval.DateWeekOfYear); Assert.AreEqual(2, dateWeekOfYear); dateWeekOfYear = new DateTime(2009, 1, 11).ToObject(PivotGroupInterval.DateWeekOfYear); Assert.AreEqual(2, dateWeekOfYear); dateWeekOfYear = new DateTime(2006, 1, 8).ToObject(PivotGroupInterval.DateWeekOfYear); Assert.AreEqual(1, dateWeekOfYear); object dateWeekOfMonth = new DateTime(2009, 7, 20).ToObject(PivotGroupInterval.DateWeekOfMonth); Assert.AreEqual(4, dateWeekOfMonth); dateWeekOfMonth = new DateTime(2009, 7, 19).ToObject(PivotGroupInterval.DateWeekOfMonth); Assert.AreEqual(3, dateWeekOfMonth); dateWeekOfMonth = new DateTime(2009, 3, 1).ToObject(PivotGroupInterval.DateWeekOfMonth); Assert.AreEqual(1, dateWeekOfMonth); dateWeekOfMonth = new DateTime(2009, 3, 2).ToObject(PivotGroupInterval.DateWeekOfMonth); Assert.AreEqual(1, dateWeekOfMonth); } } [TestMethod] public void PivotIsDateTest() { Assert.IsFalse(PivotGroupInterval.Alphabetical.IsDate()); Assert.IsFalse(PivotGroupInterval.Custom.IsDate()); Assert.IsTrue(PivotGroupInterval.Date.IsDate()); Assert.IsTrue(PivotGroupInterval.DateDay.IsDate()); Assert.IsTrue(PivotGroupInterval.DateDayOfWeek.IsDate()); Assert.IsTrue(PivotGroupInterval.DateDayOfYear.IsDate()); Assert.IsTrue(PivotGroupInterval.DateYear.IsDate()); Assert.IsFalse(PivotGroupInterval.DayAge.IsDate()); Assert.IsFalse(PivotGroupInterval.Default.IsDate()); } [TestMethod] public void PivotTruncateDateTest() { var date1 = new DateTime(2014, 02, 10, 2, 3, 4, 5); Assert.AreEqual(new DateTime(2014, 1, 1), date1.TruncateToFirstDateInInterval(PivotGroupInterval.DateYear)); Assert.AreEqual(new DateTime(2014, 1, 1), date1.TruncateToFirstDateInInterval(PivotGroupInterval.DateQuarter)); Assert.AreEqual(new DateTime(2014, 2, 1), date1.TruncateToFirstDateInInterval(PivotGroupInterval.DateMonth)); Assert.AreEqual(new DateTime(2014, 2, 10), date1.TruncateToFirstDateInInterval(PivotGroupInterval.Date)); Assert.AreEqual(new DateTime(2014, 2, 10), date1.TruncateToFirstDateInInterval(PivotGroupInterval.DateDay)); Assert.AreEqual(new DateTime(2014, 2, 10), date1.TruncateToFirstDateInInterval(PivotGroupInterval.DateDayOfWeek)); Assert.AreEqual(new DateTime(2014, 2, 10), date1.TruncateToFirstDateInInterval(PivotGroupInterval.DateDayOfYear)); var date2 = new DateTime(2013, 12, 25, 2, 3, 4, 5); Assert.AreEqual(new DateTime(2013, 1, 1), date2.TruncateToFirstDateInInterval(PivotGroupInterval.DateYear)); Assert.AreEqual(new DateTime(2013, 10, 1), date2.TruncateToFirstDateInInterval(PivotGroupInterval.DateQuarter)); Assert.AreEqual(new DateTime(2013, 12, 1), date2.TruncateToFirstDateInInterval(PivotGroupInterval.DateMonth)); Assert.AreEqual(new DateTime(2013, 12, 25), date2.TruncateToFirstDateInInterval(PivotGroupInterval.Date)); Assert.AreEqual(new DateTime(2013, 12, 25), date2.TruncateToFirstDateInInterval(PivotGroupInterval.DateDay)); Assert.AreEqual(new DateTime(2013, 12, 25), date2.TruncateToFirstDateInInterval(PivotGroupInterval.DateDayOfWeek)); Assert.AreEqual(new DateTime(2013, 12, 25), date2.TruncateToFirstDateInInterval(PivotGroupInterval.DateDayOfYear)); var date3 = new DateTime(2014, 01, 03, 2, 3, 4, 5); Assert.AreEqual(new DateTime(2013, 12, 30), date3.TruncateToFirstDateInInterval(PivotGroupInterval.DateWeekOfMonth)); Assert.AreEqual(new DateTime(2013, 12, 30), date3.TruncateToFirstDateInInterval(PivotGroupInterval.DateWeekOfYear)); var date4 = new DateTime(2014, 01, 05, 2, 3, 4, 5); Assert.AreEqual(new DateTime(2013, 12, 30), date4.TruncateToFirstDateInInterval(PivotGroupInterval.DateWeekOfMonth)); Assert.AreEqual(new DateTime(2013, 12, 30), date4.TruncateToFirstDateInInterval(PivotGroupInterval.DateWeekOfYear)); var date5 = new DateTime(2014, 01, 06, 2, 3, 4, 5); Assert.AreEqual(new DateTime(2014, 01, 06), date5.TruncateToFirstDateInInterval(PivotGroupInterval.DateWeekOfMonth)); Assert.AreEqual(new DateTime(2014, 01, 06), date5.TruncateToFirstDateInInterval(PivotGroupInterval.DateWeekOfYear)); } #endregion } }
/******************************************************************************* * Copyright (c) 2013, Daniel Murphy * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ /** * .created at 1:14:57 AM Jan 14, 2011 */ using System; using SharpBox2D.Collision.Shapes; using SharpBox2D.Common; using SharpBox2D.Dynamics; using SharpBox2D.Dynamics.Joints; using SharpBox2D.TestBed.Framework; namespace SharpBox2D.TestBed.Tests { /** * @author Daniel Murphy */ public class BodyTypes : TestbedTest { private static long ATTACHMENT_TAG = 19; private static long PLATFORM_TAG = 20; private Body m_attachment; private Body m_platform; private float m_speed; public override long getTag(Body body) { if (body == m_attachment) return ATTACHMENT_TAG; if (body == m_platform) return PLATFORM_TAG; return base.getTag(body); } public override void processBody(Body body, long tag) { if (tag == ATTACHMENT_TAG) { m_attachment = body; } else if (tag == PLATFORM_TAG) { m_platform = body; } else { base.processBody(body, tag); } } public override bool isSaveLoadEnabled() { return true; } public override void initTest(bool deserialized) { m_speed = 3.0f; if (deserialized) { return; } Body ground = null; { BodyDef bd = new BodyDef(); ground = getWorld().createBody(bd); EdgeShape shape = new EdgeShape(); shape.set(new Vec2(-20.0f, 0.0f), new Vec2(20.0f, 0.0f)); FixtureDef fd = new FixtureDef(); fd.shape = shape; ground.createFixture(fd); } // Define attachment { BodyDef bd = new BodyDef(); bd.type = BodyType.DYNAMIC; bd.position.set(0.0f, 3.0f); m_attachment = getWorld().createBody(bd); PolygonShape shape = new PolygonShape(); shape.setAsBox(0.5f, 2.0f); m_attachment.createFixture(shape, 2.0f); } // Define platform { BodyDef bd = new BodyDef(); bd.type = BodyType.DYNAMIC; bd.position.set(-4.0f, 5.0f); m_platform = getWorld().createBody(bd); PolygonShape shape = new PolygonShape(); shape.setAsBox(0.5f, 4.0f, new Vec2(4.0f, 0.0f), 0.5f*MathUtils.PI); FixtureDef fd = new FixtureDef(); fd.shape = shape; fd.friction = 0.6f; fd.density = 2.0f; m_platform.createFixture(fd); RevoluteJointDef rjd = new RevoluteJointDef(); rjd.initialize(m_attachment, m_platform, new Vec2(0.0f, 5.0f)); rjd.maxMotorTorque = 50.0f; rjd.enableMotor = true; getWorld().createJoint(rjd); PrismaticJointDef pjd = new PrismaticJointDef(); pjd.initialize(ground, m_platform, new Vec2(0.0f, 5.0f), new Vec2(1.0f, 0.0f)); pjd.maxMotorForce = 1000.0f; pjd.enableMotor = true; pjd.lowerTranslation = -10.0f; pjd.upperTranslation = 10.0f; pjd.enableLimit = true; getWorld().createJoint(pjd); } // .create a payload { BodyDef bd = new BodyDef(); bd.type = BodyType.DYNAMIC; bd.position.set(0.0f, 8.0f); Body body = getWorld().createBody(bd); PolygonShape shape = new PolygonShape(); shape.setAsBox(0.75f, 0.75f); FixtureDef fd = new FixtureDef(); fd.shape = shape; fd.friction = 0.6f; fd.density = 2.0f; body.createFixture(fd); } } public override void step(TestbedSettings settings) { base.step(settings); addTextLine("Keys: (d) dynamic, (s) static, (k) kinematic"); // Drive the kinematic body. if (m_platform.getType() == BodyType.KINEMATIC) { Vec2 p = m_platform.getTransform().p; Vec2 v = m_platform.getLinearVelocity(); if ((p.x < -10.0f && v.x < 0.0f) || (p.x > 10.0f && v.x > 0.0f)) { v.x = -v.x; m_platform.setLinearVelocity(v); } } } public override void keyPressed(char argKeyChar, int argKeyCode) { switch (argKeyChar) { case 'd': m_platform.setType(BodyType.DYNAMIC); break; case 's': m_platform.setType(BodyType.STATIC); break; case 'k': m_platform.setType(BodyType.KINEMATIC); m_platform.setLinearVelocity(new Vec2(-m_speed, 0.0f)); m_platform.setAngularVelocity(0.0f); break; } } public override string getTestName() { return "Body Types"; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Data; using System.Reflection; using MySql.Data.MySqlClient; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Data; using RegionFlags = OpenSim.Framework.RegionFlags; namespace OpenSim.Data.MySQL { public class MySqlRegionData : MySqlFramework, IRegionData { private string m_Realm; private List<string> m_ColumnNames; //private string m_connectionString; protected virtual Assembly Assembly { get { return GetType().Assembly; } } public MySqlRegionData(string connectionString, string realm) : base(connectionString) { m_Realm = realm; m_connectionString = connectionString; using (MySqlConnection dbcon = new MySqlConnection(m_connectionString)) { dbcon.Open(); Migration m = new Migration(dbcon, Assembly, "GridStore"); m.Update(); } } public List<RegionData> Get(string regionName, UUID scopeID) { string command = "select * from `"+m_Realm+"` where regionName like ?regionName"; if (scopeID != UUID.Zero) command += " and ScopeID = ?scopeID"; command += " order by regionName"; using (MySqlCommand cmd = new MySqlCommand(command)) { cmd.Parameters.AddWithValue("?regionName", regionName); cmd.Parameters.AddWithValue("?scopeID", scopeID.ToString()); return RunCommand(cmd); } } public RegionData Get(int posX, int posY, UUID scopeID) { string command = "select * from `"+m_Realm+"` where locX = ?posX and locY = ?posY"; if (scopeID != UUID.Zero) command += " and ScopeID = ?scopeID"; using (MySqlCommand cmd = new MySqlCommand(command)) { cmd.Parameters.AddWithValue("?posX", posX.ToString()); cmd.Parameters.AddWithValue("?posY", posY.ToString()); cmd.Parameters.AddWithValue("?scopeID", scopeID.ToString()); List<RegionData> ret = RunCommand(cmd); if (ret.Count == 0) return null; return ret[0]; } } public RegionData Get(UUID regionID, UUID scopeID) { string command = "select * from `"+m_Realm+"` where uuid = ?regionID"; if (scopeID != UUID.Zero) command += " and ScopeID = ?scopeID"; using (MySqlCommand cmd = new MySqlCommand(command)) { cmd.Parameters.AddWithValue("?regionID", regionID.ToString()); cmd.Parameters.AddWithValue("?scopeID", scopeID.ToString()); List<RegionData> ret = RunCommand(cmd); if (ret.Count == 0) return null; return ret[0]; } } public List<RegionData> Get(int startX, int startY, int endX, int endY, UUID scopeID) { string command = "select * from `"+m_Realm+"` where locX between ?startX and ?endX and locY between ?startY and ?endY"; if (scopeID != UUID.Zero) command += " and ScopeID = ?scopeID"; using (MySqlCommand cmd = new MySqlCommand(command)) { cmd.Parameters.AddWithValue("?startX", startX.ToString()); cmd.Parameters.AddWithValue("?startY", startY.ToString()); cmd.Parameters.AddWithValue("?endX", endX.ToString()); cmd.Parameters.AddWithValue("?endY", endY.ToString()); cmd.Parameters.AddWithValue("?scopeID", scopeID.ToString()); return RunCommand(cmd); } } public List<RegionData> RunCommand(MySqlCommand cmd) { List<RegionData> retList = new List<RegionData>(); using (MySqlConnection dbcon = new MySqlConnection(m_connectionString)) { dbcon.Open(); cmd.Connection = dbcon; using (IDataReader result = cmd.ExecuteReader()) { while (result.Read()) { RegionData ret = new RegionData(); ret.Data = new Dictionary<string, object>(); ret.RegionID = DBGuid.FromDB(result["uuid"]); ret.ScopeID = DBGuid.FromDB(result["ScopeID"]); ret.RegionName = result["regionName"].ToString(); ret.posX = Convert.ToInt32(result["locX"]); ret.posY = Convert.ToInt32(result["locY"]); ret.sizeX = Convert.ToInt32(result["sizeX"]); ret.sizeY = Convert.ToInt32(result["sizeY"]); CheckColumnNames(result); foreach (string s in m_ColumnNames) { if (s == "uuid") continue; if (s == "ScopeID") continue; if (s == "regionName") continue; if (s == "locX") continue; if (s == "locY") continue; object value = result[s]; if (value is DBNull) ret.Data[s] = null; else ret.Data[s] = result[s].ToString(); } retList.Add(ret); } } } return retList; } private void CheckColumnNames(IDataReader result) { if (m_ColumnNames != null) return; List<string> columnNames = new List<string>(); DataTable schemaTable = result.GetSchemaTable(); foreach (DataRow row in schemaTable.Rows) { if (row["ColumnName"] != null) columnNames.Add(row["ColumnName"].ToString()); } m_ColumnNames = columnNames; } public bool Store(RegionData data) { if (data.Data.ContainsKey("uuid")) data.Data.Remove("uuid"); if (data.Data.ContainsKey("ScopeID")) data.Data.Remove("ScopeID"); if (data.Data.ContainsKey("regionName")) data.Data.Remove("regionName"); if (data.Data.ContainsKey("posX")) data.Data.Remove("posX"); if (data.Data.ContainsKey("posY")) data.Data.Remove("posY"); if (data.Data.ContainsKey("sizeX")) data.Data.Remove("sizeX"); if (data.Data.ContainsKey("sizeY")) data.Data.Remove("sizeY"); if (data.Data.ContainsKey("locX")) data.Data.Remove("locX"); if (data.Data.ContainsKey("locY")) data.Data.Remove("locY"); if (data.RegionName.Length > 128) data.RegionName = data.RegionName.Substring(0, 128); string[] fields = new List<string>(data.Data.Keys).ToArray(); using (MySqlCommand cmd = new MySqlCommand()) { string update = "update `" + m_Realm + "` set locX=?posX, locY=?posY, sizeX=?sizeX, sizeY=?sizeY"; foreach (string field in fields) { update += ", "; update += "`" + field + "` = ?" + field; cmd.Parameters.AddWithValue("?" + field, data.Data[field]); } update += " where uuid = ?regionID"; if (data.ScopeID != UUID.Zero) update += " and ScopeID = ?scopeID"; cmd.CommandText = update; cmd.Parameters.AddWithValue("?regionID", data.RegionID.ToString()); cmd.Parameters.AddWithValue("?regionName", data.RegionName); cmd.Parameters.AddWithValue("?scopeID", data.ScopeID.ToString()); cmd.Parameters.AddWithValue("?posX", data.posX.ToString()); cmd.Parameters.AddWithValue("?posY", data.posY.ToString()); cmd.Parameters.AddWithValue("?sizeX", data.sizeX.ToString()); cmd.Parameters.AddWithValue("?sizeY", data.sizeY.ToString()); if (ExecuteNonQuery(cmd) < 1) { string insert = "insert into `" + m_Realm + "` (`uuid`, `ScopeID`, `locX`, `locY`, `sizeX`, `sizeY`, `regionName`, `" + String.Join("`, `", fields) + "`) values ( ?regionID, ?scopeID, ?posX, ?posY, ?sizeX, ?sizeY, ?regionName, ?" + String.Join(", ?", fields) + ")"; cmd.CommandText = insert; if (ExecuteNonQuery(cmd) < 1) { return false; } } } return true; } public bool SetDataItem(UUID regionID, string item, string value) { using (MySqlCommand cmd = new MySqlCommand("update `" + m_Realm + "` set `" + item + "` = ?" + item + " where uuid = ?UUID")) { cmd.Parameters.AddWithValue("?" + item, value); cmd.Parameters.AddWithValue("?UUID", regionID.ToString()); if (ExecuteNonQuery(cmd) > 0) return true; } return false; } public bool Delete(UUID regionID) { using (MySqlCommand cmd = new MySqlCommand("delete from `" + m_Realm + "` where uuid = ?UUID")) { cmd.Parameters.AddWithValue("?UUID", regionID.ToString()); if (ExecuteNonQuery(cmd) > 0) return true; } return false; } public List<RegionData> GetDefaultRegions(UUID scopeID) { return Get((int)RegionFlags.DefaultRegion, scopeID); } public List<RegionData> GetDefaultHypergridRegions(UUID scopeID) { return Get((int)RegionFlags.DefaultHGRegion, scopeID); } public List<RegionData> GetFallbackRegions(UUID scopeID, int x, int y) { List<RegionData> regions = Get((int)RegionFlags.FallbackRegion, scopeID); RegionDataDistanceCompare distanceComparer = new RegionDataDistanceCompare(x, y); regions.Sort(distanceComparer); return regions; } public List<RegionData> GetHyperlinks(UUID scopeID) { return Get((int)RegionFlags.Hyperlink, scopeID); } private List<RegionData> Get(int regionFlags, UUID scopeID) { string command = "select * from `" + m_Realm + "` where (flags & " + regionFlags.ToString() + ") <> 0"; if (scopeID != UUID.Zero) command += " and ScopeID = ?scopeID"; using (MySqlCommand cmd = new MySqlCommand(command)) { cmd.Parameters.AddWithValue("?scopeID", scopeID.ToString()); return RunCommand(cmd); } } } }
namespace Exercise { partial class Form1 { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.CreatePadFootingButton = new System.Windows.Forms.Button(); this.FootingSize = new System.Windows.Forms.TextBox(); this.SizeLabel1 = new System.Windows.Forms.Label(); this.button2 = new System.Windows.Forms.Button(); this.ColumnsProfileTextBox = new System.Windows.Forms.TextBox(); this.SizeTextBox = new System.Windows.Forms.TextBox(); this.GradeTextBox = new System.Windows.Forms.TextBox(); this.BendingRadiusTextBox = new System.Windows.Forms.TextBox(); this.label2 = new System.Windows.Forms.Label(); this.label3 = new System.Windows.Forms.Label(); this.label4 = new System.Windows.Forms.Label(); this.groupBox1 = new System.Windows.Forms.GroupBox(); this.groupBox3 = new System.Windows.Forms.GroupBox(); this.reinforcementCatalog1 = new Tekla.Structures.Dialog.UIControls.ReinforcementCatalog(); this.groupBox2 = new System.Windows.Forms.GroupBox(); this.label1 = new System.Windows.Forms.Label(); this.ColumnsMaterialTextBox = new System.Windows.Forms.TextBox(); this.MaterialButton = new System.Windows.Forms.Button(); this.profileCatalog1 = new Tekla.Structures.Dialog.UIControls.ProfileCatalog(); this.label5 = new System.Windows.Forms.Label(); this.saveLoad1 = new Tekla.Structures.Dialog.UIControls.SaveLoad(); this.CreateButton = new System.Windows.Forms.Button(); this.label6 = new System.Windows.Forms.Label(); this.ViewTitle = new System.Windows.Forms.TextBox(); this.EditDrawingButton = new System.Windows.Forms.Button(); this.groupBox4 = new System.Windows.Forms.GroupBox(); this.OpenDrawingButton = new System.Windows.Forms.Button(); this.label7 = new System.Windows.Forms.Label(); this.listView1 = new System.Windows.Forms.ListView(); this.groupBox1.SuspendLayout(); this.groupBox2.SuspendLayout(); this.groupBox4.SuspendLayout(); this.SuspendLayout(); // // CreatePadFootingButton // this.structuresExtender.SetAttributeName(this.CreatePadFootingButton, null); this.structuresExtender.SetAttributeTypeName(this.CreatePadFootingButton, null); this.structuresExtender.SetBindPropertyName(this.CreatePadFootingButton, null); this.CreatePadFootingButton.Location = new System.Drawing.Point(389, 172); this.CreatePadFootingButton.Name = "CreatePadFootingButton"; this.CreatePadFootingButton.Size = new System.Drawing.Size(126, 23); this.CreatePadFootingButton.TabIndex = 0; this.CreatePadFootingButton.Text = "Footings and Columns"; this.CreatePadFootingButton.UseVisualStyleBackColor = true; this.CreatePadFootingButton.Click += new System.EventHandler(this.CreatePadFootings); // // FootingSize // this.structuresExtender.SetAttributeName(this.FootingSize, "FootingSize"); this.structuresExtender.SetAttributeTypeName(this.FootingSize, "Double"); this.structuresExtender.SetBindPropertyName(this.FootingSize, null); this.FootingSize.Location = new System.Drawing.Point(457, 146); this.FootingSize.Name = "FootingSize"; this.FootingSize.Size = new System.Drawing.Size(58, 20); this.FootingSize.TabIndex = 1; this.FootingSize.TextChanged += new System.EventHandler(this.FootingSize_TextChanged); // // SizeLabel1 // this.structuresExtender.SetAttributeName(this.SizeLabel1, null); this.structuresExtender.SetAttributeTypeName(this.SizeLabel1, null); this.SizeLabel1.AutoSize = true; this.structuresExtender.SetBindPropertyName(this.SizeLabel1, null); this.SizeLabel1.Location = new System.Drawing.Point(386, 149); this.SizeLabel1.Name = "SizeLabel1"; this.SizeLabel1.Size = new System.Drawing.Size(65, 13); this.SizeLabel1.TabIndex = 2; this.SizeLabel1.Text = "Footing Size"; // // button2 // this.structuresExtender.SetAttributeName(this.button2, null); this.structuresExtender.SetAttributeTypeName(this.button2, null); this.structuresExtender.SetBindPropertyName(this.button2, null); this.button2.Location = new System.Drawing.Point(389, 202); this.button2.Name = "button2"; this.button2.Size = new System.Drawing.Size(126, 23); this.button2.TabIndex = 3; this.button2.Text = "Create rebars"; this.button2.UseVisualStyleBackColor = true; this.button2.Click += new System.EventHandler(this.button2_Click); // // ColumnsProfileTextBox // this.structuresExtender.SetAttributeName(this.ColumnsProfileTextBox, "Profile"); this.structuresExtender.SetAttributeTypeName(this.ColumnsProfileTextBox, "String"); this.structuresExtender.SetBindPropertyName(this.ColumnsProfileTextBox, null); this.ColumnsProfileTextBox.Location = new System.Drawing.Point(99, 20); this.ColumnsProfileTextBox.Name = "ColumnsProfileTextBox"; this.ColumnsProfileTextBox.Size = new System.Drawing.Size(131, 20); this.ColumnsProfileTextBox.TabIndex = 4; this.ColumnsProfileTextBox.Text = "HEA400"; // // SizeTextBox // this.structuresExtender.SetAttributeName(this.SizeTextBox, "Size"); this.structuresExtender.SetAttributeTypeName(this.SizeTextBox, "String"); this.structuresExtender.SetBindPropertyName(this.SizeTextBox, null); this.SizeTextBox.Location = new System.Drawing.Point(99, 23); this.SizeTextBox.Name = "SizeTextBox"; this.SizeTextBox.Size = new System.Drawing.Size(131, 20); this.SizeTextBox.TabIndex = 6; this.SizeTextBox.Text = "12"; // // GradeTextBox // this.structuresExtender.SetAttributeName(this.GradeTextBox, "Grade"); this.structuresExtender.SetAttributeTypeName(this.GradeTextBox, "String"); this.structuresExtender.SetBindPropertyName(this.GradeTextBox, null); this.GradeTextBox.Location = new System.Drawing.Point(99, 49); this.GradeTextBox.Name = "GradeTextBox"; this.GradeTextBox.Size = new System.Drawing.Size(131, 20); this.GradeTextBox.TabIndex = 7; this.GradeTextBox.Text = "A500HW"; // // BendingRadiusTextBox // this.structuresExtender.SetAttributeName(this.BendingRadiusTextBox, "BendingRadius"); this.structuresExtender.SetAttributeTypeName(this.BendingRadiusTextBox, "Distance"); this.structuresExtender.SetBindPropertyName(this.BendingRadiusTextBox, null); this.BendingRadiusTextBox.Location = new System.Drawing.Point(99, 75); this.BendingRadiusTextBox.Name = "BendingRadiusTextBox"; this.BendingRadiusTextBox.Size = new System.Drawing.Size(131, 20); this.BendingRadiusTextBox.TabIndex = 8; this.BendingRadiusTextBox.Text = "40"; // // label2 // this.structuresExtender.SetAttributeName(this.label2, null); this.structuresExtender.SetAttributeTypeName(this.label2, null); this.label2.AutoSize = true; this.structuresExtender.SetBindPropertyName(this.label2, null); this.label2.Location = new System.Drawing.Point(6, 26); this.label2.Name = "label2"; this.label2.Size = new System.Drawing.Size(30, 13); this.label2.TabIndex = 9; this.label2.Text = "Size:"; // // label3 // this.structuresExtender.SetAttributeName(this.label3, null); this.structuresExtender.SetAttributeTypeName(this.label3, null); this.label3.AutoSize = true; this.structuresExtender.SetBindPropertyName(this.label3, null); this.label3.Location = new System.Drawing.Point(6, 53); this.label3.Name = "label3"; this.label3.Size = new System.Drawing.Size(39, 13); this.label3.TabIndex = 10; this.label3.Text = "Grade:"; // // label4 // this.structuresExtender.SetAttributeName(this.label4, null); this.structuresExtender.SetAttributeTypeName(this.label4, null); this.label4.AutoSize = true; this.structuresExtender.SetBindPropertyName(this.label4, null); this.label4.Location = new System.Drawing.Point(6, 78); this.label4.Name = "label4"; this.label4.Size = new System.Drawing.Size(80, 13); this.label4.TabIndex = 11; this.label4.Text = "Bending radius:"; // // groupBox1 // this.structuresExtender.SetAttributeName(this.groupBox1, null); this.structuresExtender.SetAttributeTypeName(this.groupBox1, null); this.structuresExtender.SetBindPropertyName(this.groupBox1, null); this.groupBox1.Controls.Add(this.groupBox3); this.groupBox1.Controls.Add(this.reinforcementCatalog1); this.groupBox1.Controls.Add(this.label4); this.groupBox1.Controls.Add(this.label3); this.groupBox1.Controls.Add(this.label2); this.groupBox1.Controls.Add(this.BendingRadiusTextBox); this.groupBox1.Controls.Add(this.GradeTextBox); this.groupBox1.Controls.Add(this.SizeTextBox); this.groupBox1.Location = new System.Drawing.Point(12, 146); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(350, 110); this.groupBox1.TabIndex = 12; this.groupBox1.TabStop = false; this.groupBox1.Text = "Rebars"; // // groupBox3 // this.structuresExtender.SetAttributeName(this.groupBox3, null); this.structuresExtender.SetAttributeTypeName(this.groupBox3, null); this.structuresExtender.SetBindPropertyName(this.groupBox3, null); this.groupBox3.Location = new System.Drawing.Point(238, 19); this.groupBox3.Name = "groupBox3"; this.groupBox3.Size = new System.Drawing.Size(2, 77); this.groupBox3.TabIndex = 13; this.groupBox3.TabStop = false; // // reinforcementCatalog1 // this.structuresExtender.SetAttributeName(this.reinforcementCatalog1, null); this.structuresExtender.SetAttributeTypeName(this.reinforcementCatalog1, null); this.reinforcementCatalog1.BackColor = System.Drawing.Color.Transparent; this.structuresExtender.SetBindPropertyName(this.reinforcementCatalog1, null); this.reinforcementCatalog1.Location = new System.Drawing.Point(247, 48); this.reinforcementCatalog1.Name = "reinforcementCatalog1"; this.reinforcementCatalog1.SelectedRebarBendingRadius = 0D; this.reinforcementCatalog1.SelectedRebarGrade = ""; this.reinforcementCatalog1.SelectedRebarSize = ""; this.reinforcementCatalog1.Size = new System.Drawing.Size(88, 23); this.reinforcementCatalog1.TabIndex = 12; this.reinforcementCatalog1.SelectClicked += new System.EventHandler(this.reinforcementCatalog1_SelectClicked); this.reinforcementCatalog1.SelectionDone += new System.EventHandler(this.reinforcementCatalog1_SelectionDone); // // groupBox2 // this.structuresExtender.SetAttributeName(this.groupBox2, null); this.structuresExtender.SetAttributeTypeName(this.groupBox2, null); this.structuresExtender.SetBindPropertyName(this.groupBox2, null); this.groupBox2.Controls.Add(this.label1); this.groupBox2.Controls.Add(this.ColumnsMaterialTextBox); this.groupBox2.Controls.Add(this.MaterialButton); this.groupBox2.Controls.Add(this.profileCatalog1); this.groupBox2.Controls.Add(this.label5); this.groupBox2.Controls.Add(this.ColumnsProfileTextBox); this.groupBox2.Location = new System.Drawing.Point(12, 59); this.groupBox2.Name = "groupBox2"; this.groupBox2.Size = new System.Drawing.Size(350, 81); this.groupBox2.TabIndex = 13; this.groupBox2.TabStop = false; this.groupBox2.Text = "Columns"; // // label1 // this.structuresExtender.SetAttributeName(this.label1, null); this.structuresExtender.SetAttributeTypeName(this.label1, null); this.label1.AutoSize = true; this.structuresExtender.SetBindPropertyName(this.label1, null); this.label1.Location = new System.Drawing.Point(6, 50); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(47, 13); this.label1.TabIndex = 9; this.label1.Text = "Material:"; // // ColumnsMaterialTextBox // this.structuresExtender.SetAttributeName(this.ColumnsMaterialTextBox, "Material"); this.structuresExtender.SetAttributeTypeName(this.ColumnsMaterialTextBox, "String"); this.structuresExtender.SetBindPropertyName(this.ColumnsMaterialTextBox, null); this.ColumnsMaterialTextBox.Location = new System.Drawing.Point(99, 46); this.ColumnsMaterialTextBox.Name = "ColumnsMaterialTextBox"; this.ColumnsMaterialTextBox.Size = new System.Drawing.Size(131, 20); this.ColumnsMaterialTextBox.TabIndex = 8; this.ColumnsMaterialTextBox.Text = "S235JR"; // // MaterialButton // this.structuresExtender.SetAttributeName(this.MaterialButton, null); this.structuresExtender.SetAttributeTypeName(this.MaterialButton, null); this.structuresExtender.SetBindPropertyName(this.MaterialButton, null); this.MaterialButton.Location = new System.Drawing.Point(247, 45); this.MaterialButton.Name = "MaterialButton"; this.MaterialButton.Size = new System.Drawing.Size(88, 23); this.MaterialButton.TabIndex = 7; this.MaterialButton.Text = "Select..."; this.MaterialButton.UseVisualStyleBackColor = true; this.MaterialButton.Click += new System.EventHandler(this.SelectMaterialButton_Click); // // profileCatalog1 // this.structuresExtender.SetAttributeName(this.profileCatalog1, null); this.structuresExtender.SetAttributeTypeName(this.profileCatalog1, null); this.profileCatalog1.BackColor = System.Drawing.Color.Transparent; this.structuresExtender.SetBindPropertyName(this.profileCatalog1, null); this.profileCatalog1.Location = new System.Drawing.Point(247, 19); this.profileCatalog1.Name = "profileCatalog1"; this.profileCatalog1.SelectedProfile = ""; this.profileCatalog1.Size = new System.Drawing.Size(88, 23); this.profileCatalog1.TabIndex = 6; this.profileCatalog1.SelectClicked += new System.EventHandler(this.profileCatalog1_SelectClicked); this.profileCatalog1.SelectionDone += new System.EventHandler(this.profileCatalog1_SelectionDone); // // label5 // this.structuresExtender.SetAttributeName(this.label5, null); this.structuresExtender.SetAttributeTypeName(this.label5, null); this.label5.AutoSize = true; this.structuresExtender.SetBindPropertyName(this.label5, null); this.label5.Location = new System.Drawing.Point(6, 24); this.label5.Name = "label5"; this.label5.Size = new System.Drawing.Size(39, 13); this.label5.TabIndex = 5; this.label5.Text = "Profile:"; // // saveLoad1 // this.structuresExtender.SetAttributeName(this.saveLoad1, null); this.structuresExtender.SetAttributeTypeName(this.saveLoad1, null); this.saveLoad1.AutoSizeMode = System.Windows.Forms.AutoSizeMode.GrowAndShrink; this.structuresExtender.SetBindPropertyName(this.saveLoad1, null); this.saveLoad1.Dock = System.Windows.Forms.DockStyle.Top; this.saveLoad1.Location = new System.Drawing.Point(0, 0); this.saveLoad1.Name = "saveLoad1"; this.saveLoad1.SaveAsText = ""; this.saveLoad1.Size = new System.Drawing.Size(534, 43); this.saveLoad1.TabIndex = 14; // // CreateButton // this.structuresExtender.SetAttributeName(this.CreateButton, null); this.structuresExtender.SetAttributeTypeName(this.CreateButton, null); this.structuresExtender.SetBindPropertyName(this.CreateButton, null); this.CreateButton.Location = new System.Drawing.Point(389, 231); this.CreateButton.Name = "CreateButton"; this.CreateButton.Size = new System.Drawing.Size(126, 23); this.CreateButton.TabIndex = 16; this.CreateButton.Text = "Create..."; this.CreateButton.UseVisualStyleBackColor = true; this.CreateButton.Click += new System.EventHandler(this.CreateButton_Click); // // label6 // this.structuresExtender.SetAttributeName(this.label6, null); this.structuresExtender.SetAttributeTypeName(this.label6, null); this.label6.AutoSize = true; this.structuresExtender.SetBindPropertyName(this.label6, null); this.label6.Location = new System.Drawing.Point(6, 26); this.label6.Name = "label6"; this.label6.Size = new System.Drawing.Size(52, 13); this.label6.TabIndex = 18; this.label6.Text = "View title:"; // // ViewTitle // this.structuresExtender.SetAttributeName(this.ViewTitle, null); this.structuresExtender.SetAttributeTypeName(this.ViewTitle, null); this.structuresExtender.SetBindPropertyName(this.ViewTitle, null); this.ViewTitle.Location = new System.Drawing.Point(99, 19); this.ViewTitle.Name = "ViewTitle"; this.ViewTitle.Size = new System.Drawing.Size(131, 20); this.ViewTitle.TabIndex = 17; // // EditDrawingButton // this.structuresExtender.SetAttributeName(this.EditDrawingButton, null); this.structuresExtender.SetAttributeTypeName(this.EditDrawingButton, null); this.structuresExtender.SetBindPropertyName(this.EditDrawingButton, null); this.EditDrawingButton.Location = new System.Drawing.Point(247, 19); this.EditDrawingButton.Name = "EditDrawingButton"; this.EditDrawingButton.Size = new System.Drawing.Size(88, 23); this.EditDrawingButton.TabIndex = 16; this.EditDrawingButton.Text = "Edit drawing"; this.EditDrawingButton.UseVisualStyleBackColor = true; this.EditDrawingButton.Click += new System.EventHandler(this.EditDrawingButton_Click); // // groupBox4 // this.structuresExtender.SetAttributeName(this.groupBox4, null); this.structuresExtender.SetAttributeTypeName(this.groupBox4, null); this.structuresExtender.SetBindPropertyName(this.groupBox4, null); this.groupBox4.Controls.Add(this.OpenDrawingButton); this.groupBox4.Controls.Add(this.label7); this.groupBox4.Controls.Add(this.listView1); this.groupBox4.Controls.Add(this.label6); this.groupBox4.Controls.Add(this.ViewTitle); this.groupBox4.Controls.Add(this.EditDrawingButton); this.groupBox4.Location = new System.Drawing.Point(13, 262); this.groupBox4.Name = "groupBox4"; this.groupBox4.Size = new System.Drawing.Size(349, 120); this.groupBox4.TabIndex = 20; this.groupBox4.TabStop = false; this.groupBox4.Text = "Drawings"; // // OpenDrawingButton // this.structuresExtender.SetAttributeName(this.OpenDrawingButton, null); this.structuresExtender.SetAttributeTypeName(this.OpenDrawingButton, null); this.structuresExtender.SetBindPropertyName(this.OpenDrawingButton, null); this.OpenDrawingButton.Location = new System.Drawing.Point(246, 91); this.OpenDrawingButton.Name = "OpenDrawingButton"; this.OpenDrawingButton.Size = new System.Drawing.Size(88, 23); this.OpenDrawingButton.TabIndex = 25; this.OpenDrawingButton.Text = "Open drawing"; this.OpenDrawingButton.UseVisualStyleBackColor = true; this.OpenDrawingButton.Click += new System.EventHandler(this.OpenDrawingButton_Click); // // label7 // this.structuresExtender.SetAttributeName(this.label7, null); this.structuresExtender.SetAttributeTypeName(this.label7, null); this.label7.AutoSize = true; this.structuresExtender.SetBindPropertyName(this.label7, null); this.label7.Location = new System.Drawing.Point(6, 45); this.label7.Name = "label7"; this.label7.Size = new System.Drawing.Size(64, 13); this.label7.TabIndex = 24; this.label7.Text = "Drawing list:"; // // listView1 // this.structuresExtender.SetAttributeName(this.listView1, null); this.structuresExtender.SetAttributeTypeName(this.listView1, null); this.structuresExtender.SetBindPropertyName(this.listView1, null); this.listView1.HoverSelection = true; this.listView1.Location = new System.Drawing.Point(98, 45); this.listView1.MultiSelect = false; this.listView1.Name = "listView1"; this.listView1.Size = new System.Drawing.Size(131, 69); this.listView1.TabIndex = 23; this.listView1.TileSize = new System.Drawing.Size(5, 5); this.listView1.UseCompatibleStateImageBehavior = false; // // Form1 // this.structuresExtender.SetAttributeName(this, null); this.structuresExtender.SetAttributeTypeName(this, null); this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.structuresExtender.SetBindPropertyName(this, null); this.ClientSize = new System.Drawing.Size(534, 389); this.Controls.Add(this.groupBox4); this.Controls.Add(this.CreateButton); this.Controls.Add(this.saveLoad1); this.Controls.Add(this.groupBox2); this.Controls.Add(this.groupBox1); this.Controls.Add(this.button2); this.Controls.Add(this.SizeLabel1); this.Controls.Add(this.FootingSize); this.Controls.Add(this.CreatePadFootingButton); this.Name = "Form1"; this.Text = "DotNetExample"; this.groupBox1.ResumeLayout(false); this.groupBox1.PerformLayout(); this.groupBox2.ResumeLayout(false); this.groupBox2.PerformLayout(); this.groupBox4.ResumeLayout(false); this.groupBox4.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.Button CreatePadFootingButton; private System.Windows.Forms.TextBox FootingSize; private System.Windows.Forms.Label SizeLabel1; private System.Windows.Forms.Button button2; private System.Windows.Forms.TextBox ColumnsProfileTextBox; private System.Windows.Forms.TextBox SizeTextBox; private System.Windows.Forms.TextBox GradeTextBox; private System.Windows.Forms.TextBox BendingRadiusTextBox; private System.Windows.Forms.Label label2; private System.Windows.Forms.Label label3; private System.Windows.Forms.Label label4; private System.Windows.Forms.GroupBox groupBox1; private Tekla.Structures.Dialog.UIControls.ReinforcementCatalog reinforcementCatalog1; private System.Windows.Forms.GroupBox groupBox2; private System.Windows.Forms.Label label5; private System.Windows.Forms.Button MaterialButton; private Tekla.Structures.Dialog.UIControls.ProfileCatalog profileCatalog1; private System.Windows.Forms.Label label1; private System.Windows.Forms.TextBox ColumnsMaterialTextBox; private Tekla.Structures.Dialog.UIControls.SaveLoad saveLoad1; private System.Windows.Forms.GroupBox groupBox3; private System.Windows.Forms.Button CreateButton; private System.Windows.Forms.Label label6; private System.Windows.Forms.TextBox ViewTitle; private System.Windows.Forms.Button EditDrawingButton; private System.Windows.Forms.GroupBox groupBox4; private System.Windows.Forms.Button OpenDrawingButton; private System.Windows.Forms.Label label7; private System.Windows.Forms.ListView listView1; } }
// Copyright 2020 The Tilt Brush Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #pragma warning disable 219 using UnityEngine; namespace TiltBrush { // TODO: Could be slightly more vtx-efficient with a non-textured tube // (no need to duplicate verts along seam) // TODO: remove use of nRight, nSurface class TetraBrush : GeometryBrush { const float M2U = App.METERS_TO_UNITS; const float U2M = App.UNITS_TO_METERS; const float TWOPI = 2 * Mathf.PI; const float kMinimumMove = 5e-4f * M2U; const float kCapAspect = .8f; const ushort kVertsInClosedCircle = 4; const ushort kVertsInCap = kVertsInClosedCircle-1; const float kBreakAngleScalar = 3.0f; const float kSolidMinLengthMeters = 0.002f; const float kSolidAspectRatio = 0.2f; /// Positive multiplier; 1.0 is standard, higher is more sensitive. [SerializeField] float m_BreakAngleMultiplier = 2; /// Amount of texture to chop off left and right edges, because /// interesting textures have ragged edges which don't work well when /// wrapped around tubes. [SerializeField] float m_TextureEdgeChop = 0.0f; protected enum UVStyle { Distance, Unitized }; [SerializeField] protected UVStyle m_uvStyle = UVStyle.Distance; public TetraBrush() : this(true) {} public TetraBrush(bool bCanBatch) : base(bCanBatch: bCanBatch, upperBoundVertsPerKnot: kVertsInClosedCircle * 2, bDoubleSided: false) { // Start and end of circle are coincident, and need at least one more point. Debug.Assert(kVertsInClosedCircle > 2); } // // GeometryBrush API // protected override void InitBrush(BrushDescriptor desc, TrTransform localPointerXf) { base.InitBrush(desc, localPointerXf); m_geometry.Layout = GetVertexLayout(desc); } override public GeometryPool.VertexLayout GetVertexLayout(BrushDescriptor desc) { return new GeometryPool.VertexLayout { uv0Size = 2, uv1Size = 0, bUseNormals = true, bUseColors = true, bUseTangents = true, }; } override public float GetSpawnInterval(float pressure01) { return kSolidMinLengthMeters * App.METERS_TO_UNITS + (PressuredSize(pressure01) * kSolidAspectRatio); } override protected void ControlPointsChanged(int iKnot0) { // Updating a control point affects geometry generated by previous knot // (if there is any). The HasGeometry check is not a micro-optimization: // it also keeps us from backing up past knot 0. int start = (m_knots[iKnot0 - 1].HasGeometry) ? iKnot0 - 1 : iKnot0; // Frames knots, determines how much geometry each knot should get OnChanged_FrameKnots(start); OnChanged_MakeGeometry(start); ResizeGeometry(); } // This approximates parallel transport. static Quaternion ComputeMinimalRotationFrame( Vector3 nTangent, Quaternion qPrevFrame) { Vector3 nPrevTangent = qPrevFrame * Vector3.forward; Quaternion minimal = Quaternion.FromToRotation(nPrevTangent, nTangent); return minimal * qPrevFrame; } // Fills in any knot data needed for geometry generation. // - fill in length, nRight, nSurface, iVert, iTri // - calculate strip-break points void OnChanged_FrameKnots(int iKnot0) { Knot prev = m_knots[iKnot0-1]; for (int iKnot = iKnot0; iKnot < m_knots.Count; ++iKnot) { Knot cur = m_knots[iKnot]; bool shouldBreak = false; Vector3 vMove = cur.smoothedPos - prev.smoothedPos; cur.length = vMove.magnitude; if (cur.length < kMinimumMove) { shouldBreak = true; } else { Vector3 nTangent = vMove / cur.length; if (prev.HasGeometry) { cur.qFrame = ComputeMinimalRotationFrame(nTangent, prev.qFrame); } else { Vector3 nRight, nUp; // No previous orientation; compute a reasonable starting point ComputeSurfaceFrameNew(Vector3.zero, nTangent, cur.point.m_Orient, out nRight, out nUp); cur.qFrame = Quaternion.LookRotation(nTangent, nUp); } // More break checking; replicates previous logic // TODO: decompose into twist and swing; use different constraints // http://www.euclideanspace.com/maths/geometry/rotations/for/decomposition/ if (prev.HasGeometry && !m_PreviewMode) { float fWidthHeightRatio = cur.length / PressuredSize(cur.smoothedPressure); float fBreakAngle = Mathf.Atan(fWidthHeightRatio) * Mathf.Rad2Deg * m_BreakAngleMultiplier; float angle = Quaternion.Angle(prev.qFrame, cur.qFrame); if (angle > fBreakAngle) { shouldBreak = true; } } } if (shouldBreak) { cur.qFrame = new Quaternion(0,0,0,0); cur.nRight = cur.nSurface = Vector3.zero; } else { cur.nRight = cur.qFrame * Vector3.right; cur.nSurface = cur.qFrame * Vector3.up; } // Just mark whether or not the strip is broken // tri/vert allocation will happen next pass cur.nTri = cur.nVert = (ushort)(shouldBreak ? 0 : 1); m_knots[iKnot] = cur; prev = cur; } } // Textures are laid out so u goes along the strip, // and v goes across the strip (from left to right) void OnChanged_MakeGeometry(int iKnot0) { // Invariant: there is a previous knot. Knot prev = m_knots[iKnot0-1]; for (int iKnot = iKnot0; iKnot < m_knots.Count; ++iKnot) { // Invariant: all of prev's geometry (if any) is correct and up-to-date. // Thus, there is no need to modify anything shared with prev. Knot cur = m_knots[iKnot]; cur.iTri = prev.iTri + prev.nTri; cur.iVert = (ushort)(prev.iVert + prev.nVert); // Verts are: back cap, back circle, front circle, front cap // Back circle is shared with previous knot if (cur.HasGeometry) { cur.nVert = cur.nTri = 0; Vector3 rt = cur.qFrame * Vector3.right; Vector3 up = cur.qFrame * Vector3.up; Vector3 fwd = cur.qFrame * Vector3.forward; // Verts, back half float u0, v0, v1; // Verts, back half float random01 = m_rng.In01(cur.iVert - 1); if (m_uvStyle == UVStyle.Unitized) { u0 = 0; } else { u0 = random01; } int numV = m_Desc.m_TextureAtlasV; int iAtlas = (int) (random01 * 3331) % numV; v0 = (iAtlas + m_TextureEdgeChop) / (float) numV; v1 = (iAtlas+1 - m_TextureEdgeChop) / (float) numV; float prevSize = PressuredSize(prev.smoothedPressure); float prevRadius = prevSize / 2; float prevCircumference = TWOPI * prevRadius; float prevURate = m_Desc.m_TileRate / prevCircumference; MakeClosedCircle(ref cur, prev.smoothedPos, prevRadius, kVertsInClosedCircle, up, rt, fwd, u0, v0, v1); // Verts, front point { float size = PressuredSize(cur.smoothedPressure); float radius = size / 2; float circumference = TWOPI * radius; float uRate = m_Desc.m_TileRate / circumference; Vector2 uv = Vector3.zero; AppendVert(ref cur, cur.smoothedPos, fwd, m_Color, fwd, uv); } // Tris int BC = 0; int FC = BC + kVertsInClosedCircle; // vert index of front circle // Connect back circle to front point for (int i = 0; i < kVertsInClosedCircle-1; ++i) { int ii = (i+1); AppendTri(ref cur, BC+i, FC, BC+ii); } // Back of tetrahedron AppendTri(ref cur, BC+0, BC+1, BC+2); AppendTri(ref cur, BC+2, BC+3, BC+0); } m_knots[iKnot] = cur; prev = cur; } } void MakeCapVerts( ref Knot k, int num, Vector3 tip, Vector3 circleCenter, float radius, float u0, float v0, float v1, float uRate, Vector3 up, Vector3 rt, Vector3 fwd) { // Length of diagonal between circle and tip float diagonal = ((circleCenter + up * radius) - tip).magnitude; float u = u0 + uRate * diagonal; Vector3 normal = Mathf.Sign(Vector3.Dot(tip - circleCenter, fwd)) * fwd; for (int i = 0; i < num; ++i) { // Endcap vert n tangent points halfway between circle verts n and (n+1) float t = (i + .5f) / num; float theta = TWOPI * t; Vector3 tan = -Mathf.Cos(theta) * up + -Mathf.Sin(theta) * rt; Vector2 uv = new Vector2(u, Mathf.Lerp(v0, v1, t)); AppendVert(ref k, tip, normal, m_Color, tan, uv); } } void MakeClosedCircle( ref Knot k, Vector3 center, float radius, int num, Vector3 up, Vector3 rt, Vector3 fwd, float u, float v0, float v1) { // When facing down the tangent, circle verts should go clockwise // We'd like the seam to be on the bottom up *= radius; rt *= radius; for (int i = 0; i < num; ++i) { float t = (float)i / (num-1); // Ensure that the first and last verts are exactly coincident float theta = (t == 1) ? 0 : TWOPI * t; Vector2 uv; if (m_uvStyle == UVStyle.Unitized) { uv = new Vector2(u,i); } else { uv = new Vector2(u, Mathf.Lerp(v0, v1, t)); } Vector3 off = -Mathf.Cos(theta) * up + -Mathf.Sin(theta) * rt; AppendVert(ref k, center + off, off.normalized, m_Color, fwd, uv); } } /// Resizes arrays if necessary, appends data, mutates knot's vtx count. The /// incoming normal n should be normalized. void AppendVert(ref Knot k, Vector3 v, Vector3 n, Color32 c, Vector3 tan, Vector2 uv) { int i = k.iVert + k.nVert++; Vector4 tan4 = tan; tan4.w = 1; if (i == m_geometry.m_Vertices.Count) { m_geometry.m_Vertices .Add(v); m_geometry.m_Normals .Add(n); m_geometry.m_Colors .Add(c); m_geometry.m_Tangents .Add(tan4); m_geometry.m_Texcoord0.v2.Add(uv); } else { m_geometry.m_Vertices[i] = v; m_geometry.m_Normals[i] = n; m_geometry.m_Colors[i] = c; m_geometry.m_Tangents[i] = tan4; m_geometry.m_Texcoord0.v2[i] = uv; } } void AppendTri(ref Knot k, int t0, int t1, int t2) { int i = (k.iTri + k.nTri++) * 3; if (i == m_geometry.m_Tris.Count) { m_geometry.m_Tris.Add(k.iVert + t0); m_geometry.m_Tris.Add(k.iVert + t1); m_geometry.m_Tris.Add(k.iVert + t2); } else { m_geometry.m_Tris[i + 0] = k.iVert + t0; m_geometry.m_Tris[i + 1] = k.iVert + t1; m_geometry.m_Tris[i + 2] = k.iVert + t2; } } bool IsPenultimate(int iKnot) { return (iKnot+1 == m_knots.Count || !m_knots[iKnot+1].HasGeometry); } } } // namespace TiltBrush
//----------------------------------------------------------------------- // <copyright file="Stage.cs" company="Akka.NET Project"> // Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Generic; using Akka.Pattern; using Akka.Streams.Dsl; using Akka.Streams.Supervision; namespace Akka.Streams.Stage { /// <summary> /// General interface for stream transformation. /// /// Custom <see cref="IStage{TIn, TOut}"/> implementations are intended to be used with /// <see cref="FlowOperations.Transform{TIn,TOut1,TOut2,TMat}"/> to extend the <see cref="FlowOperations"/> API when there /// is no specialized operator that performs the transformation. /// /// Custom implementations are subclasses of <see cref="PushPullStage{TIn, TOut}"/> or /// <see cref="DetachedStage{TIn, TOut}"/>. Sometimes it is convenient to extend /// <see cref="StatefulStage{TIn, TOut}"/> for support of become like behavior. /// /// It is possible to keep state in the concrete <see cref="IStage{TIn, TOut}"/> instance with /// ordinary instance variables. The <see cref="ITransformerLike{TIn,TOut}"/> is executed by an actor and /// therefore you do not have to add any additional thread safety or memory /// visibility constructs to access the state from the callback methods. /// </summary> /// <typeparam name="TIn">TBD</typeparam> /// <typeparam name="TOut">TBD</typeparam> [Obsolete("Please use GraphStage instead.")] public interface IStage<in TIn, out TOut> { } /// <summary> /// <para> /// <see cref="PushPullStage{TIn,TOut}"/> implementations participate in 1-bounded regions. For every external non-completion signal these /// stages produce *exactly one* push or pull signal. /// </para> /// <para> /// <see cref="AbstractStage{TIn,TOut}.OnPush"/> is called when an element from upstream is available and there is demand from downstream, i.e. /// in <see cref="AbstractStage{TIn,TOut}.OnPush"/> you are allowed to call <see cref="IContext.Push"/> to emit one element downstream, or you can absorb the /// element by calling <see cref="IContext.Pull"/>. Note that you can only emit zero or one element downstream from <see cref="AbstractStage{TIn,TOut}.OnPull"/>. /// To emit more than one element you have to push the remaining elements from <see cref="AbstractStage{TIn,TOut}.OnPush"/>, one-by-one. /// <see cref="AbstractStage{TIn,TOut}.OnPush"/> is not called again until <see cref="AbstractStage{TIn,TOut}.OnPull"/> has requested more elements with <see cref="IContext.Pull"/>. /// </para> /// <para> /// <see cref="StatefulStage{TIn,TOut}"/> has support for making it easy to emit more than one element from <see cref="AbstractStage{TIn,TOut}.OnPush"/>. /// </para> /// <para> /// <see cref="AbstractStage{TIn,TOut}.OnPull"/>> is called when there is demand from downstream, i.e. you are allowed to push one element /// downstream with <see cref="IContext.Push"/>, or request elements from upstreams with <see cref="IContext.Pull"/>. If you /// always perform transitive pull by calling <see cref="IContext.Pull"/> from <see cref="AbstractStage{TIn,TOut}.OnPull"/> you can use /// <see cref="PushStage{TIn,TOut}"/> instead of <see cref="PushPullStage{TIn,TOut}"/>. /// </para> /// <para> /// Stages are allowed to do early completion of downstream and cancel of upstream. This is done with <see cref="IContext.Finish"/>, /// which is a combination of cancel/complete. /// </para> /// <para> /// Since OnComplete is not a backpressured signal it is sometimes preferable to push a final element and then /// immediately finish. This combination is exposed as <see cref="IContext.PushAndFinish"/> which enables stages to /// propagate completion events without waiting for an extra round of pull. /// </para> /// <para> /// Another peculiarity is how to convert termination events (complete/failure) into elements. The problem /// here is that the termination events are not backpressured while elements are. This means that simply calling /// <see cref="IContext.Push"/> as a response to <see cref="AbstractStage{TIn,TOut}.OnUpstreamFinish(IContext)"/> or <see cref="AbstractStage{TIn,TOut}.OnUpstreamFailure(Exception,IContext)"/> will very likely break boundedness /// and result in a buffer overflow somewhere. Therefore the only allowed command in this case is /// <see cref="IContext.AbsorbTermination"/> which stops the propagation of the termination signal, and puts the stage in a /// <see cref="IContext.IsFinishing"/> state. Depending on whether the stage has a pending pull signal it /// has not yet "consumed" by a push its <see cref="AbstractStage{TIn,TOut}.OnPull"/> handler might be called immediately or later. From /// <see cref="AbstractStage{TIn,TOut}.OnPull"/> final elements can be pushed before completing downstream with <see cref="IContext.Finish"/> or /// <see cref="IContext.PushAndFinish"/>. /// </para> /// <para> /// <see cref="StatefulStage{TIn,TOut}"/> has support for making it easy to emit final elements. /// </para> /// <para> /// All these rules are enforced by types and runtime checks where needed. Always return the <see cref="Directive"/> /// from the call to the <see cref="IContext"/> method, and do only call <see cref="IContext"/> commands once per callback. /// </para> /// </summary> /// <seealso cref="DetachedStage{TIn,TOut}"/> /// <seealso cref="StatefulStage{TIn,TOut}"/> /// <seealso cref="PushStage{TIn,TOut}"/> /// <typeparam name="TIn">TBD</typeparam> /// <typeparam name="TOut">TBD</typeparam> [Obsolete("Please use GraphStage instead.")] public abstract class PushPullStage<TIn, TOut> : AbstractStage<TIn, TOut, ISyncDirective, ISyncDirective, IContext<TOut>> { } /// <summary> /// <see cref="PushStage{TIn,TOut}"/> is a <see cref="PushPullStage{TIn,TOut}"/> that always perform transitive pull by calling <see cref="IContext.Pull"/> from <see cref="OnPull"/>. /// </summary> /// <typeparam name="TIn">TBD</typeparam> /// <typeparam name="TOut">TBD</typeparam> [Obsolete("Please use GraphStage instead.")] public abstract class PushStage<TIn, TOut> : PushPullStage<TIn, TOut> { /// <summary> /// Always pulls from upstream. /// </summary> /// <param name="context">TBD</param> /// <returns>TBD</returns> public sealed override ISyncDirective OnPull(IContext<TOut> context) => context.Pull(); } /// <summary> /// DetachedStage can be used to implement operations similar to <see cref="FlowOperations.Buffer{TIn,TOut,TMat}"/>, /// <see cref="FlowOperations.Expand{TIn,TOut1,TOut2,TMat}"/> and <see cref="FlowOperations.Conflate{TIn,TOut,TMat}"/>. /// /// DetachedStage implementations are boundaries between 1-bounded regions. This means that they need to enforce the /// "exactly one" property both on their upstream and downstream regions. As a consequence a DetachedStage can never /// answer an <see cref="AbstractStage{TIn,TOut}.OnPull"/> with a <see cref="IContext.Pull"/> or answer an <see cref="AbstractStage{TIn,TOut}.OnPush"/> with a <see cref="IContext.Push"/> since such an action /// would "steal" the event from one region (resulting in zero signals) and would inject it to the other region /// (resulting in two signals). /// /// However, DetachedStages have the ability to call <see cref="IDetachedContext.HoldUpstream"/> and <see cref="IDetachedContext.HoldDownstream"/> as a response to /// <see cref="AbstractStage{TIn,TOut}.OnPush"/> and <see cref="AbstractStage{TIn,TOut}.OnPull"/> which temporarily takes the signal off and /// stops execution, at the same time putting the stage in an <see cref="IDetachedContext.IsHoldingBoth"/> state. /// If the stage is in a holding state it contains one absorbed signal, therefore in this state the only possible /// command to call is <see cref="IDetachedContext.PushAndPull"/> which results in two events making the /// balance right again: 1 hold + 1 external event = 2 external event /// /// This mechanism allows synchronization between the upstream and downstream regions which otherwise can progress /// independently. /// /// @see <see cref="PushPullStage{TIn,TOut}"/> /// </summary> /// <typeparam name="TIn">TBD</typeparam> /// <typeparam name="TOut">TBD</typeparam> [Obsolete("Please use GraphStage instead.")] public abstract class DetachedStage<TIn, TOut> : AbstractStage<TIn, TOut, IUpstreamDirective, IDownstreamDirective, IDetachedContext<TOut>> { /// <summary> /// TBD /// </summary> protected internal override bool IsDetached => true; } /// <summary> /// The behavior of <see cref="StatefulStage{TIn,TOut}"/> is defined by these two methods, which /// has the same semantics as corresponding methods in <see cref="PushPullStage{TIn,TOut}"/>. /// </summary> /// <typeparam name="TIn">TBD</typeparam> /// <typeparam name="TOut">TBD</typeparam> public abstract class StageState<TIn, TOut> { /// <summary> /// TBD /// </summary> /// <param name="element">TBD</param> /// <param name="context">TBD</param> /// <returns>TBD</returns> public abstract ISyncDirective OnPush(TIn element, IContext<TOut> context); /// <summary> /// TBD /// </summary> /// <param name="context">TBD</param> /// <returns>TBD</returns> public virtual ISyncDirective OnPull(IContext<TOut> context) => context.Pull(); } /// <summary> /// TBD /// </summary> public static class StatefulStage { #region Internal API /// <summary> /// TBD /// </summary> internal interface IAndThen { } /// <summary> /// TBD /// </summary> [Serializable] internal sealed class Finish : IAndThen { /// <summary> /// TBD /// </summary> public static readonly Finish Instance = new Finish(); private Finish() { } } /// <summary> /// TBD /// </summary> /// <typeparam name="TIn">TBD</typeparam> /// <typeparam name="TOut">TBD</typeparam> [Serializable] internal sealed class Become<TIn, TOut> : IAndThen { /// <summary> /// TBD /// </summary> public readonly StageState<TIn, TOut> State; /// <summary> /// TBD /// </summary> /// <param name="state">TBD</param> public Become(StageState<TIn, TOut> state) { State = state; } } /// <summary> /// TBD /// </summary> [Serializable] internal sealed class Stay : IAndThen { /// <summary> /// TBD /// </summary> public static readonly Stay Instance = new Stay(); private Stay() { } } #endregion } /// <summary> /// <see cref="StatefulStage{TIn,TOut}"/> is a <see cref="PushPullStage{TIn,TOut}"/> that provides convenience to make some things easier. /// /// The behavior is defined in <see cref="StageState{TIn,TOut}"/> instances. The initial behavior is specified /// by subclass implementing the <see cref="Initial"/> method. The behavior can be changed by using <see cref="Become"/>. /// /// Use <see cref="Emit(IEnumerator{TOut},IContext{TOut},StageState{TIn,TOut})"/> or <see cref="EmitAndFinish"/> to push more than one element from <see cref="StageState{TIn,TOut}.OnPush"/> or /// <see cref="StageState{TIn,TOut}.OnPull"/>. /// /// Use <see cref="TerminationEmit"/> to push final elements from <see cref="OnUpstreamFinish"/> or <see cref="AbstractStage{TIn,TOut}.OnUpstreamFailure"/>. /// </summary> /// <typeparam name="TIn">TBD</typeparam> /// <typeparam name="TOut">TBD</typeparam> [Obsolete("Please use GraphStage instead.")] public abstract class StatefulStage<TIn, TOut> : PushPullStage<TIn, TOut> { private bool _isEmitting; private StageState<TIn, TOut> _current; /// <summary> /// TBD /// </summary> /// <param name="current">TBD</param> protected StatefulStage(StageState<TIn, TOut> current) { _current = current; Become(Initial); } /// <summary> /// Concrete subclass must return the initial behavior from this method. /// **Warning:** This method must not be implemented as `val`. /// </summary> public abstract StageState<TIn, TOut> Initial { get; } /// <summary> /// Current state. /// </summary> public StageState<TIn, TOut> Current => _current; /// <summary> /// Change the behavior to another <see cref="StageState{TIn,TOut}"/>. /// </summary> /// <param name="state">TBD</param> /// <exception cref="ArgumentNullException">TBD</exception> /// <returns>TBD</returns> public void Become(StageState<TIn, TOut> state) { if (state == null) throw new ArgumentNullException(nameof(state)); _current = state; } /// <summary> /// Invokes current state. /// </summary> /// <param name="element">TBD</param> /// <param name="context">TBD</param> /// <returns>TBD</returns> public sealed override ISyncDirective OnPush(TIn element, IContext<TOut> context) => _current.OnPush(element, context); /// <summary> /// Invokes current state. /// </summary> /// <param name="context">TBD</param> /// <returns>TBD</returns> public sealed override ISyncDirective OnPull(IContext<TOut> context) => _current.OnPull(context); /// <summary> /// TBD /// </summary> /// <param name="context">TBD</param> /// <returns>TBD</returns> public override ITerminationDirective OnUpstreamFinish(IContext<TOut> context) { return _isEmitting ? context.AbsorbTermination() : context.Finish(); } /// <summary> /// Can be used from <see cref="StageState{TIn,TOut}.OnPush"/> or <see cref="StageState{TIn,TOut}.OnPull"/> to push more than one /// element downstream. /// </summary> /// <param name="enumerator">TBD</param> /// <param name="context">TBD</param> /// <returns>TBD</returns> public ISyncDirective Emit(IEnumerator<TOut> enumerator, IContext<TOut> context) => Emit(enumerator, context, _current); /// <summary> /// Can be used from <see cref="StageState{TIn,TOut}.OnPush"/> or <see cref="StageState{TIn,TOut}.OnPull"/> to push more than one /// element downstream and after that change behavior. /// </summary> /// <param name="enumerator">TBD</param> /// <param name="context">TBD</param> /// <param name="nextState">TBD</param> /// <exception cref="IllegalStateException">TBD</exception> /// <returns>TBD</returns> public ISyncDirective Emit(IEnumerator<TOut> enumerator, IContext<TOut> context, StageState<TIn, TOut> nextState) { if (_isEmitting) throw new IllegalStateException("Already in emitting state"); if (!enumerator.MoveNext()) { Become(nextState); return context.Pull(); } var element = enumerator.Current; if (enumerator.MoveNext()) { _isEmitting = true; Become(EmittingState(enumerator, new StatefulStage.Become<TIn, TOut>(nextState))); } else Become(nextState); return context.Push(element); } /// <summary> /// Can be used from <see cref="OnUpstreamFinish"/> to push final elements downstream /// before completing the stream successfully. Note that if this is used from /// <see cref="AbstractStage{TIn,TOut}.OnUpstreamFailure"/> the failure will be absorbed and the stream will be completed /// successfully. /// </summary> /// <param name="enumerator">TBD</param> /// <param name="context">TBD</param> /// <returns>TBD</returns> public ISyncDirective TerminationEmit(IEnumerator<TOut> enumerator, IContext<TOut> context) { if (!enumerator.MoveNext()) return _isEmitting ? context.AbsorbTermination() : context.Finish(); var es = Current as EmittingState<TIn, TOut>; var nextState = es != null && _isEmitting ? es.Copy(enumerator) : EmittingState(enumerator, StatefulStage.Finish.Instance); Become(nextState); return context.AbsorbTermination(); } /// <summary> /// Can be used from <see cref="StageState{TIn,TOut}.OnPush"/> or <see cref="StageState{TIn,TOut}.OnPull"/> to push more than one /// element downstream and after that finish (complete downstream, cancel upstreams). /// </summary> /// <param name="enumerator">TBD</param> /// <param name="context">TBD</param> /// <exception cref="IllegalStateException">TBD</exception> /// <returns>TBD</returns> public ISyncDirective EmitAndFinish(IEnumerator<TOut> enumerator, IContext<TOut> context) { if(_isEmitting) throw new IllegalStateException("Already emitting a state"); if (!enumerator.MoveNext()) return context.Finish(); var elem = enumerator.Current; if (enumerator.MoveNext()) { _isEmitting = true; Become(EmittingState(enumerator, StatefulStage.Finish.Instance)); return context.Push(elem); } return context.PushAndFinish(elem); } private StageState<TIn, TOut> EmittingState(IEnumerator<TOut> enumerator, StatefulStage.IAndThen andThen) { return new EmittingState<TIn, TOut>(enumerator, andThen, context => { if (enumerator.MoveNext()) { var element = enumerator.Current; if (enumerator.MoveNext()) return context.Push(element); if (!context.IsFinishing) { _isEmitting = false; if (andThen is StatefulStage.Stay) ; else if (andThen is StatefulStage.Become<TIn, TOut>) { var become = andThen as StatefulStage.Become<TIn, TOut>; Become(become.State); } else if (andThen is StatefulStage.Finish) context.PushAndFinish(element); return context.Push(element); } return context.PushAndFinish(element); } throw new IllegalStateException("OnPull with empty enumerator is not expected in emitting state"); }); } } /// <summary> /// TBD /// </summary> /// <typeparam name="TIn">TBD</typeparam> /// <typeparam name="TOut">TBD</typeparam> internal sealed class EmittingState<TIn, TOut> : StageState<TIn, TOut> { private readonly IEnumerator<TOut> _enumerator; private readonly Func<IContext<TOut>, ISyncDirective> _onPull; private readonly StatefulStage.IAndThen _andThen; /// <summary> /// TBD /// </summary> /// <param name="enumerator">TBD</param> /// <param name="andThen">TBD</param> /// <param name="onPull">TBD</param> public EmittingState(IEnumerator<TOut> enumerator, StatefulStage.IAndThen andThen, Func<IContext<TOut>, ISyncDirective> onPull) { _enumerator = enumerator; _onPull = onPull; _andThen = andThen; } /// <summary> /// TBD /// </summary> /// <param name="context">TBD</param> /// <exception cref="NotImplementedException">TBD</exception> /// <returns>TBD</returns> public override ISyncDirective OnPull(IContext<TOut> context) { throw new NotImplementedException(); } /// <summary> /// TBD /// </summary> /// <param name="element">TBD</param> /// <param name="context">TBD</param> /// <exception cref="IllegalStateException">TBD</exception> /// <returns>TBD</returns> public override ISyncDirective OnPush(TIn element, IContext<TOut> context) { throw new IllegalStateException("OnPush is not allowed in emitting state"); } /// <summary> /// TBD /// </summary> /// <param name="enumerator">TBD</param> /// <exception cref="NotImplementedException">TBD</exception> /// <returns>TBD</returns> public StageState<TIn, TOut> Copy(IEnumerator<TOut> enumerator) { throw new NotImplementedException(); } } }
//! \file ImageWBM.cs //! \date Thu Jul 09 20:59:09 2015 //! \brief Wild Bug's image format. // // Copyright (C) 2015-2016 by morkt // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // using System; using System.ComponentModel.Composition; using System.IO; using System.Windows.Media; using System.Windows.Media.Imaging; using GameRes.Utility; namespace GameRes.Formats.WildBug { internal class WbmMetaData : ImageMetaData { public int EntryCount; public int EntrySize; public byte[] Header; } internal class WpxSection { public int DataFormat; public int Offset; public int UnpackedSize; public int PackedSize; public static WpxSection Find (byte[] header, byte id, int count, int dir_size) { int ptr = 0; int n = 0; while (header[ptr] != id) { ptr += dir_size; if (ptr >= header.Length) return null; if (++n >= count) return null; } return new WpxSection { DataFormat = header[ptr+1], Offset = LittleEndian.ToInt32 (header, ptr+4), UnpackedSize = LittleEndian.ToInt32 (header, ptr+8), PackedSize = LittleEndian.ToInt32 (header, ptr+12), }; } } [Export(typeof(ImageFormat))] public class WbmFormat : ImageFormat { public override string Tag { get { return "WBM"; } } public override string Description { get { return "Wild Bug's image format"; } } public override uint Signature { get { return 0x1A585057; } } // 'WPX' public override ImageMetaData ReadMetaData (IBinaryStream stream) { var header = stream.ReadHeader (0x10); if (!header.AsciiEqual (4, "BMP")) return null; int count = header[0xE]; int dir_size = header[0xF]; if (1 != header[0xC] || 0 == count || 0 == dir_size) return null; var dir = stream.ReadBytes (count * dir_size); var section = WpxSection.Find (dir, 0x10, count, dir_size); if (null == section) return null; if (section.UnpackedSize < 0x10) return null; stream.Position = section.Offset; var data = stream.ReadBytes (section.UnpackedSize); if (data.Length != section.UnpackedSize) return null; return new WbmMetaData { Width = LittleEndian.ToUInt16 (data, 4), Height = LittleEndian.ToUInt16 (data, 6), BPP = data[0xC], EntryCount = count, EntrySize = dir_size, Header = dir, }; } public override ImageData Read (IBinaryStream stream, ImageMetaData info) { var meta = (WbmMetaData)info; var section = WpxSection.Find (meta.Header, 0x11, meta.EntryCount, meta.EntrySize); if (null == section) throw new InvalidFormatException(); PixelFormat format; int pixel_size; switch (meta.BPP) { case 24: format = PixelFormats.Bgr24; pixel_size = 3; break; case 32: format = PixelFormats.Bgr32; pixel_size = 4; break; case 16: format = PixelFormats.Bgr555; pixel_size = 2; break; case 8: format = PixelFormats.Indexed8; pixel_size = 1; break; default: throw new NotSupportedException ("Not supported WBM bitdepth"); } int stride = ((int)meta.Width * pixel_size + 3) & -4; var reader = new WbmReader (stream, section); var pixels = reader.Unpack (stride, pixel_size, section.DataFormat); if (null == pixels) throw new InvalidFormatException(); if (8 == meta.BPP) { section = WpxSection.Find (meta.Header, 0x12, meta.EntryCount, meta.EntrySize); if (null == section) return ImageData.Create (info, PixelFormats.Gray8, null, pixels, stride); reader = new WbmReader (stream, section); var palette_data = reader.Unpack (48, 3, section.DataFormat); var palette = CreatePalette (palette_data); return ImageData.Create (info, PixelFormats.Indexed8, palette, pixels, stride); } if (meta.BPP < 24) return ImageData.Create (info, format, null, pixels, stride); section = WpxSection.Find (meta.Header, 0x13, meta.EntryCount, meta.EntrySize); if (null == section) return ImageData.Create (info, format, null, pixels, stride); int alpha_stride = ((int)meta.Width + 3) & -4; byte[] alpha = null; try { reader = new WbmReader (stream, section); alpha = reader.Unpack (alpha_stride, 1, section.DataFormat); } catch { } if (null == alpha) return ImageData.Create (info, format, null, pixels, stride); byte[] alpha_image = new byte[4*meta.Width*meta.Height]; int dst = 0; for (int y = 0; y < meta.Height; ++y) { int alpha_src = y * alpha_stride; int src = y * stride; for (int x = 0; x < meta.Width; ++x) { alpha_image[dst++] = pixels[src]; alpha_image[dst++] = pixels[src+1]; alpha_image[dst++] = pixels[src+2]; alpha_image[dst++] = alpha[alpha_src+x]; src += pixel_size; } } return ImageData.Create (info, PixelFormats.Bgra32, null, alpha_image, (int)meta.Width*4); } public override void Write (Stream file, ImageData image) { throw new System.NotImplementedException ("WbmFormat.Write not implemented"); } static BitmapPalette CreatePalette (byte[] palette_data) { int colors = Math.Min (palette_data.Length/3, 0x100); var palette = new Color[0x100]; for (int i = 0; i < colors; ++i) { int c = i * 3; palette[i] = Color.FromRgb (palette_data[c], palette_data[c+1], palette_data[c+2]); } return new BitmapPalette (palette); } } internal class WbmReader : WpxDecoder { public WbmReader (IBinaryStream input, WpxSection section) : base (input.AsStream, section) { } void GenerateOffsetTableV1 (int[] offset_table, int stride, int pixel_size) { offset_table[4] = pixel_size; offset_table[2] = 2 * pixel_size; offset_table[5] = 3 * pixel_size; if (5 * pixel_size < stride) { offset_table[6] = stride - pixel_size; offset_table[0] = stride; offset_table[7] = pixel_size + stride; offset_table[3] = 2 * pixel_size + stride; offset_table[1] = 2 * stride; } else { offset_table[6] = 4 * pixel_size; offset_table[0] = 5 * pixel_size; offset_table[7] = 6 * pixel_size; offset_table[3] = 7 * pixel_size; offset_table[1] = 8 * pixel_size; } } void GenerateOffsetTableV2 (int[] offset_table, int stride, int pixel_size) { offset_table[0] = pixel_size; offset_table[1] = 2 * pixel_size; offset_table[2] = 3 * pixel_size; if (5 * pixel_size < stride) { offset_table[3] = stride - pixel_size; offset_table[4] = stride; offset_table[5] = pixel_size + stride; offset_table[6] = 2 * pixel_size + stride; offset_table[7] = 2 * stride; } else { offset_table[3] = 4 * pixel_size; offset_table[4] = 5 * pixel_size; offset_table[5] = 6 * pixel_size; offset_table[6] = 7 * pixel_size; offset_table[7] = 8 * pixel_size; } } int m_version; int m_condition; public byte[] Unpack (int stride, int pixel_size, int flags) // sub_40919C { int[] offset_table = new int[8]; GenerateOffsetTableV2 (offset_table, stride, pixel_size); for (m_version = 2; m_version >= 0; --m_version) { m_condition = m_version > 0 ? 1 : 0; try { ResetInput(); if (0 == (flags & 0x80) && 0 != PackedSize) { byte[] ref_table = new byte[0x10000]; if (0 != (flags & 1)) { if (0 != (flags & 8)) { if (0 != (flags & 4)) return UnpackVD (ref_table, offset_table, pixel_size); else if (0 != (flags & 2)) return UnpackVB (ref_table, offset_table, pixel_size); else return UnpackV9 (offset_table, pixel_size); } else if (0 != (flags & 4)) return UnpackV5 (ref_table, offset_table, pixel_size); else if (0 != (flags & 2)) return UnpackV3 (ref_table, offset_table, pixel_size); else return UnpackV1 (offset_table, pixel_size); } else if (0 != (flags & 4)) return UnpackV4 (ref_table, offset_table, pixel_size); else if (0 != (flags & 2)) return UnpackV2 (ref_table, offset_table, pixel_size); else return UnpackV0 (offset_table, pixel_size); } else return ReadUncompressed(); } catch { if (0 == m_version) throw; } if (1 == m_version) GenerateOffsetTableV1 (offset_table, stride, pixel_size); } return null; } byte[] UnpackVD (byte[] a4, int[] offset_table, int pixel_size) // 0x0F format { byte[] v47 = BuildTable(); //sub_46C26C(); int min_count = 1 == pixel_size ? 2 : 1; m_available = FillBuffer(); if (0 == m_available) return null; int step = (pixel_size + 3) & -4; if (m_available < step + 0x80) return null; int v7 = -pixel_size & 3; Buffer.BlockCopy (m_buffer, 0, m_output, 0, pixel_size); int dst = pixel_size; int remaining = m_output.Length - pixel_size; m_current = pixel_size + v7 + 128; if (!FillRefTable (a4, pixel_size + v7)) return null; int v45 = 16384; while (remaining > 0) { while (0 == (GetNextBit() ^ m_condition)) { int v24 = 0; int v25 = 0; v45 &= ~0xff00; v45 |= m_output[dst - pixel_size] << 8; int v26 = 16384; for (;;) { v24 = (v24 + 1) & 0xFF; if (GetNextBit() != 0) v25 |= v26; if (a4[2 * v25] == v24) break; v26 >>= 1; if (0 == v26) return null; } v24 = a4[2 * v25 + 1]; byte v28 = v47[v45 + v24]; if (0 != v24) { Buffer.BlockCopy (v47, v45, v47, v45+1, v24); v47[v45] = v28; } m_output[dst++] = v28; --remaining; if (0 == remaining) return m_output; } int count; int src_offset; if (GetNextBit() != 0) { int v37 = ReadNext(); count = 2; src_offset = dst - 1 - v37; } else { count = min_count; int v36 = GetNextBit() << 2; v36 |= GetNextBit() << 1; v36 |= GetNextBit(); src_offset = dst - offset_table[v36]; } if (0 == GetNextBit()) { count += ReadCount(); } if (remaining < count) return null; Binary.CopyOverlapped (m_output, src_offset, dst, count); dst += count; remaining -= count; } return m_output; } byte[] UnpackVB (byte[] a4, int[] offset_table, int pixel_size) // 0x0B format { byte[] v47 = BuildTable(); //sub_46C26C(); int min_count = 1 == pixel_size ? 2 : 1; m_available = FillBuffer(); if (0 == m_available) return null; int step = (pixel_size + 3) & -4; if (m_available < step + 0x80) return null; int v7 = -pixel_size & 3; Buffer.BlockCopy (m_buffer, 0, m_output, 0, pixel_size); int dst = pixel_size; int remaining = m_output.Length - pixel_size; m_current = pixel_size + v7 + 128; if (!FillRefTable (a4, pixel_size + v7)) return null; while (remaining > 0) { while (0 == (GetNextBit() ^ m_condition)) { int v24 = 0; int v25 = 0; int v26 = 16384; for (;;) { v24 = (v24 + 1) & 0xFF; if (GetNextBit() != 0) v25 |= v26; if (a4[2 * v25] == v24) break; v26 >>= 1; if (0 == v26) return null; } m_output[dst++] = a4[2 * v25 + 1]; --remaining; if (0 == remaining) return m_output; } int count; int src_offset; if (GetNextBit() != 0) { int v37 = ReadNext(); count = 2; src_offset = dst - 1 - v37; } else { count = min_count; int v36 = GetNextBit() << 2; v36 |= GetNextBit() << 1; v36 |= GetNextBit(); src_offset = dst - offset_table[v36]; } if (0 == GetNextBit()) { count += ReadCount(); } if (remaining < count) return null; Binary.CopyOverlapped (m_output, src_offset, dst, count); dst += count; remaining -= count; } return m_output; } byte[] UnpackV9 (int[] offset_table, int pixel_size) // 0x09 format { int min_count = 1 == pixel_size ? 2 : 1; m_available = FillBuffer(); if (0 == m_available) return null; int step = (pixel_size + 3) & -4; if (m_available < step) return null; Buffer.BlockCopy (m_buffer, 0, m_output, 0, pixel_size); int dst = pixel_size; int remaining = m_output.Length - pixel_size; m_current = pixel_size + (-pixel_size & 3); m_bits = m_buffer[m_current++]; m_bit_count = 8; while (remaining > 0) { while (0 == (GetNextBit() ^ m_condition)) { m_output[dst++] = ReadNext(); --remaining; if (0 == remaining) return m_output; } int count, src_offset; if (GetNextBit() != 0) { src_offset = dst - 1 - ReadNext(); count = 2; } else { count = min_count; int v35 = GetNextBit(); v35 += v35 + GetNextBit(); v35 += v35 + GetNextBit(); src_offset = dst - offset_table[v35]; } if (GetNextBit() == 0) { count += ReadCount(); } if (remaining < count) return null; Binary.CopyOverlapped (m_output, src_offset, dst, count); dst += count; remaining -= count; } return m_output; } byte[] UnpackV5 (byte[] a4, int[] offset_table, int pixel_size) // 0x07 format { byte[] v46 = BuildTable(); int min_count = 1 == pixel_size ? 2 : 1; m_available = FillBuffer(); if (0 == m_available) return null; int step = (pixel_size + 3) & -4; if (m_available < step + 0x80) return null; int v10 = -pixel_size & 3; Buffer.BlockCopy (m_buffer, 0, m_output, 0, pixel_size); int dst = pixel_size; int remaining = m_output.Length - pixel_size; m_current = pixel_size + v10 + 128; if (!FillRefTable (a4, pixel_size + v10)) return null; int v43 = 16384; while (remaining > 0) { while (0 == (GetNextBit() ^ m_condition)) { int v25 = 0; int v26 = 0; v43 &= ~0xff00; v43 |= m_output[dst-pixel_size] << 8; int v27 = 16384; for (;;) { v25 = (v25 + 1) & 0xff; if (GetNextBit() != 0) v26 |= v27; if (a4[2 * v26] == v25) break; v27 >>= 1; if (0 == v27) return null; } v25 = a4[2 * v26 + 1]; byte v29 = v46[v43 + v25]; if (0 != v25) { Buffer.BlockCopy (v46, v43, v46, v43+1, v25); v46[v43] = v29; } m_output[dst++] = v29; --remaining; if (0 == remaining) return m_output; } int count, src_offset; if (m_version > 1) { if (GetNextBit() != 0) { int v35; if (GetNextBit() != 0) { v35 = ReadNext(); count = 2; } else { v35 = ReadNext(); v35 |= ReadNext() << 8; count = 3; } src_offset = dst - 1 - v35; } else { count = min_count; int v35 = GetNextBit(); v35 += v35 + GetNextBit(); v35 += v35 + GetNextBit(); src_offset = dst - offset_table[v35]; } } else { if (GetNextBit() != 0) { count = min_count; int v32 = GetNextBit() << 2; v32 |= GetNextBit() << 1; v32 |= GetNextBit(); src_offset = dst - offset_table[v32]; } else { byte v35 = ReadNext(); count = 2; src_offset = dst - 1 - v35; } } if (0 == GetNextBit()) { count += ReadCount(); } if (remaining < count) return null; Binary.CopyOverlapped (m_output, src_offset, dst, count); dst += count; remaining -= count; } return m_output; } byte[] UnpackV4 (byte[] a4, int[] offset_table, int pixel_size) // 0x06 format { byte[] v48 = BuildTable(); int min_count = 1 == pixel_size ? 2 : 1; m_available = FillBuffer(); if (0 == m_available) return null; int step = (pixel_size + 4) & -4; if (m_available < step + 0x80) return null; int v10 = -pixel_size & 3; Buffer.BlockCopy (m_buffer, 0, m_output, 0, pixel_size); int dst = pixel_size; int remaining = m_output.Length - pixel_size; m_current = pixel_size + v10 + 128; if (!FillRefTable (a4, pixel_size + v10)) return null; int v46 = 16384; while (remaining > 0) { int v28; while (0 == (GetNextBit() ^ m_condition)) { int v27 = 0; v28 = 0; v46 &= ~0xff00; v46 |= m_output[dst - pixel_size] << 8; int v29 = 16384; for (;;) { v27 = (v27 + 1) & 0xff; if (GetNextBit() != 0) v28 |= v29; if (a4[2 * v28] == v27) break; v29 >>= 1; if (0 == v29) return null; } v27 = a4[2 * v28 + 1]; byte v31 = v48[v46 + v27]; if (0 != v27) { Buffer.BlockCopy (v48, v46, v48, v46+1, v27); v48[v46] = v31; } m_output[dst++] = v31; --remaining; if (0 == remaining) return m_output; } v28 = GetNextBit() << 1; v28 |= GetNextBit(); v28 <<= 1; v28 |= GetNextBit(); int src_offset = dst - offset_table[v28]; int count; if (GetNextBit() != 0) { count = min_count; } else { count = min_count + ReadCount(); } if (remaining < count) return null; Binary.CopyOverlapped (m_output, src_offset, dst, count); dst += count; remaining -= count; } return m_output; } byte[] UnpackV3 (byte[] a4, int[] offset_table, int pixel_size) // 0x03 format { int min_count = 1 == pixel_size ? 2 : 1; m_available = FillBuffer(); if (0 == m_available) return null; int step = (pixel_size + 3) & -4; if (m_available < step + 0x80) return null; int v9 = -pixel_size & 3; Buffer.BlockCopy (m_buffer, 0, m_output, 0, pixel_size); int dst = pixel_size; int remaining = m_output.Length - pixel_size; m_current = pixel_size + v9 + 128; if (!FillRefTable (a4, pixel_size + v9)) return null; while (remaining > 0) { while (0 == (GetNextBit() ^ m_condition)) { int v24 = 0; int v25 = 0; int v26 = 16384; for (;;) { ++v24; if (GetNextBit() != 0) v25 |= v26; if (a4[2 * v25] == v24) break; v26 >>= 1; if (0 == v26) return null; } m_output[dst++] = a4[2 * v25 + 1]; --remaining; if (0 == remaining) return m_output; } int count, src_offset; if (m_version > 1) { if (GetNextBit() != 0) { if (GetNextBit() != 0) { count = 2; src_offset = ReadNext(); } else { count = 3; src_offset = ReadNext(); src_offset |= ReadNext() << 8; } src_offset = dst - 1 - src_offset; } else { count = min_count; int v28 = GetNextBit(); v28 += v28 + GetNextBit(); v28 += v28 + GetNextBit(); src_offset = dst - offset_table[v28]; } } else { if (GetNextBit() != 0) { count = min_count; int v28 = GetNextBit() << 1; v28 |= GetNextBit(); v28 <<= 1; v28 |= GetNextBit(); src_offset = dst - offset_table[v28]; } else { count = 2; src_offset = dst - 1 - ReadNext(); } } if (GetNextBit() == 0) { count += ReadCount(); } if (remaining < count) return null; Binary.CopyOverlapped (m_output, src_offset, dst, count); dst += count; remaining -= count; } return m_output; } byte[] UnpackV2 (byte[] a4, int[] offset_table, int pixel_size) // 0x02 format { int min_count = 1 == pixel_size ? 2 : 1; m_available = FillBuffer(); if (0 == m_available) return null; int step = (pixel_size + 3) & -4; if (m_available < step + 0x80) return null; int v9 = -pixel_size & 3; Buffer.BlockCopy (m_buffer, 0, m_output, 0, pixel_size); int dst = pixel_size; int remaining = m_output.Length - pixel_size; m_current = pixel_size + v9 + 128; // within m_buffer if (!FillRefTable (a4, pixel_size + v9)) return null; while (remaining > 0) { while (0 == (GetNextBit() ^ m_condition)) { int v20 = 0; int v21 = 0; v9 = 16384; for (;;) { ++v20; if (0 != GetNextBit()) v21 |= v9; if (a4[2 * v21] == v20) break; v9 >>= 1; if (0 == v9) return null; } m_output[dst++] = a4[2 * v21 + 1]; --remaining; if (0 == remaining) return m_output; } int v22 = GetNextBit() << 1; v22 |= GetNextBit(); v22 <<= 1; v22 |= GetNextBit(); int src_offset = dst - offset_table[v22]; int count; if (0 != GetNextBit()) { count = min_count; } else { count = min_count + ReadCount(); } if (remaining < count) return null; Binary.CopyOverlapped (m_output, src_offset, dst, count); dst += count; remaining -= count; } return m_output; } byte[] UnpackV1 (int[] offset_table, int pixel_size) // 0x01 format { int min_count = 1 == pixel_size ? 2 : 1; m_available = FillBuffer(); if (0 == m_available) return null; int step = (pixel_size + 3) & -4; if (m_available < step) return null; Buffer.BlockCopy (m_buffer, 0, m_output, 0, pixel_size); int dst = pixel_size; int remaining = m_output.Length - pixel_size; m_current = pixel_size + (-pixel_size & 3); m_bits = m_buffer[m_current++]; m_bit_count = 8; while (remaining > 0) { while (0 == (GetNextBit() ^ m_condition)) { m_output[dst++] = ReadNext(); --remaining; if (0 == remaining) return m_output; } int count, src_offset; if (m_version > 1) { if (GetNextBit() != 0) { if (GetNextBit() != 0) { src_offset = ReadNext(); count = 2; } else { src_offset = ReadNext(); src_offset |= ReadNext() << 8; count = 3; } src_offset = dst - 1 - src_offset; } else { count = min_count; int v20 = GetNextBit(); v20 += v20 + GetNextBit(); v20 += v20 + GetNextBit(); src_offset = dst - offset_table[v20]; } } else { if (GetNextBit() != 0) { count = min_count; int v14 = GetNextBit() << 2; v14 |= GetNextBit() << 1; v14 |= GetNextBit(); src_offset = dst - offset_table[v14]; } else { count = 2; src_offset = dst - 1 - ReadNext(); } } if (GetNextBit() == 0) { count += ReadCount(); } if (remaining < count) return null; Binary.CopyOverlapped (m_output, src_offset, dst, count); dst += count; remaining -= count; } return m_output; } byte[] UnpackV0 (int[] offset_table, int pixel_size) // 0x00 format { int min_count = 1 == pixel_size ? 2 : 1; m_available = FillBuffer(); if (0 == m_available) return null; int step = (pixel_size + 3) & -4; if (m_available < step) return null; Buffer.BlockCopy (m_buffer, 0, m_output, 0, pixel_size); int dst = pixel_size; int remaining = m_output.Length - pixel_size; m_current = pixel_size + (-pixel_size & 3); m_bits = m_buffer[m_current++]; m_bit_count = 8; while (remaining > 0) { while (0 == (GetNextBit() ^ m_condition)) { m_output[dst++] = ReadNext(); --remaining; if (0 == remaining) return m_output; } int v14 = GetNextBit() << 1; v14 |= GetNextBit(); v14 <<= 1; v14 |= GetNextBit(); int src_offset = dst - offset_table[v14]; int count; if (GetNextBit() != 0) { count = min_count; } else { count = min_count + ReadCount(); } if (remaining < count) return null; Binary.CopyOverlapped (m_output, src_offset, dst, count); dst += count; remaining -= count; } return m_output; } } internal class WpxDecoder { Stream m_input; protected byte[] m_output; int m_packed_size; int m_start_pos; public byte[] Data { get { return m_output; } } protected int PackedSize { get { return m_packed_size; } } protected WpxDecoder (Stream input, WpxSection section) { m_input = input; m_start_pos = section.Offset; m_output = new byte[section.UnpackedSize]; m_packed_size = section.PackedSize; } protected byte[] ReadUncompressed () { if (m_output.Length == m_input.Read (m_output, 0, m_output.Length)) return m_output; else return null; } protected static byte[] BuildTable () // sub_4090E0 { var table = new byte[0x100*0x100]; for (int i = 0; i < 0x100; ++i) { byte v2 = (byte)(-1 - i); for (int j = 0; j < 0x100; ++j) { table[0x100*i + j] = v2--; } } return table; } protected bool FillRefTable (byte[] table, int src) { m_bits = m_buffer[m_current++]; m_bit_count = 8; for (int n = 0; n < 0x100; ) { byte v16 = m_buffer[src++]; for (int half = 0; half < 2; ++half) { byte v17 = (byte)(v16 & 0xF); if (0 != v17) { int v18 = 0; for (int i = v17; i != 0; --i) { if (0 == m_bit_count) { if (m_current >= m_available) return false; m_bits = m_buffer[m_current++]; m_bit_count = 8; } int bit = m_bits >> 7; m_bits <<= 1; --m_bit_count; v18 += v18 + bit; } if (15 != v17) v18 <<= 15 - v17; table[2 * v18] = v17; table[2 * v18 + 1] = (byte)n; } ++n; v16 >>= 4; } } return true; } protected byte[] m_buffer = new byte[0x8000]; protected int m_current = 0; protected int m_available = 0; protected byte ReadNext () { if (m_current >= m_available) { m_available = FillBuffer(); if (0 == m_available) throw new InvalidFormatException ("Unexpected end of file"); m_current = 0; } return m_buffer[m_current++]; } protected int ReadCount () { int n = 1; while (0 == GetNextBit()) { ++n; } int count = 1; for (int i = 0; i < n; ++i) { count += count + GetNextBit(); } return count - 1; } protected int m_input_remaining; protected void ResetInput () { m_input.Position = m_start_pos; m_input_remaining = m_packed_size; } protected int FillBuffer () // sub_409B02 { int read = 0; if (m_input_remaining > 0) { int size = Math.Min (m_input_remaining, 0x8000); m_input_remaining -= size; read = m_input.Read (m_buffer, 0, size); } return read; } protected byte m_bits; protected int m_bit_count = 0; protected int GetNextBit () { if (0 == m_bit_count) { m_bits = ReadNext(); m_bit_count = 8; } int bit = m_bits >> 7; m_bits <<= 1; --m_bit_count; return bit; } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: POGOProtos/Networking/Responses/GetGymDetailsResponse.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace POGOProtos.Networking.Responses { /// <summary>Holder for reflection information generated from POGOProtos/Networking/Responses/GetGymDetailsResponse.proto</summary> public static partial class GetGymDetailsResponseReflection { #region Descriptor /// <summary>File descriptor for POGOProtos/Networking/Responses/GetGymDetailsResponse.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static GetGymDetailsResponseReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CjtQT0dPUHJvdG9zL05ldHdvcmtpbmcvUmVzcG9uc2VzL0dldEd5bURldGFp", "bHNSZXNwb25zZS5wcm90bxIfUE9HT1Byb3Rvcy5OZXR3b3JraW5nLlJlc3Bv", "bnNlcxoiUE9HT1Byb3Rvcy9EYXRhL0d5bS9HeW1TdGF0ZS5wcm90byKDAgoV", "R2V0R3ltRGV0YWlsc1Jlc3BvbnNlEjAKCWd5bV9zdGF0ZRgBIAEoCzIdLlBP", "R09Qcm90b3MuRGF0YS5HeW0uR3ltU3RhdGUSDAoEbmFtZRgCIAEoCRIMCgR1", "cmxzGAMgAygJEk0KBnJlc3VsdBgEIAEoDjI9LlBPR09Qcm90b3MuTmV0d29y", "a2luZy5SZXNwb25zZXMuR2V0R3ltRGV0YWlsc1Jlc3BvbnNlLlJlc3VsdBIT", "CgtkZXNjcmlwdGlvbhgFIAEoCSI4CgZSZXN1bHQSCQoFVU5TRVQQABILCgdT", "VUNDRVNTEAESFgoSRVJST1JfTk9UX0lOX1JBTkdFEAJiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::POGOProtos.Data.Gym.GymStateReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::POGOProtos.Networking.Responses.GetGymDetailsResponse), global::POGOProtos.Networking.Responses.GetGymDetailsResponse.Parser, new[]{ "GymState", "Name", "Urls", "Result", "Description" }, null, new[]{ typeof(global::POGOProtos.Networking.Responses.GetGymDetailsResponse.Types.Result) }, null) })); } #endregion } #region Messages public sealed partial class GetGymDetailsResponse : pb::IMessage<GetGymDetailsResponse> { private static readonly pb::MessageParser<GetGymDetailsResponse> _parser = new pb::MessageParser<GetGymDetailsResponse>(() => new GetGymDetailsResponse()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<GetGymDetailsResponse> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::POGOProtos.Networking.Responses.GetGymDetailsResponseReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GetGymDetailsResponse() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GetGymDetailsResponse(GetGymDetailsResponse other) : this() { GymState = other.gymState_ != null ? other.GymState.Clone() : null; name_ = other.name_; urls_ = other.urls_.Clone(); result_ = other.result_; description_ = other.description_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public GetGymDetailsResponse Clone() { return new GetGymDetailsResponse(this); } /// <summary>Field number for the "gym_state" field.</summary> public const int GymStateFieldNumber = 1; private global::POGOProtos.Data.Gym.GymState gymState_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::POGOProtos.Data.Gym.GymState GymState { get { return gymState_; } set { gymState_ = value; } } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 2; private string name_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "urls" field.</summary> public const int UrlsFieldNumber = 3; private static readonly pb::FieldCodec<string> _repeated_urls_codec = pb::FieldCodec.ForString(26); private readonly pbc::RepeatedField<string> urls_ = new pbc::RepeatedField<string>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<string> Urls { get { return urls_; } } /// <summary>Field number for the "result" field.</summary> public const int ResultFieldNumber = 4; private global::POGOProtos.Networking.Responses.GetGymDetailsResponse.Types.Result result_ = 0; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::POGOProtos.Networking.Responses.GetGymDetailsResponse.Types.Result Result { get { return result_; } set { result_ = value; } } /// <summary>Field number for the "description" field.</summary> public const int DescriptionFieldNumber = 5; private string description_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Description { get { return description_; } set { description_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as GetGymDetailsResponse); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(GetGymDetailsResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!object.Equals(GymState, other.GymState)) return false; if (Name != other.Name) return false; if(!urls_.Equals(other.urls_)) return false; if (Result != other.Result) return false; if (Description != other.Description) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (gymState_ != null) hash ^= GymState.GetHashCode(); if (Name.Length != 0) hash ^= Name.GetHashCode(); hash ^= urls_.GetHashCode(); if (Result != 0) hash ^= Result.GetHashCode(); if (Description.Length != 0) hash ^= Description.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (gymState_ != null) { output.WriteRawTag(10); output.WriteMessage(GymState); } if (Name.Length != 0) { output.WriteRawTag(18); output.WriteString(Name); } urls_.WriteTo(output, _repeated_urls_codec); if (Result != 0) { output.WriteRawTag(32); output.WriteEnum((int) Result); } if (Description.Length != 0) { output.WriteRawTag(42); output.WriteString(Description); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (gymState_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(GymState); } if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } size += urls_.CalculateSize(_repeated_urls_codec); if (Result != 0) { size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Result); } if (Description.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Description); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(GetGymDetailsResponse other) { if (other == null) { return; } if (other.gymState_ != null) { if (gymState_ == null) { gymState_ = new global::POGOProtos.Data.Gym.GymState(); } GymState.MergeFrom(other.GymState); } if (other.Name.Length != 0) { Name = other.Name; } urls_.Add(other.urls_); if (other.Result != 0) { Result = other.Result; } if (other.Description.Length != 0) { Description = other.Description; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { if (gymState_ == null) { gymState_ = new global::POGOProtos.Data.Gym.GymState(); } input.ReadMessage(gymState_); break; } case 18: { Name = input.ReadString(); break; } case 26: { urls_.AddEntriesFrom(input, _repeated_urls_codec); break; } case 32: { result_ = (global::POGOProtos.Networking.Responses.GetGymDetailsResponse.Types.Result) input.ReadEnum(); break; } case 42: { Description = input.ReadString(); break; } } } } #region Nested types /// <summary>Container for nested types declared in the GetGymDetailsResponse message type.</summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static partial class Types { public enum Result { [pbr::OriginalName("UNSET")] Unset = 0, [pbr::OriginalName("SUCCESS")] Success = 1, [pbr::OriginalName("ERROR_NOT_IN_RANGE")] ErrorNotInRange = 2, } } #endregion } #endregion } #endregion Designer generated code
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // This file defines an internal class used to throw exceptions in BCL code. // The main purpose is to reduce code size. // // The old way to throw an exception generates quite a lot IL code and assembly code. // Following is an example: // C# source // throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); // IL code: // IL_0003: ldstr "key" // IL_0008: ldstr "ArgumentNull_Key" // IL_000d: call string System.Environment::GetResourceString(string) // IL_0012: newobj instance void System.ArgumentNullException::.ctor(string,string) // IL_0017: throw // which is 21bytes in IL. // // So we want to get rid of the ldstr and call to Environment.GetResource in IL. // In order to do that, I created two enums: ExceptionResource, ExceptionArgument to represent the // argument name and resource name in a small integer. The source code will be changed to // ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key, ExceptionResource.ArgumentNull_Key); // // The IL code will be 7 bytes. // IL_0008: ldc.i4.4 // IL_0009: ldc.i4.4 // IL_000a: call void System.ThrowHelper::ThrowArgumentNullException(valuetype System.ExceptionArgument) // IL_000f: ldarg.0 // // This will also reduce the Jitted code size a lot. // // It is very important we do this for generic classes because we can easily generate the same code // multiple times for different instantiation. // using System.Buffers; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.CompilerServices; using System.Runtime.Serialization; namespace System { [StackTraceHidden] internal static class ThrowHelper { [DoesNotReturn] internal static void ThrowArrayTypeMismatchException() { throw new ArrayTypeMismatchException(); } [DoesNotReturn] internal static void ThrowInvalidTypeWithPointersNotSupported(Type targetType) { throw new ArgumentException(SR.Format(SR.Argument_InvalidTypeWithPointersNotSupported, targetType)); } [DoesNotReturn] internal static void ThrowIndexOutOfRangeException() { throw new IndexOutOfRangeException(); } [DoesNotReturn] internal static void ThrowArgumentOutOfRangeException() { throw new ArgumentOutOfRangeException(); } [DoesNotReturn] internal static void ThrowArgumentException_DestinationTooShort() { throw new ArgumentException(SR.Argument_DestinationTooShort, "destination"); } [DoesNotReturn] internal static void ThrowArgumentException_OverlapAlignmentMismatch() { throw new ArgumentException(SR.Argument_OverlapAlignmentMismatch); } [DoesNotReturn] internal static void ThrowArgumentException_CannotExtractScalar(ExceptionArgument argument) { throw GetArgumentException(ExceptionResource.Argument_CannotExtractScalar, argument); } [DoesNotReturn] internal static void ThrowArgumentOutOfRange_IndexException() { throw GetArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_Index); } [DoesNotReturn] internal static void ThrowIndexArgumentOutOfRange_NeedNonNegNumException() { throw GetArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum); } [DoesNotReturn] internal static void ThrowValueArgumentOutOfRange_NeedNonNegNumException() { throw GetArgumentOutOfRangeException(ExceptionArgument.value, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum); } [DoesNotReturn] internal static void ThrowLengthArgumentOutOfRange_ArgumentOutOfRange_NeedNonNegNum() { throw GetArgumentOutOfRangeException(ExceptionArgument.length, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum); } [DoesNotReturn] internal static void ThrowStartIndexArgumentOutOfRange_ArgumentOutOfRange_Index() { throw GetArgumentOutOfRangeException(ExceptionArgument.startIndex, ExceptionResource.ArgumentOutOfRange_Index); } [DoesNotReturn] internal static void ThrowCountArgumentOutOfRange_ArgumentOutOfRange_Count() { throw GetArgumentOutOfRangeException(ExceptionArgument.count, ExceptionResource.ArgumentOutOfRange_Count); } [DoesNotReturn] internal static void ThrowArgumentOutOfRange_Year() { throw GetArgumentOutOfRangeException(ExceptionArgument.year, ExceptionResource.ArgumentOutOfRange_Year); } [DoesNotReturn] internal static void ThrowArgumentOutOfRange_BadYearMonthDay() { throw new ArgumentOutOfRangeException(null, SR.ArgumentOutOfRange_BadYearMonthDay); } [DoesNotReturn] internal static void ThrowArgumentOutOfRange_BadHourMinuteSecond() { throw new ArgumentOutOfRangeException(null, SR.ArgumentOutOfRange_BadHourMinuteSecond); } [DoesNotReturn] internal static void ThrowArgumentOutOfRange_TimeSpanTooLong() { throw new ArgumentOutOfRangeException(null, SR.Overflow_TimeSpanTooLong); } [DoesNotReturn] internal static void ThrowWrongKeyTypeArgumentException<T>(T key, Type targetType) { // Generic key to move the boxing to the right hand side of throw throw GetWrongKeyTypeArgumentException((object?)key, targetType); } [DoesNotReturn] internal static void ThrowWrongValueTypeArgumentException<T>(T value, Type targetType) { // Generic key to move the boxing to the right hand side of throw throw GetWrongValueTypeArgumentException((object?)value, targetType); } private static ArgumentException GetAddingDuplicateWithKeyArgumentException(object? key) { return new ArgumentException(SR.Format(SR.Argument_AddingDuplicateWithKey, key)); } [DoesNotReturn] internal static void ThrowAddingDuplicateWithKeyArgumentException<T>(T key) { // Generic key to move the boxing to the right hand side of throw throw GetAddingDuplicateWithKeyArgumentException((object?)key); } [DoesNotReturn] internal static void ThrowKeyNotFoundException<T>(T key) { // Generic key to move the boxing to the right hand side of throw throw GetKeyNotFoundException((object?)key); } [DoesNotReturn] internal static void ThrowArgumentException(ExceptionResource resource) { throw GetArgumentException(resource); } [DoesNotReturn] internal static void ThrowArgumentException(ExceptionResource resource, ExceptionArgument argument) { throw GetArgumentException(resource, argument); } private static ArgumentNullException GetArgumentNullException(ExceptionArgument argument) { return new ArgumentNullException(GetArgumentName(argument)); } [DoesNotReturn] internal static void ThrowArgumentNullException(ExceptionArgument argument) { throw GetArgumentNullException(argument); } [DoesNotReturn] internal static void ThrowArgumentNullException(ExceptionResource resource) { throw new ArgumentNullException(GetResourceString(resource)); } [DoesNotReturn] internal static void ThrowArgumentNullException(ExceptionArgument argument, ExceptionResource resource) { throw new ArgumentNullException(GetArgumentName(argument), GetResourceString(resource)); } [DoesNotReturn] internal static void ThrowArgumentOutOfRangeException(ExceptionArgument argument) { throw new ArgumentOutOfRangeException(GetArgumentName(argument)); } [DoesNotReturn] internal static void ThrowArgumentOutOfRangeException(ExceptionArgument argument, ExceptionResource resource) { throw GetArgumentOutOfRangeException(argument, resource); } [DoesNotReturn] internal static void ThrowArgumentOutOfRangeException(ExceptionArgument argument, int paramNumber, ExceptionResource resource) { throw GetArgumentOutOfRangeException(argument, paramNumber, resource); } [DoesNotReturn] internal static void ThrowInvalidOperationException() { throw new InvalidOperationException(); } [DoesNotReturn] internal static void ThrowInvalidOperationException(ExceptionResource resource) { throw GetInvalidOperationException(resource); } [DoesNotReturn] internal static void ThrowInvalidOperationException_OutstandingReferences() { throw new InvalidOperationException(SR.Memory_OutstandingReferences); } [DoesNotReturn] internal static void ThrowInvalidOperationException(ExceptionResource resource, Exception e) { throw new InvalidOperationException(GetResourceString(resource), e); } [DoesNotReturn] internal static void ThrowSerializationException(ExceptionResource resource) { throw new SerializationException(GetResourceString(resource)); } [DoesNotReturn] internal static void ThrowSecurityException(ExceptionResource resource) { throw new System.Security.SecurityException(GetResourceString(resource)); } [DoesNotReturn] internal static void ThrowRankException(ExceptionResource resource) { throw new RankException(GetResourceString(resource)); } [DoesNotReturn] internal static void ThrowNotSupportedException(ExceptionResource resource) { throw new NotSupportedException(GetResourceString(resource)); } [DoesNotReturn] internal static void ThrowUnauthorizedAccessException(ExceptionResource resource) { throw new UnauthorizedAccessException(GetResourceString(resource)); } [DoesNotReturn] internal static void ThrowObjectDisposedException(string objectName, ExceptionResource resource) { throw new ObjectDisposedException(objectName, GetResourceString(resource)); } [DoesNotReturn] internal static void ThrowObjectDisposedException(ExceptionResource resource) { throw new ObjectDisposedException(null, GetResourceString(resource)); } [DoesNotReturn] internal static void ThrowNotSupportedException() { throw new NotSupportedException(); } [DoesNotReturn] internal static void ThrowAggregateException(List<Exception> exceptions) { throw new AggregateException(exceptions); } [DoesNotReturn] internal static void ThrowOutOfMemoryException() { throw new OutOfMemoryException(); } [DoesNotReturn] internal static void ThrowArgumentException_Argument_InvalidArrayType() { throw new ArgumentException(SR.Argument_InvalidArrayType); } [DoesNotReturn] internal static void ThrowInvalidOperationException_InvalidOperation_EnumNotStarted() { throw new InvalidOperationException(SR.InvalidOperation_EnumNotStarted); } [DoesNotReturn] internal static void ThrowInvalidOperationException_InvalidOperation_EnumEnded() { throw new InvalidOperationException(SR.InvalidOperation_EnumEnded); } [DoesNotReturn] internal static void ThrowInvalidOperationException_EnumCurrent(int index) { throw GetInvalidOperationException_EnumCurrent(index); } [DoesNotReturn] internal static void ThrowInvalidOperationException_InvalidOperation_EnumFailedVersion() { throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); } [DoesNotReturn] internal static void ThrowInvalidOperationException_InvalidOperation_EnumOpCantHappen() { throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); } [DoesNotReturn] internal static void ThrowInvalidOperationException_InvalidOperation_NoValue() { throw new InvalidOperationException(SR.InvalidOperation_NoValue); } [DoesNotReturn] internal static void ThrowInvalidOperationException_ConcurrentOperationsNotSupported() { throw new InvalidOperationException(SR.InvalidOperation_ConcurrentOperationsNotSupported); } [DoesNotReturn] internal static void ThrowInvalidOperationException_HandleIsNotInitialized() { throw new InvalidOperationException(SR.InvalidOperation_HandleIsNotInitialized); } [DoesNotReturn] internal static void ThrowInvalidOperationException_HandleIsNotPinned() { throw new InvalidOperationException(SR.InvalidOperation_HandleIsNotPinned); } [DoesNotReturn] internal static void ThrowArraySegmentCtorValidationFailedExceptions(Array? array, int offset, int count) { throw GetArraySegmentCtorValidationFailedException(array, offset, count); } [DoesNotReturn] internal static void ThrowFormatException_BadFormatSpecifier() { throw new FormatException(SR.Argument_BadFormatSpecifier); } [DoesNotReturn] internal static void ThrowArgumentOutOfRangeException_PrecisionTooLarge() { throw new ArgumentOutOfRangeException("precision", SR.Format(SR.Argument_PrecisionTooLarge, StandardFormat.MaxPrecision)); } [DoesNotReturn] internal static void ThrowArgumentOutOfRangeException_SymbolDoesNotFit() { throw new ArgumentOutOfRangeException("symbol", SR.Argument_BadFormatSpecifier); } private static Exception GetArraySegmentCtorValidationFailedException(Array? array, int offset, int count) { if (array == null) return new ArgumentNullException(nameof(array)); if (offset < 0) return new ArgumentOutOfRangeException(nameof(offset), SR.ArgumentOutOfRange_NeedNonNegNum); if (count < 0) return new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum); Debug.Assert(array.Length - offset < count); return new ArgumentException(SR.Argument_InvalidOffLen); } private static ArgumentException GetArgumentException(ExceptionResource resource) { return new ArgumentException(GetResourceString(resource)); } private static InvalidOperationException GetInvalidOperationException(ExceptionResource resource) { return new InvalidOperationException(GetResourceString(resource)); } private static ArgumentException GetWrongKeyTypeArgumentException(object? key, Type targetType) { return new ArgumentException(SR.Format(SR.Arg_WrongType, key, targetType), nameof(key)); } private static ArgumentException GetWrongValueTypeArgumentException(object? value, Type targetType) { return new ArgumentException(SR.Format(SR.Arg_WrongType, value, targetType), nameof(value)); } private static KeyNotFoundException GetKeyNotFoundException(object? key) { return new KeyNotFoundException(SR.Format(SR.Arg_KeyNotFoundWithKey, key)); } private static ArgumentOutOfRangeException GetArgumentOutOfRangeException(ExceptionArgument argument, ExceptionResource resource) { return new ArgumentOutOfRangeException(GetArgumentName(argument), GetResourceString(resource)); } private static ArgumentException GetArgumentException(ExceptionResource resource, ExceptionArgument argument) { return new ArgumentException(GetResourceString(resource), GetArgumentName(argument)); } private static ArgumentOutOfRangeException GetArgumentOutOfRangeException(ExceptionArgument argument, int paramNumber, ExceptionResource resource) { return new ArgumentOutOfRangeException(GetArgumentName(argument) + "[" + paramNumber.ToString() + "]", GetResourceString(resource)); } private static InvalidOperationException GetInvalidOperationException_EnumCurrent(int index) { return new InvalidOperationException( index < 0 ? SR.InvalidOperation_EnumNotStarted : SR.InvalidOperation_EnumEnded); } // Allow nulls for reference types and Nullable<U>, but not for value types. // Aggressively inline so the jit evaluates the if in place and either drops the call altogether // Or just leaves null test and call to the Non-returning ThrowHelper.ThrowArgumentNullException [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void IfNullAndNullsAreIllegalThenThrow<T>(object? value, ExceptionArgument argName) { // Note that default(T) is not equal to null for value types except when T is Nullable<U>. if (!(default(T)! == null) && value == null) // TODO-NULLABLE: default(T) == null warning (https://github.com/dotnet/roslyn/issues/34757) ThrowHelper.ThrowArgumentNullException(argName); } // Throws if 'T' is disallowed in Vector<T> / Vector128<T> / other related types in the // Numerics or Intrinsics namespaces. If 'T' is allowed, no-ops. JIT will elide the method // entirely if 'T' is supported and we're on an optimized release build. [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void ThrowForUnsupportedVectorBaseType<T>() where T : struct { if (typeof(T) != typeof(byte) && typeof(T) != typeof(sbyte) && typeof(T) != typeof(short) && typeof(T) != typeof(ushort) && typeof(T) != typeof(int) && typeof(T) != typeof(uint) && typeof(T) != typeof(long) && typeof(T) != typeof(ulong) && typeof(T) != typeof(float) && typeof(T) != typeof(double)) { ThrowNotSupportedException(ExceptionResource.Arg_TypeNotSupported); } } #if false // Reflection-based implementation does not work for CoreRT/ProjectN // This function will convert an ExceptionArgument enum value to the argument name string. [MethodImpl(MethodImplOptions.NoInlining)] private static string GetArgumentName(ExceptionArgument argument) { Debug.Assert(Enum.IsDefined(typeof(ExceptionArgument), argument), "The enum value is not defined, please check the ExceptionArgument Enum."); return argument.ToString(); } #endif private static string GetArgumentName(ExceptionArgument argument) { switch (argument) { case ExceptionArgument.obj: return "obj"; case ExceptionArgument.dictionary: return "dictionary"; case ExceptionArgument.array: return "array"; case ExceptionArgument.info: return "info"; case ExceptionArgument.key: return "key"; case ExceptionArgument.text: return "text"; case ExceptionArgument.values: return "values"; case ExceptionArgument.value: return "value"; case ExceptionArgument.startIndex: return "startIndex"; case ExceptionArgument.task: return "task"; case ExceptionArgument.bytes: return "bytes"; case ExceptionArgument.byteIndex: return "byteIndex"; case ExceptionArgument.byteCount: return "byteCount"; case ExceptionArgument.ch: return "ch"; case ExceptionArgument.chars: return "chars"; case ExceptionArgument.charIndex: return "charIndex"; case ExceptionArgument.charCount: return "charCount"; case ExceptionArgument.s: return "s"; case ExceptionArgument.input: return "input"; case ExceptionArgument.ownedMemory: return "ownedMemory"; case ExceptionArgument.list: return "list"; case ExceptionArgument.index: return "index"; case ExceptionArgument.capacity: return "capacity"; case ExceptionArgument.collection: return "collection"; case ExceptionArgument.item: return "item"; case ExceptionArgument.converter: return "converter"; case ExceptionArgument.match: return "match"; case ExceptionArgument.count: return "count"; case ExceptionArgument.action: return "action"; case ExceptionArgument.comparison: return "comparison"; case ExceptionArgument.exceptions: return "exceptions"; case ExceptionArgument.exception: return "exception"; case ExceptionArgument.pointer: return "pointer"; case ExceptionArgument.start: return "start"; case ExceptionArgument.format: return "format"; case ExceptionArgument.culture: return "culture"; case ExceptionArgument.comparer: return "comparer"; case ExceptionArgument.comparable: return "comparable"; case ExceptionArgument.source: return "source"; case ExceptionArgument.state: return "state"; case ExceptionArgument.length: return "length"; case ExceptionArgument.comparisonType: return "comparisonType"; case ExceptionArgument.manager: return "manager"; case ExceptionArgument.sourceBytesToCopy: return "sourceBytesToCopy"; case ExceptionArgument.callBack: return "callBack"; case ExceptionArgument.creationOptions: return "creationOptions"; case ExceptionArgument.function: return "function"; case ExceptionArgument.scheduler: return "scheduler"; case ExceptionArgument.continuationAction: return "continuationAction"; case ExceptionArgument.continuationFunction: return "continuationFunction"; case ExceptionArgument.tasks: return "tasks"; case ExceptionArgument.asyncResult: return "asyncResult"; case ExceptionArgument.beginMethod: return "beginMethod"; case ExceptionArgument.endMethod: return "endMethod"; case ExceptionArgument.endFunction: return "endFunction"; case ExceptionArgument.cancellationToken: return "cancellationToken"; case ExceptionArgument.continuationOptions: return "continuationOptions"; case ExceptionArgument.delay: return "delay"; case ExceptionArgument.millisecondsDelay: return "millisecondsDelay"; case ExceptionArgument.millisecondsTimeout: return "millisecondsTimeout"; case ExceptionArgument.stateMachine: return "stateMachine"; case ExceptionArgument.timeout: return "timeout"; case ExceptionArgument.type: return "type"; case ExceptionArgument.sourceIndex: return "sourceIndex"; case ExceptionArgument.sourceArray: return "sourceArray"; case ExceptionArgument.destinationIndex: return "destinationIndex"; case ExceptionArgument.destinationArray: return "destinationArray"; case ExceptionArgument.pHandle: return "pHandle"; case ExceptionArgument.other: return "other"; case ExceptionArgument.newSize: return "newSize"; case ExceptionArgument.lowerBounds: return "lowerBounds"; case ExceptionArgument.lengths: return "lengths"; case ExceptionArgument.len: return "len"; case ExceptionArgument.keys: return "keys"; case ExceptionArgument.indices: return "indices"; case ExceptionArgument.index1: return "index1"; case ExceptionArgument.index2: return "index2"; case ExceptionArgument.index3: return "index3"; case ExceptionArgument.length1: return "length1"; case ExceptionArgument.length2: return "length2"; case ExceptionArgument.length3: return "length3"; case ExceptionArgument.endIndex: return "endIndex"; case ExceptionArgument.elementType: return "elementType"; case ExceptionArgument.arrayIndex: return "arrayIndex"; case ExceptionArgument.year: return "year"; default: Debug.Fail("The enum value is not defined, please check the ExceptionArgument Enum."); return ""; } } #if false // Reflection-based implementation does not work for CoreRT/ProjectN // This function will convert an ExceptionResource enum value to the resource string. [MethodImpl(MethodImplOptions.NoInlining)] private static string GetResourceString(ExceptionResource resource) { Debug.Assert(Enum.IsDefined(typeof(ExceptionResource), resource), "The enum value is not defined, please check the ExceptionResource Enum."); return SR.GetResourceString(resource.ToString()); } #endif private static string GetResourceString(ExceptionResource resource) { switch (resource) { case ExceptionResource.ArgumentOutOfRange_Index: return SR.ArgumentOutOfRange_Index; case ExceptionResource.ArgumentOutOfRange_IndexCount: return SR.ArgumentOutOfRange_IndexCount; case ExceptionResource.ArgumentOutOfRange_IndexCountBuffer: return SR.ArgumentOutOfRange_IndexCountBuffer; case ExceptionResource.ArgumentOutOfRange_Count: return SR.ArgumentOutOfRange_Count; case ExceptionResource.ArgumentOutOfRange_Year: return SR.ArgumentOutOfRange_Year; case ExceptionResource.Arg_ArrayPlusOffTooSmall: return SR.Arg_ArrayPlusOffTooSmall; case ExceptionResource.NotSupported_ReadOnlyCollection: return SR.NotSupported_ReadOnlyCollection; case ExceptionResource.Arg_RankMultiDimNotSupported: return SR.Arg_RankMultiDimNotSupported; case ExceptionResource.Arg_NonZeroLowerBound: return SR.Arg_NonZeroLowerBound; case ExceptionResource.ArgumentOutOfRange_ListInsert: return SR.ArgumentOutOfRange_ListInsert; case ExceptionResource.ArgumentOutOfRange_NeedNonNegNum: return SR.ArgumentOutOfRange_NeedNonNegNum; case ExceptionResource.ArgumentOutOfRange_SmallCapacity: return SR.ArgumentOutOfRange_SmallCapacity; case ExceptionResource.Argument_InvalidOffLen: return SR.Argument_InvalidOffLen; case ExceptionResource.Argument_CannotExtractScalar: return SR.Argument_CannotExtractScalar; case ExceptionResource.ArgumentOutOfRange_BiggerThanCollection: return SR.ArgumentOutOfRange_BiggerThanCollection; case ExceptionResource.Serialization_MissingKeys: return SR.Serialization_MissingKeys; case ExceptionResource.Serialization_NullKey: return SR.Serialization_NullKey; case ExceptionResource.NotSupported_KeyCollectionSet: return SR.NotSupported_KeyCollectionSet; case ExceptionResource.NotSupported_ValueCollectionSet: return SR.NotSupported_ValueCollectionSet; case ExceptionResource.InvalidOperation_NullArray: return SR.InvalidOperation_NullArray; case ExceptionResource.TaskT_TransitionToFinal_AlreadyCompleted: return SR.TaskT_TransitionToFinal_AlreadyCompleted; case ExceptionResource.TaskCompletionSourceT_TrySetException_NullException: return SR.TaskCompletionSourceT_TrySetException_NullException; case ExceptionResource.TaskCompletionSourceT_TrySetException_NoExceptions: return SR.TaskCompletionSourceT_TrySetException_NoExceptions; case ExceptionResource.NotSupported_StringComparison: return SR.NotSupported_StringComparison; case ExceptionResource.ConcurrentCollection_SyncRoot_NotSupported: return SR.ConcurrentCollection_SyncRoot_NotSupported; case ExceptionResource.Task_MultiTaskContinuation_NullTask: return SR.Task_MultiTaskContinuation_NullTask; case ExceptionResource.InvalidOperation_WrongAsyncResultOrEndCalledMultiple: return SR.InvalidOperation_WrongAsyncResultOrEndCalledMultiple; case ExceptionResource.Task_MultiTaskContinuation_EmptyTaskList: return SR.Task_MultiTaskContinuation_EmptyTaskList; case ExceptionResource.Task_Start_TaskCompleted: return SR.Task_Start_TaskCompleted; case ExceptionResource.Task_Start_Promise: return SR.Task_Start_Promise; case ExceptionResource.Task_Start_ContinuationTask: return SR.Task_Start_ContinuationTask; case ExceptionResource.Task_Start_AlreadyStarted: return SR.Task_Start_AlreadyStarted; case ExceptionResource.Task_RunSynchronously_Continuation: return SR.Task_RunSynchronously_Continuation; case ExceptionResource.Task_RunSynchronously_Promise: return SR.Task_RunSynchronously_Promise; case ExceptionResource.Task_RunSynchronously_TaskCompleted: return SR.Task_RunSynchronously_TaskCompleted; case ExceptionResource.Task_RunSynchronously_AlreadyStarted: return SR.Task_RunSynchronously_AlreadyStarted; case ExceptionResource.AsyncMethodBuilder_InstanceNotInitialized: return SR.AsyncMethodBuilder_InstanceNotInitialized; case ExceptionResource.Task_ContinueWith_ESandLR: return SR.Task_ContinueWith_ESandLR; case ExceptionResource.Task_ContinueWith_NotOnAnything: return SR.Task_ContinueWith_NotOnAnything; case ExceptionResource.Task_Delay_InvalidDelay: return SR.Task_Delay_InvalidDelay; case ExceptionResource.Task_Delay_InvalidMillisecondsDelay: return SR.Task_Delay_InvalidMillisecondsDelay; case ExceptionResource.Task_Dispose_NotCompleted: return SR.Task_Dispose_NotCompleted; case ExceptionResource.Task_ThrowIfDisposed: return SR.Task_ThrowIfDisposed; case ExceptionResource.Task_WaitMulti_NullTask: return SR.Task_WaitMulti_NullTask; case ExceptionResource.ArgumentException_OtherNotArrayOfCorrectLength: return SR.ArgumentException_OtherNotArrayOfCorrectLength; case ExceptionResource.ArgumentNull_Array: return SR.ArgumentNull_Array; case ExceptionResource.ArgumentNull_SafeHandle: return SR.ArgumentNull_SafeHandle; case ExceptionResource.ArgumentOutOfRange_EndIndexStartIndex: return SR.ArgumentOutOfRange_EndIndexStartIndex; case ExceptionResource.ArgumentOutOfRange_Enum: return SR.ArgumentOutOfRange_Enum; case ExceptionResource.ArgumentOutOfRange_HugeArrayNotSupported: return SR.ArgumentOutOfRange_HugeArrayNotSupported; case ExceptionResource.Argument_AddingDuplicate: return SR.Argument_AddingDuplicate; case ExceptionResource.Argument_InvalidArgumentForComparison: return SR.Argument_InvalidArgumentForComparison; case ExceptionResource.Arg_LowerBoundsMustMatch: return SR.Arg_LowerBoundsMustMatch; case ExceptionResource.Arg_MustBeType: return SR.Arg_MustBeType; case ExceptionResource.Arg_Need1DArray: return SR.Arg_Need1DArray; case ExceptionResource.Arg_Need2DArray: return SR.Arg_Need2DArray; case ExceptionResource.Arg_Need3DArray: return SR.Arg_Need3DArray; case ExceptionResource.Arg_NeedAtLeast1Rank: return SR.Arg_NeedAtLeast1Rank; case ExceptionResource.Arg_RankIndices: return SR.Arg_RankIndices; case ExceptionResource.Arg_RanksAndBounds: return SR.Arg_RanksAndBounds; case ExceptionResource.InvalidOperation_IComparerFailed: return SR.InvalidOperation_IComparerFailed; case ExceptionResource.NotSupported_FixedSizeCollection: return SR.NotSupported_FixedSizeCollection; case ExceptionResource.Rank_MultiDimNotSupported: return SR.Rank_MultiDimNotSupported; case ExceptionResource.Arg_TypeNotSupported: return SR.Arg_TypeNotSupported; default: Debug.Fail("The enum value is not defined, please check the ExceptionResource Enum."); return ""; } } } // // The convention for this enum is using the argument name as the enum name // internal enum ExceptionArgument { obj, dictionary, array, info, key, text, values, value, startIndex, task, bytes, byteIndex, byteCount, ch, chars, charIndex, charCount, s, input, ownedMemory, list, index, capacity, collection, item, converter, match, count, action, comparison, exceptions, exception, pointer, start, format, culture, comparer, comparable, source, state, length, comparisonType, manager, sourceBytesToCopy, callBack, creationOptions, function, scheduler, continuationAction, continuationFunction, tasks, asyncResult, beginMethod, endMethod, endFunction, cancellationToken, continuationOptions, delay, millisecondsDelay, millisecondsTimeout, stateMachine, timeout, type, sourceIndex, sourceArray, destinationIndex, destinationArray, pHandle, other, newSize, lowerBounds, lengths, len, keys, indices, index1, index2, index3, length1, length2, length3, endIndex, elementType, arrayIndex, year, } // // The convention for this enum is using the resource name as the enum name // internal enum ExceptionResource { ArgumentOutOfRange_Index, ArgumentOutOfRange_IndexCount, ArgumentOutOfRange_IndexCountBuffer, ArgumentOutOfRange_Count, ArgumentOutOfRange_Year, Arg_ArrayPlusOffTooSmall, NotSupported_ReadOnlyCollection, Arg_RankMultiDimNotSupported, Arg_NonZeroLowerBound, ArgumentOutOfRange_ListInsert, ArgumentOutOfRange_NeedNonNegNum, ArgumentOutOfRange_SmallCapacity, Argument_InvalidOffLen, Argument_CannotExtractScalar, ArgumentOutOfRange_BiggerThanCollection, Serialization_MissingKeys, Serialization_NullKey, NotSupported_KeyCollectionSet, NotSupported_ValueCollectionSet, InvalidOperation_NullArray, TaskT_TransitionToFinal_AlreadyCompleted, TaskCompletionSourceT_TrySetException_NullException, TaskCompletionSourceT_TrySetException_NoExceptions, NotSupported_StringComparison, ConcurrentCollection_SyncRoot_NotSupported, Task_MultiTaskContinuation_NullTask, InvalidOperation_WrongAsyncResultOrEndCalledMultiple, Task_MultiTaskContinuation_EmptyTaskList, Task_Start_TaskCompleted, Task_Start_Promise, Task_Start_ContinuationTask, Task_Start_AlreadyStarted, Task_RunSynchronously_Continuation, Task_RunSynchronously_Promise, Task_RunSynchronously_TaskCompleted, Task_RunSynchronously_AlreadyStarted, AsyncMethodBuilder_InstanceNotInitialized, Task_ContinueWith_ESandLR, Task_ContinueWith_NotOnAnything, Task_Delay_InvalidDelay, Task_Delay_InvalidMillisecondsDelay, Task_Dispose_NotCompleted, Task_ThrowIfDisposed, Task_WaitMulti_NullTask, ArgumentException_OtherNotArrayOfCorrectLength, ArgumentNull_Array, ArgumentNull_SafeHandle, ArgumentOutOfRange_EndIndexStartIndex, ArgumentOutOfRange_Enum, ArgumentOutOfRange_HugeArrayNotSupported, Argument_AddingDuplicate, Argument_InvalidArgumentForComparison, Arg_LowerBoundsMustMatch, Arg_MustBeType, Arg_Need1DArray, Arg_Need2DArray, Arg_Need3DArray, Arg_NeedAtLeast1Rank, Arg_RankIndices, Arg_RanksAndBounds, InvalidOperation_IComparerFailed, NotSupported_FixedSizeCollection, Rank_MultiDimNotSupported, Arg_TypeNotSupported, } }
using System; using System.Buffers; using System.IO; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using Jasper.Serialization; using Jasper.Serialization.Json; using Jasper.Util; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.WebUtilities; using Microsoft.Extensions.ObjectPool; using Newtonsoft.Json; namespace Jasper.Http.ContentHandling { public interface IResponseWriter : IWriterStrategy { /// <summary> /// Called during HTTP requests /// </summary> /// <param name="model"></param> /// <param name="response"></param> /// <returns></returns> Task WriteToStream(object model, HttpResponse response); } public interface IRequestReader : IReaderStrategy { Task<T> ReadFromRequest<T>(HttpRequest request); } public class NewtonsoftReaderWriterFactory : ISerializerFactory<IRequestReader, IResponseWriter> { private readonly ArrayPool<byte> _bytePool; private readonly ArrayPool<char> _charPool; private readonly ObjectPool<JsonSerializer> _serializerPool; public NewtonsoftReaderWriterFactory(JsonSerializerSettings settings, ObjectPoolProvider pooling) { _serializerPool = pooling.Create(new JsonSerializerObjectPolicy(settings)); _charPool = ArrayPool<char>.Shared; _bytePool = ArrayPool<byte>.Shared; } public string ContentType => EnvelopeConstants.JsonContentType; public object Deserialize(Stream message) { var serializer = _serializerPool.Get(); try { var reader = new JsonTextReader(new StreamReader(message)) { ArrayPool = new JsonArrayPool<char>(_charPool), CloseInput = true }; return serializer.Deserialize(reader); } finally { _serializerPool.Return(serializer); } } public IRequestReader ReaderFor(Type messageType) { return new NewtonsoftJsonReader(messageType, _charPool, _bytePool, _serializerPool); } public IResponseWriter WriterFor(Type messageType) { return new NewtonsoftJsonWriter(messageType, _charPool, _bytePool, _serializerPool); } } internal class NewtonsoftJsonReader : IRequestReader { private readonly int _bufferSize = 1024; private readonly ArrayPool<byte> _bytePool; private readonly ArrayPool<char> _charPool; private readonly JsonArrayPool<char> _jsonCharPool; private readonly ObjectPool<JsonSerializer> _serializerPool; internal NewtonsoftJsonReader( Type messageType, ArrayPool<char> charPool, ArrayPool<byte> bytePool, ObjectPool<JsonSerializer> serializerPool ) : this(messageType, charPool, bytePool, serializerPool, EnvelopeConstants.JsonContentType) { } internal NewtonsoftJsonReader( Type messageType, ArrayPool<char> charPool, ArrayPool<byte> bytePool, ObjectPool<JsonSerializer> serializerPool, string contentType) { _charPool = charPool; _bytePool = bytePool; _serializerPool = serializerPool; _jsonCharPool = new JsonArrayPool<char>(charPool); DotNetType = messageType; MessageType = messageType.ToMessageTypeName(); ContentType = contentType; } public string MessageType { get; } public Type DotNetType { get; } public string ContentType { get; } public async Task<T> ReadFromRequest<T>(HttpRequest request) { // TODO -- the encoding should vary here var targetType = typeof(T); object ret; if (!request.Body.CanSeek) { // JSON.Net does synchronous reads. In order to avoid blocking on the stream, we asynchronously // read everything into a buffer, and then seek back to the beginning. request.EnableBuffering(); await request.Body.DrainAsync(CancellationToken.None); request.Body.Seek(0L, SeekOrigin.Begin); } using (var streamReader = new HttpRequestStreamReader(request.Body, Encoding.UTF8, _bufferSize, _bytePool, _charPool)) { using (var jsonReader = new JsonTextReader(streamReader)) { jsonReader.ArrayPool = _jsonCharPool; jsonReader.CloseInput = false; var serializer = _serializerPool.Get(); try { ret = serializer.Deserialize(jsonReader, targetType); } finally { _serializerPool.Return(serializer); } } } return (T) ret; } } public class NewtonsoftJsonWriter : IResponseWriter { private readonly ArrayPool<byte> _bytePool; private readonly ArrayPool<char> _charPool; private readonly JsonArrayPool<char> _jsonCharPool; private readonly ObjectPool<JsonSerializer> _serializerPool; private int _bufferSize = 1024; public NewtonsoftJsonWriter(Type messageType, ArrayPool<char> charPool, ArrayPool<byte> bytePool, ObjectPool<JsonSerializer> serializerPool) : this(messageType, EnvelopeConstants.JsonContentType, charPool, bytePool, serializerPool) { } public NewtonsoftJsonWriter(Type messageType, string contentType, ArrayPool<char> charPool, ArrayPool<byte> bytePool, ObjectPool<JsonSerializer> serializerPool) { DotNetType = messageType; ContentType = contentType; _charPool = charPool; _bytePool = bytePool; _serializerPool = serializerPool; _jsonCharPool = new JsonArrayPool<char>(charPool); } public string ContentType { get; } public Type DotNetType { get; } public async Task WriteToStream(object model, HttpResponse response) { response.Headers["content-type"] = ContentType; using (var textWriter = new HttpResponseStreamWriter(response.Body, Encoding.UTF8, 1024, _bytePool, _charPool)) using (var jsonWriter = new JsonTextWriter(textWriter) { ArrayPool = _jsonCharPool, CloseOutput = false, AutoCompleteOnClose = false }) { var serializer = _serializerPool.Get(); try { serializer.Serialize(jsonWriter, model); await textWriter.FlushAsync(); } finally { _serializerPool.Return(serializer); } } } public byte[] Write(object model) { var serializer = _serializerPool.Get(); var bytes = _bytePool.Rent(_bufferSize); // TODO -- should this be configurable? var stream = new MemoryStream(bytes); try { using (var textWriter = new StreamWriter(stream) {AutoFlush = true}) using (var jsonWriter = new JsonTextWriter(textWriter) { ArrayPool = _jsonCharPool, CloseOutput = false //AutoCompleteOnClose = false // TODO -- put this in if we upgrad Newtonsoft }) { serializer.Serialize(jsonWriter, model); if (stream.Position < _bufferSize) { return bytes.Take((int) stream.Position).ToArray(); } return stream.ToArray(); } } catch (NotSupportedException e) { if (e.Message.Contains("Memory stream is not expandable")) { var data = writeWithNoBuffer(model, serializer); var bufferSize = 1024; while (bufferSize < data.Length) { bufferSize = bufferSize * 2; } _bufferSize = bufferSize; return data; } throw; } finally { _bytePool.Return(bytes); _serializerPool.Return(serializer); } } private byte[] writeWithNoBuffer(object model, JsonSerializer serializer) { var stream = new MemoryStream(); using (var textWriter = new StreamWriter(stream) {AutoFlush = true}) using (var jsonWriter = new JsonTextWriter(textWriter) { ArrayPool = _jsonCharPool, CloseOutput = false //AutoCompleteOnClose = false // TODO -- put this in if we upgrad Newtonsoft }) { serializer.Serialize(jsonWriter, model); return stream.ToArray(); } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Drawing; using System.Drawing.Imaging; using Nini.Config; using OpenMetaverse; using OpenMetaverse.Imaging; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using log4net; using System.Reflection; using Mono.Addins; namespace OpenSim.Region.CoreModules.Scripting.DynamicTexture { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "DynamicTextureModule")] public class DynamicTextureModule : ISharedRegionModule, IDynamicTextureManager { // private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private const int ALL_SIDES = -1; public const int DISP_EXPIRE = 1; public const int DISP_TEMP = 2; /// <summary> /// If true then where possible dynamic textures are reused. /// </summary> public bool ReuseTextures { get; set; } /// <summary> /// If false, then textures which have a low data size are not reused when ReuseTextures = true. /// </summary> /// <remarks> /// LL viewers 3.3.4 and before appear to not fully render textures pulled from the viewer cache if those /// textures have a relatively high pixel surface but a small data size. Typically, this appears to happen /// if the data size is smaller than the viewer's discard level 2 size estimate. So if this is setting is /// false, textures smaller than the calculation in IsSizeReuseable are always regenerated rather than reused /// to work around this problem.</remarks> public bool ReuseLowDataTextures { get; set; } private Dictionary<UUID, Scene> RegisteredScenes = new Dictionary<UUID, Scene>(); private Dictionary<string, IDynamicTextureRender> RenderPlugins = new Dictionary<string, IDynamicTextureRender>(); private Dictionary<UUID, DynamicTextureUpdater> Updaters = new Dictionary<UUID, DynamicTextureUpdater>(); /// <summary> /// Record dynamic textures that we can reuse for a given data and parameter combination rather than /// regenerate. /// </summary> /// <remarks> /// Key is string.Format("{0}{1}", data /// </remarks> private Cache m_reuseableDynamicTextures; /// <summary> /// This constructor is only here because of the Unit Tests... /// Don't use it. /// </summary> public DynamicTextureModule() { m_reuseableDynamicTextures = new Cache(CacheMedium.Memory, CacheStrategy.Conservative); m_reuseableDynamicTextures.DefaultTTL = new TimeSpan(24, 0, 0); } #region IDynamicTextureManager Members public void RegisterRender(string handleType, IDynamicTextureRender render) { if (!RenderPlugins.ContainsKey(handleType)) { RenderPlugins.Add(handleType, render); } } /// <summary> /// Called by code which actually renders the dynamic texture to supply texture data. /// </summary> /// <param name="updaterId"></param> /// <param name="texture"></param> public void ReturnData(UUID updaterId, IDynamicTexture texture) { DynamicTextureUpdater updater = null; lock (Updaters) { if (Updaters.ContainsKey(updaterId)) { updater = Updaters[updaterId]; } } if (updater != null) { if (RegisteredScenes.ContainsKey(updater.SimUUID)) { Scene scene = RegisteredScenes[updater.SimUUID]; UUID newTextureID = updater.DataReceived(texture.Data, scene); if (ReuseTextures && !updater.BlendWithOldTexture && texture.IsReuseable && (ReuseLowDataTextures || IsDataSizeReuseable(texture))) { m_reuseableDynamicTextures.Store( GenerateReusableTextureKey(texture.InputCommands, texture.InputParams), newTextureID); } } } if (updater.UpdateTimer == 0) { lock (Updaters) { if (!Updaters.ContainsKey(updater.UpdaterID)) { Updaters.Remove(updater.UpdaterID); } } } } /// <summary> /// Determines whether the texture is reuseable based on its data size. /// </summary> /// <remarks> /// This is a workaround for a viewer bug where very small data size textures relative to their pixel size /// are not redisplayed properly when pulled from cache. The calculation here is based on the typical discard /// level of 2, a 'rate' of 0.125 and 4 components (which makes for a factor of 0.5). /// </remarks> /// <returns></returns> private bool IsDataSizeReuseable(IDynamicTexture texture) { // Console.WriteLine("{0} {1}", texture.Size.Width, texture.Size.Height); int discardLevel2DataThreshold = (int)Math.Ceiling((texture.Size.Width >> 2) * (texture.Size.Height >> 2) * 0.5); // m_log.DebugFormat( // "[DYNAMIC TEXTURE MODULE]: Discard level 2 threshold {0}, texture data length {1}", // discardLevel2DataThreshold, texture.Data.Length); return discardLevel2DataThreshold < texture.Data.Length; } public UUID AddDynamicTextureURL(UUID simID, UUID primID, string contentType, string url, string extraParams, int updateTimer) { return AddDynamicTextureURL(simID, primID, contentType, url, extraParams, updateTimer, false, 255); } public UUID AddDynamicTextureURL(UUID simID, UUID primID, string contentType, string url, string extraParams, int updateTimer, bool SetBlending, byte AlphaValue) { return AddDynamicTextureURL(simID, primID, contentType, url, extraParams, updateTimer, SetBlending, (int)(DISP_TEMP|DISP_EXPIRE), AlphaValue, ALL_SIDES); } public UUID AddDynamicTextureURL(UUID simID, UUID primID, string contentType, string url, string extraParams, int updateTimer, bool SetBlending, int disp, byte AlphaValue, int face) { if (RenderPlugins.ContainsKey(contentType)) { DynamicTextureUpdater updater = new DynamicTextureUpdater(); updater.SimUUID = simID; updater.PrimID = primID; updater.ContentType = contentType; updater.Url = url; updater.UpdateTimer = updateTimer; updater.UpdaterID = UUID.Random(); updater.Params = extraParams; updater.BlendWithOldTexture = SetBlending; updater.FrontAlpha = AlphaValue; updater.Face = face; updater.Disp = disp; lock (Updaters) { if (!Updaters.ContainsKey(updater.UpdaterID)) { Updaters.Add(updater.UpdaterID, updater); } } RenderPlugins[contentType].AsyncConvertUrl(updater.UpdaterID, url, extraParams); return updater.UpdaterID; } return UUID.Zero; } public UUID AddDynamicTextureData(UUID simID, UUID primID, string contentType, string data, string extraParams, int updateTimer) { return AddDynamicTextureData(simID, primID, contentType, data, extraParams, updateTimer, false, 255); } public UUID AddDynamicTextureData(UUID simID, UUID primID, string contentType, string data, string extraParams, int updateTimer, bool SetBlending, byte AlphaValue) { return AddDynamicTextureData(simID, primID, contentType, data, extraParams, updateTimer, SetBlending, (int) (DISP_TEMP|DISP_EXPIRE), AlphaValue, ALL_SIDES); } public UUID AddDynamicTextureData(UUID simID, UUID primID, string contentType, string data, string extraParams, int updateTimer, bool SetBlending, int disp, byte AlphaValue, int face) { if (!RenderPlugins.ContainsKey(contentType)) return UUID.Zero; Scene scene; RegisteredScenes.TryGetValue(simID, out scene); if (scene == null) return UUID.Zero; SceneObjectPart part = scene.GetSceneObjectPart(primID); if (part == null) return UUID.Zero; // If we want to reuse dynamic textures then we have to ignore any request from the caller to expire // them. if (ReuseTextures) disp = disp & ~DISP_EXPIRE; DynamicTextureUpdater updater = new DynamicTextureUpdater(); updater.SimUUID = simID; updater.PrimID = primID; updater.ContentType = contentType; updater.BodyData = data; updater.UpdateTimer = updateTimer; updater.UpdaterID = UUID.Random(); updater.Params = extraParams; updater.BlendWithOldTexture = SetBlending; updater.FrontAlpha = AlphaValue; updater.Face = face; updater.Url = "Local image"; updater.Disp = disp; object objReusableTextureUUID = null; if (ReuseTextures && !updater.BlendWithOldTexture) { string reuseableTextureKey = GenerateReusableTextureKey(data, extraParams); objReusableTextureUUID = m_reuseableDynamicTextures.Get(reuseableTextureKey); if (objReusableTextureUUID != null) { // If something else has removed this temporary asset from the cache, detect and invalidate // our cached uuid. if (scene.AssetService.GetMetadata(objReusableTextureUUID.ToString()) == null) { m_reuseableDynamicTextures.Invalidate(reuseableTextureKey); objReusableTextureUUID = null; } } } // We cannot reuse a dynamic texture if the data is going to be blended with something already there. if (objReusableTextureUUID == null) { lock (Updaters) { if (!Updaters.ContainsKey(updater.UpdaterID)) { Updaters.Add(updater.UpdaterID, updater); } } // m_log.DebugFormat( // "[DYNAMIC TEXTURE MODULE]: Requesting generation of new dynamic texture for {0} in {1}", // part.Name, part.ParentGroup.Scene.Name); RenderPlugins[contentType].AsyncConvertData(updater.UpdaterID, data, extraParams); } else { // m_log.DebugFormat( // "[DYNAMIC TEXTURE MODULE]: Reusing cached texture {0} for {1} in {2}", // objReusableTextureUUID, part.Name, part.ParentGroup.Scene.Name); // No need to add to updaters as the texture is always the same. Not that this functionality // apppears to be implemented anyway. updater.UpdatePart(part, (UUID)objReusableTextureUUID); } return updater.UpdaterID; } private string GenerateReusableTextureKey(string data, string extraParams) { return string.Format("{0}{1}", data, extraParams); } public void GetDrawStringSize(string contentType, string text, string fontName, int fontSize, out double xSize, out double ySize) { xSize = 0; ySize = 0; if (RenderPlugins.ContainsKey(contentType)) { RenderPlugins[contentType].GetDrawStringSize(text, fontName, fontSize, out xSize, out ySize); } } #endregion #region ISharedRegionModule Members public void Initialise(IConfigSource config) { IConfig texturesConfig = config.Configs["Textures"]; if (texturesConfig != null) { ReuseTextures = texturesConfig.GetBoolean("ReuseDynamicTextures", false); ReuseLowDataTextures = texturesConfig.GetBoolean("ReuseDynamicLowDataTextures", false); if (ReuseTextures) { m_reuseableDynamicTextures = new Cache(CacheMedium.Memory, CacheStrategy.Conservative); m_reuseableDynamicTextures.DefaultTTL = new TimeSpan(24, 0, 0); } } } public void PostInitialise() { } public void AddRegion(Scene scene) { if (!RegisteredScenes.ContainsKey(scene.RegionInfo.RegionID)) { RegisteredScenes.Add(scene.RegionInfo.RegionID, scene); scene.RegisterModuleInterface<IDynamicTextureManager>(this); } } public void RegionLoaded(Scene scene) { } public void RemoveRegion(Scene scene) { if (RegisteredScenes.ContainsKey(scene.RegionInfo.RegionID)) RegisteredScenes.Remove(scene.RegionInfo.RegionID); } public void Close() { } public string Name { get { return "DynamicTextureModule"; } } public Type ReplaceableInterface { get { return null; } } #endregion #region Nested type: DynamicTextureUpdater public class DynamicTextureUpdater { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); public bool BlendWithOldTexture = false; public string BodyData; public string ContentType; public byte FrontAlpha = 255; public string Params; public UUID PrimID; public bool SetNewFrontAlpha = false; public UUID SimUUID; public UUID UpdaterID; public int UpdateTimer; public int Face; public int Disp; public string Url; public DynamicTextureUpdater() { UpdateTimer = 0; BodyData = null; } /// <summary> /// Update the given part with the new texture. /// </summary> /// <returns> /// The old texture UUID. /// </returns> public UUID UpdatePart(SceneObjectPart part, UUID textureID) { UUID oldID; lock (part) { // mostly keep the values from before Primitive.TextureEntry tmptex = part.Shape.Textures; // FIXME: Need to return the appropriate ID if only a single face is replaced. oldID = tmptex.DefaultTexture.TextureID; if (Face == ALL_SIDES) { oldID = tmptex.DefaultTexture.TextureID; tmptex.DefaultTexture.TextureID = textureID; } else { try { Primitive.TextureEntryFace texface = tmptex.CreateFace((uint)Face); texface.TextureID = textureID; tmptex.FaceTextures[Face] = texface; } catch (Exception) { tmptex.DefaultTexture.TextureID = textureID; } } // I'm pretty sure we always want to force this to true // I'm pretty sure noone whats to set fullbright true if it wasn't true before. // tmptex.DefaultTexture.Fullbright = true; part.UpdateTextureEntry(tmptex.GetBytes()); } return oldID; } /// <summary> /// Called once new texture data has been received for this updater. /// </summary> /// <param name="data"></param> /// <param name="scene"></param> /// <param name="isReuseable">True if the data given is reuseable.</param> /// <returns>The asset UUID given to the incoming data.</returns> public UUID DataReceived(byte[] data, Scene scene) { SceneObjectPart part = scene.GetSceneObjectPart(PrimID); if (part == null || data == null || data.Length <= 1) { string msg = String.Format("DynamicTextureModule: Error preparing image using URL {0}", Url); scene.SimChat(Utils.StringToBytes(msg), ChatTypeEnum.Say, 0, part.ParentGroup.RootPart.AbsolutePosition, part.Name, part.UUID, false); return UUID.Zero; } byte[] assetData = null; AssetBase oldAsset = null; if (BlendWithOldTexture) { Primitive.TextureEntryFace defaultFace = part.Shape.Textures.DefaultTexture; if (defaultFace != null) { oldAsset = scene.AssetService.Get(defaultFace.TextureID.ToString()); if (oldAsset != null) assetData = BlendTextures(data, oldAsset.Data, SetNewFrontAlpha, FrontAlpha); } } if (assetData == null) { assetData = new byte[data.Length]; Array.Copy(data, assetData, data.Length); } // Create a new asset for user AssetBase asset = new AssetBase( UUID.Random(), "DynamicImage" + Util.RandomClass.Next(1, 10000), (sbyte)AssetType.Texture, scene.RegionInfo.RegionID.ToString()); asset.Data = assetData; asset.Description = String.Format("URL image : {0}", Url); if (asset.Description.Length > 128) asset.Description = asset.Description.Substring(0, 128); asset.Local = true; // dynamic images aren't saved in the assets server asset.Temporary = ((Disp & DISP_TEMP) != 0); scene.AssetService.Store(asset); // this will only save the asset in the local asset cache IJ2KDecoder cacheLayerDecode = scene.RequestModuleInterface<IJ2KDecoder>(); if (cacheLayerDecode != null) { if (!cacheLayerDecode.Decode(asset.FullID, asset.Data)) m_log.WarnFormat( "[DYNAMIC TEXTURE MODULE]: Decoding of dynamically generated asset {0} for {1} in {2} failed", asset.ID, part.Name, part.ParentGroup.Scene.Name); } UUID oldID = UpdatePart(part, asset.FullID); if (oldID != UUID.Zero && ((Disp & DISP_EXPIRE) != 0)) { if (oldAsset == null) oldAsset = scene.AssetService.Get(oldID.ToString()); if (oldAsset != null) { if (oldAsset.Temporary) { scene.AssetService.Delete(oldID.ToString()); } } } return asset.FullID; } private byte[] BlendTextures(byte[] frontImage, byte[] backImage, bool setNewAlpha, byte newAlpha) { ManagedImage managedImage; Image image; if (!OpenJPEG.DecodeToImage(frontImage, out managedImage, out image) || image == null) return null; Bitmap image1 = new Bitmap(image); image.Dispose(); if (!OpenJPEG.DecodeToImage(backImage, out managedImage, out image) || image == null) { image1.Dispose(); return null; } Bitmap image2 = new Bitmap(image); image.Dispose(); if (setNewAlpha) SetAlpha(ref image1, newAlpha); using(Bitmap joint = MergeBitMaps(image1, image2)) { image1.Dispose(); image2.Dispose(); byte[] result = new byte[0]; try { result = OpenJPEG.EncodeFromImage(joint, true); } catch (Exception e) { m_log.ErrorFormat( "[DYNAMICTEXTUREMODULE]: OpenJpeg Encode Failed. Exception {0}{1}", e.Message, e.StackTrace); } return result; } } public Bitmap MergeBitMaps(Bitmap front, Bitmap back) { Bitmap joint; Graphics jG; joint = new Bitmap(back.Width, back.Height, PixelFormat.Format32bppArgb); using(jG = Graphics.FromImage(joint)) { jG.DrawImage(back, 0, 0, back.Width, back.Height); jG.DrawImage(front, 0, 0, back.Width, back.Height); return joint; } } private void SetAlpha(ref Bitmap b, byte alpha) { for (int w = 0; w < b.Width; w++) { for (int h = 0; h < b.Height; h++) { b.SetPixel(w, h, Color.FromArgb(alpha, b.GetPixel(w, h))); } } } } #endregion } }
#region License // Copyright (c) Jeremy Skinner (http://www.jeremyskinner.co.uk) // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // The latest version of this file can be found at http://www.codeplex.com/FluentValidation #endregion namespace FluentValidation.Tests { using System; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using Internal; #if !CoreCLR using Moq; #endif using Xunit; using Resources; using Results; using Validators; using System.Reflection; public class RuleBuilderTests { RuleBuilder<Person, string> builder; public RuleBuilderTests() { var rule = PropertyRule.Create<Person,string>(x => x.Surname); builder = new RuleBuilder<Person, string>(rule); } [Fact] public void Should_build_property_name() { builder.Rule.PropertyName.ShouldEqual("Surname"); } [Fact] public void Should_compile_expression() { var person = new Person {Surname = "Foo"}; builder.Rule.PropertyFunc(person).ShouldEqual("Foo"); } [Fact] public void Adding_a_validator_should_return_builder() { var builderWithOptions = builder.SetValidator(new TestPropertyValidator()); builderWithOptions.ShouldBeTheSameAs(builder); } [Fact] public void Adding_a_validator_should_store_validator() { var validator = new TestPropertyValidator(); builder.SetValidator(validator); builder.Rule.CurrentValidator.ShouldBeTheSameAs(validator); } [Fact] public void Should_set_cutom_property_name() { builder.SetValidator(new TestPropertyValidator()).WithName("Foo"); Assert.Equal(builder.Rule.DisplayName.GetString(), "Foo"); } [Fact] public void Should_set_custom_error() { builder.SetValidator(new TestPropertyValidator()).WithMessage("Bar"); builder.Rule.CurrentValidator.ErrorMessageSource.GetString().ShouldEqual("Bar"); } [Fact] public void Should_throw_if_validator_is_null() { typeof(ArgumentNullException).ShouldBeThrownBy(() => builder.SetValidator((IPropertyValidator)null)); } [Fact] public void Should_throw_if_overriding_validator_is_null() { typeof(ArgumentNullException).ShouldBeThrownBy(() => builder.SetValidator((IValidator<string>)null)); } [Fact] public void Should_throw_if_overriding_validator_provider_is_null() { typeof (ArgumentNullException).ShouldBeThrownBy(() => builder.SetValidator((Func<Person, IValidator<string>>) null)); } [Fact] public void Should_throw_if_message_is_null() { typeof(ArgumentNullException).ShouldBeThrownBy(() => builder.SetValidator(new TestPropertyValidator()).WithMessage(null)); } [Fact] public void Should_throw_if_property_name_is_null() { typeof(ArgumentNullException).ShouldBeThrownBy(() => builder.SetValidator(new TestPropertyValidator()).WithName(null)); } [Fact] public void Should_throw_when_predicate_is_null() { typeof(ArgumentNullException).ShouldBeThrownBy(() => builder.SetValidator(new TestPropertyValidator()).When(null)); } [Fact] public void Should_throw_when_async_predicate_is_null() { typeof (ArgumentNullException).ShouldBeThrownBy(() => builder.SetValidator(new TestPropertyValidator()).WhenAsync(null)); } [Fact] public void Should_throw_when_inverse_predicate_is_null() { typeof(ArgumentNullException).ShouldBeThrownBy(() => builder.SetValidator(new TestPropertyValidator()).Unless(null)); } [Fact] public void Should_throw_when_async_inverse_predicate_is_null() { typeof (ArgumentNullException).ShouldBeThrownBy(() => builder.SetValidator(new TestPropertyValidator()).UnlessAsync(null)); } [Fact] public void Calling_when_should_replace_current_validator_with_predicate_validator() { var validator = new TestPropertyValidator(); builder.SetValidator(validator).When(x => true); builder.Rule.CurrentValidator.ShouldBe<DelegatingValidator>(); var predicateValidator = (DelegatingValidator)builder.Rule.CurrentValidator; predicateValidator.InnerValidator.ShouldBeTheSameAs(validator); } [Fact] public void Calling_when_async_should_replace_current_validator_with_predicate_validator() { var validator = new TestPropertyValidator(); builder.SetValidator(validator).WhenAsync(x => TaskHelpers.FromResult(true)); builder.Rule.CurrentValidator.ShouldBe<DelegatingValidator>(); var predicateValidator = (DelegatingValidator) builder.Rule.CurrentValidator; predicateValidator.InnerValidator.ShouldBeTheSameAs(validator); } [Fact] public void Calling_validate_should_delegate_to_underlying_validator() { #if CoreCLR Assert.True(false, "No mocking on coreclr"); #else var person = new Person {Surname = "Foo"}; var validator = new Mock<IPropertyValidator>(); builder.SetValidator(validator.Object); builder.Rule.Validate(new ValidationContext<Person>(person, new PropertyChain(), new DefaultValidatorSelector())).ToList(); validator.Verify(x => x.Validate(It.Is<PropertyValidatorContext>(c => (string)c.PropertyValue == "Foo"))); #endif } [Fact] public void Calling_ValidateAsync_should_delegate_to_underlying_sync_validator() { #if CoreCLR Assert.True(false, "No mocking on coreclr"); #else var person = new Person { Surname = "Foo" }; var validator = new Mock<IPropertyValidator>(); builder.SetValidator(validator.Object); builder.Rule.ValidateAsync(new ValidationContext<Person>(person, new PropertyChain(), new DefaultValidatorSelector()), new CancellationToken()).Result.ToList(); validator.Verify(x => x.Validate(It.Is<PropertyValidatorContext>(c => (string)c.PropertyValue == "Foo"))); #endif } [Fact] public void Calling_ValidateAsync_should_delegate_to_underlying_async_validator() { #if CoreCLR Assert.True(false, "No mocking on coreclr"); #else var person = new Person { Surname = "Foo" }; var validator = new Mock<AsyncValidatorBase>(MockBehavior.Loose, Messages.predicate_error) {CallBase = true}; validator.Setup(v => v.ValidateAsync(It.IsAny<PropertyValidatorContext>(), It.IsAny<CancellationToken>())).Returns(TaskHelpers.FromResult(Enumerable.Empty<ValidationFailure>())); builder.SetValidator(validator.Object); builder.Rule.ValidateAsync(new ValidationContext<Person>(person, new PropertyChain(), new DefaultValidatorSelector()), new CancellationToken()).Result.ToList(); validator.Verify(x => x.ValidateAsync(It.Is<PropertyValidatorContext>(c => (string)c.PropertyValue == "Foo"), It.IsAny<CancellationToken>())); #endif } [Fact] public void PropertyDescription_should_return_property_name_split() { var builder = new RuleBuilder<Person, DateTime>(PropertyRule.Create<Person, DateTime>(x => x.DateOfBirth)); builder.Rule.GetDisplayName().ShouldEqual("Date Of Birth"); } [Fact] public void PropertyDescription_should_return_custom_property_name() { var builder = new RuleBuilder<Person, DateTime>(PropertyRule.Create<Person, DateTime>(x => x.DateOfBirth)); builder.NotEqual(default(DateTime)).WithName("Foo"); builder.Rule.GetDisplayName().ShouldEqual("Foo"); } [Fact] public void Nullable_object_with_condition_should_not_throw() { var builder = new RuleBuilder<Person, int>(PropertyRule.Create<Person, int>(x => x.NullableInt.Value)); builder.GreaterThanOrEqualTo(3).When(x => x.NullableInt != null); builder.Rule.Validate(new ValidationContext<Person>(new Person(), new PropertyChain(), new DefaultValidatorSelector())); } [Fact] public void Nullable_object_with_async_condition_should_not_throw() { var builder = new RuleBuilder<Person, int>(PropertyRule.Create<Person, int>(x => x.NullableInt.Value)); builder.GreaterThanOrEqualTo(3).WhenAsync(x => TaskHelpers.FromResult(x.NullableInt != null)); builder.Rule.Validate(new ValidationContext<Person>(new Person(), new PropertyChain(), new DefaultValidatorSelector())); } [Fact] public void Rule_for_a_non_memberexpression_should_not_generate_property_name() { var builder = new RuleBuilder<Person, int>(PropertyRule.Create<Person, int>(x => x.CalculateSalary())); builder.Rule.GetDisplayName().ShouldBeNull(); builder.Rule.PropertyName.ShouldBeNull(); } [Fact] public void Property_should_return_property_being_validated() { #if CoreCLR var property = typeof(Person).GetRuntimeProperty("Surname"); #else var property = typeof(Person).GetProperty("Surname"); #endif builder.Rule.Member.ShouldEqual(property); } [Fact] public void Property_should_return_null_when_it_is_not_a_property_being_validated() { builder = new RuleBuilder<Person, string>(PropertyRule.Create<Person, string>(x => "Foo")); builder.Rule.Member.ShouldBeNull(); } [Fact] public void Result_should_use_custom_property_name_when_no_property_name_can_be_determined() { var builder = new RuleBuilder<Person, int>(PropertyRule.Create<Person, int>(x => x.CalculateSalary())); builder.GreaterThan(100).WithName("Foo"); var results = builder.Rule.Validate(new ValidationContext<Person>(new Person(), new PropertyChain(), new DefaultValidatorSelector())); results.Single().PropertyName.ShouldEqual("Foo"); } class TestPropertyValidator : PropertyValidator { public TestPropertyValidator() : base(() => Messages.notnull_error) { } protected override bool IsValid(PropertyValidatorContext context) { return true; } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.Azure.AcceptanceTestsHeadExceptions { using System; using System.Linq; using System.Collections.Generic; using System.Diagnostics; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Microsoft.Rest.Azure; /// <summary> /// Test Infrastructure for AutoRest /// </summary> public partial class AutoRestHeadExceptionTestService : ServiceClient<AutoRestHeadExceptionTestService>, IAutoRestHeadExceptionTestService, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Gets Azure subscription credentials. /// </summary> public ServiceClientCredentials Credentials { get; private set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } /// <summary> /// Gets the IHeadExceptionOperations. /// </summary> public virtual IHeadExceptionOperations HeadException { get; private set; } /// <summary> /// Initializes a new instance of the AutoRestHeadExceptionTestService class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestHeadExceptionTestService(params DelegatingHandler[] handlers) : base(handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestHeadExceptionTestService class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestHeadExceptionTestService(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestHeadExceptionTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestHeadExceptionTestService(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestHeadExceptionTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestHeadExceptionTestService(Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestHeadExceptionTestService class. /// </summary> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestHeadExceptionTestService(ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestHeadExceptionTestService class. /// </summary> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestHeadExceptionTestService(ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestHeadExceptionTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestHeadExceptionTestService(Uri baseUri, ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestHeadExceptionTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestHeadExceptionTestService(Uri baseUri, ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { this.HeadException = new HeadExceptionOperations(this); this.BaseUri = new Uri("http://localhost"); this.AcceptLanguage = "en-US"; this.LongRunningOperationRetryTimeout = 30; this.GenerateClientRequestId = true; SerializationSettings = new JsonSerializerSettings { Formatting = Formatting.Indented, DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; DeserializationSettings = new JsonSerializerSettings { DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; DeserializationSettings.Converters.Add(new CloudErrorJsonConverter()); } } }
using System; using System.Collections.Generic; using System.Data.Common; using System.Linq; using System.Threading.Tasks; using Microsoft.EntityFrameworkCore; using Npgsql; using Npgsql.EntityFrameworkCore.PostgreSQL; using System.Reflection; using Microsoft.EntityFrameworkCore.Storage; using System.Text.RegularExpressions; using System.ComponentModel.DataAnnotations.Schema; using System.ComponentModel.DataAnnotations; using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Metadata.Internal; using SchoolBusAPI.Models; namespace SchoolBusAPI { /// <summary> /// Uility class used to update database column comments or descriptions. /// </summary> /// <typeparam name="TContext"></typeparam> public class DbCommentsUpdater<TContext> where TContext : DbAppContext { /// <summary> /// Constructor. /// </summary> /// <param name="context"></param> public DbCommentsUpdater(TContext context) { this.context = context; } readonly TContext context; IDbContextTransaction transaction; /// <summary> /// Update the database descriptions /// </summary> public void UpdateDatabaseDescriptions() { Type contextType; contextType = typeof(TContext); var props = contextType.GetProperties(System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.Public); DbConnection con = context.Database.GetDbConnection(); try { con.Open(); transaction = context.Database.BeginTransaction(); foreach (var prop in props) { if (prop.PropertyType.InheritsOrImplements((typeof(DbSet<>)))) { var tableType = prop.PropertyType.GetGenericArguments()[0]; SetTableDescriptions(tableType); } } transaction.Commit(); } catch { if (transaction != null) transaction.Rollback(); throw; } finally { if (con.State == System.Data.ConnectionState.Open) { con.Close(); } } } /// <summary> /// Set a table comment /// </summary> /// <param name="tableType"></param> private void SetTableDescriptions(Type tableType) { var entityType = context.Model.FindEntityType(tableType); string fullTableName = entityType.Relational().TableName; Regex regex = new Regex(@"(\[\w+\]\.)?\[(?<table>.*)\]"); Match match = regex.Match(fullTableName); string tableName; if (match.Success) tableName = match.Groups["table"].Value; else tableName = fullTableName; var tableAttrs = tableType.GetTypeInfo().GetCustomAttributes(typeof(TableAttribute), false); var tableAttrsArray = tableAttrs.ToArray<Attribute>(); if (tableAttrsArray.Length > 0) { tableName = ((TableAttribute)tableAttrsArray[0]).Name; } // get the table description var tableExtAttrs = tableType.GetTypeInfo().GetCustomAttributes(typeof(MetaDataExtension), false); var tableExtAttrssArray = tableExtAttrs.ToArray<Attribute>(); if (tableExtAttrssArray.Length > 0) { SetTableDescription(tableName, ((MetaDataExtension)tableExtAttrssArray[0]).Description); } foreach (Property entityProperty in entityType.GetProperties().OfType<Property>()) { // Not all properties have MemberInfo, so a null check is required. if (entityProperty.MemberInfo != null) { // get the custom attributes for this field. var attrs = entityProperty.MemberInfo.GetCustomAttributes(typeof(MetaDataExtension), false); var attrsArray = attrs.ToArray<Attribute>(); if (attrsArray.Length > 0) { SetColumnDescription(tableName, entityProperty.Relational().ColumnName, ((MetaDataExtension)attrsArray[0]).Description); } } } } /// <summary> /// Set a column comment /// </summary> /// <param name="tableName"></param> /// <param name="columnName"></param> /// <param name="description"></param> private void SetColumnDescription(string tableName, string columnName, string description) { // Postgres has the COMMENT command to update a description. string query = "COMMENT ON COLUMN \"" + tableName + "\".\"" + columnName + "\" IS '" + description.Replace("'", "\'") + "'"; context.Database.ExecuteSqlCommand(query); } /// <summary> /// Set a table description /// </summary> /// <param name="tableName">Name of the table</param> /// <param name="description">Description text for the table</param> private void SetTableDescription(string tableName, string description) { // Postgres has the COMMENT command to update a description. string query = "COMMENT ON TABLE \"" + tableName + "\" IS '" + description.Replace("'", "\'") + "'"; context.Database.ExecuteSqlCommand(query); } } public static class ReflectionUtil { public static bool InheritsOrImplements(this Type child, Type parent) { parent = ResolveGenericTypeDefinition(parent); var currentChild = child.GetTypeInfo().IsGenericType ? child.GetGenericTypeDefinition() : child; while (currentChild != typeof(object)) { if (parent == currentChild || HasAnyInterfaces(parent, currentChild)) return true; currentChild = currentChild.GetTypeInfo().BaseType != null && currentChild.GetTypeInfo().BaseType.GetTypeInfo().IsGenericType ? currentChild.GetTypeInfo().BaseType.GetGenericTypeDefinition() : currentChild.GetTypeInfo().BaseType; if (currentChild == null) return false; } return false; } private static bool HasAnyInterfaces(Type parent, Type child) { return child.GetInterfaces() .Any(childInterface => { var currentInterface = childInterface.GetTypeInfo().IsGenericType ? childInterface.GetGenericTypeDefinition() : childInterface; return currentInterface == parent; }); } private static Type ResolveGenericTypeDefinition(Type parent) { var shouldUseGenericType = true; if (parent.GetTypeInfo().IsGenericType && parent.GetGenericTypeDefinition() != parent) shouldUseGenericType = false; if (parent.GetTypeInfo().IsGenericType && shouldUseGenericType) parent = parent.GetGenericTypeDefinition(); return parent; } } }
using Microsoft.Xna.Framework.Content; using Microsoft.Xna.Framework.Graphics; using OpenKh.Common; using OpenKh.Engine; using OpenKh.Engine.MonoGame; using OpenKh.Engine.Parsers; using OpenKh.Kh2; using OpenKh.Kh2.Ard; using OpenKh.Kh2.Extensions; using OpenKh.Kh2.Models; using OpenKh.Tools.Kh2MapStudio.Interfaces; using OpenKh.Tools.Kh2MapStudio.Models; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Numerics; using xna = Microsoft.Xna.Framework; namespace OpenKh.Tools.Kh2MapStudio { class MapRenderer : ILayerController, ISpawnPointController { private readonly static BlendState DefaultBlendState = new BlendState() { ColorSourceBlend = Blend.SourceAlpha, AlphaSourceBlend = Blend.SourceAlpha, ColorDestinationBlend = Blend.InverseSourceAlpha, AlphaDestinationBlend = Blend.InverseSourceAlpha, ColorBlendFunction = BlendFunction.Add, AlphaBlendFunction = BlendFunction.Add, BlendFactor = xna.Color.White, MultiSampleMask = int.MaxValue, IndependentBlendEnable = false }; private readonly xna.GraphicsDeviceManager _graphicsManager; private readonly GraphicsDevice _graphics; private readonly KingdomShader _shader; private readonly Texture2D _whiteTexture; private bool _showBobs = true; public Camera Camera { get; } public IObjEntryController ObjEntryController { get; set; } public bool? ShowMap { get => MapMeshGroups.FirstOrDefault(x => x.Name == "MAP")?.IsVisible; set { var mesh = MapMeshGroups.FirstOrDefault(x => x.Name == "MAP"); if (mesh != null) mesh.IsVisible = value ?? true; } } public bool? ShowSk0 { get => MapMeshGroups.FirstOrDefault(x => x.Name == "SK0")?.IsVisible; set { var mesh = MapMeshGroups.FirstOrDefault(x => x.Name == "SK0"); if (mesh != null) mesh.IsVisible = value ?? true; } } public bool? ShowSk1 { get => MapMeshGroups.FirstOrDefault(x => x.Name == "SK1")?.IsVisible; set { var mesh = MapMeshGroups.FirstOrDefault(x => x.Name == "SK1"); if (mesh != null) mesh.IsVisible = value ?? true; } } public bool? ShowBobs { get => BobDescriptors.Any() ? (bool?)_showBobs : null; set => _showBobs = value ?? true; } public bool? ShowMapCollision { get => MapCollision != null ? (bool?)MapCollision.IsVisible : null; set => MapCollision.IsVisible = value ?? false; } public bool? ShowCameraCollision { get => CameraCollision != null ? (bool?)CameraCollision.IsVisible : null; set => CameraCollision.IsVisible = value ?? false; } public bool? ShowLightCollision { get => LightCollision != null ? (bool?)LightCollision.IsVisible : null; set => LightCollision.IsVisible = value ?? false; } internal List<Bar.Entry> MapBarEntries { get; private set; } internal List<Bar.Entry> ArdBarEntries { get; private set; } internal List<MeshGroupModel> MapMeshGroups { get; } internal List<MeshGroupModel> BobMeshGroups { get; } internal List<BobDescriptor> BobDescriptors { get; } internal CollisionModel MapCollision { get; set; } internal CollisionModel CameraCollision { get; set; } internal CollisionModel LightCollision { get; set; } public List<SpawnPointModel> SpawnPoints { get; private set; } public SpawnPointModel CurrentSpawnPoint { get; private set; } public string SelectSpawnPoint { get => CurrentSpawnPoint?.Name ?? string.Empty; set => CurrentSpawnPoint = SpawnPoints.FirstOrDefault(x => x.Name == value); } public SpawnScriptModel SpawnScriptMap { get; private set; } public SpawnScriptModel SpawnScriptBattle { get; private set; } public SpawnScriptModel SpawnScriptEvent { get; private set; } public MapRenderer(ContentManager content, xna.GraphicsDeviceManager graphics) { _graphicsManager = graphics; _graphics = graphics.GraphicsDevice; _shader = new KingdomShader(content); MapMeshGroups = new List<MeshGroupModel>(); BobMeshGroups = new List<MeshGroupModel>(); BobDescriptors = new List<BobDescriptor>(); Camera = new Camera() { CameraPosition = new Vector3(0, 100, 200), CameraRotationYawPitchRoll = new Vector3(90, 0, 10), }; _whiteTexture = new Texture2D(_graphics, 2, 2); _whiteTexture.SetData(Enumerable.Range(0, 2 * 2 * sizeof(int)).Select(_ => (byte)0xff).ToArray()); } public void OpenMap(string fileName) { Close(); MapBarEntries = File.OpenRead(fileName).Using(Bar.Read); LoadMapComponent(MapBarEntries, "SK0"); LoadMapComponent(MapBarEntries, "SK1"); LoadMapComponent(MapBarEntries, "MAP"); var bobDescEntry = MapBarEntries .Where(x => x.Name == "out" && x.Type == Bar.EntryType.BgObjPlacement) .FirstOrDefault(); if (bobDescEntry != null) BobDescriptors.AddRange(BobDescriptor.Read(bobDescEntry.Stream)); var bobModel = MapBarEntries.Where(x => x.Name == "BOB" && x.Type == Bar.EntryType.Model).ToArray(); var bobTexture = MapBarEntries.Where(x => x.Name == "BOB" && x.Type == Bar.EntryType.ModelTexture).ToArray(); var bobCount = Math.Min(bobModel.Length, bobTexture.Length); for (var i = 0; i < bobCount; i++) { var model = Mdlx.Read(bobModel[i].Stream); var textures = ModelTexture.Read(bobTexture[i].Stream).Images; BobMeshGroups.Add(new MeshGroupModel(_graphics, "BOB", model, textures, i)); } var mapCollisionEntry = MapBarEntries .Where(x => x.Name.StartsWith("ID_") && x.Type == Bar.EntryType.CollisionOctalTree) .FirstOrDefault(); if (mapCollisionEntry != null) MapCollision = new CollisionModel(Coct.Read(mapCollisionEntry.Stream)); var cameraCollisionEntry = MapBarEntries .Where(x => x.Name.StartsWith("CH_") && x.Type == Bar.EntryType.CameraOctalTree) .FirstOrDefault(); if (cameraCollisionEntry != null) CameraCollision = new CollisionModel(Coct.Read(cameraCollisionEntry.Stream)); var lightCollisionEntry = MapBarEntries .Where(x => x.Name == "COL_" && x.Type == Bar.EntryType.ColorOctalTree) .FirstOrDefault(); if (lightCollisionEntry != null) LightCollision = new CollisionModel(Coct.Read(lightCollisionEntry.Stream)); } public void SaveMap(string fileName) { var memStream = new MemoryStream(); BobDescriptor.Write(memStream, BobDescriptors); MapBarEntries.AddOrReplace(new Bar.Entry { Name = "out", Type = Bar.EntryType.BgObjPlacement, Stream = memStream }); File.Create(fileName).Using(stream => Bar.Write(stream, MapBarEntries)); } public void OpenArd(string fileName) { ArdBarEntries = File.OpenRead(fileName).Using(Bar.Read); SpawnPoints = ArdBarEntries .Where(x => x.Type == Bar.EntryType.AreaDataSpawn && x.Stream.Length > 0) .Select(x => new SpawnPointModel(ObjEntryController, x.Name, SpawnPoint.Read(x.Stream.SetPosition(0)))) .ToList(); SelectSpawnPoint = "m_00"; SpawnScriptMap = SpawnScriptModel.Create(ArdBarEntries, "map"); SpawnScriptBattle = SpawnScriptModel.Create(ArdBarEntries, "btl"); SpawnScriptEvent = SpawnScriptModel.Create(ArdBarEntries, "evt"); } public void SaveArd(string fileName) { foreach (var spawnPointModel in SpawnPoints) { var memStream = new MemoryStream(); SpawnPoint.Write(memStream, spawnPointModel.SpawnPoints); ArdBarEntries.AddOrReplace(new Bar.Entry { Name = spawnPointModel.Name, Type = Bar.EntryType.AreaDataSpawn, Stream = memStream }); } SpawnScriptMap?.SaveToBar(ArdBarEntries); SpawnScriptBattle?.SaveToBar(ArdBarEntries); SpawnScriptEvent?.SaveToBar(ArdBarEntries); File.Create(fileName).Using(stream => Bar.Write(stream, ArdBarEntries)); } public void Close() { foreach (var meshGroup in MapMeshGroups) meshGroup?.Dispose(); MapMeshGroups.Clear(); foreach (var meshGroup in BobMeshGroups) meshGroup?.Dispose(); BobMeshGroups.Clear(); BobDescriptors.Clear(); MapCollision?.Dispose(); CameraCollision?.Dispose(); LightCollision?.Dispose(); } public void Update(float deltaTime) { } public void Draw() { var viewport = _graphics.Viewport; Camera.AspectRatio = viewport.Width / (float)viewport.Height; _graphics.RasterizerState = new RasterizerState() { CullMode = CullMode.CullClockwiseFace }; _graphics.DepthStencilState = new DepthStencilState(); _graphics.BlendState = DefaultBlendState; _shader.Pass(pass => { _shader.SetProjectionView(Camera.Projection); _shader.SetWorldView(Camera.World); _shader.SetModelViewIdentity(); pass.Apply(); foreach (var mesh in MapMeshGroups.Where(x => x.IsVisible)) RenderMeshNew(pass, mesh.MeshGroup, true); foreach (var mesh in MapMeshGroups.Where(x => x.IsVisible)) RenderMeshNew(pass, mesh.MeshGroup, false); _shader.SetRenderTexture(pass, _whiteTexture); MapCollision?.Draw(_graphics); CameraCollision.Draw(_graphics); LightCollision?.Draw(_graphics); if (_showBobs) { foreach (var entity in BobDescriptors ?? new List<BobDescriptor>()) { if (entity.BobIndex < 0 || entity.BobIndex >= BobMeshGroups.Count) continue; _shader.SetModelView(Matrix4x4.CreateRotationX(entity.RotationX) * Matrix4x4.CreateRotationY(entity.RotationY) * Matrix4x4.CreateRotationZ(entity.RotationZ) * Matrix4x4.CreateScale(entity.ScalingX, entity.ScalingY, entity.ScalingZ) * Matrix4x4.CreateTranslation(entity.PositionX, -entity.PositionY, -entity.PositionZ)); RenderMeshNew(pass, BobMeshGroups[entity.BobIndex].MeshGroup, true); } } if (CurrentSpawnPoint != null) { foreach (var spawnPoint in CurrentSpawnPoint.SpawnPoints) { foreach (var entity in spawnPoint.Entities) { _shader.SetModelView(Matrix4x4.CreateRotationX(entity.RotationX) * Matrix4x4.CreateRotationY(entity.RotationY) * Matrix4x4.CreateRotationZ(entity.RotationZ) * Matrix4x4.CreateTranslation(entity.PositionX, -entity.PositionY, -entity.PositionZ)); RenderMeshNew(pass, CurrentSpawnPoint.ObjEntryCtrl[entity.ObjectId], true); } _graphics.RasterizerState = new RasterizerState() { CullMode = CullMode.None }; _shader.SetRenderTexture(pass, _whiteTexture); foreach (var item in spawnPoint.EventActivators) { _shader.SetModelView(Matrix4x4.CreateRotationX(item.RotationX) * Matrix4x4.CreateRotationY(item.RotationY) * Matrix4x4.CreateRotationZ(item.RotationZ) * Matrix4x4.CreateScale(item.ScaleX, item.ScaleY, item.ScaleZ) * Matrix4x4.CreateTranslation(item.PositionX, -item.PositionY, -item.PositionZ)); pass.Apply(); var color = new xna.Color(1f, 0f, 0f, .5f); var vertices = new PositionColoredTextured[] { new PositionColoredTextured(-1, -1, -1, 0, 0, 1f, 0f, 0f, 1f), new PositionColoredTextured(+1, -1, -1, 0, 0, 1f, 0f, 0f, 1f), new PositionColoredTextured(+1, +1, -1, 0, 0, 1f, 0f, 0f, 1f), new PositionColoredTextured(-1, +1, -1, 0, 0, 1f, 0f, 0f, 1f), new PositionColoredTextured(-1, -1, +1, 0, 0, 1f, 0f, 0f, 1f), new PositionColoredTextured(+1, -1, +1, 0, 0, 1f, 0f, 0f, 1f), new PositionColoredTextured(+1, +1, +1, 0, 0, 1f, 0f, 0f, 1f), new PositionColoredTextured(-1, +1, +1, 0, 0, 1f, 0f, 0f, 1f), }; var indices = new int[] { 0, 1, 3, 3, 1, 2, 1, 5, 2, 2, 5, 6, 5, 4, 6, 6, 4, 7, 4, 0, 7, 7, 0, 3, 3, 2, 7, 7, 2, 6, 4, 5, 0, 0, 5, 1 }; _graphics.DrawUserIndexedPrimitives(PrimitiveType.TriangleList, vertices, 0, 8, indices, 0, 12, MeshLoader.PositionColoredTexturedVertexDeclaration); } } } }); } private void RenderMeshNew(EffectPass pass, MeshGroup mesh, bool passRenderOpaque) { if (mesh.MeshDescriptors == null) return; foreach (var meshDescriptor in mesh.MeshDescriptors) { if (meshDescriptor.IsOpaque != passRenderOpaque) continue; if (meshDescriptor.Indices.Length == 0) continue; var textureIndex = meshDescriptor.TextureIndex & 0xffff; if (textureIndex < mesh.Textures.Length) _shader.SetRenderTexture(pass, mesh.Textures[textureIndex]); _graphics.DrawUserIndexedPrimitives( PrimitiveType.TriangleList, meshDescriptor.Vertices, 0, meshDescriptor.Vertices.Length, meshDescriptor.Indices, 0, meshDescriptor.Indices.Length / 3, MeshLoader.PositionColoredTexturedVertexDeclaration); } } private void LoadMapComponent(List<Bar.Entry> entries, string componentName) { var modelEntry = entries.FirstOrDefault(x => x.Name == componentName && x.Type == Bar.EntryType.Model); var textureEntry = entries.FirstOrDefault(x => x.Name == componentName && x.Type == Bar.EntryType.ModelTexture); if (modelEntry == null || textureEntry == null) return; var model = Mdlx.Read(modelEntry.Stream); var textures = ModelTexture.Read(textureEntry.Stream).Images; MapMeshGroups.Add(new MeshGroupModel(_graphics, componentName, model, textures, 0)); } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Xml; using Sce.Atf.Dom; using Sce.Atf; namespace SettingsEditor { /// <summary> /// Class to read DOM data defined by an XML schema /// This is a customized reader that is copy of original Sce.Atf.Dom.DomXmlReader but can skip elements that aren't defined in schema. /// Atf reader just stops reading and throws an exception /// For differences search for: se_diff /// </summary> public class DomXmlReader { /// <summary> /// Constructor</summary> /// <param name="typeLoader">Type loader to translate element names to DOM node types</param> public DomXmlReader(XmlSchemaTypeLoader typeLoader) { m_typeLoader = typeLoader; } /// <summary> /// Gets the type loader that defines DOM node types</summary> public XmlSchemaTypeLoader TypeLoader { get { return m_typeLoader; } } /// <summary> /// Gets the URI for the current read</summary> public Uri Uri { get { return m_uri; } } /// <summary> /// Gets the root node for the current read</summary> public DomNode Root { get { return m_root; } } /// <summary> /// Gets dictionary with keys for DomNodes</summary> public IDictionary<string, DomNode> NodeDictionary { get { return m_nodeDictionary; } } /// <summary> /// Gets an enumeration of unresolved XML node references</summary> public IEnumerable<XmlNodeReference> UnresolvedReferences { get { return m_nodeReferences; } protected set { m_nodeReferences = value.ToList(); } } /// <summary> /// Reads a node tree from a stream</summary> /// <param name="stream">Read stream</param> /// <param name="uri">URI of stream</param> /// <returns>Node tree, from stream</returns> public virtual DomNode Read(Stream stream, Uri uri) { m_uri = uri; m_root = null; m_nodeDictionary.Clear(); m_nodeReferences.Clear(); XmlReaderSettings settings = new XmlReaderSettings(); settings.IgnoreComments = true; settings.IgnoreProcessingInstructions = true; //settings.IgnoreWhitespace = true; using (XmlReader reader = XmlReader.Create(stream, settings)) { reader.MoveToContent(); ChildInfo rootElement = CreateRootElement(reader, m_uri); if (rootElement == null) throw new InvalidOperationException( "No root element was found in the XML document, probably " + "due to a namespace mismatch with the schema file"); m_root = ReadElement(rootElement, reader); ResolveReferences(); } return m_root; } /// <summary> /// Gets the root element metadata for the reader's current XML node</summary> /// <param name="reader">XML reader</param> /// <param name="rootUri">URI of XML data</param> /// <returns>Root element metadata for the reader's current XML node</returns> protected virtual ChildInfo CreateRootElement(XmlReader reader, Uri rootUri) { string ns = reader.NamespaceURI; if (string.IsNullOrEmpty(ns)) { // no xmlns declaration in the file, so grab the first type collection's target namespace foreach (XmlSchemaTypeCollection typeCollection in m_typeLoader.GetTypeCollections()) { ns = typeCollection.DefaultNamespace; break; } } ChildInfo rootElement = m_typeLoader.GetRootElement(ns + ":" + reader.LocalName); return rootElement; } /// <summary> /// Gets NodeReferences. A subclass needs this property to process attributes.</summary> protected IList<XmlNodeReference> NodeReferences { get { return m_nodeReferences; } } /// <summary> /// Converts the give string to attribute value and set it to given node using attributeInfo</summary> /// <param name="node">DomNode </param> /// <param name="attributeInfo">attributeInfo to set</param> /// <param name="valueString">The string representation of the attribute value</param> protected virtual void ReadAttribute(DomNode node, AttributeInfo attributeInfo, string valueString) { if (IsReferenceAttribute(attributeInfo)) { // save reference so it can be resolved after all nodes have been read m_nodeReferences.Add(new XmlNodeReference(node, attributeInfo, valueString)); } else { object value = attributeInfo.Type.Convert(valueString); node.SetAttribute(attributeInfo, value); } } /// <summary> /// Reads the node specified by the child metadata</summary> /// <param name="nodeInfo">Child metadata for node</param> /// <param name="reader">XML reader</param> /// <returns>DomNode specified by the child metadata</returns> protected virtual DomNode ReadElement(ChildInfo nodeInfo, XmlReader reader) { // handle polymorphism, if necessary DomNodeType type = null; // se_diff // error CS0122: 'Sce.Atf.Dom.SubstitutionGroupChildRule' is inaccessible due to its protection level // SubstitutionGroupChildRule is defined internal in XmlSchemaTypeLoader.cs // //var substitutionGroupRule = nodeInfo.Rules.OfType<SubstitutionGroupChildRule>().FirstOrDefault(); //if (substitutionGroupRule != null) //{ // foreach (var sub in substitutionGroupRule.Substitutions) // { // if (sub.Name == reader.LocalName) // { // type = sub.Type; // break; // } // } // // Fallback to non-substituted version (for example loading an old schema). // if (type == null) // type = GetChildType(nodeInfo.Type, reader); // if (type == null) // throw new InvalidOperationException("Could not match substitution group for child " + nodeInfo.Name); //} //else //{ type = GetChildType(nodeInfo.Type, reader); //} if (type == null) { // se_diff // just skip this element // return null; } int index = type.Name.LastIndexOf(':'); string typeNS = type.Name.Substring(0, index); DomNode node = new DomNode(type, nodeInfo); // read attributes while (reader.MoveToNextAttribute()) { if (reader.Prefix == string.Empty || reader.LookupNamespace(reader.Prefix) == typeNS) { AttributeInfo attributeInfo = type.GetAttributeInfo(reader.LocalName); if (attributeInfo != null) { ReadAttribute(node, attributeInfo, reader.Value); } } } // add node to map if it has an id if (node.Type.IdAttribute != null) { string id = node.GetId(); if (!string.IsNullOrEmpty(id)) m_nodeDictionary[id] = node; // don't Add, in case there are multiple DomNodes with the same id } reader.MoveToElement(); if (!reader.IsEmptyElement) { // read child elements while (reader.Read()) { if (reader.NodeType == XmlNodeType.Element) { // look up metadata for this element ChildInfo childInfo = type.GetChildInfo(reader.LocalName); // se_diff // error CS0122: 'Sce.Atf.Dom.SubstitutionGroupChildRule' is inaccessible due to its protection level // SubstitutionGroupChildRule is defined internal in XmlSchemaTypeLoader.cs // //if (childInfo == null) //{ // // Try and get substitution group // childInfo = GetSubsitutionGroup(type, reader.LocalName); //} if (childInfo != null) { DomNode childNode = ReadElement(childInfo, reader); if (childNode != null) { // childNode is fully populated sub-tree if (childInfo.IsList) { node.GetChildList( childInfo ).Add( childNode ); } else { node.SetChild( childInfo, childNode ); } } else { // se_diff // skip unrecognized element reader.Skip(); // if that takes us to the end of the enclosing element, break if (reader.NodeType == XmlNodeType.EndElement) break; } } else { // try reading as an attribute AttributeInfo attributeInfo = type.GetAttributeInfo(reader.LocalName); if (attributeInfo != null) { reader.MoveToElement(); if (!reader.IsEmptyElement) { // read element text while (reader.Read()) { if (reader.NodeType == XmlNodeType.Text) { ReadAttribute(node, attributeInfo, reader.Value); // skip child elements, as this is an attribute value reader.Skip(); break; } if (reader.NodeType == XmlNodeType.EndElement) { break; } } reader.MoveToContent(); } } else { // skip unrecognized element reader.Skip(); // if that takes us to the end of the enclosing element, break if (reader.NodeType == XmlNodeType.EndElement) break; } } } else if (reader.NodeType == XmlNodeType.Text) { AttributeInfo attributeInfo = type.GetAttributeInfo(string.Empty); if (attributeInfo != null) { ReadAttribute(node, attributeInfo, reader.Value); } } else if (reader.NodeType == XmlNodeType.EndElement) { break; } } } reader.MoveToContent(); return node; } // se_diff // error CS0122: 'Sce.Atf.Dom.SubstitutionGroupChildRule' is inaccessible due to its protection level // SubstitutionGroupChildRule is defined internal in XmlSchemaTypeLoader.cs // //private ChildInfo GetSubsitutionGroup(DomNodeType type, string localName) //{ // foreach (var childInfo in type.Children) // { // var substitutionGroupRule = childInfo.Rules.OfType<SubstitutionGroupChildRule>().FirstOrDefault(); // if (substitutionGroupRule != null) // { // // This is a candidate group // foreach (var substitutechildInfo in substitutionGroupRule.Substitutions) // { // // If name of substitutechildInfo matches localName then return childInfo // if (substitutechildInfo.Name == localName) // { // return childInfo; // } // } // } // } // return null; //} /// <summary> /// Determines if attribute is a reference</summary> /// <param name="attributeInfo">Attribute</param> /// <returns>True iff attribute is reference</returns> protected virtual bool IsReferenceAttribute(AttributeInfo attributeInfo) { return (attributeInfo.Type.Type == AttributeTypes.Reference); } /// <summary> /// Gets a derived node type, given a base type, namespace, and type name</summary> /// <param name="baseType">Base node type</param> /// <param name="ns">Type namespace</param> /// <param name="typeName">Type name</param> /// <returns>Derived node type</returns> protected virtual DomNodeType GetDerivedType(DomNodeType baseType, string ns, string typeName) { return m_typeLoader.GetNodeType(ns + ":" + typeName); } /// <summary> /// Resolves XML node references</summary> protected virtual void ResolveReferences() { List<XmlNodeReference> unresolved = new List<XmlNodeReference>(); foreach (XmlNodeReference nodeReference in m_nodeReferences) { // ID fixup ported from ATF 2 DomXmlResolver.Resolve(DomUri) string id = nodeReference.Value.TrimStart('#'); id = Uri.UnescapeDataString(id); // remove escape characters id = id.TrimStart(s_trimChars); DomNode refNode; if (m_nodeDictionary.TryGetValue(id, out refNode)) { nodeReference.Node.SetAttribute(nodeReference.AttributeInfo, refNode); } else { unresolved.Add(nodeReference); object value = nodeReference.AttributeInfo.Type.Convert(nodeReference.Value); nodeReference.Node.SetAttribute(nodeReference.AttributeInfo, value); } } m_nodeReferences = unresolved; } /// <summary> /// Gets node type of child of a node type</summary> /// <param name="type">Node type</param> /// <param name="reader">XML reader</param> /// <returns>Child's node type</returns> protected DomNodeType GetChildType(DomNodeType type, XmlReader reader) { DomNodeType result = type; // check for xsi:type attribute, for polymorphic elements string typeName = reader.GetAttribute("xsi:type"); if (typeName != null) { // check for qualified type name string prefix = string.Empty; int index = typeName.IndexOf(':'); if (index >= 0) { prefix = typeName.Substring(0, index); index++; typeName = typeName.Substring(index, typeName.Length - index); } string ns = reader.LookupNamespace(prefix); result = GetDerivedType(result, ns, typeName); if (result == null) { string baseTypeName = type != null ? type.Name : "<none>"; // se_diff // just warn instead of throwing // //throw new InvalidOperationException( string.Format( // "No type was found with the name {0} in namespace {1} that derives from {2}", typeName, ns, baseTypeName ) ); Outputs.WriteLine( OutputMessageType.Warning, string.Format( "No type was found with the name {0} in namespace {1} that derives from {2}", typeName, ns, baseTypeName ) ); } } return result; } private static readonly char[] s_trimChars = new[] { '|' }; private readonly XmlSchemaTypeLoader m_typeLoader; private DomNode m_root; private Uri m_uri; private readonly Dictionary<string, DomNode> m_nodeDictionary = new Dictionary<string,DomNode>(); private List<XmlNodeReference> m_nodeReferences = new List<XmlNodeReference>(); } }
#region License /* * WebSocketFrame.cs * * The MIT License * * Copyright (c) 2012-2021 sta.blockhead * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ #endregion #region Contributors /* * Contributors: * - Chris Swiedler */ #endregion using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Text; namespace WebSocketSharp { internal class WebSocketFrame : IEnumerable<byte> { #region Private Fields private byte[] _extPayloadLength; private Fin _fin; private Mask _mask; private byte[] _maskingKey; private Opcode _opcode; private PayloadData _payloadData; private byte _payloadLength; private Rsv _rsv1; private Rsv _rsv2; private Rsv _rsv3; #endregion #region Private Constructors private WebSocketFrame () { } #endregion #region Internal Constructors internal WebSocketFrame (Opcode opcode, PayloadData payloadData, bool mask) : this (Fin.Final, opcode, payloadData, false, mask) { } internal WebSocketFrame ( Fin fin, Opcode opcode, byte[] data, bool compressed, bool mask ) : this (fin, opcode, new PayloadData (data), compressed, mask) { } internal WebSocketFrame ( Fin fin, Opcode opcode, PayloadData payloadData, bool compressed, bool mask ) { _fin = fin; _opcode = opcode; _rsv1 = opcode.IsData () && compressed ? Rsv.On : Rsv.Off; _rsv2 = Rsv.Off; _rsv3 = Rsv.Off; var len = payloadData.Length; if (len < 126) { _payloadLength = (byte) len; _extPayloadLength = WebSocket.EmptyBytes; } else if (len < 0x010000) { _payloadLength = (byte) 126; _extPayloadLength = ((ushort) len).ToByteArray (ByteOrder.Big); } else { _payloadLength = (byte) 127; _extPayloadLength = len.ToByteArray (ByteOrder.Big); } if (mask) { _mask = Mask.On; _maskingKey = createMaskingKey (); payloadData.Mask (_maskingKey); } else { _mask = Mask.Off; _maskingKey = WebSocket.EmptyBytes; } _payloadData = payloadData; } #endregion #region Internal Properties internal ulong ExactPayloadLength { get { return _payloadLength < 126 ? _payloadLength : _payloadLength == 126 ? _extPayloadLength.ToUInt16 (ByteOrder.Big) : _extPayloadLength.ToUInt64 (ByteOrder.Big); } } internal int ExtendedPayloadLengthWidth { get { return _payloadLength < 126 ? 0 : _payloadLength == 126 ? 2 : 8; } } #endregion #region Public Properties public byte[] ExtendedPayloadLength { get { return _extPayloadLength; } } public Fin Fin { get { return _fin; } } public bool IsBinary { get { return _opcode == Opcode.Binary; } } public bool IsClose { get { return _opcode == Opcode.Close; } } public bool IsCompressed { get { return _rsv1 == Rsv.On; } } public bool IsContinuation { get { return _opcode == Opcode.Cont; } } public bool IsControl { get { return _opcode >= Opcode.Close; } } public bool IsData { get { return _opcode == Opcode.Text || _opcode == Opcode.Binary; } } public bool IsFinal { get { return _fin == Fin.Final; } } public bool IsFragment { get { return _fin == Fin.More || _opcode == Opcode.Cont; } } public bool IsMasked { get { return _mask == Mask.On; } } public bool IsPing { get { return _opcode == Opcode.Ping; } } public bool IsPong { get { return _opcode == Opcode.Pong; } } public bool IsText { get { return _opcode == Opcode.Text; } } public ulong Length { get { return 2 + (ulong) (_extPayloadLength.Length + _maskingKey.Length) + _payloadData.Length; } } public Mask Mask { get { return _mask; } } public byte[] MaskingKey { get { return _maskingKey; } } public Opcode Opcode { get { return _opcode; } } public PayloadData PayloadData { get { return _payloadData; } } public byte PayloadLength { get { return _payloadLength; } } public Rsv Rsv1 { get { return _rsv1; } } public Rsv Rsv2 { get { return _rsv2; } } public Rsv Rsv3 { get { return _rsv3; } } #endregion #region Private Methods private static byte[] createMaskingKey () { var key = new byte[4]; WebSocket.RandomNumber.GetBytes (key); return key; } private static string dump (WebSocketFrame frame) { var len = frame.Length; var cnt = (long) (len / 4); var rem = (int) (len % 4); int cntDigit; string cntFmt; if (cnt < 10000) { cntDigit = 4; cntFmt = "{0,4}"; } else if (cnt < 0x010000) { cntDigit = 4; cntFmt = "{0,4:X}"; } else if (cnt < 0x0100000000) { cntDigit = 8; cntFmt = "{0,8:X}"; } else { cntDigit = 16; cntFmt = "{0,16:X}"; } var spFmt = String.Format ("{{0,{0}}}", cntDigit); var headerFmt = String.Format ( @" {0} 01234567 89ABCDEF 01234567 89ABCDEF {0}+--------+--------+--------+--------+\n", spFmt ); var lineFmt = String.Format ( "{0}|{{1,8}} {{2,8}} {{3,8}} {{4,8}}|\n", cntFmt ); var footerFmt = String.Format ( "{0}+--------+--------+--------+--------+", spFmt ); var buff = new StringBuilder (64); Func<Action<string, string, string, string>> linePrinter = () => { long lineCnt = 0; return (arg1, arg2, arg3, arg4) => { buff.AppendFormat ( lineFmt, ++lineCnt, arg1, arg2, arg3, arg4 ); }; }; var printLine = linePrinter (); var bytes = frame.ToArray (); buff.AppendFormat (headerFmt, String.Empty); for (long i = 0; i <= cnt; i++) { var j = i * 4; if (i < cnt) { printLine ( Convert.ToString (bytes[j], 2).PadLeft (8, '0'), Convert.ToString (bytes[j + 1], 2).PadLeft (8, '0'), Convert.ToString (bytes[j + 2], 2).PadLeft (8, '0'), Convert.ToString (bytes[j + 3], 2).PadLeft (8, '0') ); continue; } if (rem > 0) { printLine ( Convert.ToString (bytes[j], 2).PadLeft (8, '0'), rem >= 2 ? Convert.ToString (bytes[j + 1], 2).PadLeft (8, '0') : String.Empty, rem == 3 ? Convert.ToString (bytes[j + 2], 2).PadLeft (8, '0') : String.Empty, String.Empty ); } } buff.AppendFormat (footerFmt, String.Empty); return buff.ToString (); } private static string print (WebSocketFrame frame) { // Payload Length var payloadLen = frame._payloadLength; // Extended Payload Length var extPayloadLen = payloadLen > 125 ? frame.ExactPayloadLength.ToString () : String.Empty; // Masking Key var maskingKey = BitConverter.ToString (frame._maskingKey); // Payload Data var payload = payloadLen == 0 ? String.Empty : payloadLen > 125 ? "---" : !frame.IsText || frame.IsFragment || frame.IsMasked || frame.IsCompressed ? frame._payloadData.ToString () : utf8Decode (frame._payloadData.ApplicationData); var fmt = @" FIN: {0} RSV1: {1} RSV2: {2} RSV3: {3} Opcode: {4} MASK: {5} Payload Length: {6} Extended Payload Length: {7} Masking Key: {8} Payload Data: {9}"; return String.Format ( fmt, frame._fin, frame._rsv1, frame._rsv2, frame._rsv3, frame._opcode, frame._mask, payloadLen, extPayloadLen, maskingKey, payload ); } private static WebSocketFrame processHeader (byte[] header) { if (header.Length != 2) { var msg = "The header part of a frame could not be read."; throw new WebSocketException (msg); } // FIN var fin = (header[0] & 0x80) == 0x80 ? Fin.Final : Fin.More; // RSV1 var rsv1 = (header[0] & 0x40) == 0x40 ? Rsv.On : Rsv.Off; // RSV2 var rsv2 = (header[0] & 0x20) == 0x20 ? Rsv.On : Rsv.Off; // RSV3 var rsv3 = (header[0] & 0x10) == 0x10 ? Rsv.On : Rsv.Off; // Opcode var opcode = (byte) (header[0] & 0x0f); // MASK var mask = (header[1] & 0x80) == 0x80 ? Mask.On : Mask.Off; // Payload Length var payloadLen = (byte) (header[1] & 0x7f); if (!opcode.IsSupported ()) { var msg = "A frame has an unsupported opcode."; throw new WebSocketException (CloseStatusCode.ProtocolError, msg); } if (!opcode.IsData () && rsv1 == Rsv.On) { var msg = "A non data frame is compressed."; throw new WebSocketException (CloseStatusCode.ProtocolError, msg); } if (opcode.IsControl ()) { if (fin == Fin.More) { var msg = "A control frame is fragmented."; throw new WebSocketException (CloseStatusCode.ProtocolError, msg); } if (payloadLen > 125) { var msg = "A control frame has too long payload length."; throw new WebSocketException (CloseStatusCode.ProtocolError, msg); } } var frame = new WebSocketFrame (); frame._fin = fin; frame._rsv1 = rsv1; frame._rsv2 = rsv2; frame._rsv3 = rsv3; frame._opcode = (Opcode) opcode; frame._mask = mask; frame._payloadLength = payloadLen; return frame; } private static WebSocketFrame readExtendedPayloadLength ( Stream stream, WebSocketFrame frame ) { var len = frame.ExtendedPayloadLengthWidth; if (len == 0) { frame._extPayloadLength = WebSocket.EmptyBytes; return frame; } var bytes = stream.ReadBytes (len); if (bytes.Length != len) { var msg = "The extended payload length of a frame could not be read."; throw new WebSocketException (msg); } frame._extPayloadLength = bytes; return frame; } private static void readExtendedPayloadLengthAsync ( Stream stream, WebSocketFrame frame, Action<WebSocketFrame> completed, Action<Exception> error ) { var len = frame.ExtendedPayloadLengthWidth; if (len == 0) { frame._extPayloadLength = WebSocket.EmptyBytes; completed (frame); return; } stream.ReadBytesAsync ( len, bytes => { if (bytes.Length != len) { var msg = "The extended payload length of a frame could not be read."; throw new WebSocketException (msg); } frame._extPayloadLength = bytes; completed (frame); }, error ); } private static WebSocketFrame readHeader (Stream stream) { var bytes = stream.ReadBytes (2); return processHeader (bytes); } private static void readHeaderAsync ( Stream stream, Action<WebSocketFrame> completed, Action<Exception> error ) { stream.ReadBytesAsync ( 2, bytes => { var frame = processHeader (bytes); completed (frame); }, error ); } private static WebSocketFrame readMaskingKey ( Stream stream, WebSocketFrame frame ) { if (!frame.IsMasked) { frame._maskingKey = WebSocket.EmptyBytes; return frame; } var len = 4; var bytes = stream.ReadBytes (len); if (bytes.Length != len) { var msg = "The masking key of a frame could not be read."; throw new WebSocketException (msg); } frame._maskingKey = bytes; return frame; } private static void readMaskingKeyAsync ( Stream stream, WebSocketFrame frame, Action<WebSocketFrame> completed, Action<Exception> error ) { if (!frame.IsMasked) { frame._maskingKey = WebSocket.EmptyBytes; completed (frame); return; } var len = 4; stream.ReadBytesAsync ( len, bytes => { if (bytes.Length != len) { var msg = "The masking key of a frame could not be read."; throw new WebSocketException (msg); } frame._maskingKey = bytes; completed (frame); }, error ); } private static WebSocketFrame readPayloadData ( Stream stream, WebSocketFrame frame ) { var exactLen = frame.ExactPayloadLength; if (exactLen > PayloadData.MaxLength) { var msg = "A frame has too long payload length."; throw new WebSocketException (CloseStatusCode.TooBig, msg); } if (exactLen == 0) { frame._payloadData = PayloadData.Empty; return frame; } var len = (long) exactLen; var bytes = frame._payloadLength < 127 ? stream.ReadBytes ((int) exactLen) : stream.ReadBytes (len, 1024); if (bytes.LongLength != len) { var msg = "The payload data of a frame could not be read."; throw new WebSocketException (msg); } frame._payloadData = new PayloadData (bytes, len); return frame; } private static void readPayloadDataAsync ( Stream stream, WebSocketFrame frame, Action<WebSocketFrame> completed, Action<Exception> error ) { var exactLen = frame.ExactPayloadLength; if (exactLen > PayloadData.MaxLength) { var msg = "A frame has too long payload length."; throw new WebSocketException (CloseStatusCode.TooBig, msg); } if (exactLen == 0) { frame._payloadData = PayloadData.Empty; completed (frame); return; } var len = (long) exactLen; Action<byte[]> comp = bytes => { if (bytes.LongLength != len) { var msg = "The payload data of a frame could not be read."; throw new WebSocketException (msg); } frame._payloadData = new PayloadData (bytes, len); completed (frame); }; if (frame._payloadLength < 127) { stream.ReadBytesAsync ((int) exactLen, comp, error); return; } stream.ReadBytesAsync (len, 1024, comp, error); } private static string utf8Decode (byte[] bytes) { try { return Encoding.UTF8.GetString (bytes); } catch { return null; } } #endregion #region Internal Methods internal static WebSocketFrame CreateCloseFrame ( PayloadData payloadData, bool mask ) { return new WebSocketFrame ( Fin.Final, Opcode.Close, payloadData, false, mask ); } internal static WebSocketFrame CreatePingFrame (bool mask) { return new WebSocketFrame ( Fin.Final, Opcode.Ping, PayloadData.Empty, false, mask ); } internal static WebSocketFrame CreatePingFrame (byte[] data, bool mask) { return new WebSocketFrame ( Fin.Final, Opcode.Ping, new PayloadData (data), false, mask ); } internal static WebSocketFrame CreatePongFrame ( PayloadData payloadData, bool mask ) { return new WebSocketFrame ( Fin.Final, Opcode.Pong, payloadData, false, mask ); } internal static WebSocketFrame ReadFrame (Stream stream, bool unmask) { var frame = readHeader (stream); readExtendedPayloadLength (stream, frame); readMaskingKey (stream, frame); readPayloadData (stream, frame); if (unmask) frame.Unmask (); return frame; } internal static void ReadFrameAsync ( Stream stream, bool unmask, Action<WebSocketFrame> completed, Action<Exception> error ) { readHeaderAsync ( stream, frame => readExtendedPayloadLengthAsync ( stream, frame, frame1 => readMaskingKeyAsync ( stream, frame1, frame2 => readPayloadDataAsync ( stream, frame2, frame3 => { if (unmask) frame3.Unmask (); completed (frame3); }, error ), error ), error ), error ); } internal void Unmask () { if (_mask == Mask.Off) return; _payloadData.Mask (_maskingKey); _maskingKey = WebSocket.EmptyBytes; _mask = Mask.Off; } #endregion #region Public Methods public IEnumerator<byte> GetEnumerator () { foreach (var b in ToArray ()) yield return b; } public void Print (bool dumped) { var val = dumped ? dump (this) : print (this); Console.WriteLine (val); } public string PrintToString (bool dumped) { return dumped ? dump (this) : print (this); } public byte[] ToArray () { using (var buff = new MemoryStream ()) { var header = (int) _fin; header = (header << 1) + (int) _rsv1; header = (header << 1) + (int) _rsv2; header = (header << 1) + (int) _rsv3; header = (header << 4) + (int) _opcode; header = (header << 1) + (int) _mask; header = (header << 7) + (int) _payloadLength; var headerAsUshort = (ushort) header; var headerAsBytes = headerAsUshort.ToByteArray (ByteOrder.Big); buff.Write (headerAsBytes, 0, 2); if (_payloadLength > 125) { var cnt = _payloadLength == 126 ? 2 : 8; buff.Write (_extPayloadLength, 0, cnt); } if (_mask == Mask.On) buff.Write (_maskingKey, 0, 4); if (_payloadLength > 0) { var bytes = _payloadData.ToArray (); if (_payloadLength < 127) buff.Write (bytes, 0, bytes.Length); else buff.WriteBytes (bytes, 1024); } buff.Close (); return buff.ToArray (); } } public override string ToString () { var val = ToArray (); return BitConverter.ToString (val); } #endregion #region Explicit Interface Implementations IEnumerator IEnumerable.GetEnumerator () { return GetEnumerator (); } #endregion } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; using MyTextBox; namespace NotePad__ { public partial class FindForm : Form { //int preSearchText_Length = 0; int indexOfSearchText = -1; List<int> textsFound = new List<int>(); Color AllFoundTextBackColor = Color.Aquamarine; //Color SelectedFoundTextBackColor = Color.Orange; //Color MyDefaultBackColor = Color.White; string previousText = ""; public FindForm() { InitializeComponent(); } private void findButton_Click(object sender, EventArgs e) { TextArea currentTextArea = TabControlClass.CurrentTextArea; previousText = currentTextArea.Text; ////Remove highlighted text of previous search currentTextArea.ClearBackColor(currentTextArea.BackColor); // Highlighted backcolor of all found text //TextFound = currentTextArea.FindAll(searchTermTextBox.Text); //currentTextArea.ColorBackGround(TextFound, searchTermTextBox.Text.Length, AllFoundTextBackColor); textsFound.Clear(); textsFound = currentTextArea.FindAndColorAll(searchTermTextBox.Text, AllFoundTextBackColor); indexOfSearchText = -1; this.Focus(); } private void findNextButton_Click(object sender, EventArgs e) { TextArea currentTextArea = TabControlClass.CurrentTextArea; if(previousText != currentTextArea.Text) { previousText = currentTextArea.Text; //get this again because we might have changed the text in text area and it made some of the found text position changed textsFound.Clear(); textsFound = currentTextArea.FindAll(searchTermTextBox.Text); } //set this to prevent some disturb things currentTextArea.BlockAllAction = true; if (textsFound.Count != 0) { if (searchTermTextBox.Text.Length == 0) return; ////Change backcolor of current found text //if (indexOfSearchText != -1) //{ // currentTextArea.Select(TextFound[indexOfSearchText], searchTermTextBox.Text.Length); // currentTextArea.SelectionBackColor = AllFoundTextBackColor; //} //Reset the index indexOfSearchText++; if (indexOfSearchText == textsFound.Count) { indexOfSearchText = 0; } //Chose the highlight backcolor for select text currentTextArea.Select(textsFound[indexOfSearchText], searchTermTextBox.Text.Length); //currentTextArea.SelectionBackColor = SelectedFoundTextBackColor; } currentTextArea.BlockAllAction = false; currentTextArea.Focus(); } private void findPreviousButton_Click(object sender, EventArgs e) { TextArea currentTextArea = TabControlClass.CurrentTextArea; if (previousText != currentTextArea.Text) { previousText = currentTextArea.Text; //get this again because we might have changed the text in text area and it made some of the found text position changed textsFound.Clear(); textsFound = currentTextArea.FindAll(searchTermTextBox.Text); } currentTextArea.BlockAllAction = true; if (textsFound.Count != 0) { if (searchTermTextBox.Text.Length == 0) return; ////Change backcolor of current found text //if (indexOfSearchText != -1) //{ // currentTextArea.Select(TextFound[indexOfSearchText], searchTermTextBox.Text.Length); // currentTextArea.SelectionBackColor = AllFoundTextBackColor; //} //Reset the index indexOfSearchText--; if (indexOfSearchText <= -1) { indexOfSearchText = textsFound.Count - 1; } //Chose the highlight backcolor for select text currentTextArea.Select(textsFound[indexOfSearchText], searchTermTextBox.Text.Length); //currentTextArea.SelectionBackColor = SelectedFoundTextBackColor; } currentTextArea.BlockAllAction = false; currentTextArea.Focus(); } private void replaceButton_Click(object sender, EventArgs e) { TextArea currentTextArea = TabControlClass.CurrentTextArea; currentTextArea.Focus(); if (indexOfSearchText == -1 || searchTermTextBox.Text.Equals(replacementTextBox.Text) || currentTextArea.SelectionLength == 0) { return; } //currentTextArea.StopRecordingUndo(); //get this again because we might have changed the text in text area and it made some of the found text position changed textsFound.Clear(); textsFound = currentTextArea.FindAll(searchTermTextBox.Text); currentTextArea.Select(textsFound[indexOfSearchText], searchTermTextBox.Text.Length); //currentTextArea.SelectionBackColor = MyDefaultBackColor; //currentTextArea.SelectedText = replacementTextBox.Text; currentTextArea.ReplaceSelectedText(replacementTextBox.Text); //just select for nothing much currentTextArea.Select(textsFound[indexOfSearchText], replacementTextBox.Text.Length); //since we have changed the found text, the number of position of found text now is off by one //that's why we lessen indexOFSearchText by one for the purpose of find next //indexOfSearchText--; //currentTextArea.ContinueRecordingUndo(); } private void replaceAllButton_Click(object sender, EventArgs e) { if (searchTermTextBox.Text.Equals(replacementTextBox.Text)) { return; } TextArea currentTextArea = TabControlClass.CurrentTextArea; //currentTextArea.StopRecordingUndo(); ////get this again because we might have changed the text in text area and it made some of the found text position changed //TextFound.Clear(); //TextFound = currentTextArea.FindAll(searchTermTextBox.Text); //int index = 0; //int offset = 0; //while (index < TextFound.Count) //{ // currentTextArea.Select(TextFound[index], searchTermTextBox.Text.Length); // //currentTextArea.SelectionBackColor = MyDefaultBackColor; // currentTextArea.SelectedText = replacementTextBox.Text; // index++; // if (index < TextFound.Count) // { // offset += searchTermTextBox.Text.Length - replacementTextBox.Text.Length; // TextFound[index] = TextFound[index] - offset; // } //} string textToReplace = currentTextArea.Text.Replace(searchTermTextBox.Text, replacementTextBox.Text); currentTextArea.Select(0, currentTextArea.TextLength); currentTextArea.ReplaceSelectedText(textToReplace); //currentTextArea.ContinueRecordingUndo(); } private void FindForm_Deactivate(object sender, EventArgs e) { try { this.Opacity = 0.3; } catch { } } private void FindForm_Activated(object sender, EventArgs e) { this.Opacity = 1; } public void ShowFindForm() { this.Text = "Find"; replacementLabel.Visible = false; replacementTextBox.Visible = false; replaceButton.Visible = false; replaceAllButton.Visible = false; mathCaseCheckBox.Location = replacementLabel.Location; this.Width = 380; this.Show(); } public void ShowFindAndReplaceForm() { this.Text = "Find And Replace"; replacementLabel.Visible = true; replacementTextBox.Visible = true; replaceButton.Visible = true; replaceAllButton.Visible = true; mathCaseCheckBox.Location = new Point(12, 103); this.AutoSize = true; this.Show(); } protected override bool ProcessCmdKey(ref Message msg, Keys keyData) { if (keyData == (Keys.H | Keys.Control)) { ShowFindAndReplaceForm(); return true; } if (keyData == (Keys.F | Keys.Control)) { ShowFindForm(); return true; } return base.ProcessCmdKey(ref msg, keyData); } private void FindForm_FormClosing(object sender, FormClosingEventArgs e) { foreach (TabPage tabPage in TabControlClass.TabControl.TabPages) { TextArea textArea = (tabPage.Controls[0] as MyRichTextBox).TextArea; textArea.ClearBackColor(textArea.BackColor); } this.Visible = false; e.Cancel = true; } } }
//============================================================================== // TorqueLab -> Datablock Editor Persistence Manager // Copyright (c) 2015 All Right Reserved, http://nordiklab.com/ //------------------------------------------------------------------------------ //============================================================================== //============================================================================== // Scene Datablock Saving (PersistenceManager) //============================================================================== //============================================================================== function SceneEd::saveAllDatablocks(%this) { SceneEd.DBPM.saveDirty(); } //------------------------------------------------------------------------------ //============================================================================== function SceneEd::save( %this ) { // Clear the first responder to capture any inspector changes %ctrl = canvas.getFirstResponder(); if( isObject(%ctrl) ) %ctrl.clearFirstResponder(); %tree = SceneDatablockTree; %count = %tree.getSelectedItemsCount(); %selected = %tree.getSelectedItemList(); if (%count == 1) { %this.saveSingleData(DbEd.activeDatablock); return; } for( %i = 0; %i < %count; %i ++ ) { %id = getWord( %selected, %i ); %db = %tree.getItemValue( %id ); if( %this.DBPM.isDirty( %db ) ) { %this.DBPM.saveDirtyObject( %db ); %this.flagDatablockAsDirty( %db, false ); } } } //------------------------------------------------------------------------------ //============================================================================== function SceneEd::saveSingleData( %this,%db ) { // Clear the first responder to capture any inspector changes if (DbEd_DatablockNameEdit.getText() !$= %db.getFileName()) { %oldFileName = %db.getFileName(); if( %oldFileName !$= "" ) %this.DBPM.removeObjectFromFile( %db, %oldFileName ); // Save to new file. %this.DBPM.setDirty( %db, DbEd_DatablockNameEdit.getText() ); } if( %this.DBPM.isDirty( %db ) ) { %this.DBPM.saveDirtyObject( %db ); } %this.flagDatablockAsDirty( %db, %this.DBPM.isDirty( %db ) ); } //------------------------------------------------------------------------------ //============================================================================== function SceneEd::setDatablockDirty(%this, %datablock, %dirty ) { if (%dirty $= "") %dirty = true; %tree = SceneDatablockTree; %id = %tree.findItemByValue( %datablock.getId() ); if( %id != 0 ){ if( %dirty ) { SceneDatablockTree.editItem( %id, %datablock.getName() @ " *", %datablock.getId() ); } else { SceneDatablockTree.editItem( %id, %datablock.getName(), %datablock.getId() ); } } DbEd_ActiveDbIcons-->saveButton.visible = %dirty; //Hack: For unknown reason the icon visibility need to be set twice using a schedule for the second if (%dirty) DbEd_ActiveIconSave.schedule(100,"setVisible","1"); } //------------------------------------------------------------------------------ //============================================================================== //- Return true if there is any datablock with unsaved changes. function SceneEd::isDirty( %this ) { return %this.DBPM.hasDirty(); } //------------------------------------------------------------------------------ //============================================================================== //- Return true if any of the currently selected datablocks has unsaved changes. function SceneEd::selectedDatablockIsDirty( %this ) { %tree = SceneDatablockTree; %count = %tree.getSelectedItemsCount(); %selected = %tree.getSelectedItemList(); foreach$( %id in %selected ) { %db = %tree.getItemValue( %id ); if( %this.DBPM.isDirty( %db ) ) return true; } return false; } //------------------------------------------------------------------------------ //============================================================================== function SceneEd::syncDirtyState( %this ) { %tree = SceneDatablockTree; %count = %tree.getSelectedItemsCount(); %selected = %tree.getSelectedItemList(); %haveDirty = false; foreach$( %id in %selected ) { %db = %tree.getItemValue( %id ); if( %this.DBPM.isDirty( %db ) ) { %this.flagDatablockAsDirty( %db, true ); %haveDirty = true; } else %this.flagDatablockAsDirty( %db, false ); } %this.flagInspectorAsDirty( %haveDirty ); } //------------------------------------------------------------------------------ //============================================================================== //- function SceneEd::flagInspectorAsDirty( %this, %dirty ) { if( %dirty ) { DatablockEditorInspectorWindow.text = "Datablock *"; show(DbEd_ReloadDataButton); } else { DatablockEditorInspectorWindow.text = "Datablock"; hide(DbEd_ReloadDataButton); } } //------------------------------------------------------------------------------ //============================================================================== function SceneEd::flagDatablockAsDirty(%this, %datablock, %dirty ) { //%tree = SceneDatablockTree; //%id = %tree.findItemByValue( %datablock.getId() ); if( !isObject(%datablock)) return; // Tag the item caption and sync the persistence manager. if( %dirty ) { //SceneDatablockTree.editItem( %id, %datablock.getName() @ " *", %datablock.getId() ); %this.DBPM.setDirty( %datablock ); } else { //SceneDatablockTree.editItem( %id, %datablock.getName(), %datablock.getId() ); %this.DBPM.removeDirty( %datablock ); } %this.setDatablockDirty(%datablock, %dirty); // Sync the inspector dirty state. %this.flagInspectorAsDirty( %this.DBPM.hasDirty() ); } //------------------------------------------------------------------------------ //============================================================================== function SceneEd::showSaveNewFileDialog(%this) { %currentFile = %this.getSelectedDatablock().getFilename(); getSaveFilename( "TorqueScript Files|*.cs|All Files|*.*", %this @ ".saveNewFileFinish", %currentFile, false ); } //------------------------------------------------------------------------------ //============================================================================== function SceneEd::saveNewFileFinish( %this, %newFileName ) { // Clear the first responder to capture any inspector changes %ctrl = canvas.getFirstResponder(); %newFileName = makeRelativePath(%newFileName); if( isObject(%ctrl) ) %ctrl.clearFirstResponder(); %tree = SceneDatablockTree; %count = %tree.getSelectedItemsCount(); %selected = %tree.getSelectedItemList(); foreach$( %id in %selected ) { %db = %tree.getItemValue( %id ); %db = %this.getSelectedDatablock(); // Remove from current file. %oldFileName = %db.getFileName(); if( %oldFileName !$= "" ) %this.DBPM.removeObjectFromFile( %db, %oldFileName ); // Save to new file. %this.DBPM.setDirty( %db, %newFileName ); if( %this.DBPM.saveDirtyObject( %db ) ) { // Clear dirty state. %this.flagDatablockAsDirty( %db, false ); } } DatablockEditorInspectorWindow-->DatablockFile.setText( %newFileName ); } //------------------------------------------------------------------------------
// *********************************************************************** // Copyright (c) 2012-2014 Charlie Poole // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** #if PARALLEL //#define NO_PARALLEL_CASES using System; using System.Collections.Generic; using System.Threading; namespace NUnit.Framework.Internal.Execution { /// <summary> /// ParallelWorkItemDispatcher handles execution of work items by /// queuing them for worker threads to process. /// </summary> public class ParallelWorkItemDispatcher : IWorkItemDispatcher { private static readonly Logger log = InternalTrace.GetLogger("Dispatcher"); private readonly int _levelOfParallelism; private int _itemsDispatched; // WorkShifts - Dispatcher processes tests in three non-overlapping shifts. // See comment in Workshift.cs for a more detailed explanation. private readonly WorkShift _parallelShift = new WorkShift("Parallel"); private readonly WorkShift _nonParallelShift = new WorkShift("NonParallel"); private readonly WorkShift _nonParallelSTAShift = new WorkShift("NonParallelSTA"); /// <summary> /// Enumerates all the shifts supported by the dispatcher /// </summary> public IEnumerable<WorkShift> Shifts { get; private set; } // Queues used by WorkShifts private readonly Lazy<WorkItemQueue> _parallelQueue; private readonly Lazy<WorkItemQueue> _nonParallelQueue; private readonly Lazy<WorkItemQueue> _parallelSTAQueue; private readonly Lazy<WorkItemQueue> _nonParallelSTAQueue; #region Constructor /// <summary> /// Construct a ParallelWorkItemDispatcher /// </summary> /// <param name="levelOfParallelism">Number of workers to use</param> public ParallelWorkItemDispatcher(int levelOfParallelism) { _levelOfParallelism = levelOfParallelism; // Initialize WorkShifts Shifts = new WorkShift[] { _parallelShift, _nonParallelShift, _nonParallelSTAShift }; foreach (var shift in Shifts) shift.EndOfShift += OnEndOfShift; // Set up queues for lazy initialization _parallelQueue = new Lazy<WorkItemQueue>(() => { var parallelQueue = new WorkItemQueue("ParallelQueue"); _parallelShift.AddQueue(parallelQueue); for (int i = 1; i <= _levelOfParallelism; i++) { string name = string.Format("Worker#" + i.ToString()); _parallelShift.Assign(new TestWorker(parallelQueue, name, ApartmentState.MTA)); } return parallelQueue; }); _parallelSTAQueue = new Lazy<WorkItemQueue>(() => { var parallelSTAQueue = new WorkItemQueue("ParallelSTAQueue"); _parallelShift.AddQueue(parallelSTAQueue); _parallelShift.Assign(new TestWorker(parallelSTAQueue, "Worker#STA", ApartmentState.STA)); return parallelSTAQueue; }); _nonParallelQueue = new Lazy<WorkItemQueue>(() => { var nonParallelQueue = new WorkItemQueue("NonParallelQueue"); _nonParallelShift.AddQueue(nonParallelQueue); _nonParallelShift.Assign(new TestWorker(nonParallelQueue, "Worker#STA_NP", ApartmentState.MTA)); return nonParallelQueue; }); _nonParallelSTAQueue = new Lazy<WorkItemQueue>(() => { var nonParallelSTAQueue = new WorkItemQueue("NonParallelSTAQueue"); _nonParallelSTAShift.AddQueue(nonParallelSTAQueue); _nonParallelSTAShift.Assign(new TestWorker(nonParallelSTAQueue, "Worker#NP_STA", ApartmentState.STA)); return nonParallelSTAQueue; }); } #endregion #region IWorkItemDispatcher Members /// <summary> /// Start execution, setting the top level work, /// enqueuing it and starting a shift to execute it. /// </summary> public void Start(WorkItem topLevelWorkItem) { var strategy = topLevelWorkItem.ParallelScope.HasFlag(ParallelScope.None) ? ExecutionStrategy.NonParallel : ExecutionStrategy.Parallel; Dispatch(topLevelWorkItem, strategy); StartNextShift(); } /// <summary> /// Dispatch a single work item for execution. The first /// work item dispatched is saved as the top-level /// work item and used when stopping the run. /// </summary> /// <param name="work">The item to dispatch</param> public void Dispatch(WorkItem work) { Dispatch(work, GetExecutionStrategy(work)); } private void Dispatch(WorkItem work, ExecutionStrategy strategy) { log.Debug("Using {0} strategy for {1}", strategy, work.Name); switch (strategy) { default: case ExecutionStrategy.Direct: work.Execute(); break; case ExecutionStrategy.Parallel: if (work.TargetApartment == ApartmentState.STA) ParallelSTAQueue.Enqueue(work); else ParallelQueue.Enqueue(work); break; case ExecutionStrategy.NonParallel: if (work.TargetApartment == ApartmentState.STA) NonParallelSTAQueue.Enqueue(work); else NonParallelQueue.Enqueue(work); break; } Interlocked.Increment(ref _itemsDispatched); } /// <summary> /// Cancel the ongoing run completely. /// If no run is in process, the call has no effect. /// </summary> public void CancelRun(bool force) { foreach (var shift in Shifts) shift.Cancel(force); } #endregion #region Private Queue Properties // Queues are not actually created until the first time the property // is referenced by the Dispatch method adding a WorkItem to it. private WorkItemQueue ParallelQueue { get { return _parallelQueue.Value; } } private WorkItemQueue ParallelSTAQueue { get { return _parallelSTAQueue.Value; } } private WorkItemQueue NonParallelQueue { get { return _nonParallelQueue.Value; } } private WorkItemQueue NonParallelSTAQueue { get { return _nonParallelSTAQueue.Value; } } #endregion #region Helper Methods private void OnEndOfShift(object sender, EventArgs ea) { if (!StartNextShift()) { foreach (var shift in Shifts) shift.ShutDown(); } } private bool StartNextShift() { foreach (var shift in Shifts) { if (shift.HasWork) { shift.Start(); return true; } } return false; } private enum ExecutionStrategy { Direct, Parallel, NonParallel, } private static ExecutionStrategy GetExecutionStrategy(WorkItem work) { // If there is no fixture and so nothing to do but dispatch // grandchildren we run directly. This saves time that would // otherwise be spent enqueuing and dequeing items. // TODO: It would be even better if we could avoid creating // these "do-nothing" work items in the first place. if (work.Test.TypeInfo == null) return ExecutionStrategy.Direct; // If the context is single-threaded we are required to run // the tests one by one on the same thread as the fixture. if (work.Context.IsSingleThreaded) return ExecutionStrategy.Direct; #if NO_PARALLEL_CASES // For now, if this represents a test case, run directly. // This avoids issues caused by tests that access the fixture // state and allows handling ApartmentState preferences set on // the fixture more easily. if (work is SimpleWorkItem) return ExecutionStrategy.Direct; #endif if (work.ParallelScope.HasFlag(ParallelScope.Self) || work.Context.ParallelScope.HasFlag(ParallelScope.Children) || work.Test is TestFixture && work.Context.ParallelScope.HasFlag(ParallelScope.Fixtures)) { return ExecutionStrategy.Parallel; } else if (work.ParallelScope.HasFlag(ParallelScope.None)) { return ExecutionStrategy.NonParallel; } else { return ExecutionStrategy.Direct; } } #endregion } #if NET_2_0 || NET_3_5 static class ParallelScopeHelper { public static bool HasFlag(this ParallelScope scope, ParallelScope value) { return (scope & value) != 0; } } #endif } #endif
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Text; using Microsoft.AspNetCore.Cryptography.KeyDerivation.PBKDF2; using Microsoft.AspNetCore.DataProtection.Test.Shared; using Microsoft.AspNetCore.Testing; using Xunit; namespace Microsoft.AspNetCore.Cryptography.KeyDerivation { public class Pbkdf2Tests { // The 'numBytesRequested' parameters below are chosen to exercise code paths where // this value straddles the digest length of the PRF. We only use 5 iterations so // that our unit tests are fast. // This provider is only available in .NET Core because .NET Standard only supports HMACSHA1 [Theory] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 - 1, "efmxNcKD/U1urTEDGvsThlPnHA==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 + 0, "efmxNcKD/U1urTEDGvsThlPnHDI=")] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 + 1, "efmxNcKD/U1urTEDGvsThlPnHDLk")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 - 1, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRA==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 + 0, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRLo=")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 + 1, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRLpk")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 - 1, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm9")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 + 0, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm90Q==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 + 1, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm90Wk=")] public void RunTest_Normal_NetCore(string password, KeyDerivationPrf prf, int iterationCount, int numBytesRequested, string expectedValueAsBase64) { // Arrange byte[] salt = new byte[256]; for (int i = 0; i < salt.Length; i++) { salt[i] = (byte)i; } // Act & assert #if NETFRAMEWORK TestProvider<ManagedPbkdf2Provider>(password, salt, prf, iterationCount, numBytesRequested, expectedValueAsBase64); #elif NETCOREAPP TestProvider<NetCorePbkdf2Provider>(password, salt, prf, iterationCount, numBytesRequested, expectedValueAsBase64); #else #error Update target frameworks #endif } [Fact] public void RunTest_WithLongPassword_NetCore_FallbackToManaged() { // salt is less than 8 bytes byte[] salt = Encoding.UTF8.GetBytes("salt"); const string expectedDerivedKeyBase64 = "Sc+V/c3fiZq5Z5qH3iavAiojTsW97FAp2eBNmCQAwCNzA8hfhFFYyQLIMK65qPnBFHOHXQPwAxNQNhaEAH9hzfiaNBSRJpF9V4rpl02d5ZpI6cZbsQFF7TJW7XJzQVpYoPDgJlg0xVmYLhn1E9qMtUVUuXsBjOOdd7K1M+ZI00c="; #if NETFRAMEWORK RunTest_WithLongPassword_Impl<ManagedPbkdf2Provider>(salt, expectedDerivedKeyBase64); #elif NETCOREAPP RunTest_WithLongPassword_Impl<NetCorePbkdf2Provider>(salt, expectedDerivedKeyBase64); #else #error Update target frameworks #endif } [Fact] public void RunTest_WithLongPassword_NetCore() { // salt longer than 8 bytes var salt = Encoding.UTF8.GetBytes("abcdefghijkl"); #if NETFRAMEWORK RunTest_WithLongPassword_Impl<ManagedPbkdf2Provider>(salt, "NGJtFzYUaaSxu+3ZsMeZO5d/qPJDUYW4caLkFlaY0cLSYdh1PN4+nHUVp4pUUubJWu3UeXNMnHKNDfnn8GMfnDVrAGTv1lldszsvUJ0JQ6p4+daQEYBc//Tj/ejuB3luwW0IinyE7U/ViOQKbfi5pCZFMQ0FFx9I+eXRlyT+I74="); #elif NETCOREAPP RunTest_WithLongPassword_Impl<NetCorePbkdf2Provider>(salt, "NGJtFzYUaaSxu+3ZsMeZO5d/qPJDUYW4caLkFlaY0cLSYdh1PN4+nHUVp4pUUubJWu3UeXNMnHKNDfnn8GMfnDVrAGTv1lldszsvUJ0JQ6p4+daQEYBc//Tj/ejuB3luwW0IinyE7U/ViOQKbfi5pCZFMQ0FFx9I+eXRlyT+I74="); #else #error Update target frameworks #endif } // The 'numBytesRequested' parameters below are chosen to exercise code paths where // this value straddles the digest length of the PRF. We only use 5 iterations so // that our unit tests are fast. [Theory] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 - 1, "efmxNcKD/U1urTEDGvsThlPnHA==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 + 0, "efmxNcKD/U1urTEDGvsThlPnHDI=")] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 + 1, "efmxNcKD/U1urTEDGvsThlPnHDLk")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 - 1, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRA==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 + 0, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRLo=")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 + 1, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRLpk")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 - 1, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm9")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 + 0, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm90Q==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 + 1, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm90Wk=")] public void RunTest_Normal_Managed(string password, KeyDerivationPrf prf, int iterationCount, int numBytesRequested, string expectedValueAsBase64) { // Arrange byte[] salt = new byte[256]; for (int i = 0; i < salt.Length; i++) { salt[i] = (byte)i; } // Act & assert TestProvider<ManagedPbkdf2Provider>(password, salt, prf, iterationCount, numBytesRequested, expectedValueAsBase64); } // The 'numBytesRequested' parameters below are chosen to exercise code paths where // this value straddles the digest length of the PRF. We only use 5 iterations so // that our unit tests are fast. [ConditionalTheory] [ConditionalRunTestOnlyOnWindows] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 - 1, "efmxNcKD/U1urTEDGvsThlPnHA==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 + 0, "efmxNcKD/U1urTEDGvsThlPnHDI=")] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 + 1, "efmxNcKD/U1urTEDGvsThlPnHDLk")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 - 1, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRA==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 + 0, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRLo=")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 + 1, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRLpk")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 - 1, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm9")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 + 0, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm90Q==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 + 1, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm90Wk=")] public void RunTest_Normal_Win7(string password, KeyDerivationPrf prf, int iterationCount, int numBytesRequested, string expectedValueAsBase64) { // Arrange byte[] salt = new byte[256]; for (int i = 0; i < salt.Length; i++) { salt[i] = (byte)i; } // Act & assert TestProvider<Win7Pbkdf2Provider>(password, salt, prf, iterationCount, numBytesRequested, expectedValueAsBase64); } // The 'numBytesRequested' parameters below are chosen to exercise code paths where // this value straddles the digest length of the PRF. We only use 5 iterations so // that our unit tests are fast. [ConditionalTheory] [ConditionalRunTestOnlyOnWindows8OrLater] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 - 1, "efmxNcKD/U1urTEDGvsThlPnHA==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 + 0, "efmxNcKD/U1urTEDGvsThlPnHDI=")] [InlineData("my-password", KeyDerivationPrf.HMACSHA1, 5, 160 / 8 + 1, "efmxNcKD/U1urTEDGvsThlPnHDLk")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 - 1, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRA==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 + 0, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRLo=")] [InlineData("my-password", KeyDerivationPrf.HMACSHA256, 5, 256 / 8 + 1, "JRNz8bPKS02EG1vf7eWjA64IeeI+TI8gBEwb1oVvRLpk")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 - 1, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm9")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 + 0, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm90Q==")] [InlineData("my-password", KeyDerivationPrf.HMACSHA512, 5, 512 / 8 + 1, "ZTallQJrFn0279xIzaiA1XqatVTGei+ZjKngA7bIMtKMDUw6YJeGUQpFG8iGTgN+ri3LNDktNbzwfcSyZmm90Wk=")] public void RunTest_Normal_Win8(string password, KeyDerivationPrf prf, int iterationCount, int numBytesRequested, string expectedValueAsBase64) { // Arrange byte[] salt = new byte[256]; for (int i = 0; i < salt.Length; i++) { salt[i] = (byte)i; } // Act & assert TestProvider<Win8Pbkdf2Provider>(password, salt, prf, iterationCount, numBytesRequested, expectedValueAsBase64); } [Fact] public void RunTest_WithLongPassword_Managed() { RunTest_WithLongPassword_Impl<ManagedPbkdf2Provider>(); } [ConditionalFact] [ConditionalRunTestOnlyOnWindows] public void RunTest_WithLongPassword_Win7() { RunTest_WithLongPassword_Impl<Win7Pbkdf2Provider>(); } [ConditionalFact] [ConditionalRunTestOnlyOnWindows8OrLater] public void RunTest_WithLongPassword_Win8() { RunTest_WithLongPassword_Impl<Win8Pbkdf2Provider>(); } private static void RunTest_WithLongPassword_Impl<TProvider>() where TProvider : IPbkdf2Provider, new() { byte[] salt = Encoding.UTF8.GetBytes("salt"); const string expectedDerivedKeyBase64 = "Sc+V/c3fiZq5Z5qH3iavAiojTsW97FAp2eBNmCQAwCNzA8hfhFFYyQLIMK65qPnBFHOHXQPwAxNQNhaEAH9hzfiaNBSRJpF9V4rpl02d5ZpI6cZbsQFF7TJW7XJzQVpYoPDgJlg0xVmYLhn1E9qMtUVUuXsBjOOdd7K1M+ZI00c="; RunTest_WithLongPassword_Impl<TProvider>(salt, expectedDerivedKeyBase64); } private static void RunTest_WithLongPassword_Impl<TProvider>(byte[] salt, string expectedDerivedKeyBase64) where TProvider : IPbkdf2Provider, new() { // Arrange string password = new String('x', 50000); // 50,000 char password const KeyDerivationPrf prf = KeyDerivationPrf.HMACSHA256; const int iterationCount = 5; const int numBytesRequested = 128; // Act & assert TestProvider<TProvider>(password, salt, prf, iterationCount, numBytesRequested, expectedDerivedKeyBase64); } private static void TestProvider<TProvider>(string password, byte[] salt, KeyDerivationPrf prf, int iterationCount, int numBytesRequested, string expectedDerivedKeyAsBase64) where TProvider : IPbkdf2Provider, new() { byte[] derivedKey = new TProvider().DeriveKey(password, salt, prf, iterationCount, numBytesRequested); Assert.Equal(numBytesRequested, derivedKey.Length); Assert.Equal(expectedDerivedKeyAsBase64, Convert.ToBase64String(derivedKey)); } } }
// ---------------------------------------------------------------------------- // <copyright file="PhotonView.cs" company="Exit Games GmbH"> // PhotonNetwork Framework for Unity - Copyright (C) 2011 Exit Games GmbH // </copyright> // <summary> // // </summary> // <author>developer@exitgames.com</author> // ---------------------------------------------------------------------------- using System; using UnityEngine; using System.Reflection; using System.Collections.Generic; using ExitGames.Client.Photon; #if UNITY_EDITOR using UnityEditor; #endif public enum ViewSynchronization { Off, ReliableDeltaCompressed, Unreliable, UnreliableOnChange } public enum OnSerializeTransform { OnlyPosition, OnlyRotation, OnlyScale, PositionAndRotation, All } public enum OnSerializeRigidBody { OnlyVelocity, OnlyAngularVelocity, All } /// <summary> /// Options to define how Ownership Transfer is handled per PhotonView. /// </summary> /// <remarks> /// This setting affects how RequestOwnership and TransferOwnership work at runtime. /// </remarks> public enum OwnershipOption { /// <summary> /// Ownership is fixed. Instantiated objects stick with their creator, scene objects always belong to the Master Client. /// </summary> Fixed, /// <summary> /// Ownership can be taken away from the current owner who can't object. /// </summary> Takeover, /// <summary> /// Ownership can be requested with PhotonView.RequestOwnership but the current owner has to agree to give up ownership. /// </summary> /// <remarks>The current owner has to implement IPunCallbacks.OnOwnershipRequest to react to the ownership request.</remarks> Request } /// <summary> /// PUN's NetworkView replacement class for networking. Use it like a NetworkView. /// </summary> /// \ingroup publicApi [AddComponentMenu("Photon Networking/Photon View &v")] public class PhotonView : Photon.MonoBehaviour { #if UNITY_EDITOR [ContextMenu("Open PUN Wizard")] void OpenPunWizard() { EditorApplication.ExecuteMenuItem("Window/Photon Unity Networking"); } #endif public int ownerId; public int group = 0; protected internal bool mixedModeIsReliable = false; // NOTE: this is now an integer because unity won't serialize short (needed for instantiation). we SEND only a short though! // NOTE: prefabs have a prefixBackup of -1. this is replaced with any currentLevelPrefix that's used at runtime. instantiated GOs get their prefix set pre-instantiation (so those are not -1 anymore) public int prefix { get { if (this.prefixBackup == -1 && PhotonNetwork.networkingPeer != null) { this.prefixBackup = PhotonNetwork.networkingPeer.currentLevelPrefix; } return this.prefixBackup; } set { this.prefixBackup = value; } } // this field is serialized by unity. that means it is copied when instantiating a persistent obj into the scene public int prefixBackup = -1; /// <summary> /// This is the instantiationData that was passed when calling PhotonNetwork.Instantiate* (if that was used to spawn this prefab) /// </summary> public object[] instantiationData { get { if (!this.didAwake) { // even though viewID and instantiationID are setup before the GO goes live, this data can't be set. as workaround: fetch it if needed this.instantiationDataField = PhotonNetwork.networkingPeer.FetchInstantiationData(this.instantiationId); } return this.instantiationDataField; } set { this.instantiationDataField = value; } } private object[] instantiationDataField; /// <summary> /// For internal use only, don't use /// </summary> protected internal object[] lastOnSerializeDataSent = null; /// <summary> /// For internal use only, don't use /// </summary> protected internal object[] lastOnSerializeDataReceived = null; public Component observed; public ViewSynchronization synchronization; public OnSerializeTransform onSerializeTransformOption = OnSerializeTransform.PositionAndRotation; public OnSerializeRigidBody onSerializeRigidBodyOption = OnSerializeRigidBody.All; /// <summary>Defines if ownership of this PhotonView is fixed, can be requested or simply taken.</summary> /// <remarks> /// Note that you can't edit this value at runtime. /// The options are described in enum OwnershipOption. /// The current owner has to implement IPunCallbacks.OnOwnershipRequest to react to the ownership request. /// </remarks> public OwnershipOption ownershipTransfer = OwnershipOption.Fixed; public List<Component> ObservedComponents; Dictionary<Component, MethodInfo> m_OnSerializeMethodInfos = new Dictionary<Component, MethodInfo>(); //These fields are only used in the CustomEditor for this script and would trigger a //"this variable is never used" warning, which I am suppressing here #pragma warning disable 0414 [SerializeField] bool ObservedComponentsFoldoutOpen = true; #pragma warning restore 0414 [SerializeField] private int viewIdField = 0; /// <summary> /// The ID of the PhotonView. Identifies it in a networked game (per room). /// </summary> /// <remarks>See: [Network Instantiation](@ref instantiateManual)</remarks> public int viewID { get { return this.viewIdField; } set { // if ID was 0 for an awakened PhotonView, the view should add itself into the networkingPeer.photonViewList after setup bool viewMustRegister = this.didAwake && this.viewIdField == 0; // TODO: decide if a viewID can be changed once it wasn't 0. most likely that is not a good idea // check if this view is in networkingPeer.photonViewList and UPDATE said list (so we don't keep the old viewID with a reference to this object) // PhotonNetwork.networkingPeer.RemovePhotonView(this, true); this.ownerId = value / PhotonNetwork.MAX_VIEW_IDS; this.viewIdField = value; if (viewMustRegister) { PhotonNetwork.networkingPeer.RegisterPhotonView(this); } //Debug.Log("Set viewID: " + value + " -> owner: " + this.ownerId + " subId: " + this.subId); } } public int instantiationId; // if the view was instantiated with a GO, this GO has a instantiationID (first view's viewID) /// <summary>True if the PhotonView was loaded with the scene (game object) or instantiated with InstantiateSceneObject.</summary> /// <remarks> /// Scene objects are not owned by a particular player but belong to the scene. Thus they don't get destroyed when their /// creator leaves the game and the current Master Client can control them (whoever that is). /// The ownerId is 0 (player IDs are 1 and up). /// </remarks> public bool isSceneView { get { return this.CreatorActorNr == 0; } } /// <summary> /// The owner of a PhotonView is the player who created the GameObject with that view. Objects in the scene don't have an owner. /// </summary> /// <remarks> /// The owner/controller of a PhotonView is also the client which sends position updates of the GameObject. /// /// Ownership can be transferred to another player with PhotonView.TransferOwnership or any player can request /// ownership by calling the PhotonView's RequestOwnership method. /// The current owner has to implement IPunCallbacks.OnOwnershipRequest to react to the ownership request. /// </remarks> public PhotonPlayer owner { get { return PhotonPlayer.Find(this.ownerId); } } public int OwnerActorNr { get { return this.ownerId; } } public bool isOwnerActive { get { return this.ownerId != 0 && PhotonNetwork.networkingPeer.mActors.ContainsKey(this.ownerId); } } public int CreatorActorNr { get { return this.viewIdField / PhotonNetwork.MAX_VIEW_IDS; } } /// <summary> /// True if the PhotonView is "mine" and can be controlled by this client. /// </summary> /// <remarks> /// PUN has an ownership concept that defines who can control and destroy each PhotonView. /// True in case the owner matches the local PhotonPlayer. /// True if this is a scene photonview on the Master client. /// </remarks> public bool isMine { get { return (this.ownerId == PhotonNetwork.player.ID) || (!this.isOwnerActive && PhotonNetwork.isMasterClient); } } protected internal bool didAwake; protected internal bool destroyedByPhotonNetworkOrQuit; /// <summary>Called by Unity on start of the application and does a setup the PhotonView.</summary> protected internal void Awake() { // registration might be too late when some script (on this GO) searches this view BUT GetPhotonView() can search ALL in that case PhotonNetwork.networkingPeer.RegisterPhotonView(this); this.instantiationDataField = PhotonNetwork.networkingPeer.FetchInstantiationData(this.instantiationId); this.didAwake = true; } /// <summary> /// Depending on the PhotonView's ownershipTransfer setting, any client can request to become owner of the PhotonView. /// </summary> /// <remarks> /// Requesting ownership can give you control over a PhotonView, if the ownershipTransfer setting allows that. /// The current owner might have to implement IPunCallbacks.OnOwnershipRequest to react to the ownership request. /// /// The owner/controller of a PhotonView is also the client which sends position updates of the GameObject. /// </remarks> public void RequestOwnership() { PhotonNetwork.networkingPeer.RequestOwnership(this.viewID, this.ownerId); } /// <summary> /// Transfers the ownership of this PhotonView (and GameObject) to another player. /// </summary> /// <remarks> /// The owner/controller of a PhotonView is also the client which sends position updates of the GameObject. /// </remarks> public void TransferOwnership(PhotonPlayer newOwner) { this.TransferOwnership(newOwner.ID); } /// <summary> /// Transfers the ownership of this PhotonView (and GameObject) to another player. /// </summary> /// <remarks> /// The owner/controller of a PhotonView is also the client which sends position updates of the GameObject. /// </remarks> public void TransferOwnership(int newOwnerId) { PhotonNetwork.networkingPeer.TransferOwnership(this.viewID, newOwnerId); this.ownerId = newOwnerId; // immediately switch ownership locally, to avoid more updates sent from this client. } protected internal void OnApplicationQuit() { destroyedByPhotonNetworkOrQuit = true; // on stop-playing its ok Destroy is being called directly (not by PN.Destroy()) } protected internal void OnDestroy() { if (!this.destroyedByPhotonNetworkOrQuit) { PhotonNetwork.networkingPeer.LocalCleanPhotonView(this); } if (!this.destroyedByPhotonNetworkOrQuit && !Application.isLoadingLevel) { if (this.instantiationId > 0) { // if this viewID was not manually assigned (and we're not shutting down or loading a level), you should use PhotonNetwork.Destroy() to get rid of GOs with PhotonViews Debug.LogError("OnDestroy() seems to be called without PhotonNetwork.Destroy()?! GameObject: " + this.gameObject + " Application.isLoadingLevel: " + Application.isLoadingLevel); } else { // this seems to be a manually instantiated PV. if it's local, we could warn if the ID is not in the allocated-list if (this.viewID <= 0) { Debug.LogWarning(string.Format("OnDestroy manually allocated PhotonView {0}. The viewID is 0. Was it ever (manually) set?", this)); } else if (this.isMine && !PhotonNetwork.manuallyAllocatedViewIds.Contains(this.viewID)) { Debug.LogWarning(string.Format("OnDestroy manually allocated PhotonView {0}. The viewID is local (isMine) but not in manuallyAllocatedViewIds list. Use UnAllocateViewID() after you destroyed the PV.", this)); } } } } private MethodInfo OnSerializeMethodInfo; private bool failedToFindOnSerialize; public void SerializeView( PhotonStream stream, PhotonMessageInfo info ) { SerializeComponent( observed, stream, info ); for( int i = 0; i < ObservedComponents.Count; ++i ) { SerializeComponent( ObservedComponents[ i ], stream, info ); } } public void DeserializeView( PhotonStream stream, PhotonMessageInfo info ) { DeserializeComponent( observed, stream, info ); for( int i = 0; i < ObservedComponents.Count; ++i ) { DeserializeComponent( ObservedComponents[ i ], stream, info ); } } internal protected void DeserializeComponent( Component component, PhotonStream stream, PhotonMessageInfo info ) { if( component == null ) { return; } // Use incoming data according to observed type if( component is MonoBehaviour ) { ExecuteComponentOnSerialize( component, stream, info ); } else if( component is Transform ) { Transform trans = (Transform)component; switch( onSerializeTransformOption ) { case OnSerializeTransform.All: trans.localPosition = (Vector3)stream.ReceiveNext(); trans.localRotation = (Quaternion)stream.ReceiveNext(); trans.localScale = (Vector3)stream.ReceiveNext(); break; case OnSerializeTransform.OnlyPosition: trans.localPosition = (Vector3)stream.ReceiveNext(); break; case OnSerializeTransform.OnlyRotation: trans.localRotation = (Quaternion)stream.ReceiveNext(); break; case OnSerializeTransform.OnlyScale: trans.localScale = (Vector3)stream.ReceiveNext(); break; case OnSerializeTransform.PositionAndRotation: trans.localPosition = (Vector3)stream.ReceiveNext(); trans.localRotation = (Quaternion)stream.ReceiveNext(); break; } } else if( component is Rigidbody ) { Rigidbody rigidB = (Rigidbody)component; switch( onSerializeRigidBodyOption ) { case OnSerializeRigidBody.All: rigidB.velocity = (Vector3)stream.ReceiveNext(); rigidB.angularVelocity = (Vector3)stream.ReceiveNext(); break; case OnSerializeRigidBody.OnlyAngularVelocity: rigidB.angularVelocity = (Vector3)stream.ReceiveNext(); break; case OnSerializeRigidBody.OnlyVelocity: rigidB.velocity = (Vector3)stream.ReceiveNext(); break; } } else if( component is Rigidbody2D ) { Rigidbody2D rigidB = (Rigidbody2D)component; switch( onSerializeRigidBodyOption ) { case OnSerializeRigidBody.All: rigidB.velocity = (Vector2)stream.ReceiveNext(); rigidB.angularVelocity = (float)stream.ReceiveNext(); break; case OnSerializeRigidBody.OnlyAngularVelocity: rigidB.angularVelocity = (float)stream.ReceiveNext(); break; case OnSerializeRigidBody.OnlyVelocity: rigidB.velocity = (Vector2)stream.ReceiveNext(); break; } } else { Debug.LogError( "Type of observed is unknown when receiving." ); } } internal protected void SerializeComponent( Component component, PhotonStream stream, PhotonMessageInfo info ) { if( component == null ) { return; } if( component is MonoBehaviour ) { ExecuteComponentOnSerialize( component, stream, info ); } else if( component is Transform ) { Transform trans = (Transform)component; switch( onSerializeTransformOption ) { case OnSerializeTransform.All: stream.SendNext( trans.localPosition ); stream.SendNext( trans.localRotation ); stream.SendNext( trans.localScale ); break; case OnSerializeTransform.OnlyPosition: stream.SendNext( trans.localPosition ); break; case OnSerializeTransform.OnlyRotation: stream.SendNext( trans.localRotation ); break; case OnSerializeTransform.OnlyScale: stream.SendNext( trans.localScale ); break; case OnSerializeTransform.PositionAndRotation: stream.SendNext( trans.localPosition ); stream.SendNext( trans.localRotation ); break; } } else if( component is Rigidbody ) { Rigidbody rigidB = (Rigidbody)component; switch( onSerializeRigidBodyOption ) { case OnSerializeRigidBody.All: stream.SendNext( rigidB.velocity ); stream.SendNext( rigidB.angularVelocity ); break; case OnSerializeRigidBody.OnlyAngularVelocity: stream.SendNext( rigidB.angularVelocity ); break; case OnSerializeRigidBody.OnlyVelocity: stream.SendNext( rigidB.velocity ); break; } } else if( component is Rigidbody2D ) { Rigidbody2D rigidB = (Rigidbody2D)component; switch( onSerializeRigidBodyOption ) { case OnSerializeRigidBody.All: stream.SendNext( rigidB.velocity ); stream.SendNext( rigidB.angularVelocity ); break; case OnSerializeRigidBody.OnlyAngularVelocity: stream.SendNext( rigidB.angularVelocity ); break; case OnSerializeRigidBody.OnlyVelocity: stream.SendNext( rigidB.velocity ); break; } } else { Debug.LogError( "Observed type is not serializable: " + component.GetType() ); } } internal protected void ExecuteComponentOnSerialize( Component component, PhotonStream stream, PhotonMessageInfo info ) { if( component != null ) { if( m_OnSerializeMethodInfos.ContainsKey( component ) == false ) { MethodInfo newMethod = null; bool foundMethod = NetworkingPeer.GetMethod( component as MonoBehaviour, PhotonNetworkingMessage.OnPhotonSerializeView.ToString(), out newMethod ); if( foundMethod == false ) { Debug.LogError( "The observed monobehaviour (" + component.name + ") of this PhotonView does not implement OnPhotonSerializeView()!" ); newMethod = null; } m_OnSerializeMethodInfos.Add( component, newMethod ); } if( m_OnSerializeMethodInfos[ component ] != null ) { m_OnSerializeMethodInfos[ component ].Invoke( component, new object[] { stream, info } ); } } } /// <summary> /// Call a RPC method of this GameObject on remote clients of this room (or on all, inclunding this client). /// </summary> /// <remarks> /// [Remote Procedure Calls](@ref rpcManual) are an essential tool in making multiplayer games with PUN. /// It enables you to make every client in a room call a specific method. /// /// RPC calls can target "All" or the "Others". /// Usually, the target "All" gets executed locally immediately after sending the RPC. /// The "*ViaServer" options send the RPC to the server and execute it on this client when it's sent back. /// Of course, calls are affected by this client's lag and that of remote clients. /// /// Each call automatically is routed to the same PhotonView (and GameObject) that was used on the /// originating client. /// /// See: [Remote Procedure Calls](@ref rpcManual). /// </remarks> /// <param name="methodName">The name of a fitting method that was has the RPC attribute.</param> /// <param name="target">The group of targets and the way the RPC gets sent.</param> /// <param name="parameters">The parameters that the RPC method has (must fit this call!).</param> public void RPC(string methodName, PhotonTargets target, params object[] parameters) { if(PhotonNetwork.networkingPeer.hasSwitchedMC && target == PhotonTargets.MasterClient) { PhotonNetwork.RPC(this, methodName, PhotonNetwork.masterClient, parameters); } else { PhotonNetwork.RPC(this, methodName, target, parameters); } } /// <summary> /// Call a RPC method of this GameObject on remote clients of this room (or on all, inclunding this client). /// </summary> /// <remarks> /// [Remote Procedure Calls](@ref rpcManual) are an essential tool in making multiplayer games with PUN. /// It enables you to make every client in a room call a specific method. /// /// This method allows you to make an RPC calls on a specific player's client. /// Of course, calls are affected by this client's lag and that of remote clients. /// /// Each call automatically is routed to the same PhotonView (and GameObject) that was used on the /// originating client. /// /// See: [Remote Procedure Calls](@ref rpcManual). /// </remarks> /// <param name="methodName">The name of a fitting method that was has the RPC attribute.</param> /// <param name="targetPlayer">The group of targets and the way the RPC gets sent.</param> /// <param name="parameters">The parameters that the RPC method has (must fit this call!).</param> public void RPC(string methodName, PhotonPlayer targetPlayer, params object[] parameters) { PhotonNetwork.RPC(this, methodName, targetPlayer, parameters); } public static PhotonView Get(Component component) { return component.GetComponent<PhotonView>(); } public static PhotonView Get(GameObject gameObj) { return gameObj.GetComponent<PhotonView>(); } public static PhotonView Find(int viewID) { return PhotonNetwork.networkingPeer.GetPhotonView(viewID); } public override string ToString() { return string.Format("View ({3}){0} on {1} {2}", this.viewID, (this.gameObject != null) ? this.gameObject.name : "GO==null", (this.isSceneView) ? "(scene)" : string.Empty, this.prefix); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.Xml.Serialization { using System.Reflection; using System.Collections; using System.Xml.Schema; using System; using System.Text; using System.ComponentModel; using System.Xml; using System.CodeDom.Compiler; using System.Collections.Generic; // These classes represent a mapping between classes and a particular XML format. // There are two class of mapping information: accessors (such as elements and // attributes), and mappings (which specify the type of an accessor). internal abstract class Accessor { private string _name; private object _defaultValue = null; private string _ns; private TypeMapping _mapping; private bool _any; private string _anyNs; private bool _topLevelInSchema; private XmlSchemaForm _form = XmlSchemaForm.None; internal Accessor() { } internal TypeMapping Mapping { get { return _mapping; } set { _mapping = value; } } internal object Default { get { return _defaultValue; } set { _defaultValue = value; } } internal virtual string Name { get { return _name == null ? string.Empty : _name; } set { _name = value; } } internal bool Any { get { return _any; } set { _any = value; } } internal string AnyNamespaces { get { return _anyNs; } set { _anyNs = value; } } internal string Namespace { get { return _ns; } set { _ns = value; } } internal XmlSchemaForm Form { get { return _form; } set { _form = value; } } internal bool IsTopLevelInSchema { get { return _topLevelInSchema; } set { _topLevelInSchema = value; } } internal static string EscapeQName(string name) { if (name == null || name.Length == 0) return name; int colon = name.LastIndexOf(':'); if (colon < 0) return XmlConvert.EncodeLocalName(name); else { if (colon == 0 || colon == name.Length - 1) throw new ArgumentException(SR.Format(SR.Xml_InvalidNameChars, name), nameof(name)); return new XmlQualifiedName(XmlConvert.EncodeLocalName(name.Substring(colon + 1)), XmlConvert.EncodeLocalName(name.Substring(0, colon))).ToString(); } } internal static string UnescapeName(string name) { return XmlConvert.DecodeName(name); } } internal class ElementAccessor : Accessor { private bool _nullable; private bool _unbounded = false; internal bool IsSoap { get { return false; } } internal bool IsNullable { get { return _nullable; } set { _nullable = value; } } internal bool IsUnbounded { get { return _unbounded; } set { _unbounded = value; } } internal ElementAccessor Clone() { ElementAccessor newAccessor = new ElementAccessor(); newAccessor._nullable = _nullable; newAccessor.IsTopLevelInSchema = this.IsTopLevelInSchema; newAccessor.Form = this.Form; newAccessor.Name = this.Name; newAccessor.Default = this.Default; newAccessor.Namespace = this.Namespace; newAccessor.Mapping = this.Mapping; newAccessor.Any = this.Any; return newAccessor; } } internal class ChoiceIdentifierAccessor : Accessor { private string _memberName; private string[] _memberIds; private MemberInfo _memberInfo; internal string MemberName { get { return _memberName; } set { _memberName = value; } } internal string[] MemberIds { get { return _memberIds; } set { _memberIds = value; } } internal MemberInfo MemberInfo { get { return _memberInfo; } set { _memberInfo = value; } } } internal class TextAccessor : Accessor { } internal class XmlnsAccessor : Accessor { } internal class AttributeAccessor : Accessor { private bool _isSpecial; private bool _isList; internal bool IsSpecialXmlNamespace { get { return _isSpecial; } } internal bool IsList { get { return _isList; } set { _isList = value; } } internal void CheckSpecial() { int colon = Name.LastIndexOf(':'); if (colon >= 0) { if (!Name.StartsWith("xml:", StringComparison.Ordinal)) { throw new InvalidOperationException(SR.Format(SR.Xml_InvalidNameChars, Name)); } Name = Name.Substring("xml:".Length); Namespace = XmlReservedNs.NsXml; _isSpecial = true; } else { if (Namespace == XmlReservedNs.NsXml) { _isSpecial = true; } else { _isSpecial = false; } } if (_isSpecial) { Form = XmlSchemaForm.Qualified; } } } internal abstract class Mapping { internal Mapping() { } protected Mapping(Mapping mapping) { } internal bool IsSoap { get { return false; } } } internal abstract class TypeMapping : Mapping { private TypeDesc _typeDesc; private string _typeNs; private string _typeName; private bool _includeInSchema = true; internal string Namespace { get { return _typeNs; } set { _typeNs = value; } } internal string TypeName { get { return _typeName; } set { _typeName = value; } } internal TypeDesc TypeDesc { get { return _typeDesc; } set { _typeDesc = value; } } internal bool IncludeInSchema { get { return _includeInSchema; } set { _includeInSchema = value; } } internal virtual bool IsList { get { return false; } set { } } internal bool IsAnonymousType { get { return _typeName == null || _typeName.Length == 0; } } internal virtual string DefaultElementName { get { return IsAnonymousType ? XmlConvert.EncodeLocalName(_typeDesc.Name) : _typeName; } } } internal class PrimitiveMapping : TypeMapping { private bool _isList; internal override bool IsList { get { return _isList; } set { _isList = value; } } } internal class NullableMapping : TypeMapping { private TypeMapping _baseMapping; internal TypeMapping BaseMapping { get { return _baseMapping; } set { _baseMapping = value; } } internal override string DefaultElementName { get { return BaseMapping.DefaultElementName; } } } internal class ArrayMapping : TypeMapping { private ElementAccessor[] _elements; private ElementAccessor[] _sortedElements; private ArrayMapping _next; internal ElementAccessor[] Elements { get { return _elements; } set { _elements = value; _sortedElements = null; } } internal ElementAccessor[] ElementsSortedByDerivation { get { if (_sortedElements != null) return _sortedElements; if (_elements == null) return null; _sortedElements = new ElementAccessor[_elements.Length]; Array.Copy(_elements, 0, _sortedElements, 0, _elements.Length); AccessorMapping.SortMostToLeastDerived(_sortedElements); return _sortedElements; } } internal ArrayMapping Next { get { return _next; } set { _next = value; } } } internal class EnumMapping : PrimitiveMapping { private ConstantMapping[] _constants; private bool _isFlags; internal bool IsFlags { get { return _isFlags; } set { _isFlags = value; } } internal ConstantMapping[] Constants { get { return _constants; } set { _constants = value; } } } internal class ConstantMapping : Mapping { private string _xmlName; private string _name; private long _value; internal string XmlName { get { return _xmlName == null ? string.Empty : _xmlName; } set { _xmlName = value; } } internal string Name { get { return _name == null ? string.Empty : _name; } set { _name = value; } } internal long Value { get { return _value; } set { _value = value; } } } internal class StructMapping : TypeMapping, INameScope { private MemberMapping[] _members; private StructMapping _baseMapping; private StructMapping _derivedMappings; private StructMapping _nextDerivedMapping; private MemberMapping _xmlnsMember = null; private bool _hasSimpleContent; private bool _isSequence; private NameTable _elements; private NameTable _attributes; internal StructMapping BaseMapping { get { return _baseMapping; } set { _baseMapping = value; if (!IsAnonymousType && _baseMapping != null) { _nextDerivedMapping = _baseMapping._derivedMappings; _baseMapping._derivedMappings = this; } if (value._isSequence && !_isSequence) { _isSequence = true; if (_baseMapping.IsSequence) { for (StructMapping derived = _derivedMappings; derived != null; derived = derived.NextDerivedMapping) { derived.SetSequence(); } } } } } internal StructMapping DerivedMappings { get { return _derivedMappings; } } internal bool IsFullyInitialized { get { return _baseMapping != null && Members != null; } } internal NameTable LocalElements { get { if (_elements == null) _elements = new NameTable(); return _elements; } } internal NameTable LocalAttributes { get { if (_attributes == null) _attributes = new NameTable(); return _attributes; } } object INameScope.this[string name, string ns] { get { object named = LocalElements[name, ns]; if (named != null) return named; if (_baseMapping != null) return ((INameScope)_baseMapping)[name, ns]; return null; } set { LocalElements[name, ns] = value; } } internal StructMapping NextDerivedMapping { get { return _nextDerivedMapping; } } internal bool HasSimpleContent { get { return _hasSimpleContent; } } internal bool HasXmlnsMember { get { StructMapping mapping = this; while (mapping != null) { if (mapping.XmlnsMember != null) return true; mapping = mapping.BaseMapping; } return false; } } internal MemberMapping[] Members { get { return _members; } set { _members = value; } } internal MemberMapping XmlnsMember { get { return _xmlnsMember; } set { _xmlnsMember = value; } } internal MemberMapping FindDeclaringMapping(MemberMapping member, out StructMapping declaringMapping, string parent) { declaringMapping = null; if (BaseMapping != null) { MemberMapping baseMember = BaseMapping.FindDeclaringMapping(member, out declaringMapping, parent); if (baseMember != null) return baseMember; } if (_members == null) return null; for (int i = 0; i < _members.Length; i++) { if (_members[i].Name == member.Name) { if (_members[i].TypeDesc != member.TypeDesc) throw new InvalidOperationException(SR.Format(SR.XmlHiddenMember, parent, member.Name, member.TypeDesc.FullName, this.TypeName, _members[i].Name, _members[i].TypeDesc.FullName)); else if (!_members[i].Match(member)) { throw new InvalidOperationException(SR.Format(SR.XmlInvalidXmlOverride, parent, member.Name, this.TypeName, _members[i].Name)); } declaringMapping = this; return _members[i]; } } return null; } internal bool Declares(MemberMapping member, string parent) { StructMapping m; return (FindDeclaringMapping(member, out m, parent) != null); } internal void SetContentModel(TextAccessor text, bool hasElements) { if (BaseMapping == null || BaseMapping.TypeDesc.IsRoot) { _hasSimpleContent = !hasElements && text != null && !text.Mapping.IsList; } else if (BaseMapping.HasSimpleContent) { if (text != null || hasElements) { // we can only extent a simleContent type with attributes throw new InvalidOperationException(SR.Format(SR.XmlIllegalSimpleContentExtension, TypeDesc.FullName, BaseMapping.TypeDesc.FullName)); } else { _hasSimpleContent = true; } } else { _hasSimpleContent = false; } if (!_hasSimpleContent && text != null && !text.Mapping.TypeDesc.CanBeTextValue) { throw new InvalidOperationException(SR.Format(SR.XmlIllegalTypedTextAttribute, TypeDesc.FullName, text.Name, text.Mapping.TypeDesc.FullName)); } } internal bool HasExplicitSequence() { if (_members != null) { for (int i = 0; i < _members.Length; i++) { if (_members[i].IsParticle && _members[i].IsSequence) { return true; } } } return (_baseMapping != null && _baseMapping.HasExplicitSequence()); } internal void SetSequence() { if (TypeDesc.IsRoot) return; StructMapping start = this; // find first mapping that does not have the sequence set while (start.BaseMapping != null && !start.BaseMapping.IsSequence && !start.BaseMapping.TypeDesc.IsRoot) start = start.BaseMapping; start.IsSequence = true; for (StructMapping derived = start.DerivedMappings; derived != null; derived = derived.NextDerivedMapping) { derived.SetSequence(); } } internal bool IsSequence { get { return _isSequence && !TypeDesc.IsRoot; } set { _isSequence = value; } } } internal abstract class AccessorMapping : Mapping { private TypeDesc _typeDesc; private AttributeAccessor _attribute; private ElementAccessor[] _elements; private ElementAccessor[] _sortedElements; private TextAccessor _text; private ChoiceIdentifierAccessor _choiceIdentifier; private XmlnsAccessor _xmlns; private bool _ignore; internal AccessorMapping() { } protected AccessorMapping(AccessorMapping mapping) : base(mapping) { _typeDesc = mapping._typeDesc; _attribute = mapping._attribute; _elements = mapping._elements; _sortedElements = mapping._sortedElements; _text = mapping._text; _choiceIdentifier = mapping._choiceIdentifier; _xmlns = mapping._xmlns; _ignore = mapping._ignore; } internal bool IsAttribute { get { return _attribute != null; } } internal bool IsText { get { return _text != null && (_elements == null || _elements.Length == 0); } } internal bool IsParticle { get { return (_elements != null && _elements.Length > 0); } } internal TypeDesc TypeDesc { get { return _typeDesc; } set { _typeDesc = value; } } internal AttributeAccessor Attribute { get { return _attribute; } set { _attribute = value; } } internal ElementAccessor[] Elements { get { return _elements; } set { _elements = value; _sortedElements = null; } } internal static void SortMostToLeastDerived(ElementAccessor[] elements) { Array.Sort(elements, new AccessorComparer()); } internal class AccessorComparer : IComparer { public int Compare(object o1, object o2) { if (o1 == o2) return 0; Accessor a1 = (Accessor)o1; Accessor a2 = (Accessor)o2; int w1 = a1.Mapping.TypeDesc.Weight; int w2 = a2.Mapping.TypeDesc.Weight; if (w1 == w2) return 0; if (w1 < w2) return 1; return -1; } } internal ElementAccessor[] ElementsSortedByDerivation { get { if (_sortedElements != null) return _sortedElements; if (_elements == null) return null; _sortedElements = new ElementAccessor[_elements.Length]; Array.Copy(_elements, 0, _sortedElements, 0, _elements.Length); SortMostToLeastDerived(_sortedElements); return _sortedElements; } } internal TextAccessor Text { get { return _text; } set { _text = value; } } internal ChoiceIdentifierAccessor ChoiceIdentifier { get { return _choiceIdentifier; } set { _choiceIdentifier = value; } } internal XmlnsAccessor Xmlns { get { return _xmlns; } set { _xmlns = value; } } internal bool Ignore { get { return _ignore; } set { _ignore = value; } } internal Accessor Accessor { get { if (_xmlns != null) return _xmlns; if (_attribute != null) return _attribute; if (_elements != null && _elements.Length > 0) return _elements[0]; return _text; } } internal static bool ElementsMatch(ElementAccessor[] a, ElementAccessor[] b) { if (a == null) { if (b == null) return true; return false; } if (b == null) return false; if (a.Length != b.Length) return false; for (int i = 0; i < a.Length; i++) { if (a[i].Name != b[i].Name || a[i].Namespace != b[i].Namespace || a[i].Form != b[i].Form || a[i].IsNullable != b[i].IsNullable) return false; } return true; } internal bool Match(AccessorMapping mapping) { if (Elements != null && Elements.Length > 0) { if (!ElementsMatch(Elements, mapping.Elements)) { return false; } if (Text == null) { return (mapping.Text == null); } } if (Attribute != null) { if (mapping.Attribute == null) return false; return (Attribute.Name == mapping.Attribute.Name && Attribute.Namespace == mapping.Attribute.Namespace && Attribute.Form == mapping.Attribute.Form); } if (Text != null) { return (mapping.Text != null); } return (mapping.Accessor == null); } } internal class MemberMappingComparer : IComparer<MemberMapping> { public int Compare(MemberMapping m1, MemberMapping m2) { bool m1Text = m1.IsText; if (m1Text) { if (m2.IsText) return 0; return 1; } else if (m2.IsText) return -1; if (m1.SequenceId < 0 && m2.SequenceId < 0) return 0; if (m1.SequenceId < 0) return 1; if (m2.SequenceId < 0) return -1; if (m1.SequenceId < m2.SequenceId) return -1; if (m1.SequenceId > m2.SequenceId) return 1; return 0; } } internal class MemberMapping : AccessorMapping { private string _name; private bool _checkShouldPersist; private SpecifiedAccessor _checkSpecified; private bool _isReturnValue; private bool _readOnly = false; private int _sequenceId = -1; private MemberInfo _memberInfo; private MemberInfo _checkSpecifiedMemberInfo; private MethodInfo _checkShouldPersistMethodInfo; internal MemberMapping() { } private MemberMapping(MemberMapping mapping) : base(mapping) { _name = mapping._name; _checkShouldPersist = mapping._checkShouldPersist; _checkSpecified = mapping._checkSpecified; _isReturnValue = mapping._isReturnValue; _readOnly = mapping._readOnly; _sequenceId = mapping._sequenceId; _memberInfo = mapping._memberInfo; _checkSpecifiedMemberInfo = mapping._checkSpecifiedMemberInfo; _checkShouldPersistMethodInfo = mapping._checkShouldPersistMethodInfo; } internal bool CheckShouldPersist { get { return _checkShouldPersist; } set { _checkShouldPersist = value; } } internal SpecifiedAccessor CheckSpecified { get { return _checkSpecified; } set { _checkSpecified = value; } } internal string Name { get { return _name == null ? string.Empty : _name; } set { _name = value; } } internal MemberInfo MemberInfo { get { return _memberInfo; } set { _memberInfo = value; } } internal MemberInfo CheckSpecifiedMemberInfo { get { return _checkSpecifiedMemberInfo; } set { _checkSpecifiedMemberInfo = value; } } internal MethodInfo CheckShouldPersistMethodInfo { get { return _checkShouldPersistMethodInfo; } set { _checkShouldPersistMethodInfo = value; } } internal bool IsReturnValue { get { return _isReturnValue; } set { _isReturnValue = value; } } internal bool ReadOnly { get { return _readOnly; } set { _readOnly = value; } } internal bool IsSequence { get { return _sequenceId >= 0; } } internal int SequenceId { get { return _sequenceId; } set { _sequenceId = value; } } private string GetNullableType(TypeDesc td) { // SOAP encoded arrays not mapped to Nullable<T> since they always derive from soapenc:Array if (td.IsMappedType || (!td.IsValueType && (Elements[0].IsSoap || td.ArrayElementTypeDesc == null))) return td.FullName; if (td.ArrayElementTypeDesc != null) { return GetNullableType(td.ArrayElementTypeDesc) + "[]"; } return "System.Nullable`1[" + td.FullName + "]"; } internal MemberMapping Clone() { return new MemberMapping(this); } } internal class MembersMapping : TypeMapping { private MemberMapping[] _members; private bool _hasWrapperElement = true; private bool _writeAccessors = true; private MemberMapping _xmlnsMember = null; internal MemberMapping[] Members { get { return _members; } set { _members = value; } } internal MemberMapping XmlnsMember { get { return _xmlnsMember; } set { _xmlnsMember = value; } } internal bool HasWrapperElement { get { return _hasWrapperElement; } set { _hasWrapperElement = value; } } internal bool WriteAccessors { get { return _writeAccessors; } } } internal class SpecialMapping : TypeMapping { } internal class SerializableMapping : SpecialMapping { private Type _type; private bool _needSchema = true; // new implementation of the IXmlSerializable private MethodInfo _getSchemaMethod; private XmlQualifiedName _xsiType; private XmlSchemaSet _schemas; private bool _any; private SerializableMapping _derivedMappings; private SerializableMapping _nextDerivedMapping; internal SerializableMapping() { } internal SerializableMapping(MethodInfo getSchemaMethod, bool any, string ns) { _getSchemaMethod = getSchemaMethod; _any = any; this.Namespace = ns; _needSchema = getSchemaMethod != null; } internal bool IsAny { get { if (_any) return true; if (_getSchemaMethod == null) return false; if (_needSchema && typeof(XmlSchemaType).IsAssignableFrom(_getSchemaMethod.ReturnType)) return false; RetrieveSerializableSchema(); return _any; } } internal SerializableMapping DerivedMappings { get { return _derivedMappings; } } internal SerializableMapping NextDerivedMapping { get { return _nextDerivedMapping; } } internal Type Type { get { return _type; } set { _type = value; } } internal XmlQualifiedName XsiType { get { if (!_needSchema) return _xsiType; if (_getSchemaMethod == null) return null; if (typeof(XmlSchemaType).IsAssignableFrom(_getSchemaMethod.ReturnType)) return null; RetrieveSerializableSchema(); return _xsiType; } } private void RetrieveSerializableSchema() { if (_needSchema) { _needSchema = false; if (_getSchemaMethod != null) { // get the type info object typeInfo = _getSchemaMethod.Invoke(null, new object[] { _schemas }); _xsiType = XmlQualifiedName.Empty; if (typeInfo != null) { if (typeof(XmlSchemaType).IsAssignableFrom(_getSchemaMethod.ReturnType)) { throw Globals.NotSupported("No XmlSchemaType in SL"); } else if (typeof(XmlQualifiedName).IsAssignableFrom(_getSchemaMethod.ReturnType)) { _xsiType = (XmlQualifiedName)typeInfo; if (_xsiType.IsEmpty) { throw new InvalidOperationException(SR.Format(SR.XmlGetSchemaEmptyTypeName, _type.FullName, _getSchemaMethod.Name)); } } else { throw new InvalidOperationException(SR.Format(SR.XmlGetSchemaMethodReturnType, _type.Name, _getSchemaMethod.Name, typeof(XmlSchemaProviderAttribute).Name, typeof(XmlQualifiedName).FullName)); } } else { _any = true; } } } } } }
// // Encog(tm) Core v3.2 - .Net Version // http://www.heatonresearch.com/encog/ // // Copyright 2008-2014 Heaton Research, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information on Heaton Research copyrights, licenses // and trademarks visit: // http://www.heatonresearch.com/copyright // using System; using System.Collections; namespace Encog.MathUtil.LIBSVM { // This class was taken from the libsvm package. We have made some // modifications for use in Encog. // // http://www.csie.ntu.edu.tw/~cjlin/libsvm/ // // The libsvm Copyright/license is listed here. // // Copyright (c) 2000-2010 Chih-Chung Chang and Chih-Jen Lin // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // 3. Neither name of copyright holders nor the names of its contributors // may be used to endorse or promote products derived from this software // without specific prior written permission. // // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // In order to convert some functionality to Visual C#, the Java Language Conversion Assistant // creates "support classes" that duplicate the original functionality. // // Support classes replicate the functionality of the original code, but in some cases they are // substantially different architecturally. Although every effort is made to preserve the // original architecture of the application in the converted project, the user should be aware that // the primary goal of these support classes is to replicate functionality, and that at times // the architecture of the resulting solution may differ somewhat. // /// <summary> /// Contains conversion support elements such as classes, interfaces and static methods. /// </summary> public class SupportClass { /// <summary> /// Provides access to a static System.Random class instance /// </summary> public static Random Random = new Random(); /*******************************/ #region Nested type: Tokenizer /// <summary> /// The class performs token processing in strings /// </summary> public class Tokenizer : IEnumerator { /// Char representation of the String to tokenize. private readonly char[] chars; /// Include demiliters in the results. private readonly bool includeDelims; /// Position over the string private long currentPos; //The tokenizer uses the default delimiter set: the space character, the tab character, the newline character, and the carriage-return character and the form-feed character private string delimiters = " \t\n\r\f"; /// <summary> /// Initializes a new class instance with a specified string to process /// </summary> /// <param name="source">String to tokenize</param> public Tokenizer(String source) { chars = source.ToCharArray(); } /// <summary> /// Initializes a new class instance with a specified string to process /// and the specified token delimiters to use /// </summary> /// <param name="source">String to tokenize</param> /// <param name="delimiters">String containing the delimiters</param> public Tokenizer(String source, String delimiters) : this(source) { this.delimiters = delimiters; } /// <summary> /// Initializes a new class instance with a specified string to process, the specified token /// delimiters to use, and whether the delimiters must be included in the results. /// </summary> /// <param name="source">String to tokenize</param> /// <param name="delimiters">String containing the delimiters</param> /// <param name="includeDelims">Determines if delimiters are included in the results.</param> public Tokenizer(String source, String delimiters, bool includeDelims) : this(source, delimiters) { this.includeDelims = includeDelims; } /// <summary> /// Remaining tokens count /// </summary> public int Count { get { //keeping the current pos long pos = currentPos; int i = 0; try { while (true) { NextToken(); i++; } } catch (ArgumentOutOfRangeException) { currentPos = pos; return i; } } } #region IEnumerator Members /// <summary> /// Performs the same action as NextToken. /// </summary> public Object Current { get { return NextToken(); } } /// <summary> /// Performs the same action as HasMoreTokens. /// </summary> /// <returns>True or false, depending if there are more tokens</returns> public bool MoveNext() { return HasMoreTokens(); } /// <summary> /// Does nothing. /// </summary> public void Reset() { ; } #endregion /// <summary> /// Returns the next token from the token list /// </summary> /// <returns>The string value of the token</returns> public String NextToken() { return NextToken(delimiters); } /// <summary> /// Returns the next token from the source string, using the provided /// token delimiters /// </summary> /// <param name="delimiters">String containing the delimiters to use</param> /// <returns>The string value of the token</returns> public String NextToken(String delimiters) { //According to documentation, the usage of the received delimiters should be temporary (only for this call). //However, it seems it is not true, so the following line is necessary. this.delimiters = delimiters; //at the end if (currentPos == chars.Length) throw new ArgumentOutOfRangeException(); //if over a delimiter and delimiters must be returned else if ((Array.IndexOf(delimiters.ToCharArray(), chars[currentPos]) != -1) && includeDelims) return "" + chars[currentPos++]; //need to get the token wo delimiters. else return nextToken(delimiters.ToCharArray()); } //Returns the nextToken wo delimiters private String nextToken(char[] delimiters) { string token = ""; long pos = currentPos; //skip possible delimiters while (Array.IndexOf(delimiters, chars[currentPos]) != -1) //The last one is a delimiter (i.e there is no more tokens) if (++currentPos == chars.Length) { currentPos = pos; throw new ArgumentOutOfRangeException(); } //getting the token while (Array.IndexOf(delimiters, chars[currentPos]) == -1) { token += chars[currentPos]; //the last one is not a delimiter if (++currentPos == chars.Length) break; } return token; } /// <summary> /// Determines if there are more tokens to return from the source string /// </summary> /// <returns>True or false, depending if there are more tokens</returns> public bool HasMoreTokens() { //keeping the current pos long pos = currentPos; try { NextToken(); } catch (ArgumentOutOfRangeException) { return false; } finally { currentPos = pos; } return true; } } #endregion } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ namespace ParquetColumnTests.Column.Values.Rle { using System.Collections.Generic; using ParquetSharp.Bytes; using ParquetSharp.Column.Values.BitPacking; using ParquetSharp.Column.Values.Rle; using ParquetSharp.External; using Xunit; /** * @author Alex Levenson */ public class TestRunLengthBitPackingHybridEncoder { private RunLengthBitPackingHybridEncoder getRunLengthBitPackingHybridEncoder() { return getRunLengthBitPackingHybridEncoder(3, 5, 10); } private RunLengthBitPackingHybridEncoder getRunLengthBitPackingHybridEncoder( int bitWidth, int initialCapacity, int pageSize) { return new RunLengthBitPackingHybridEncoder(bitWidth, initialCapacity, pageSize, new DirectByteBufferAllocator()); } [Fact] public void testRLEOnly() { RunLengthBitPackingHybridEncoder encoder = getRunLengthBitPackingHybridEncoder(); for (int i = 0; i < 100; i++) { encoder.writeInt(4); } for (int i = 0; i < 100; i++) { encoder.writeInt(5); } ByteArrayInputStream @is = new ByteArrayInputStream(encoder.toBytes().toByteArray()); // header = 100 << 1 = 200 Assert.Equal(200, BytesUtils.readUnsignedVarInt(@is)); // payload = 4 Assert.Equal(4, BytesUtils.readIntLittleEndianOnOneByte(@is)); // header = 100 << 1 = 200 Assert.Equal(200, BytesUtils.readUnsignedVarInt(@is)); // payload = 5 Assert.Equal(5, BytesUtils.readIntLittleEndianOnOneByte(@is)); // end of stream Assert.Equal(-1, @is.ReadByte()); } [Fact] public void testRepeatedZeros() { // previousValue is initialized to 0 // make sure that repeated 0s at the beginning // of the stream don't trip up the repeat count RunLengthBitPackingHybridEncoder encoder = getRunLengthBitPackingHybridEncoder(); for (int i = 0; i < 10; i++) { encoder.writeInt(0); } ByteArrayInputStream @is = new ByteArrayInputStream(encoder.toBytes().toByteArray()); // header = 10 << 1 = 20 Assert.Equal(20, BytesUtils.readUnsignedVarInt(@is)); // payload = 4 Assert.Equal(0, BytesUtils.readIntLittleEndianOnOneByte(@is)); // end of stream Assert.Equal(-1, @is.ReadByte()); } [Fact] public void testBitWidthZero() { RunLengthBitPackingHybridEncoder encoder = getRunLengthBitPackingHybridEncoder(0, 5, 10); for (int i = 0; i < 10; i++) { encoder.writeInt(0); } ByteArrayInputStream @is = new ByteArrayInputStream(encoder.toBytes().toByteArray()); // header = 10 << 1 = 20 Assert.Equal(20, BytesUtils.readUnsignedVarInt(@is)); // end of stream Assert.Equal(-1, @is.ReadByte()); } [Fact] public void testBitPackingOnly() { RunLengthBitPackingHybridEncoder encoder = getRunLengthBitPackingHybridEncoder(); for (int i = 0; i < 100; i++) { encoder.writeInt(i % 3); } ByteArrayInputStream @is = new ByteArrayInputStream(encoder.toBytes().toByteArray()); // header = ((104/8) << 1) | 1 = 27 Assert.Equal(27, BytesUtils.readUnsignedVarInt(@is)); List<int> values = unpack(3, 104, @is); for (int i = 0; i < 100; i++) { Assert.Equal(i % 3, (int)values[i]); } // end of stream Assert.Equal(-1, @is.ReadByte()); } [Fact] public void testBitPackingOverflow() { RunLengthBitPackingHybridEncoder encoder = getRunLengthBitPackingHybridEncoder(); for (int i = 0; i < 1000; i++) { encoder.writeInt(i % 3); } ByteArrayInputStream @is = new ByteArrayInputStream(encoder.toBytes().toByteArray()); // 504 is the max number of values in a bit packed run // that still has a header of 1 byte // header = ((504/8) << 1) | 1 = 127 Assert.Equal(127, BytesUtils.readUnsignedVarInt(@is)); List<int> values = unpack(3, 504, @is); for (int i = 0; i < 504; i++) { Assert.Equal(i % 3, (int)values[i]); } // there should now be 496 values in another bit-packed run // header = ((496/8) << 1) | 1 = 125 Assert.Equal(125, BytesUtils.readUnsignedVarInt(@is)); values = unpack(3, 496, @is); for (int i = 0; i < 496; i++) { Assert.Equal((i + 504) % 3, (int)values[i]); } // end of stream Assert.Equal(-1, @is.ReadByte()); } [Fact] public void testTransitionFromBitPackingToRle() { RunLengthBitPackingHybridEncoder encoder = getRunLengthBitPackingHybridEncoder(); // 5 obviously bit-packed values encoder.writeInt(0); encoder.writeInt(1); encoder.writeInt(0); encoder.writeInt(1); encoder.writeInt(0); // three repeated values, that ought to be bit-packed as well encoder.writeInt(2); encoder.writeInt(2); encoder.writeInt(2); // lots more repeated values, that should be rle-encoded for (int i = 0; i < 100; i++) { encoder.writeInt(2); } ByteArrayInputStream @is = new ByteArrayInputStream(encoder.toBytes().toByteArray()); // header = ((8/8) << 1) | 1 = 3 Assert.Equal(3, BytesUtils.readUnsignedVarInt(@is)); List<int> values = unpack(3, 8, @is); Assert.Equal(new int[] { 0, 1, 0, 1, 0, 2, 2, 2 }, values); // header = 100 << 1 = 200 Assert.Equal(200, BytesUtils.readUnsignedVarInt(@is)); // payload = 2 Assert.Equal(2, BytesUtils.readIntLittleEndianOnOneByte(@is)); // end of stream Assert.Equal(-1, @is.ReadByte()); } [Fact] public void testPaddingZerosOnUnfinishedBitPackedRuns() { RunLengthBitPackingHybridEncoder encoder = getRunLengthBitPackingHybridEncoder(5, 5, 10); for (int i = 0; i < 9; i++) { encoder.writeInt(i + 1); } ByteArrayInputStream @is = new ByteArrayInputStream(encoder.toBytes().toByteArray()); // header = ((16/8) << 1) | 1 = 5 Assert.Equal(5, BytesUtils.readUnsignedVarInt(@is)); List<int> values = unpack(5, 16, @is); Assert.Equal(new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 0, 0, 0, 0 }, values); Assert.Equal(-1, @is.ReadByte()); } [Fact] public void testSwitchingModes() { RunLengthBitPackingHybridEncoder encoder = getRunLengthBitPackingHybridEncoder(9, 100, 1000); // rle first for (int i = 0; i < 25; i++) { encoder.writeInt(17); } // bit-packing for (int i = 0; i < 7; i++) { encoder.writeInt(7); } encoder.writeInt(8); encoder.writeInt(9); encoder.writeInt(10); // bit-packing followed by rle for (int i = 0; i < 25; i++) { encoder.writeInt(6); } // followed by a different rle for (int i = 0; i < 8; i++) { encoder.writeInt(5); } ByteArrayInputStream @is = new ByteArrayInputStream(encoder.toBytes().toByteArray()); // header = 25 << 1 = 50 Assert.Equal(50, BytesUtils.readUnsignedVarInt(@is)); // payload = 17, stored in 2 bytes Assert.Equal(17, BytesUtils.readIntLittleEndianOnTwoBytes(@is)); // header = ((16/8) << 1) | 1 = 5 Assert.Equal(5, BytesUtils.readUnsignedVarInt(@is)); List<int> values = unpack(9, 16, @is); int v = 0; for (int i = 0; i < 7; i++) { Assert.Equal(7, (int)values[v]); v++; } Assert.Equal(8, (int)values[v++]); Assert.Equal(9, (int)values[v++]); Assert.Equal(10, (int)values[v++]); for (int i = 0; i < 6; i++) { Assert.Equal(6, (int)values[v]); v++; } // header = 19 << 1 = 38 Assert.Equal(38, BytesUtils.readUnsignedVarInt(@is)); // payload = 6, stored in 2 bytes Assert.Equal(6, BytesUtils.readIntLittleEndianOnTwoBytes(@is)); // header = 8 << 1 = 16 Assert.Equal(16, BytesUtils.readUnsignedVarInt(@is)); // payload = 5, stored in 2 bytes Assert.Equal(5, BytesUtils.readIntLittleEndianOnTwoBytes(@is)); // end of stream Assert.Equal(-1, @is.ReadByte()); } [Fact] public void testGroupBoundary() { byte[] bytes = new byte[2]; // Create an RLE byte stream that has 3 values (1 literal group) with // bit width 2. bytes[0] = (1 << 1) | 1; bytes[1] = (1 << 0) | (2 << 2) | (3 << 4); ByteArrayInputStream stream = new ByteArrayInputStream(bytes); RunLengthBitPackingHybridDecoder decoder = new RunLengthBitPackingHybridDecoder(2, stream); Assert.Equal(decoder.readInt(), 1); Assert.Equal(decoder.readInt(), 2); Assert.Equal(decoder.readInt(), 3); Assert.Equal(stream.available(), 0); } private static List<int> unpack(int bitWidth, int numValues, ByteArrayInputStream @is) { BytePacker packer = Packer.LITTLE_ENDIAN.newBytePacker(bitWidth); int[] unpacked = new int[8]; byte[] next8Values = new byte[bitWidth]; List<int> values = new List<int>(numValues); while (values.Count < numValues) { for (int i = 0; i < bitWidth; i++) { next8Values[i] = (byte)@is.ReadByte(); } packer.unpack8Values(next8Values, 0, unpacked, 0); for (int v = 0; v < 8; v++) { values.Add(unpacked[v]); } } return values; } } }
using System; using System.Collections; using System.Collections.Specialized; using System.ComponentModel; using System.Diagnostics; using BLToolkit.EditableObjects; using BLToolkit.Reflection; namespace BLToolkit.ComponentModel { // BVChanges: adding creator public delegate object ObjectCreatorCallback(); // BVChanges: adding creator public class BindingListImpl: IBindingListView, ICancelAddNew, INotifyCollectionChanged { // BVChanges: adding creator private ObjectCreatorCallback _creator; public ObjectCreatorCallback Creator { set { _creator = value; } } // BVChanges: adding creator #region Init public BindingListImpl(IList list, Type itemType) { if (list == null) throw new ArgumentNullException("list"); if (itemType == null) throw new ArgumentNullException("itemType"); _list = list; _itemType = itemType; AddInternal(_list); } // BVChanges: /*~BindingListImpl() { for (int i = 0; i < _list.Count; i++) { if (_list[i] is INotifyPropertyChanged) ((INotifyPropertyChanged)_list[i]).PropertyChanged -= ItemPropertyChanged; } }*/ // BVChanges: #endregion #region Protected Members private readonly IList _list; private readonly Type _itemType; private void ApplySort(IComparer comparer) { if (_list is ISortable) ((ISortable)_list).Sort(0, _list.Count, comparer); else if (_list is ArrayList) ((ArrayList)_list).Sort(0, _list.Count, comparer); else if (_list is Array) Array.Sort((Array)_list, comparer); else { object[] items = new object[_list.Count]; _list.CopyTo(items, 0); Array.Sort(items, comparer); for (int i = 0; i < _list.Count; i++) _list[i] = items[i]; } _isSorted = true; } #endregion #region IBindingList Members #region Command private int _newItemIndex = -1; private INotifyObjectEdit _newObject; public object AddNew() { if (AllowNew == false) throw new NotSupportedException(); EndNew(); // BVChanges: adding creator object o = null; if(_creator != null) o = _creator(); if (o == null) o = TypeAccessor.CreateInstanceEx(_itemType); // BVChanges: adding creator _newObject = o as INotifyObjectEdit; if (_newObject != null) _newObject.ObjectEdit += NewObject_ObjectEdit; _newItemIndex = _list.Add(o); // BVChanges: adding creator if (o is INotifyPropertyChanged) ((INotifyPropertyChanged)o).PropertyChanged += ItemPropertyChanged; // BVChanges: adding creator OnAddItem(o, _newItemIndex); Debug.WriteLine(string.Format("AddNew - ({0})", o.GetType().Name)); return o; } void NewObject_ObjectEdit(object sender, ObjectEditEventArgs args) { if (sender == _newObject) { switch (args.EditType) { case ObjectEditType.End: EndNew(); break; case ObjectEditType.Cancel: CancelNew(_newItemIndex); break; default: return; } } } public bool AllowNew { get { return !_list.IsFixedSize; } } public bool AllowEdit { get { return !_list.IsReadOnly; } } public bool AllowRemove { get { return !_list.IsFixedSize; } } #endregion #region Change Notification private bool _notifyChanges = true; public bool NotifyChanges { get { return _notifyChanges; } set { _notifyChanges = value; } } public bool SupportsChangeNotification { get { return true; } } public event ListChangedEventHandler ListChanged; private void FireListChangedEvent(object sender, ListChangedEventArgs e) { if (_notifyChanges && ListChanged != null) ListChanged(sender, e); } protected virtual void OnListChanged(EditableListChangedEventArgs e) { FireListChangedEvent(this, e); } protected void OnListChanged(ListChangedType listChangedType, int index) { OnListChanged(new EditableListChangedEventArgs(listChangedType, index)); } private void ItemPropertyChanged(object sender, PropertyChangedEventArgs e) { if (_notifyChanges && sender != null) { int indexOfSender = _list.IndexOf(sender); if (indexOfSender >= 0) { MemberAccessor ma = TypeAccessor.GetAccessor(sender.GetType())[e.PropertyName]; if (ma != null) OnListChanged(new EditableListChangedEventArgs(indexOfSender, ma.PropertyDescriptor)); else OnListChanged(new EditableListChangedEventArgs(ListChangedType.ItemChanged, indexOfSender)); // Do not fire an event for OnCollectionChanged here. if (_isSorted && _list.Count > 1) { int newIndex = GetItemSortedPosition(indexOfSender, sender); if (newIndex != indexOfSender) { _list.RemoveAt(indexOfSender); _list.Insert(newIndex, sender); OnMoveItem(sender, indexOfSender, newIndex); } } } } } #endregion #region Sorting public bool SupportsSorting { get { return true; } } [NonSerialized] private bool _isSorted; public bool IsSorted { get { return _isSorted; } } [NonSerialized] private PropertyDescriptor _sortProperty; public PropertyDescriptor SortProperty { get { return _sortProperty; } } [NonSerialized] private ListSortDirection _sortDirection; public ListSortDirection SortDirection { get { return _sortDirection; } } public void ApplySort(PropertyDescriptor property, ListSortDirection direction) { Debug.WriteLine(string.Format("Begin ApplySort(\"{0}\", {1})", property.Name, direction)); _sortProperty = property; _sortDirection = direction; _sortDescriptions = null; ApplySort(GetSortComparer(_sortProperty, _sortDirection)); if (_list.Count > 0) OnReset(); Debug.WriteLine(string.Format("End ApplySort(\"{0}\", {1})", property.Name, direction)); } public void RemoveSort() { _isSorted = false; _sortProperty = null; _sortDescriptions = null; OnReset(); } #endregion #region Searching public bool SupportsSearching { get { return true; } } public int Find(PropertyDescriptor property, object key) { if (property == null) throw new ArgumentException("property"); if (key != null) for (int i = 0; i < _list.Count; i++) if (key.Equals(property.GetValue(_list[i]))) return i; return -1; } #endregion #region Indexes public void AddIndex(PropertyDescriptor property) { } public void RemoveIndex(PropertyDescriptor property) { } #endregion #endregion #region ICancelAddNew Members public void CancelNew(int itemIndex) { if (itemIndex >= 0 && itemIndex == _newItemIndex) { _list.RemoveAt(itemIndex); OnRemoveItem(_newObject, itemIndex); EndNew(); } } public void EndNew(int itemIndex) { if (itemIndex == _newItemIndex) EndNew(); } public void EndNew() { _newItemIndex = -1; if (_newObject != null) _newObject.ObjectEdit -= NewObject_ObjectEdit; _newObject = null; } #endregion #region IList Members public int Add(object value) { int index; if (!_isSorted) index = _list.Add(value); else { index = GetSortedInsertIndex(value); _list.Insert(index, value); } AddInternal(value); OnAddItem(value, index); return index; } public void Clear() { if (_list.Count > 0) { RemoveInternal(_list); _list.Clear(); OnReset(); } } public bool Contains(object value) { return _list.Contains(value); } public int IndexOf(object value) { return _list.IndexOf(value); } public void Insert(int index, object value) { if (_isSorted) index = GetSortedInsertIndex(value); _list.Insert(index, value); AddInternal(value); OnAddItem(value, index); } public bool IsFixedSize { get { return _list.IsFixedSize; } } public bool IsReadOnly { get { return _list.IsReadOnly; } } public void Remove(object value) { int index = IndexOf(value); if (index >= 0) RemoveInternal(value); _list.Remove(value); if (index >= 0) OnRemoveItem(value, index); } public void RemoveAt(int index) { object value = this[index]; RemoveInternal(value); _list.RemoveAt(index); OnRemoveItem(value, index); } public object this[int index] { get { return _list[index]; } set { object o = _list[index]; if (o != value) { RemoveInternal(o); _list[index] = value; AddInternal(value); OnChangeItem(o, value, index); if (_isSorted) { int newIndex = GetItemSortedPosition(index, value); if (newIndex != index) { _list.RemoveAt(index); _list.Insert(newIndex, value); } OnMoveItem(value, index, newIndex); } } } } #endregion #region ICollection Members public void CopyTo(Array array, int index) { _list.CopyTo(array, index); } public int Count { get { return _list.Count; } } public bool IsSynchronized { get { return _list.IsSynchronized; } } public object SyncRoot { get { return _list.SyncRoot; } } #endregion #region IEnumerable Members public IEnumerator GetEnumerator() { return _list.GetEnumerator(); } #endregion #region SortPropertyComparer class SortPropertyComparer : IComparer { readonly PropertyDescriptor _property; readonly ListSortDirection _direction; public SortPropertyComparer(PropertyDescriptor property, ListSortDirection direction) { _property = property; _direction = direction; } public int Compare(object x, object y) { object a = _property.GetValue(x); object b = _property.GetValue(y); int n = Comparer.Default.Compare(a, b); return _direction == ListSortDirection.Ascending? n: -n; } } #endregion #region IComparer Accessor public IComparer GetSortComparer() { if (_isSorted) { if (_sortDescriptions != null) return GetSortComparer(_sortDescriptions); return GetSortComparer(_sortProperty, _sortDirection); } return null; } private IComparer GetSortComparer(PropertyDescriptor sortProperty, ListSortDirection sortDirection) { if (_sortSubstitutions.ContainsKey(sortProperty.Name)) sortProperty = ((SortSubstitutionPair)_sortSubstitutions[sortProperty.Name]).Substitute; return new SortPropertyComparer(sortProperty, sortDirection); } private IComparer GetSortComparer(ListSortDescriptionCollection sortDescriptions) { bool needSubstitution = false; if (_sortSubstitutions.Count > 0) { foreach (ListSortDescription sortDescription in sortDescriptions) { if (_sortSubstitutions.ContainsKey(sortDescription.PropertyDescriptor.Name)) { needSubstitution = true; break; } } if (needSubstitution) { ListSortDescription[] sorts = new ListSortDescription[sortDescriptions.Count]; sortDescriptions.CopyTo(sorts, 0); for (int i = 0; i < sorts.Length; i++) if (_sortSubstitutions.ContainsKey(sorts[i].PropertyDescriptor.Name)) sorts[i] = new ListSortDescription(((SortSubstitutionPair)_sortSubstitutions[sorts[i].PropertyDescriptor.Name]).Substitute, sorts[i].SortDirection); sortDescriptions = new ListSortDescriptionCollection(sorts); } } return new SortListPropertyComparer(sortDescriptions); } #endregion #region IBindingListView Members public bool SupportsAdvancedSorting { get { return true; } } public void ApplySort(ListSortDescriptionCollection sorts) { _sortDescriptions = sorts; _isSorted = true; _sortProperty = null; ApplySort(GetSortComparer(sorts)); if (_list.Count > 0) OnReset(); } [NonSerialized] private ListSortDescriptionCollection _sortDescriptions; public ListSortDescriptionCollection SortDescriptions { get { return _sortDescriptions; } } public bool SupportsFiltering { get { return false; } } public string Filter { get { throw new NotImplementedException("The method 'BindingListImpl.get_Filter' is not implemented."); } set { throw new NotImplementedException("The method 'BindingListImpl.set_Filter' is not implemented."); } } public void RemoveFilter() { throw new NotImplementedException("The method 'BindingListImpl.RemoveFilter()' is not implemented."); } #endregion #region SortListPropertyComparer class SortListPropertyComparer : IComparer { readonly ListSortDescriptionCollection _sorts; public SortListPropertyComparer(ListSortDescriptionCollection sorts) { _sorts = sorts; } public int Compare(object x, object y) { for (int i = 0; i < _sorts.Count; i++) { PropertyDescriptor property = _sorts[i].PropertyDescriptor; object a = property.GetValue(x); object b = property.GetValue(y); int n = Comparer.Default.Compare(a, b); if (n != 0) return _sorts[i].SortDirection == ListSortDirection.Ascending? n: -n; } return 0; } } #endregion #region Sorting enhancement private readonly Hashtable _sortSubstitutions = new Hashtable(); private class SortSubstitutionPair { public SortSubstitutionPair(PropertyDescriptor original, PropertyDescriptor substitute) { Original = original; Substitute = substitute; } public readonly PropertyDescriptor Original; public readonly PropertyDescriptor Substitute; } public void CreateSortSubstitution(string originalProperty, string substituteProperty) { TypeAccessor typeAccessor = TypeAccessor.GetAccessor(_itemType); PropertyDescriptor originalDescriptor = typeAccessor.PropertyDescriptors[originalProperty]; PropertyDescriptor substituteDescriptor = typeAccessor.PropertyDescriptors[substituteProperty]; if (originalDescriptor == null) throw new InvalidOperationException("Can not retrieve PropertyDescriptor for original property: " + originalProperty); if (substituteDescriptor == null) throw new InvalidOperationException("Can not retrieve PropertyDescriptor for substitute property: " + substituteProperty); _sortSubstitutions[originalProperty] = new SortSubstitutionPair(originalDescriptor, substituteDescriptor); } public void RemoveSortSubstitution(string originalProperty) { _sortSubstitutions.Remove(originalProperty); } #endregion #region Sort enforcement public int GetItemSortedPosition(int index, object sender) { IComparer comparer = GetSortComparer(); if (comparer == null) return index; if ((index > 0 && comparer.Compare(_list[index - 1], sender) > 0) || (index < _list.Count - 1 && comparer.Compare(_list[index + 1], sender) < 0)) { for (int i = 0; i < _list.Count; i++) { if (i != index && comparer.Compare(_list[i], sender) > 0) { if (i > index) return i - 1; return i; } } return _list.Count - 1; } return index; } public int GetSortedInsertIndex(object value) { IComparer comparer = GetSortComparer(); if (comparer == null) return -1; for (int i = 0; i < _list.Count; i++) if (comparer.Compare(_list[i], value) > 0) return i; return _list.Count; } #endregion #region Misc/Range Operations public void Move(int newIndex, int oldIndex) { if (oldIndex != newIndex) { EndNew(); object o = _list[oldIndex]; _list.RemoveAt(oldIndex); if (!_isSorted) _list.Insert(newIndex, o); else _list.Insert(newIndex = GetSortedInsertIndex(o), o); OnMoveItem(o, oldIndex, newIndex); } } public void AddRange(ICollection c) { foreach (object o in c) { if (!_isSorted) _list.Add(o); else _list.Insert(GetSortedInsertIndex(o), o); } AddInternal(c); OnReset(); } public void InsertRange(int index, ICollection c) { if (c.Count == 0) return; foreach (object o in c) { if (!_isSorted) _list.Insert(index++, o); else _list.Insert(GetSortedInsertIndex(o), o); } AddInternal(c); OnReset(); } public void RemoveRange(int index, int count) { object[] toRemove = new object[count]; for (int i = index; i < _list.Count && i < index + count; i++) toRemove[i - index] = _list[i]; RemoveInternal(toRemove); foreach (object o in toRemove) _list.Remove(o); OnReset(); } public void SetRange(int index, ICollection c) { int cCount = c.Count; if (index < 0 || index >= _list.Count - cCount) throw new ArgumentOutOfRangeException("index"); bool oldNotifyChanges = _notifyChanges; _notifyChanges = false; int i = index; foreach (object newObject in c) { RemoveInternal(_list[i + index]); _list[i + index] = newObject; } AddInternal(c); if (_isSorted) ApplySort(GetSortComparer()); _notifyChanges = oldNotifyChanges; OnReset(); } #endregion #region Add/Remove Internal private void AddInternal(object value) { EndNew(); if (value is INotifyPropertyChanged) ((INotifyPropertyChanged)value).PropertyChanged += ItemPropertyChanged; } private void RemoveInternal(object value) { EndNew(); if (value is INotifyPropertyChanged) ((INotifyPropertyChanged)value).PropertyChanged -= ItemPropertyChanged; } private void AddInternal(IEnumerable e) { foreach (object o in e) AddInternal(o); } private void RemoveInternal(IEnumerable e) { foreach (object o in e) RemoveInternal(o); } private void OnAddItem(object item, int index) { OnListChanged(new EditableListChangedEventArgs(ListChangedType.ItemAdded, index)); OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Add, item, index)); } private void OnRemoveItem(object item, int index) { OnListChanged(new EditableListChangedEventArgs(ListChangedType.ItemDeleted, index)); OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Remove, item, index)); } private void OnMoveItem(object item, int oldIndex, int newIndex) { OnListChanged(new EditableListChangedEventArgs(newIndex, oldIndex)); OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Move, item, newIndex, oldIndex)); } private void OnChangeItem(object oldValue, object newValue, int index) { OnListChanged(new EditableListChangedEventArgs(ListChangedType.ItemChanged, index)); OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Replace, oldValue, newValue, index)); } private void OnReset() { OnListChanged(new EditableListChangedEventArgs(ListChangedType.Reset)); OnCollectionChanged(new NotifyCollectionChangedEventArgs(NotifyCollectionChangedAction.Reset)); } #endregion #region INotifyCollectionChanged Members public event NotifyCollectionChangedEventHandler CollectionChanged; private void FireCollectionChangedEvent(object sender, NotifyCollectionChangedEventArgs ea) { if (_notifyChanges && CollectionChanged != null) CollectionChanged(sender, ea); } protected virtual void OnCollectionChanged(NotifyCollectionChangedEventArgs ea) { FireCollectionChangedEvent(this, ea); } #endregion } }
using System; /// <summary> /// system.Array.LastIndexOf<>(T[],T,int32,int32) /// </summary> public class ArrayIndexOf3 { #region Public Methods public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; retVal = PosTest4() && retVal; retVal = PosTest5() && retVal; retVal = PosTest6() && retVal; TestLibrary.TestFramework.LogInformation("[Negative]"); retVal = NegTest1() && retVal; retVal = NegTest2() && retVal; retVal = NegTest3() && retVal; retVal = NegTest4() && retVal; retVal = NegTest5() && retVal; return retVal; } #region Positive Test Cases public bool PosTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest1:Test the array of elements which have the same value "); try { int length = TestLibrary.Generator.GetInt16(-55); int value = TestLibrary.Generator.GetByte(-55); int[] i1 = new int[length]; for (int i = 0; i < length; i++) { i1[i] = value; } for (int i = length - 1; i >= 0; i--) // travel the array { if (Array.LastIndexOf<int>(i1, value, i, i + 1) != i) { TestLibrary.TestFramework.LogError("001", "The result is not the value as expected"); retVal = false; } } } catch (Exception e) { TestLibrary.TestFramework.LogError("002", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; // Add your scenario description here TestLibrary.TestFramework.BeginScenario("PosTest2: Test the empty string "); try { int length = TestLibrary.Generator.GetByte(-55); string[] s1 = new string[length]; for (int i = 0; i < length; i++) { s1[i] = ""; } for (int i = length - 1; i >= 0; i--) // travel the array { if (Array.LastIndexOf<string>(s1, "", i, i + 1) != i) { TestLibrary.TestFramework.LogError("003", "The result is not the value as expected"); retVal = false; } } } catch (Exception e) { TestLibrary.TestFramework.LogError("004", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; // Add your scenario description here TestLibrary.TestFramework.BeginScenario("PosTest3: Generic convert byte to int32"); try { int[] i1 = new int[6] { 2356, 255, 988874, 90875, 255, 123334564 }; byte b1 = 255; if (Array.LastIndexOf<int>(i1, b1, 5, 6) != 4) { TestLibrary.TestFramework.LogError("005", "The result is not the value as expected"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest4() { bool retVal = true; // Add your scenario description here TestLibrary.TestFramework.BeginScenario("PosTest4: Test the array of char"); try { char[] i1 = new char[6] { 't', 'r', 'c', '4', 'r', 'c' }; char b1 = 'c'; if (Array.LastIndexOf<char>(i1, b1, 4, 5) != 2) { TestLibrary.TestFramework.LogError("007", "The result is not the value as expected"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("008", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest5() { bool retVal = true; // Add your scenario description here TestLibrary.TestFramework.BeginScenario("PosTest5: Test the null element of the string array "); try { string[] s1 = new string[6]{"Jack", "Mary", "Mike", "Peter", null, "Mary"}; if (Array.LastIndexOf<string>(s1, null, 5, 6) != 4) { TestLibrary.TestFramework.LogError("005", "The result is not the value as expected"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest6() { bool retVal = true; // Add your scenario description here TestLibrary.TestFramework.BeginScenario("PosTest6: Find out no result "); try { string[] s1 = new string[6]{"Jack", "Mary", "Mike", "Peter", "Tim", "Mary"}; if (Array.LastIndexOf<string>(s1, "mary", 5, 6) != -1) { TestLibrary.TestFramework.LogError("005", "The result is not the value as expected"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e); retVal = false; } return retVal; } #endregion #region Nagetive Test Cases public bool NegTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest1:The array is a null reference "); try { string[] s1 = null; int i1 = Array.LastIndexOf<string>(s1, "", 1, 0); TestLibrary.TestFramework.LogError("101", "The ArgumentNullException was not thrown as expected"); retVal = false; } catch (ArgumentNullException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("101", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest2: Set the negative startIndex argument"); try { string[] s1 = new string[6]{"Jack", "Mary", "Mike", "Peter", "Mary", "Joan"}; int i1 = Array.LastIndexOf<string>(s1, "", -1, 3); TestLibrary.TestFramework.LogError("103", "The ArgumentOutOfRangeException was not thrown as expected"); retVal = false; } catch (ArgumentOutOfRangeException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("104", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest3: Set the startIndex greater than the max index of the array"); try { int[] i1 = new int[6] { 2, 34, 56, 87, 23, 209 }; int i2 = Array.LastIndexOf<int>(i1, 56, 6, 3); TestLibrary.TestFramework.LogError("105", "The ArgumentOutOfRangeException was not thrown as expected"); retVal = false; } catch (ArgumentOutOfRangeException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("106", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest4: Count argument is less than zero"); try { int[] i1 = new int[6] { 2, 34, 56, 87, 23, 209 }; int i2 = Array.LastIndexOf<int>(i1, 56, 3, -3); TestLibrary.TestFramework.LogError("107", "The ArgumentOutOfRangeException was not thrown as expected"); retVal = false; } catch (ArgumentOutOfRangeException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("108", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest5() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest5: Count argument do not specify a valid section in array"); try { int[] i1 = new int[6] { 2, 34, 56, 87, 23, 209 }; int i2 = Array.LastIndexOf<int>(i1, 56, 3, 5); TestLibrary.TestFramework.LogError("109", "The ArgumentOutOfRangeException was not thrown as expected"); retVal = false; } catch (ArgumentOutOfRangeException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("110", "Unexpected exception: " + e); retVal = false; } return retVal; } #endregion #endregion public static int Main() { ArrayIndexOf3 test = new ArrayIndexOf3(); TestLibrary.TestFramework.BeginTestCase("ArrayIndexOf3"); if (test.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Diagnostics; using System.Threading; using System.Threading.Tasks; using Xunit; namespace System.Collections.Concurrent.Tests { /// <summary>The class that contains the unit tests of the LazyInit.</summary> public class ConcurrentBagTests { [Fact] public static void TestBasicScenarios() { ConcurrentBag<int> cb = new ConcurrentBag<int>(); Task[] tks = new Task[2]; tks[0] = Task.Run(() => { cb.Add(4); cb.Add(5); cb.Add(6); }); // Consume the items in the bag tks[1] = Task.Run(() => { int item; while (!cb.IsEmpty) { bool ret = cb.TryTake(out item); Assert.True(ret); // loose check if (item != 4 && item != 5 && item != 6) { Assert.False(true, "Expected: 4|5|6; actual: " + item.ToString()); } } }); Task.WaitAll(tks); } [Fact] public static void RTest1_Ctor() { ConcurrentBag<int> bag = new ConcurrentBag<int>(new int[] { 1, 2, 3 }); Assert.False(bag.IsEmpty); Assert.Equal(3, bag.Count); Assert.Throws<ArgumentNullException>( () => {bag = new ConcurrentBag<int>(null);} ); } [Fact] public static void RTest2_Add() { RTest2_Add(1, 10); RTest2_Add(3, 100); } [Fact] [OuterLoop] public static void RTest2_Add01() { RTest2_Add(8, 1000); } [Fact] public static void RTest3_TakeOrPeek() { ConcurrentBag<int> bag = CreateBag(100); RTest3_TakeOrPeek(bag, 1, 100, true); bag = CreateBag(100); RTest3_TakeOrPeek(bag, 4, 10, false); bag = CreateBag(1000); RTest3_TakeOrPeek(bag, 11, 100, true); } [Fact] public static void RTest4_AddAndTake() { RTest4_AddAndTake(8); RTest4_AddAndTake(16); } [Fact] public static void RTest5_CopyTo() { const int SIZE = 10; Array array = new int[SIZE]; int index = 0; ConcurrentBag<int> bag = CreateBag(SIZE); bag.CopyTo((int[])array, index); Assert.Throws<ArgumentNullException>(() => bag.CopyTo(null, index)); Assert.Throws<ArgumentOutOfRangeException>(() => bag.CopyTo((int[]) array, -1)); Assert.Throws<ArgumentException>(() => bag.CopyTo((int[])array, SIZE)); Assert.Throws<ArgumentException>(() => bag.CopyTo((int[])array, SIZE-2)); } [Fact] public static void RTest5_ICollectionCopyTo() { const int SIZE = 10; Array array = new int[SIZE]; int index = 0; ConcurrentBag<int> bag = CreateBag(SIZE); ICollection collection = bag as ICollection; Assert.NotNull(collection); collection.CopyTo(array, index); Assert.Throws<ArgumentNullException>(() => collection.CopyTo(null, index)); Assert.Throws<ArgumentOutOfRangeException>(() => collection.CopyTo((int[])array, -1)); Assert.Throws<ArgumentException>(() => collection.CopyTo((int[])array, SIZE)); Assert.Throws<ArgumentException>(() => collection.CopyTo((int[])array, SIZE - 2)); Array array2 = new int[SIZE, 5]; Assert.Throws<ArgumentException>(() => collection.CopyTo(array2, 0)); } /// <summary> /// Test bag addition /// </summary> /// <param name="threadsCount"></param> /// <param name="itemsPerThread"></param> /// <returns>True if succeeded, false otherwise</returns> private static void RTest2_Add(int threadsCount, int itemsPerThread) { int failures = 0; ConcurrentBag<int> bag = new ConcurrentBag<int>(); Task[] threads = new Task[threadsCount]; for (int i = 0; i < threads.Length; i++) { threads[i] = Task.Run(() => { for (int j = 0; j < itemsPerThread; j++) { try { bag.Add(j); int item; if (!bag.TryPeek(out item) || item != j) { Interlocked.Increment(ref failures); } } catch { Interlocked.Increment(ref failures); } } }); } Task.WaitAll(threads); Assert.Equal(0, failures); Assert.Equal(itemsPerThread * threadsCount, bag.Count); } /// <summary> /// Test bag Take and Peek operations /// </summary> /// <param name="bag"></param> /// <param name="threadsCount"></param> /// <param name="itemsPerThread"></param> /// <param name="take"></param> /// <returns>True if succeeded, false otherwise</returns> private static void RTest3_TakeOrPeek(ConcurrentBag<int> bag, int threadsCount, int itemsPerThread, bool take) { int bagCount = bag.Count; int succeeded = 0; int failures = 0; Task[] threads = new Task[threadsCount]; for (int i = 0; i < threads.Length; i++) { threads[i] = Task.Run(() => { for (int j = 0; j < itemsPerThread; j++) { int data; bool result = false; if (take) { result = bag.TryTake(out data); } else { result = bag.TryPeek(out data); } if (result) { Interlocked.Increment(ref succeeded); } else { Interlocked.Increment(ref failures); } } }); } Task.WaitAll(threads); if (take) { if (bag.Count != bagCount - succeeded) { Console.WriteLine("* RTest3_TakeOrPeek(" + threadsCount + "," + itemsPerThread + ")"); Assert.False(true, "TryTake failed, the remaining count doesn't match the expected count"); } } else { Assert.Equal(0, failures); } } /// <summary> /// Test parallel Add/Take, insert unique elements in the bag, and each element should be removed once /// </summary> /// <param name="threadsCount"></param> /// <returns>True if succeeded, false otherwise</returns> private static void RTest4_AddAndTake(int threadsCount) { ConcurrentBag<int> bag = new ConcurrentBag<int>(); Task[] threads = new Task[threadsCount]; int start = 0; int end = 10; int[] validation = new int[(end - start) * threads.Length / 2]; for (int i = 0; i < threads.Length; i += 2) { Interval v = new Interval(start, end); threads[i] = Task.Factory.StartNew( (o) => { Interval n = (Interval)o; Add(bag, n.m_start, n.m_end); }, v, CancellationToken.None, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default); threads[i + 1] = Task.Run(() => Take(bag, end - start - 1, validation)); int step = end - start; start = end; end += step; } Task.WaitAll(threads); int value = -1; //validation for (int i = 0; i < validation.Length; i++) { if (validation[i] > 1) { Console.WriteLine("* RTest4_AddAndTake(" + threadsCount + " )"); Assert.False(true, "Add/Take failed, item " + i + " has been taken more than once"); } else if (validation[i] == 0) { Assert.True(bag.TryTake(out value), String.Format("Add/Take failed, the list is not empty and TryTake returned false; thread count={0}", threadsCount)); } } Assert.False(bag.Count > 0 || bag.TryTake(out value), String.Format("Add/Take failed, this list is not empty after all remove operations; thread count={0}", threadsCount)); } [Fact] public static void RTest6_GetEnumerator() { ConcurrentBag<int> bag = new ConcurrentBag<int>(); foreach (int x in bag) { Assert.False(true, "RTest6_GetEnumerator: GetEnumeration failed, returned items when the bag is empty"); } for (int i = 0; i < 100; i++) { bag.Add(i); } int count = 0; foreach (int x in bag) { count++; } Assert.Equal(count, bag.Count); } [Fact] public static void RTest7_BugFix575975() { BlockingCollection<int> bc = new BlockingCollection<int>(new ConcurrentBag<int>()); bool succeeded = true; Task[] threads = new Task[4]; for (int t = 0; t < threads.Length; t++) { threads[t] = Task.Factory.StartNew((obj) => { int index = (int)obj; for (int i = 0; i < 100000; i++) { if (index < threads.Length / 2) { int k = 0; for (int j = 0; j < 1000; j++) k++; bc.Add(i); } else { try { bc.Take(); } catch // Take must not fail { succeeded = false; break; } } } }, t, CancellationToken.None, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default); } Task.WaitAll(threads); Assert.True(succeeded); } [Fact] public static void RTest8_Interfaces() { ConcurrentBag<int> bag = new ConcurrentBag<int>(); //IPCC IProducerConsumerCollection<int> ipcc = bag as IProducerConsumerCollection<int>; Assert.False(ipcc == null, "RTest8_Interfaces: ConcurrentBag<T> doesn't implement IPCC<T>"); Assert.True(ipcc.TryAdd(1), "RTest8_Interfaces: IPCC<T>.TryAdd failed"); Assert.Equal(1, bag.Count); int result = -1; Assert.True(ipcc.TryTake(out result), "RTest8_Interfaces: IPCC<T>.TryTake failed"); Assert.True(1 == result, "RTest8_Interfaces: IPCC<T>.TryTake failed"); Assert.Equal(0, bag.Count); //ICollection ICollection collection = bag as ICollection; Assert.False(collection == null, "RTest8_Interfaces: ConcurrentBag<T> doesn't implement ICollection"); Assert.False(collection.IsSynchronized, "RTest8_Interfaces: IsSynchronized returned true"); //IEnumerable IEnumerable enumerable = bag as IEnumerable; Assert.False(enumerable == null, "RTest8_Interfaces: ConcurrentBag<T> doesn't implement IEnumerable"); foreach (object o in enumerable) { Assert.True(false, "RTest8_Interfaces: Enumerable returned items when the bag is empty"); } } [Fact] public static void RTest8_Interfaces_Negative() { ConcurrentBag<int> bag = new ConcurrentBag<int>(); //IPCC IProducerConsumerCollection<int> ipcc = bag as IProducerConsumerCollection<int>; ICollection collection = bag as ICollection; Assert.Throws<NotSupportedException>(() => { object obj = collection.SyncRoot; }); } [Fact] public static void RTest9_ToArray() { var bag = new ConcurrentBag<int>(); Assert.NotNull(bag.ToArray()); Assert.Equal(0, bag.ToArray().Length); int[] allItems = new int[10000]; for (int i = 0; i < allItems.Length; i++) allItems[i] = i; bag = new ConcurrentBag<int>(allItems); int failCount = 0; Task[] tasks = new Task[10]; for (int i = 0; i < tasks.Length; i++) { tasks[i] = Task.Run(() => { int[] array = bag.ToArray(); if (array == null || array.Length != 10000) Interlocked.Increment(ref failCount); }); } Task.WaitAll(tasks); Assert.True(0 == failCount, "RTest9_ToArray: One or more thread failed to get the correct bag items from ToArray"); } [Fact] public static void RTest10_DebuggerAttributes() { DebuggerAttributes.ValidateDebuggerDisplayReferences(new ConcurrentBag<int>()); DebuggerAttributes.ValidateDebuggerTypeProxyProperties(new ConcurrentBag<int>()); } #region Helper Methods / Classes private struct Interval { public Interval(int start, int end) { m_start = start; m_end = end; } internal int m_start; internal int m_end; } /// <summary> /// Create a ComcurrentBag object /// </summary> /// <param name="numbers">number of the elements in the bag</param> /// <returns>The bag object</returns> private static ConcurrentBag<int> CreateBag(int numbers) { ConcurrentBag<int> bag = new ConcurrentBag<int>(); for (int i = 0; i < numbers; i++) { bag.Add(i); } return bag; } private static void Add(ConcurrentBag<int> bag, int start, int end) { for (int i = start; i < end; i++) { bag.Add(i); } } private static void Take(ConcurrentBag<int> bag, int count, int[] validation) { for (int i = 0; i < count; i++) { int value = -1; if (bag.TryTake(out value) && validation != null) { Interlocked.Increment(ref validation[value]); } } } #endregion } }
using System; using System.Data; using System.Drawing; using System.Collections; using System.ComponentModel; using System.Windows.Forms; using C1.Win.C1TrueDBGrid; using PCSComUtils.Common; using PCSComUtils.Common.BO; using PCSComUtils.Common.DS; using PCSComSale.Order.BO; using PCSComSale.Order.DS; using PCSComUtils.MasterSetup.DS; using PCSComUtils.PCSExc; using PCSUtils.Log; using PCSUtils.Utils; namespace PCSSale.Order { /// <summary> /// Summary description for SOCancelCommitment. /// </summary> public class SOCancelCommitment : System.Windows.Forms.Form { private System.Windows.Forms.Label lblLable3; private System.Windows.Forms.Label lblLable7; private System.Windows.Forms.Label lblLable12; private System.Windows.Forms.Label lblLable11; private System.Windows.Forms.Label lblLable1; private C1.Win.C1List.C1Combo cboCCN; private System.Windows.Forms.CheckBox chkSelectAll; private System.Windows.Forms.TextBox txtSaleOrderCode; private System.Windows.Forms.Button btnFindSaleOrder; private System.Windows.Forms.Button btnClose; private System.Windows.Forms.Button btnHelp; /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; DataTable dtbGridLayout; string CaptionLine, CaptionDelivery, CaptionMasterLocation, CaptionCommitQuantity, CaptionUnitOfMeasure, CaptionLocation, CaptionBin, CaptionProductCode, CaptionProductDes, CaptionCancel; const string DELIVERY = "Delivery"; const string UNITOFMEASURE = "Measure"; const string MASTERLOCATION = "MasterLocation"; const string LOCATION = "Location"; const string BIN = "Bin"; const string PRODUCTCODE = "ProductCode"; const string PRODUCTDES = "ProductDes"; const string CANCEL = "Cancel"; const string TRUE = "True"; private const string THIS = "PCSSale.Order.SOCancelCommitment"; private bool blnStateOfCheck = false; public EnumAction enumAction = EnumAction.Default; private bool blnHasError = false; private DataSet dstCancelCommit = new DataSet(); private System.Windows.Forms.Button btnCancelCommitment; private System.Windows.Forms.Button btnSearch; private C1.Win.C1TrueDBGrid.C1TrueDBGrid dgrdData; private SOCancelCommitmentBO boCancelCommitment = new SOCancelCommitmentBO(); private System.Windows.Forms.TextBox txtBuyingLoc; private System.Windows.Forms.TextBox txtCustomer; private System.Windows.Forms.TextBox txtCustomerName; private SO_SaleOrderMasterVO voSOMaster = new SO_SaleOrderMasterVO(); public SOCancelCommitment() { // // Required for Windows Form Designer support // InitializeComponent(); // // TODO: Add any constructor code after InitializeComponent call // } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(SOCancelCommitment)); this.btnClose = new System.Windows.Forms.Button(); this.btnHelp = new System.Windows.Forms.Button(); this.btnCancelCommitment = new System.Windows.Forms.Button(); this.lblLable12 = new System.Windows.Forms.Label(); this.cboCCN = new C1.Win.C1List.C1Combo(); this.lblLable11 = new System.Windows.Forms.Label(); this.chkSelectAll = new System.Windows.Forms.CheckBox(); this.lblLable3 = new System.Windows.Forms.Label(); this.txtSaleOrderCode = new System.Windows.Forms.TextBox(); this.lblLable7 = new System.Windows.Forms.Label(); this.dgrdData = new C1.Win.C1TrueDBGrid.C1TrueDBGrid(); this.btnFindSaleOrder = new System.Windows.Forms.Button(); this.lblLable1 = new System.Windows.Forms.Label(); this.btnSearch = new System.Windows.Forms.Button(); this.txtBuyingLoc = new System.Windows.Forms.TextBox(); this.txtCustomer = new System.Windows.Forms.TextBox(); this.txtCustomerName = new System.Windows.Forms.TextBox(); ((System.ComponentModel.ISupportInitialize)(this.cboCCN)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.dgrdData)).BeginInit(); this.SuspendLayout(); // // btnClose // this.btnClose.AccessibleDescription = ""; this.btnClose.AccessibleName = ""; this.btnClose.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right))); this.btnClose.FlatStyle = System.Windows.Forms.FlatStyle.System; this.btnClose.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.btnClose.Location = new System.Drawing.Point(657, 422); this.btnClose.Name = "btnClose"; this.btnClose.Size = new System.Drawing.Size(65, 23); this.btnClose.TabIndex = 17; this.btnClose.Text = "&Close"; this.btnClose.Click += new System.EventHandler(this.btnClose_Click); // // btnHelp // this.btnHelp.AccessibleDescription = ""; this.btnHelp.AccessibleName = ""; this.btnHelp.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right))); this.btnHelp.FlatStyle = System.Windows.Forms.FlatStyle.System; this.btnHelp.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.btnHelp.Location = new System.Drawing.Point(591, 422); this.btnHelp.Name = "btnHelp"; this.btnHelp.Size = new System.Drawing.Size(65, 23); this.btnHelp.TabIndex = 16; this.btnHelp.Text = "&Help"; // // btnCancelCommitment // this.btnCancelCommitment.AccessibleDescription = ""; this.btnCancelCommitment.AccessibleName = ""; this.btnCancelCommitment.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.btnCancelCommitment.FlatStyle = System.Windows.Forms.FlatStyle.System; this.btnCancelCommitment.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.btnCancelCommitment.Location = new System.Drawing.Point(6, 422); this.btnCancelCommitment.Name = "btnCancelCommitment"; this.btnCancelCommitment.Size = new System.Drawing.Size(110, 23); this.btnCancelCommitment.TabIndex = 14; this.btnCancelCommitment.Text = "Cance&l Commitment"; this.btnCancelCommitment.Click += new System.EventHandler(this.btnCancelCommitment_Click); // // lblLable12 // this.lblLable12.AccessibleDescription = ""; this.lblLable12.AccessibleName = ""; this.lblLable12.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); this.lblLable12.FlatStyle = System.Windows.Forms.FlatStyle.Flat; this.lblLable12.ForeColor = System.Drawing.Color.Maroon; this.lblLable12.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.lblLable12.Location = new System.Drawing.Point(602, 8); this.lblLable12.Name = "lblLable12"; this.lblLable12.Size = new System.Drawing.Size(32, 20); this.lblLable12.TabIndex = 0; this.lblLable12.Text = "CCN"; this.lblLable12.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // cboCCN // this.cboCCN.AccessibleDescription = ""; this.cboCCN.AccessibleName = ""; this.cboCCN.AddItemSeparator = ';'; this.cboCCN.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); this.cboCCN.Caption = ""; this.cboCCN.CaptionHeight = 17; this.cboCCN.CharacterCasing = System.Windows.Forms.CharacterCasing.Normal; this.cboCCN.ColumnCaptionHeight = 17; this.cboCCN.ColumnFooterHeight = 17; this.cboCCN.ComboStyle = C1.Win.C1List.ComboStyleEnum.DropdownList; this.cboCCN.ContentHeight = 15; this.cboCCN.DeadAreaBackColor = System.Drawing.Color.Empty; this.cboCCN.EditorBackColor = System.Drawing.SystemColors.Window; this.cboCCN.EditorFont = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0))); this.cboCCN.EditorForeColor = System.Drawing.SystemColors.WindowText; this.cboCCN.EditorHeight = 15; this.cboCCN.FlatStyle = C1.Win.C1List.FlatModeEnum.System; this.cboCCN.GapHeight = 2; this.cboCCN.ItemHeight = 15; this.cboCCN.Location = new System.Drawing.Point(634, 6); this.cboCCN.MatchEntryTimeout = ((long)(2000)); this.cboCCN.MaxDropDownItems = ((short)(5)); this.cboCCN.MaxLength = 32767; this.cboCCN.MouseCursor = System.Windows.Forms.Cursors.Default; this.cboCCN.Name = "cboCCN"; this.cboCCN.RowDivider.Color = System.Drawing.Color.DarkGray; this.cboCCN.RowDivider.Style = C1.Win.C1List.LineStyleEnum.None; this.cboCCN.RowSubDividerColor = System.Drawing.Color.DarkGray; this.cboCCN.Size = new System.Drawing.Size(88, 21); this.cboCCN.TabIndex = 1; this.cboCCN.PropBag = "<?xml version=\"1.0\"?><Blob><Styles type=\"C1.Win.C1List.Design.ContextWrapper\"><Da" + "ta>Group{AlignVert:Center;Border:None,,0, 0, 0, 0;BackColor:ControlDark;}Style2{" + "}Style5{}Style4{}Style7{}Style6{}EvenRow{BackColor:Aqua;}Selected{ForeColor:High" + "lightText;BackColor:Highlight;}Style3{}Inactive{ForeColor:InactiveCaptionText;Ba" + "ckColor:InactiveCaption;}Footer{}Caption{AlignHorz:Center;}Normal{BackColor:Wind" + "ow;}HighlightRow{ForeColor:HighlightText;BackColor:Highlight;}Style1{}OddRow{}Re" + "cordSelector{AlignImage:Center;}Heading{Wrap:True;BackColor:Control;Border:Raise" + "d,,1, 1, 1, 1;ForeColor:ControlText;AlignVert:Center;}Style8{}Style10{}Style11{}" + "Style9{AlignHorz:Near;}</Data></Styles><Splits><C1.Win.C1List.ListBoxView AllowC" + "olSelect=\"False\" Name=\"\" CaptionHeight=\"17\" ColumnCaptionHeight=\"17\" ColumnFoote" + "rHeight=\"17\" VerticalScrollGroup=\"1\" HorizontalScrollGroup=\"1\"><ClientRect>0, 0," + " 116, 156</ClientRect><VScrollBar><Width>16</Width></VScrollBar><HScrollBar><Hei" + "ght>16</Height></HScrollBar><CaptionStyle parent=\"Style2\" me=\"Style9\" /><EvenRow" + "Style parent=\"EvenRow\" me=\"Style7\" /><FooterStyle parent=\"Footer\" me=\"Style3\" />" + "<GroupStyle parent=\"Group\" me=\"Style11\" /><HeadingStyle parent=\"Heading\" me=\"Sty" + "le2\" /><HighLightRowStyle parent=\"HighlightRow\" me=\"Style6\" /><InactiveStyle par" + "ent=\"Inactive\" me=\"Style4\" /><OddRowStyle parent=\"OddRow\" me=\"Style8\" /><RecordS" + "electorStyle parent=\"RecordSelector\" me=\"Style10\" /><SelectedStyle parent=\"Selec" + "ted\" me=\"Style5\" /><Style parent=\"Normal\" me=\"Style1\" /></C1.Win.C1List.ListBoxV" + "iew></Splits><NamedStyles><Style parent=\"\" me=\"Normal\" /><Style parent=\"Normal\" " + "me=\"Heading\" /><Style parent=\"Heading\" me=\"Footer\" /><Style parent=\"Heading\" me=" + "\"Caption\" /><Style parent=\"Heading\" me=\"Inactive\" /><Style parent=\"Normal\" me=\"S" + "elected\" /><Style parent=\"Normal\" me=\"HighlightRow\" /><Style parent=\"Normal\" me=" + "\"EvenRow\" /><Style parent=\"Normal\" me=\"OddRow\" /><Style parent=\"Heading\" me=\"Rec" + "ordSelector\" /><Style parent=\"Caption\" me=\"Group\" /></NamedStyles><vertSplits>1<" + "/vertSplits><horzSplits>1</horzSplits><Layout>Modified</Layout><DefaultRecSelWid" + "th>16</DefaultRecSelWidth></Blob>"; // // lblLable11 // this.lblLable11.AccessibleDescription = ""; this.lblLable11.AccessibleName = ""; this.lblLable11.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.lblLable11.Location = new System.Drawing.Point(10, 28); this.lblLable11.Name = "lblLable11"; this.lblLable11.Size = new System.Drawing.Size(87, 20); this.lblLable11.TabIndex = 5; this.lblLable11.Text = "Customer"; this.lblLable11.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // chkSelectAll // this.chkSelectAll.AccessibleDescription = ""; this.chkSelectAll.AccessibleName = ""; this.chkSelectAll.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.chkSelectAll.FlatStyle = System.Windows.Forms.FlatStyle.System; this.chkSelectAll.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.chkSelectAll.Location = new System.Drawing.Point(120, 424); this.chkSelectAll.Name = "chkSelectAll"; this.chkSelectAll.Size = new System.Drawing.Size(66, 20); this.chkSelectAll.TabIndex = 15; this.chkSelectAll.Text = "Select &All"; this.chkSelectAll.Enter += new System.EventHandler(this.chkSelectAll_Enter); this.chkSelectAll.Leave += new System.EventHandler(this.chkSelectAll_Leave); this.chkSelectAll.CheckedChanged += new System.EventHandler(this.chkSelectAll_CheckedChanged); // // lblLable3 // this.lblLable3.AccessibleDescription = ""; this.lblLable3.AccessibleName = ""; this.lblLable3.ForeColor = System.Drawing.Color.Maroon; this.lblLable3.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.lblLable3.Location = new System.Drawing.Point(10, 6); this.lblLable3.Name = "lblLable3"; this.lblLable3.Size = new System.Drawing.Size(87, 20); this.lblLable3.TabIndex = 2; this.lblLable3.Text = "Sale Order"; this.lblLable3.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // txtSaleOrderCode // this.txtSaleOrderCode.AccessibleDescription = ""; this.txtSaleOrderCode.AccessibleName = ""; this.txtSaleOrderCode.Location = new System.Drawing.Point(91, 6); this.txtSaleOrderCode.Name = "txtSaleOrderCode"; this.txtSaleOrderCode.Size = new System.Drawing.Size(139, 20); this.txtSaleOrderCode.TabIndex = 3; this.txtSaleOrderCode.Text = "Sale Order"; this.txtSaleOrderCode.KeyDown += new System.Windows.Forms.KeyEventHandler(this.txtSaleOrderCode_KeyDown); this.txtSaleOrderCode.Leave += new System.EventHandler(this.txtSaleOrderCode_Leave); this.txtSaleOrderCode.Enter += new System.EventHandler(this.OnEnterControl); // // lblLable7 // this.lblLable7.AccessibleDescription = ""; this.lblLable7.AccessibleName = ""; this.lblLable7.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.lblLable7.Location = new System.Drawing.Point(10, 76); this.lblLable7.Name = "lblLable7"; this.lblLable7.Size = new System.Drawing.Size(87, 18); this.lblLable7.TabIndex = 7; this.lblLable7.Text = "Buying Loc."; this.lblLable7.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // dgrdData // this.dgrdData.AccessibleDescription = ""; this.dgrdData.AccessibleName = ""; this.dgrdData.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.dgrdData.CaptionHeight = 17; this.dgrdData.CollapseColor = System.Drawing.Color.Black; this.dgrdData.ExpandColor = System.Drawing.Color.Black; this.dgrdData.FlatStyle = C1.Win.C1TrueDBGrid.FlatModeEnum.System; this.dgrdData.GroupByCaption = "Drag a column header here to group by that column"; this.dgrdData.Images.Add(((System.Drawing.Image)(resources.GetObject("resource")))); this.dgrdData.Location = new System.Drawing.Point(6, 100); this.dgrdData.MarqueeStyle = C1.Win.C1TrueDBGrid.MarqueeEnum.DottedCellBorder; this.dgrdData.Name = "dgrdData"; this.dgrdData.PreviewInfo.Location = new System.Drawing.Point(0, 0); this.dgrdData.PreviewInfo.Size = new System.Drawing.Size(0, 0); this.dgrdData.PreviewInfo.ZoomFactor = 75; this.dgrdData.PrintInfo.ShowOptionsDialog = false; this.dgrdData.RecordSelectorWidth = 16; this.dgrdData.RowDivider.Color = System.Drawing.Color.DarkGray; this.dgrdData.RowDivider.Style = C1.Win.C1TrueDBGrid.LineStyleEnum.Single; this.dgrdData.RowHeight = 15; this.dgrdData.RowSubDividerColor = System.Drawing.Color.DarkGray; this.dgrdData.Size = new System.Drawing.Size(716, 316); this.dgrdData.TabIndex = 13; this.dgrdData.Text = "c1TrueDBGrid1"; this.dgrdData.AfterColEdit += new C1.Win.C1TrueDBGrid.ColEventHandler(this.dgrdData_AfterColEdit); this.dgrdData.KeyDown += new System.Windows.Forms.KeyEventHandler(this.dgrdData_KeyDown); this.dgrdData.PropBag = "<?xml version=\"1.0\"?><Blob><DataCols><C1DataColumn Level=\"0\" Caption=\"Sale Order " + "No.\" DataField=\"Code\"><ValueItems /><GroupInfo /></C1DataColumn><C1DataColumn Le" + "vel=\"0\" Caption=\"Del. Line\" DataField=\"Delivery\"><ValueItems /><GroupInfo /></C1" + "DataColumn><C1DataColumn Level=\"0\" Caption=\"Master Location\" DataField=\"MasterLo" + "cation\"><ValueItems /><GroupInfo /></C1DataColumn><C1DataColumn Level=\"0\" Captio" + "n=\"Committed Qty\" DataField=\"CommitQuantity\"><ValueItems /><GroupInfo /></C1Data" + "Column><C1DataColumn Level=\"0\" Caption=\"Unit\" DataField=\"Measure\"><ValueItems />" + "<GroupInfo /></C1DataColumn><C1DataColumn Level=\"0\" Caption=\"Location\" DataField" + "=\"Location\"><ValueItems /><GroupInfo /></C1DataColumn><C1DataColumn Level=\"0\" Ca" + "ption=\"Bin\" DataField=\"Bin\"><ValueItems /><GroupInfo /></C1DataColumn><C1DataCol" + "umn Level=\"0\" Caption=\"Part Number\" DataField=\"ProductCode\"><ValueItems /><Group" + "Info /></C1DataColumn><C1DataColumn Level=\"0\" Caption=\"Part Name\" DataField=\"Pro" + "ductDes\"><ValueItems /><GroupInfo /></C1DataColumn><C1DataColumn Level=\"0\" Capti" + "on=\"Cancel\" DataField=\"Cancel\"><ValueItems /><GroupInfo /></C1DataColumn><C1Data" + "Column Level=\"0\" Caption=\"Model\" DataField=\"Revision\"><ValueItems /><GroupInfo /" + "></C1DataColumn><C1DataColumn Level=\"0\" Caption=\"Schedule Date\" DataField=\"Sched" + "uleDate\"><ValueItems /><GroupInfo /></C1DataColumn></DataCols><Styles type=\"C1.W" + "in.C1TrueDBGrid.Design.ContextWrapper\"><Data>HighlightRow{ForeColor:HighlightTex" + "t;BackColor:Highlight;}Inactive{ForeColor:InactiveCaptionText;BackColor:Inactive" + "Caption;}Style78{}Style79{}Style85{}Editor{}Style72{}Style73{}Style70{AlignHorz:" + "Near;}Style71{AlignHorz:Near;}Style76{AlignHorz:Near;}Style77{AlignHorz:Near;}St" + "yle74{}Style75{}Style84{}Style87{}Style86{}Style81{}Style80{}Style83{AlignHorz:N" + "ear;}Style82{AlignHorz:Near;}FilterBar{}Heading{Wrap:True;BackColor:Control;Bord" + "er:Raised,,1, 1, 1, 1;ForeColor:ControlText;AlignVert:Center;}Style18{}Style19{A" + "lignHorz:Far;}Style14{}Style15{}Style16{AlignHorz:Near;}Style17{AlignHorz:Near;}" + "Style10{AlignHorz:Near;}Style11{}Style12{}Style13{}Selected{ForeColor:HighlightT" + "ext;BackColor:Highlight;}Style29{AlignHorz:Near;}Style28{AlignHorz:Near;}Style27" + "{}Style25{}Style22{AlignHorz:Near;}Style9{}Style8{}Style24{}Style26{}Style5{}Sty" + "le4{}Style7{}Style6{}Style1{}Style23{AlignHorz:Near;}Style3{}Style2{}Style21{}St" + "yle20{}OddRow{}Style38{}Style39{}Style36{}Style37{}Style34{AlignHorz:Near;}Style" + "35{AlignHorz:Near;}Style32{}Style33{}Style30{}Style49{}Style48{}Style31{}Normal{" + "}Style41{AlignHorz:Near;}Style40{AlignHorz:Near;}Style43{AlignHorz:Far;}Style42{" + "}Style45{}Style44{}Style47{AlignHorz:Near;}Style46{AlignHorz:Near;}EvenRow{BackC" + "olor:Aqua;}Style59{AlignHorz:Near;}Style58{AlignHorz:Near;}RecordSelector{AlignI" + "mage:Center;}Style51{}Style50{}Footer{}Style52{AlignHorz:Near;}Style53{AlignHorz" + ":Near;}Style54{}Style55{}Style56{}Style57{}Caption{AlignHorz:Center;}Style69{}St" + "yle68{}Style63{}Style62{}Style61{}Style60{}Style67{}Style66{}Style65{AlignHorz:N" + "ear;}Style64{AlignHorz:Near;}Group{AlignVert:Center;Border:None,,0, 0, 0, 0;Back" + "Color:ControlDark;}</Data></Styles><Splits><C1.Win.C1TrueDBGrid.MergeView Name=\"" + "\" CaptionHeight=\"17\" ColumnCaptionHeight=\"17\" ColumnFooterHeight=\"17\" MarqueeSty" + "le=\"DottedCellBorder\" RecordSelectorWidth=\"16\" DefRecSelWidth=\"16\" VerticalScrol" + "lGroup=\"1\" HorizontalScrollGroup=\"1\"><ClientRect>0, 0, 712, 312</ClientRect><Bor" + "derSide>0</BorderSide><CaptionStyle parent=\"Style2\" me=\"Style10\" /><EditorStyle " + "parent=\"Editor\" me=\"Style5\" /><EvenRowStyle parent=\"EvenRow\" me=\"Style8\" /><Filt" + "erBarStyle parent=\"FilterBar\" me=\"Style13\" /><FooterStyle parent=\"Footer\" me=\"St" + "yle3\" /><GroupStyle parent=\"Group\" me=\"Style12\" /><HeadingStyle parent=\"Heading\"" + " me=\"Style2\" /><HighLightRowStyle parent=\"HighlightRow\" me=\"Style7\" /><InactiveS" + "tyle parent=\"Inactive\" me=\"Style4\" /><OddRowStyle parent=\"OddRow\" me=\"Style9\" />" + "<RecordSelectorStyle parent=\"RecordSelector\" me=\"Style11\" /><SelectedStyle paren" + "t=\"Selected\" me=\"Style6\" /><Style parent=\"Normal\" me=\"Style1\" /><internalCols><C" + "1DisplayColumn><HeadingStyle parent=\"Style2\" me=\"Style16\" /><Style parent=\"Style" + "1\" me=\"Style17\" /><FooterStyle parent=\"Style3\" me=\"Style18\" /><EditorStyle paren" + "t=\"Style5\" me=\"Style19\" /><GroupHeaderStyle parent=\"Style1\" me=\"Style21\" /><Grou" + "pFooterStyle parent=\"Style1\" me=\"Style20\" /><Visible>True</Visible><ColumnDivide" + "r>DarkGray,Single</ColumnDivider><Width>112</Width><Height>15</Height><DCIdx>0</" + "DCIdx></C1DisplayColumn><C1DisplayColumn><HeadingStyle parent=\"Style2\" me=\"Style" + "22\" /><Style parent=\"Style1\" me=\"Style23\" /><FooterStyle parent=\"Style3\" me=\"Sty" + "le24\" /><EditorStyle parent=\"Style5\" me=\"Style25\" /><GroupHeaderStyle parent=\"St" + "yle1\" me=\"Style27\" /><GroupFooterStyle parent=\"Style1\" me=\"Style26\" /><Visible>T" + "rue</Visible><ColumnDivider>DarkGray,Single</ColumnDivider><Width>54</Width><Hei" + "ght>15</Height><DCIdx>1</DCIdx></C1DisplayColumn><C1DisplayColumn><HeadingStyle " + "parent=\"Style2\" me=\"Style82\" /><Style parent=\"Style1\" me=\"Style83\" /><FooterStyl" + "e parent=\"Style3\" me=\"Style84\" /><EditorStyle parent=\"Style5\" me=\"Style85\" /><Gr" + "oupHeaderStyle parent=\"Style1\" me=\"Style87\" /><GroupFooterStyle parent=\"Style1\" " + "me=\"Style86\" /><Visible>True</Visible><ColumnDivider>DarkGray,Single</ColumnDivi" + "der><Width>93</Width><Height>15</Height><DCIdx>11</DCIdx></C1DisplayColumn><C1Di" + "splayColumn><HeadingStyle parent=\"Style2\" me=\"Style40\" /><Style parent=\"Style1\" " + "me=\"Style41\" /><FooterStyle parent=\"Style3\" me=\"Style42\" /><EditorStyle parent=\"" + "Style5\" me=\"Style43\" /><GroupHeaderStyle parent=\"Style1\" me=\"Style45\" /><GroupFo" + "oterStyle parent=\"Style1\" me=\"Style44\" /><Visible>True</Visible><ColumnDivider>D" + "arkGray,Single</ColumnDivider><Width>81</Width><Height>15</Height><DCIdx>3</DCId" + "x></C1DisplayColumn><C1DisplayColumn><HeadingStyle parent=\"Style2\" me=\"Style64\" " + "/><Style parent=\"Style1\" me=\"Style65\" /><FooterStyle parent=\"Style3\" me=\"Style66" + "\" /><EditorStyle parent=\"Style5\" me=\"Style67\" /><GroupHeaderStyle parent=\"Style1" + "\" me=\"Style69\" /><GroupFooterStyle parent=\"Style1\" me=\"Style68\" /><Visible>True<" + "/Visible><ColumnDivider>DarkGray,Single</ColumnDivider><Width>120</Width><Height" + ">15</Height><DCIdx>7</DCIdx></C1DisplayColumn><C1DisplayColumn><HeadingStyle par" + "ent=\"Style2\" me=\"Style70\" /><Style parent=\"Style1\" me=\"Style71\" /><FooterStyle p" + "arent=\"Style3\" me=\"Style72\" /><EditorStyle parent=\"Style5\" me=\"Style73\" /><Group" + "HeaderStyle parent=\"Style1\" me=\"Style75\" /><GroupFooterStyle parent=\"Style1\" me=" + "\"Style74\" /><Visible>True</Visible><ColumnDivider>DarkGray,Single</ColumnDivider" + "><Width>149</Width><Height>15</Height><DCIdx>8</DCIdx></C1DisplayColumn><C1Displ" + "ayColumn><HeadingStyle parent=\"Style2\" me=\"Style76\" /><Style parent=\"Style1\" me=" + "\"Style77\" /><FooterStyle parent=\"Style3\" me=\"Style78\" /><EditorStyle parent=\"Sty" + "le5\" me=\"Style79\" /><GroupHeaderStyle parent=\"Style1\" me=\"Style81\" /><GroupFoote" + "rStyle parent=\"Style1\" me=\"Style80\" /><Visible>True</Visible><ColumnDivider>Dark" + "Gray,Single</ColumnDivider><Width>70</Width><Height>15</Height><DCIdx>10</DCIdx>" + "</C1DisplayColumn><C1DisplayColumn><HeadingStyle parent=\"Style2\" me=\"Style46\" />" + "<Style parent=\"Style1\" me=\"Style47\" /><FooterStyle parent=\"Style3\" me=\"Style48\" " + "/><EditorStyle parent=\"Style5\" me=\"Style49\" /><GroupHeaderStyle parent=\"Style1\" " + "me=\"Style51\" /><GroupFooterStyle parent=\"Style1\" me=\"Style50\" /><Visible>True</V" + "isible><ColumnDivider>DarkGray,Single</ColumnDivider><Width>49</Width><Height>15" + "</Height><DCIdx>4</DCIdx></C1DisplayColumn><C1DisplayColumn><HeadingStyle parent" + "=\"Style2\" me=\"Style28\" /><Style parent=\"Style1\" me=\"Style29\" /><FooterStyle pare" + "nt=\"Style3\" me=\"Style30\" /><EditorStyle parent=\"Style5\" me=\"Style31\" /><GroupHea" + "derStyle parent=\"Style1\" me=\"Style33\" /><GroupFooterStyle parent=\"Style1\" me=\"St" + "yle32\" /><Visible>True</Visible><ColumnDivider>DarkGray,Single</ColumnDivider><W" + "idth>87</Width><Height>15</Height><DCIdx>2</DCIdx></C1DisplayColumn><C1DisplayCo" + "lumn><HeadingStyle parent=\"Style2\" me=\"Style52\" /><Style parent=\"Style1\" me=\"Sty" + "le53\" /><FooterStyle parent=\"Style3\" me=\"Style54\" /><EditorStyle parent=\"Style5\"" + " me=\"Style55\" /><GroupHeaderStyle parent=\"Style1\" me=\"Style57\" /><GroupFooterSty" + "le parent=\"Style1\" me=\"Style56\" /><Visible>True</Visible><ColumnDivider>DarkGray" + ",Single</ColumnDivider><Width>85</Width><Height>15</Height><DCIdx>5</DCIdx></C1D" + "isplayColumn><C1DisplayColumn><HeadingStyle parent=\"Style2\" me=\"Style58\" /><Styl" + "e parent=\"Style1\" me=\"Style59\" /><FooterStyle parent=\"Style3\" me=\"Style60\" /><Ed" + "itorStyle parent=\"Style5\" me=\"Style61\" /><GroupHeaderStyle parent=\"Style1\" me=\"S" + "tyle63\" /><GroupFooterStyle parent=\"Style1\" me=\"Style62\" /><Visible>True</Visibl" + "e><ColumnDivider>DarkGray,Single</ColumnDivider><Width>94</Width><Height>15</Hei" + "ght><DCIdx>6</DCIdx></C1DisplayColumn><C1DisplayColumn><HeadingStyle parent=\"Sty" + "le2\" me=\"Style34\" /><Style parent=\"Style1\" me=\"Style35\" /><FooterStyle parent=\"S" + "tyle3\" me=\"Style36\" /><EditorStyle parent=\"Style5\" me=\"Style37\" /><GroupHeaderSt" + "yle parent=\"Style1\" me=\"Style39\" /><GroupFooterStyle parent=\"Style1\" me=\"Style38" + "\" /><Visible>True</Visible><ColumnDivider>DarkGray,Single</ColumnDivider><Width>" + "51</Width><Height>15</Height><DCIdx>9</DCIdx></C1DisplayColumn></internalCols></" + "C1.Win.C1TrueDBGrid.MergeView></Splits><NamedStyles><Style parent=\"\" me=\"Normal\"" + " /><Style parent=\"Normal\" me=\"Heading\" /><Style parent=\"Heading\" me=\"Footer\" /><" + "Style parent=\"Heading\" me=\"Caption\" /><Style parent=\"Heading\" me=\"Inactive\" /><S" + "tyle parent=\"Normal\" me=\"Selected\" /><Style parent=\"Normal\" me=\"Editor\" /><Style" + " parent=\"Normal\" me=\"HighlightRow\" /><Style parent=\"Normal\" me=\"EvenRow\" /><Styl" + "e parent=\"Normal\" me=\"OddRow\" /><Style parent=\"Heading\" me=\"RecordSelector\" /><S" + "tyle parent=\"Normal\" me=\"FilterBar\" /><Style parent=\"Caption\" me=\"Group\" /></Nam" + "edStyles><vertSplits>1</vertSplits><horzSplits>1</horzSplits><Layout>Modified</L" + "ayout><DefaultRecSelWidth>16</DefaultRecSelWidth><ClientArea>0, 0, 712, 312</Cli" + "entArea><PrintPageHeaderStyle parent=\"\" me=\"Style14\" /><PrintPageFooterStyle par" + "ent=\"\" me=\"Style15\" /></Blob>"; // // btnFindSaleOrder // this.btnFindSaleOrder.AccessibleDescription = ""; this.btnFindSaleOrder.AccessibleName = ""; this.btnFindSaleOrder.FlatStyle = System.Windows.Forms.FlatStyle.System; this.btnFindSaleOrder.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.btnFindSaleOrder.Location = new System.Drawing.Point(232, 6); this.btnFindSaleOrder.Name = "btnFindSaleOrder"; this.btnFindSaleOrder.Size = new System.Drawing.Size(24, 20); this.btnFindSaleOrder.TabIndex = 4; this.btnFindSaleOrder.Text = "..."; this.btnFindSaleOrder.Click += new System.EventHandler(this.btnFindSaleOrder_Click); // // lblLable1 // this.lblLable1.AccessibleDescription = ""; this.lblLable1.AccessibleName = ""; this.lblLable1.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.lblLable1.Location = new System.Drawing.Point(10, 50); this.lblLable1.Name = "lblLable1"; this.lblLable1.Size = new System.Drawing.Size(87, 20); this.lblLable1.TabIndex = 9; this.lblLable1.Text = "Customer Name"; this.lblLable1.TextAlign = System.Drawing.ContentAlignment.MiddleLeft; // // btnSearch // this.btnSearch.AccessibleDescription = ""; this.btnSearch.AccessibleName = ""; this.btnSearch.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); this.btnSearch.ImeMode = System.Windows.Forms.ImeMode.NoControl; this.btnSearch.Location = new System.Drawing.Point(646, 71); this.btnSearch.Name = "btnSearch"; this.btnSearch.TabIndex = 12; this.btnSearch.Text = "&Search"; this.btnSearch.Click += new System.EventHandler(this.btnSearch_Click); // // txtBuyingLoc // this.txtBuyingLoc.Location = new System.Drawing.Point(91, 74); this.txtBuyingLoc.Name = "txtBuyingLoc"; this.txtBuyingLoc.ReadOnly = true; this.txtBuyingLoc.Size = new System.Drawing.Size(101, 20); this.txtBuyingLoc.TabIndex = 8; this.txtBuyingLoc.TabStop = false; this.txtBuyingLoc.Text = ""; // // txtCustomer // this.txtCustomer.Location = new System.Drawing.Point(91, 28); this.txtCustomer.Name = "txtCustomer"; this.txtCustomer.ReadOnly = true; this.txtCustomer.Size = new System.Drawing.Size(101, 20); this.txtCustomer.TabIndex = 6; this.txtCustomer.TabStop = false; this.txtCustomer.Text = ""; // // txtCustomerName // this.txtCustomerName.Location = new System.Drawing.Point(91, 51); this.txtCustomerName.Name = "txtCustomerName"; this.txtCustomerName.ReadOnly = true; this.txtCustomerName.Size = new System.Drawing.Size(252, 20); this.txtCustomerName.TabIndex = 10; this.txtCustomerName.TabStop = false; this.txtCustomerName.Text = ""; // // SOCancelCommitment // this.AccessibleDescription = ""; this.AccessibleName = ""; this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.ClientSize = new System.Drawing.Size(726, 451); this.Controls.Add(this.txtCustomerName); this.Controls.Add(this.txtCustomer); this.Controls.Add(this.txtBuyingLoc); this.Controls.Add(this.txtSaleOrderCode); this.Controls.Add(this.dgrdData); this.Controls.Add(this.btnSearch); this.Controls.Add(this.btnFindSaleOrder); this.Controls.Add(this.chkSelectAll); this.Controls.Add(this.lblLable7); this.Controls.Add(this.lblLable1); this.Controls.Add(this.btnClose); this.Controls.Add(this.btnCancelCommitment); this.Controls.Add(this.btnHelp); this.Controls.Add(this.lblLable11); this.Controls.Add(this.lblLable3); this.Controls.Add(this.lblLable12); this.Controls.Add(this.cboCCN); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle; this.KeyPreview = true; this.Name = "SOCancelCommitment"; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen; this.Text = "Cancel Sales Order Commitment"; this.KeyDown += new System.Windows.Forms.KeyEventHandler(this.SOCancelCommitment_KeyDown); this.Closing += new System.ComponentModel.CancelEventHandler(this.SOCancelCommitment_Closing); this.Load += new System.EventHandler(this.SOCancelCommitment_Load); ((System.ComponentModel.ISupportInitialize)(this.cboCCN)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.dgrdData)).EndInit(); this.ResumeLayout(false); } #endregion //************************************************************************** /// <Description> /// Get current column's headers on the grid /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Wednesday, March 02, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void GetCaptionOnGrid() { CaptionLine = dgrdData.Splits[0].DisplayColumns[SO_CommitInventoryDetailTable.LINE_FLD].DataColumn.Caption; CaptionDelivery = dgrdData.Splits[0].DisplayColumns[DELIVERY].DataColumn.Caption; CaptionMasterLocation = dgrdData.Splits[0].DisplayColumns[MASTERLOCATION].DataColumn.Caption; CaptionCommitQuantity = dgrdData.Splits[0].DisplayColumns[SO_CommitInventoryDetailTable.COMMITQUANTITY_FLD].DataColumn.Caption; CaptionUnitOfMeasure = dgrdData.Splits[0].DisplayColumns[UNITOFMEASURE].DataColumn.Caption; CaptionLocation = dgrdData.Splits[0].DisplayColumns[LOCATION].DataColumn.Caption; CaptionBin = dgrdData.Splits[0].DisplayColumns[BIN].DataColumn.Caption; CaptionProductCode = dgrdData.Splits[0].DisplayColumns[PRODUCTCODE].DataColumn.Caption; CaptionProductDes = dgrdData.Splits[0].DisplayColumns[PRODUCTDES].DataColumn.Caption; CaptionCancel = dgrdData.Splits[0].DisplayColumns[CANCEL].DataColumn.Caption; for(int i =0; i<dgrdData.Splits[0].DisplayColumns.Count; i++) { dgrdData.Splits[0].DisplayColumns[i].HeadingStyle.HorizontalAlignment = AlignHorzEnum.Center; } } //************************************************************************** /// <Description> /// Reset all control and set focus into txtSaleOrderCode /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Wednesday, March 02, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void ResetForm() { txtSaleOrderCode.Text = string.Empty; txtCustomer.Text = string.Empty; txtCustomerName.Text = string.Empty; txtBuyingLoc.Text = string.Empty; chkSelectAll.Checked = false; txtSaleOrderCode.Focus(); dgrdData.Splits[0].Rows.Clear(); } //************************************************************************** /// <Description> /// Load all data in to Combo CCN /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Friday, February 25, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void LoadComboCCN() { try { UtilsBO boUtils = new UtilsBO(); DataSet dstCCN = boUtils.ListCCN(); cboCCN.DataSource = dstCCN.Tables[MST_CCNTable.TABLE_NAME]; cboCCN.DisplayMember = MST_CCNTable.CODE_FLD; cboCCN.ValueMember = MST_CCNTable.CCNID_FLD; FormControlComponents.PutDataIntoC1ComboBox(cboCCN,dstCCN.Tables[MST_CCNTable.TABLE_NAME],MST_CCNTable.CODE_FLD,MST_CCNTable.CCNID_FLD,MST_CCNTable.TABLE_NAME); if (SystemProperty.CCNID != 0) { cboCCN.SelectedIndex = 0; } } catch (PCSException ex) { throw ex; } catch (Exception ex) { throw ex; } } //************************************************************************** /// <Description> /// Execute searching items to display on the grid by Code or by ID /// </Description> /// <Inputs> /// pstrSaleOrderCode is Code of SaleOrder /// pintSOID is ID of SaleOrder /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Friday, February 25, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void BindDataToGrid(int pintSOID) { try { dstCancelCommit = new DataSet(); chkSelectAll.Checked = false; dstCancelCommit = boCancelCommitment.ListCancelable(pintSOID); dstCancelCommit.Tables[SO_CommitInventoryDetailTable.TABLE_NAME].Columns.Add(CANCEL, typeof(bool)); if (dstCancelCommit.Tables[0].Rows.Count != 0) { foreach (DataRow drow in dstCancelCommit.Tables[SO_CommitInventoryDetailTable.TABLE_NAME].Rows) { drow[CANCEL] = false; } } dgrdData.DataSource = dstCancelCommit.Tables[SO_CommitInventoryDetailTable.TABLE_NAME]; FormControlComponents.RestoreGridLayout(dgrdData, dtbGridLayout); dgrdData.Splits[0].DisplayColumns[SO_CommitInventoryDetailTable.COMMITQUANTITY_FLD].DataColumn.NumberFormat = Constants.DECIMAL_NUMBERFORMAT; dgrdData.Splits[0].DisplayColumns[SO_DeliveryScheduleTable.SCHEDULEDATE_FLD].DataColumn.NumberFormat = Constants.DATETIME_FORMAT_HOUR; for (int i =0; i <dgrdData.Splits[0].DisplayColumns.Count; i++) { if (dgrdData.Splits[0].DisplayColumns[i].DataColumn.DataField != CANCEL) dgrdData.Splits[0].DisplayColumns[i].Locked = true; } btnCancelCommitment.Enabled = dgrdData.RowCount >0; dgrdData.AllowDelete = dgrdData.RowCount >0; } catch (PCSException ex) { throw ex; } catch (Exception ex) { throw ex; } } //************************************************************************** /// <Description> /// Check the checking on the grid /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// Throw PCSExecption if have no item is check /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Thursday, March 3, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void ValidateData() { const string METHOD_NAME = THIS + ".ValidateData()"; int intCountCheck = 0; foreach (DataRow drow in dstCancelCommit.Tables[SO_CommitInventoryDetailTable.TABLE_NAME].Rows) { if (drow.RowState != DataRowState.Deleted) { if (drow[CANCEL].ToString().Trim() == TRUE) { intCountCheck++; return; } } } throw new PCSException(ErrorCode.MESSAGE_CANCELCOMMIT_ATLISTITEMCHECK, METHOD_NAME, null); } //************************************************************************** /// <Description> /// Load form event /// Reset and init data /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Thursday, March 3, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void SOCancelCommitment_Load(object sender, System.EventArgs e) { const string METHOD_NAME = THIS + ".SOCancelCommitment_Load()"; try { Security objSecurity = new Security(); this.Name = THIS; if (objSecurity.SetRightForUserOnForm(this, SystemProperty.UserName) == 0) { this.Close(); return; } btnCancelCommitment.Enabled = false; ResetForm(); LoadComboCCN(); dtbGridLayout = FormControlComponents.StoreGridLayout(dgrdData); } catch (PCSException ex) { PCSMessageBox.Show(ex.mCode, MessageBoxIcon.Error); try { Logger.LogMessage(ex.CauseException, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } catch (Exception ex) { PCSMessageBox.Show(ErrorCode.OTHER_ERROR, MessageBoxIcon.Error); try { Logger.LogMessage(ex, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } } //************************************************************************** /// <Description> /// Get custumer's inf and Buying location's inf by CustomerID and LocationID /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Thursday, March 3, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void GetInfors(int pintPartyID, int pintLocationID) { var voCustomer = Utilities.Instance.GetCustomerInfo(pintPartyID); txtCustomer.Text = voCustomer.Code; txtCustomerName.Text = voCustomer.Name; txtBuyingLoc.Text = new SOCancelCommitmentBO().GetBuyingLocName(pintLocationID); } //************************************************************************** /// <Description> /// Find a SaleOrder /// fill data if search success /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Thursday, March 3, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void btnFindSaleOrder_Click(object sender, System.EventArgs e) { const string METHOD_NAME = THIS + ".btnFindSaleOrder_Click()"; try { DataRowView drwResult = null; Hashtable htbCriteria = new Hashtable(); if (cboCCN.SelectedValue != null) { htbCriteria.Add(SO_SaleOrderMasterTable.CCNID_FLD, cboCCN.SelectedValue.ToString()); } else { PCSMessageBox.Show(ErrorCode.MESSAGE_RGA_CCN, MessageBoxIcon.Warning); cboCCN.Focus(); return; } drwResult = FormControlComponents.OpenSearchForm(v_SOCancelCommitment.VIEW_NAME , SO_SaleOrderMasterTable.CODE_FLD, txtSaleOrderCode.Text, htbCriteria, true); if (drwResult != null) { txtSaleOrderCode.Text = drwResult[SO_SaleOrderMasterTable.CODE_FLD].ToString(); GetInfors(int.Parse(drwResult[SO_SaleOrderMasterTable.PARTYID_FLD].ToString()), int.Parse(drwResult[SO_SaleOrderMasterTable.BUYINGLOCID_FLD].ToString())); voSOMaster.SaleOrderMasterID = int.Parse(drwResult[SO_SaleOrderMasterTable.SALEORDERMASTERID_FLD].ToString()); } else { txtSaleOrderCode.Focus(); } } catch (PCSException ex) { PCSMessageBox.Show(ex.mCode, MessageBoxIcon.Error); try { Logger.LogMessage(ex.CauseException, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } catch (Exception ex) { PCSMessageBox.Show(ErrorCode.OTHER_ERROR, MessageBoxIcon.Error); try { Logger.LogMessage(ex, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } } //************************************************************************** /// <Description> /// Check all checkbox on grid and then autocheck chkCheckAll /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Wednesday, March 02, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void CheckOrNochkCheckAll() { for (int i =0; i <dgrdData.RowCount; i++) { if (dgrdData[i, CANCEL].ToString().Trim() != TRUE) { chkSelectAll.Checked = false; return; } } chkSelectAll.Checked = true; } //************************************************************************** /// <Description> /// Check all on uncheck all item on grid /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Thursday, March 3, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void chkSelectAll_CheckedChanged(object sender, System.EventArgs e) { if (blnStateOfCheck) { dgrdData.UpdateData(); if (dstCancelCommit.Tables.Count == 0) return; if (chkSelectAll.Checked) { foreach (DataRow drow in dstCancelCommit.Tables[SO_CommitInventoryDetailTable.TABLE_NAME].Rows) { if (drow.RowState != DataRowState.Deleted) drow[CANCEL] = true; } } else { foreach (DataRow drow in dstCancelCommit.Tables[SO_CommitInventoryDetailTable.TABLE_NAME].Rows) { if (drow.RowState != DataRowState.Deleted) drow[CANCEL] = false; } } } } private void dgrdData_AfterColEdit(object sender, C1.Win.C1TrueDBGrid.ColEventArgs e) { if (e.Column.DataColumn.DataField == CANCEL) { CheckOrNochkCheckAll(); } } private void chkSelectAll_Enter(object sender, System.EventArgs e) { blnStateOfCheck = true; } private void chkSelectAll_Leave(object sender, System.EventArgs e) { blnStateOfCheck = false; } //************************************************************************** /// <Description> /// Check data and call UpdateSOCancelCommit of BO class /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Thursday, March 3, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void btnCancelCommitment_Click(object sender, System.EventArgs e) { const string METHOD_NAME = THIS + ". btnCancelCommitment_Click()"; blnHasError = true; try { dgrdData.UpdateData(); ValidateData(); if (PCSMessageBox.Show(ErrorCode.MESSAGE_CANCELCOMIIT_AREYOURSURE, MessageBoxButtons.YesNo, MessageBoxIcon.Question) == DialogResult.Yes) { boCancelCommitment.CancelCommitment(dstCancelCommit.Tables[0],int.Parse(cboCCN.SelectedValue.ToString())); blnHasError = false; PCSMessageBox.Show(ErrorCode.MESSAGE_AFTER_SAVE_DATA); BindDataToGrid(voSOMaster.SaleOrderMasterID); } } catch (PCSException ex) { PCSMessageBox.Show(ex.mCode, MessageBoxIcon.Error); if (ex.CauseException != null) { try { Logger.LogMessage(ex.CauseException, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } } catch (Exception ex) { PCSMessageBox.Show(ErrorCode.OTHER_ERROR, MessageBoxIcon.Error); try { Logger.LogMessage(ex, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } } //************************************************************************** /// <Description> /// Close form event /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Friday, March 4, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void btnClose_Click(object sender, System.EventArgs e) { this.Close(); } //************************************************************************** /// <Description> /// Check to throw question if user check but not save before close /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// true : if user has changed on form /// false: if else /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Friday, March 4, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private bool CheckBeforeExit() { if (enumAction != EnumAction.Default) { foreach (DataRow drow in dstCancelCommit.Tables[SO_CommitInventoryDetailTable.TABLE_NAME].Rows) { if (drow.RowState != DataRowState.Deleted) { if (drow[CANCEL].ToString() == TRUE) { return true; } } } return false; } return false; } //************************************************************************** /// <Description> /// Close form /// check and throw question for user /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Friday, March 4, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void SOCancelCommitment_Closing(object sender, System.ComponentModel.CancelEventArgs e) { if (CheckBeforeExit()) { DialogResult confirmDialog = PCSMessageBox.Show(ErrorCode.MESSAGE_QUESTION_STORE_INTO_DATABASE, MessageBoxButtons.YesNoCancel, MessageBoxIcon.Question); switch (confirmDialog) { case DialogResult.Yes: //Save before exit btnCancelCommitment_Click( btnCancelCommitment, new EventArgs()); if (blnHasError) { e.Cancel = true; } break; case DialogResult.No: break; case DialogResult.Cancel: e.Cancel = true; break; } } } private void txtSaleOrderCode_KeyDown(object sender, System.Windows.Forms.KeyEventArgs e) { if (e.KeyCode == Keys.F4) { btnFindSaleOrder_Click(sender, new EventArgs()); } } //************************************************************************** /// <Description> /// Change the backgroud and open the search form if need /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Tuesday, Mar 21, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void txtSaleOrderCode_Leave(object sender, System.EventArgs e) { string METHOD_NAME = THIS + ".txtSaleOrderCode_Leave()"; OnLeaveControl(sender, e); if (!txtSaleOrderCode.Modified) return; if (txtSaleOrderCode.Text == string.Empty) { return; } try { DataRowView drwResult = null; Hashtable htbCriteria = new Hashtable(); if (cboCCN.SelectedValue != null) { htbCriteria.Add(SO_SaleOrderMasterTable.CCNID_FLD, cboCCN.SelectedValue.ToString()); } else { PCSMessageBox.Show(ErrorCode.MESSAGE_RGA_CCN, MessageBoxIcon.Warning); cboCCN.Focus(); return; } drwResult = FormControlComponents.OpenSearchForm(v_SOCancelCommitment.VIEW_NAME , SO_SaleOrderMasterTable.CODE_FLD, txtSaleOrderCode.Text, htbCriteria, false); if (drwResult != null) { txtSaleOrderCode.Text = drwResult[SO_SaleOrderMasterTable.CODE_FLD].ToString(); GetInfors(int.Parse(drwResult[SO_SaleOrderMasterTable.PARTYID_FLD].ToString()), int.Parse(drwResult[SO_SaleOrderMasterTable.BUYINGLOCID_FLD].ToString())); voSOMaster.SaleOrderMasterID = int.Parse(drwResult[SO_SaleOrderMasterTable.SALEORDERMASTERID_FLD].ToString()); } else { txtSaleOrderCode.Focus(); } } catch(PCSException ex) { PCSMessageBox.Show(ex.mCode, MessageBoxIcon.Error); try { Logger.LogMessage(ex.CauseException, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } catch(Exception ex) { PCSMessageBox.Show(ErrorCode.OTHER_ERROR, MessageBoxIcon.Error); try { Logger.LogMessage(ex, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } } #region Change background when focus private void OnEnterControl(object sender, System.EventArgs e) { const string METHOD_NAME = THIS + ". OnEnterControl()"; try { FormControlComponents.OnEnterControl(sender, e); } catch (Exception ex) { // displays the error message. PCSMessageBox.Show(ErrorCode.OTHER_ERROR, MessageBoxButtons.OK, MessageBoxIcon.Error); // log message. try { Logger.LogMessage(ex, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxButtons.OK, MessageBoxIcon.Error); } } } private void OnLeaveControl(object sender, System.EventArgs e) { const string METHOD_NAME = THIS + ". OnLeaveControl()"; try { FormControlComponents.OnLeaveControl(sender, e); } catch (Exception ex) { // displays the error message. PCSMessageBox.Show(ErrorCode.OTHER_ERROR, MessageBoxButtons.OK, MessageBoxIcon.Error); // log message. try { Logger.LogMessage(ex, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxButtons.OK, MessageBoxIcon.Error); } } } #endregion private void SOCancelCommitment_KeyDown(object sender, System.Windows.Forms.KeyEventArgs e) { if (e.KeyCode == Keys.Escape) { this.Close(); } } //************************************************************************** /// <Description> /// List all cancelable /// </Description> /// <Inputs> /// </Inputs> /// <Outputs> /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// TuanDm /// </Authors> /// <History> /// Wednesday, Apr 27, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** private void btnSearch_Click(object sender, System.EventArgs e) { const string METHOD_NAME = THIS + ".btnSearch_Click()"; try { if (FormControlComponents.CheckMandatory(txtSaleOrderCode)) { PCSMessageBox.Show(ErrorCode.MANDATORY_INVALID); BindDataToGrid(0); txtSaleOrderCode.Focus(); txtSaleOrderCode.Select(); return; } BindDataToGrid(voSOMaster.SaleOrderMasterID); enumAction = EnumAction.Edit; } catch (PCSException ex) { PCSMessageBox.Show(ex.mCode, MessageBoxIcon.Error); try { Logger.LogMessage(ex.CauseException, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } catch (Exception ex) { PCSMessageBox.Show(ErrorCode.OTHER_ERROR, MessageBoxIcon.Error); try { Logger.LogMessage(ex, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxIcon.Error); } } } private void dgrdData_KeyDown(object sender, System.Windows.Forms.KeyEventArgs e) { const string METHOD_NAME = THIS + ".dgrdData_KeyDown()"; try { if (e.KeyCode == Keys.Delete) { FormControlComponents.DeleteMultiRowsOnTrueDBGrid(dgrdData); } } catch (Exception ex) { // displays the error message. PCSMessageBox.Show(ErrorCode.OTHER_ERROR, MessageBoxButtons.OK, MessageBoxIcon.Error); // log message. try { Logger.LogMessage(ex, METHOD_NAME, Level.ERROR); } catch { PCSMessageBox.Show(ErrorCode.LOG_EXCEPTION, MessageBoxButtons.OK, MessageBoxIcon.Error); } } } } }
//--------------------------------------------------------------------------- // // <copyright file="PointKeyFrameCollection.cs" company="Microsoft"> // Copyright (C) Microsoft Corporation. All rights reserved. // </copyright> // // This file was generated, please do not edit it directly. // // Please see http://wiki/default.aspx/Microsoft.Projects.Avalon/MilCodeGen.html for more information. // //--------------------------------------------------------------------------- using MS.Internal; using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Windows.Media.Animation; using System.Windows.Media.Media3D; namespace System.Windows.Media.Animation { /// <summary> /// This collection is used in conjunction with a KeyFramePointAnimation /// to animate a Point property value along a set of key frames. /// </summary> public class PointKeyFrameCollection : Freezable, IList { #region Data private List<PointKeyFrame> _keyFrames; private static PointKeyFrameCollection s_emptyCollection; #endregion #region Constructors /// <Summary> /// Creates a new PointKeyFrameCollection. /// </Summary> public PointKeyFrameCollection() : base() { _keyFrames = new List< PointKeyFrame>(2); } #endregion #region Static Methods /// <summary> /// An empty PointKeyFrameCollection. /// </summary> public static PointKeyFrameCollection Empty { get { if (s_emptyCollection == null) { PointKeyFrameCollection emptyCollection = new PointKeyFrameCollection(); emptyCollection._keyFrames = new List< PointKeyFrame>(0); emptyCollection.Freeze(); s_emptyCollection = emptyCollection; } return s_emptyCollection; } } #endregion #region Freezable /// <summary> /// Creates a freezable copy of this PointKeyFrameCollection. /// </summary> /// <returns>The copy</returns> public new PointKeyFrameCollection Clone() { return (PointKeyFrameCollection)base.Clone(); } /// <summary> /// Implementation of <see cref="System.Windows.Freezable.CreateInstanceCore">Freezable.CreateInstanceCore</see>. /// </summary> /// <returns>The new Freezable.</returns> protected override Freezable CreateInstanceCore() { return new PointKeyFrameCollection(); } /// <summary> /// Implementation of <see cref="System.Windows.Freezable.CloneCore(System.Windows.Freezable)">Freezable.CloneCore</see>. /// </summary> protected override void CloneCore(Freezable sourceFreezable) { PointKeyFrameCollection sourceCollection = (PointKeyFrameCollection) sourceFreezable; base.CloneCore(sourceFreezable); int count = sourceCollection._keyFrames.Count; _keyFrames = new List< PointKeyFrame>(count); for (int i = 0; i < count; i++) { PointKeyFrame keyFrame = (PointKeyFrame)sourceCollection._keyFrames[i].Clone(); _keyFrames.Add(keyFrame); OnFreezablePropertyChanged(null, keyFrame); } } /// <summary> /// Implementation of <see cref="System.Windows.Freezable.CloneCurrentValueCore(System.Windows.Freezable)">Freezable.CloneCurrentValueCore</see>. /// </summary> protected override void CloneCurrentValueCore(Freezable sourceFreezable) { PointKeyFrameCollection sourceCollection = (PointKeyFrameCollection) sourceFreezable; base.CloneCurrentValueCore(sourceFreezable); int count = sourceCollection._keyFrames.Count; _keyFrames = new List< PointKeyFrame>(count); for (int i = 0; i < count; i++) { PointKeyFrame keyFrame = (PointKeyFrame)sourceCollection._keyFrames[i].CloneCurrentValue(); _keyFrames.Add(keyFrame); OnFreezablePropertyChanged(null, keyFrame); } } /// <summary> /// Implementation of <see cref="System.Windows.Freezable.GetAsFrozenCore(System.Windows.Freezable)">Freezable.GetAsFrozenCore</see>. /// </summary> protected override void GetAsFrozenCore(Freezable sourceFreezable) { PointKeyFrameCollection sourceCollection = (PointKeyFrameCollection) sourceFreezable; base.GetAsFrozenCore(sourceFreezable); int count = sourceCollection._keyFrames.Count; _keyFrames = new List< PointKeyFrame>(count); for (int i = 0; i < count; i++) { PointKeyFrame keyFrame = (PointKeyFrame)sourceCollection._keyFrames[i].GetAsFrozen(); _keyFrames.Add(keyFrame); OnFreezablePropertyChanged(null, keyFrame); } } /// <summary> /// Implementation of <see cref="System.Windows.Freezable.GetCurrentValueAsFrozenCore(System.Windows.Freezable)">Freezable.GetCurrentValueAsFrozenCore</see>. /// </summary> protected override void GetCurrentValueAsFrozenCore(Freezable sourceFreezable) { PointKeyFrameCollection sourceCollection = (PointKeyFrameCollection) sourceFreezable; base.GetCurrentValueAsFrozenCore(sourceFreezable); int count = sourceCollection._keyFrames.Count; _keyFrames = new List< PointKeyFrame>(count); for (int i = 0; i < count; i++) { PointKeyFrame keyFrame = (PointKeyFrame)sourceCollection._keyFrames[i].GetCurrentValueAsFrozen(); _keyFrames.Add(keyFrame); OnFreezablePropertyChanged(null, keyFrame); } } /// <summary> /// /// </summary> protected override bool FreezeCore(bool isChecking) { bool canFreeze = base.FreezeCore(isChecking); for (int i = 0; i < _keyFrames.Count && canFreeze; i++) { canFreeze &= Freezable.Freeze(_keyFrames[i], isChecking); } return canFreeze; } #endregion #region IEnumerable /// <summary> /// Returns an enumerator of the PointKeyFrames in the collection. /// </summary> public IEnumerator GetEnumerator() { ReadPreamble(); return _keyFrames.GetEnumerator(); } #endregion #region ICollection /// <summary> /// Returns the number of PointKeyFrames in the collection. /// </summary> public int Count { get { ReadPreamble(); return _keyFrames.Count; } } /// <summary> /// See <see cref="System.Collections.ICollection.IsSynchronized">ICollection.IsSynchronized</see>. /// </summary> public bool IsSynchronized { get { ReadPreamble(); return (IsFrozen || Dispatcher != null); } } /// <summary> /// See <see cref="System.Collections.ICollection.SyncRoot">ICollection.SyncRoot</see>. /// </summary> public object SyncRoot { get { ReadPreamble(); return ((ICollection)_keyFrames).SyncRoot; } } /// <summary> /// Copies all of the PointKeyFrames in the collection to an /// array. /// </summary> void ICollection.CopyTo(Array array, int index) { ReadPreamble(); ((ICollection)_keyFrames).CopyTo(array, index); } /// <summary> /// Copies all of the PointKeyFrames in the collection to an /// array of PointKeyFrames. /// </summary> public void CopyTo(PointKeyFrame[] array, int index) { ReadPreamble(); _keyFrames.CopyTo(array, index); } #endregion #region IList /// <summary> /// Adds a PointKeyFrame to the collection. /// </summary> int IList.Add(object keyFrame) { return Add((PointKeyFrame)keyFrame); } /// <summary> /// Adds a PointKeyFrame to the collection. /// </summary> public int Add(PointKeyFrame keyFrame) { if (keyFrame == null) { throw new ArgumentNullException("keyFrame"); } WritePreamble(); OnFreezablePropertyChanged(null, keyFrame); _keyFrames.Add(keyFrame); WritePostscript(); return _keyFrames.Count - 1; } /// <summary> /// Removes all PointKeyFrames from the collection. /// </summary> public void Clear() { WritePreamble(); if (_keyFrames.Count > 0) { for (int i = 0; i < _keyFrames.Count; i++) { OnFreezablePropertyChanged(_keyFrames[i], null); } _keyFrames.Clear(); WritePostscript(); } } /// <summary> /// Returns true of the collection contains the given PointKeyFrame. /// </summary> bool IList.Contains(object keyFrame) { return Contains((PointKeyFrame)keyFrame); } /// <summary> /// Returns true of the collection contains the given PointKeyFrame. /// </summary> public bool Contains(PointKeyFrame keyFrame) { ReadPreamble(); return _keyFrames.Contains(keyFrame); } /// <summary> /// Returns the index of a given PointKeyFrame in the collection. /// </summary> int IList.IndexOf(object keyFrame) { return IndexOf((PointKeyFrame)keyFrame); } /// <summary> /// Returns the index of a given PointKeyFrame in the collection. /// </summary> public int IndexOf(PointKeyFrame keyFrame) { ReadPreamble(); return _keyFrames.IndexOf(keyFrame); } /// <summary> /// Inserts a PointKeyFrame into a specific location in the collection. /// </summary> void IList.Insert(int index, object keyFrame) { Insert(index, (PointKeyFrame)keyFrame); } /// <summary> /// Inserts a PointKeyFrame into a specific location in the collection. /// </summary> public void Insert(int index, PointKeyFrame keyFrame) { if (keyFrame == null) { throw new ArgumentNullException("keyFrame"); } WritePreamble(); OnFreezablePropertyChanged(null, keyFrame); _keyFrames.Insert(index, keyFrame); WritePostscript(); } /// <summary> /// Returns true if the collection is frozen. /// </summary> public bool IsFixedSize { get { ReadPreamble(); return IsFrozen; } } /// <summary> /// Returns true if the collection is frozen. /// </summary> public bool IsReadOnly { get { ReadPreamble(); return IsFrozen; } } /// <summary> /// Removes a PointKeyFrame from the collection. /// </summary> void IList.Remove(object keyFrame) { Remove((PointKeyFrame)keyFrame); } /// <summary> /// Removes a PointKeyFrame from the collection. /// </summary> public void Remove(PointKeyFrame keyFrame) { WritePreamble(); if (_keyFrames.Contains(keyFrame)) { OnFreezablePropertyChanged(keyFrame, null); _keyFrames.Remove(keyFrame); WritePostscript(); } } /// <summary> /// Removes the PointKeyFrame at the specified index from the collection. /// </summary> public void RemoveAt(int index) { WritePreamble(); OnFreezablePropertyChanged(_keyFrames[index], null); _keyFrames.RemoveAt(index); WritePostscript(); } /// <summary> /// Gets or sets the PointKeyFrame at a given index. /// </summary> object IList.this[int index] { get { return this[index]; } set { this[index] = (PointKeyFrame)value; } } /// <summary> /// Gets or sets the PointKeyFrame at a given index. /// </summary> public PointKeyFrame this[int index] { get { ReadPreamble(); return _keyFrames[index]; } set { if (value == null) { throw new ArgumentNullException(String.Format(CultureInfo.InvariantCulture, "PointKeyFrameCollection[{0}]", index)); } WritePreamble(); if (value != _keyFrames[index]) { OnFreezablePropertyChanged(_keyFrames[index], value); _keyFrames[index] = value; Debug.Assert(_keyFrames[index] != null); WritePostscript(); } } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Linq; using Xunit; namespace System.Collections.Tests { public class BitArray_GetSetTests { private static BitArray s_allTrue = new BitArray(320, true); private static BitArray s_allFalse = new BitArray(320, false); private static BitArray s_alternating = new BitArray(Enumerable.Repeat(unchecked((int)0xaaaaaaaa), 10).ToArray()); [Theory] [InlineData(new bool[] { true })] [InlineData(new bool[] { false })] [InlineData(new bool[] { true, false, true, true, false, true })] public static void Get_Set(bool[] newValues) { BitArray bitArray = new BitArray(newValues.Length, false); for (int i = 0; i < newValues.Length; i++) { bitArray.Set(i, newValues[i]); Assert.Equal(newValues[i], bitArray[i]); Assert.Equal(newValues[i], bitArray.Get(i)); } } [Fact] public static void Get_InvalidIndex_ThrowsArgumentOutOfRangeException() { BitArray bitArray = new BitArray(4); Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Get(-1)); Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Get(bitArray.Length)); Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[-1]); Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[bitArray.Length]); } [Fact] public static void Set_InvalidIndex_ThrowsArgumentOutOfRangeException() { BitArray bitArray = new BitArray(4); Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Set(-1, true)); Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Set(bitArray.Length, true)); Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[-1] = true); Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[bitArray.Length] = true); } [Theory] [InlineData(6, true)] [InlineData(6, false)] [InlineData(0x1000F, true)] public static void SetAll(int size, bool defaultValue) { BitArray bitArray = new BitArray(6, defaultValue); bitArray.SetAll(!defaultValue); for (int i = 0; i < bitArray.Length; i++) { Assert.Equal(!defaultValue, bitArray[i]); Assert.Equal(!defaultValue, bitArray.Get(i)); } bitArray.SetAll(defaultValue); for (int i = 0; i < bitArray.Length; i++) { Assert.Equal(defaultValue, bitArray[i]); Assert.Equal(defaultValue, bitArray.Get(i)); } } [Theory] [InlineData(new bool[0])] [InlineData(new bool[] { true, false, true, false, true, false, true, false, true, false })] public static void GetEnumerator(bool[] values) { BitArray bitArray = new BitArray(values); Assert.NotSame(bitArray.GetEnumerator(), bitArray.GetEnumerator()); IEnumerator enumerator = bitArray.GetEnumerator(); for (int i = 0; i < 2; i++) { int counter = 0; while (enumerator.MoveNext()) { Assert.Equal(bitArray[counter], enumerator.Current); counter++; } Assert.Equal(bitArray.Length, counter); enumerator.Reset(); } } [Fact] public static void GetEnumerator_Invalid() { BitArray bitArray = new BitArray(10, true); IEnumerator enumerator = bitArray.GetEnumerator(); // Has not started enumerating Assert.Throws<InvalidOperationException>(() => enumerator.Current); // Has finished enumerating while (enumerator.MoveNext()) ; Assert.Throws<InvalidOperationException>(() => enumerator.Current); // Has resetted enumerating enumerator.Reset(); Assert.Throws<InvalidOperationException>(() => enumerator.Current); // Has modified underlying collection enumerator.MoveNext(); bitArray[0] = false; Assert.True((bool)enumerator.Current); Assert.Throws<InvalidOperationException>(() => enumerator.MoveNext()); Assert.Throws<InvalidOperationException>(() => enumerator.Reset()); } [Theory] [InlineData(16, 48)] [InlineData(48, 24)] [InlineData(16384, 256)] [InlineData(48, 48)] public static void Length_Set(int originalSize, int newSize) { BitArray bitArray = new BitArray(originalSize, true); bitArray.Length = newSize; Assert.Equal(newSize, bitArray.Length); for (int i = 0; i < Math.Min(originalSize, bitArray.Length); i++) { Assert.True(bitArray[i]); Assert.True(bitArray.Get(i)); } for (int i = originalSize; i < newSize; i++) { Assert.False(bitArray[i]); Assert.False(bitArray.Get(i)); } Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray[newSize]); Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.Get(newSize)); // Decrease then increase size bitArray.Length = 0; Assert.Equal(0, bitArray.Length); bitArray.Length = newSize; Assert.Equal(newSize, bitArray.Length); Assert.False(bitArray.Get(newSize - 1)); } [Fact] public static void Length_Set_InvalidLength_ThrowsArgumentOutOfRangeException() { BitArray bitArray = new BitArray(1); Assert.Throws<ArgumentOutOfRangeException>(() => bitArray.Length = -1); } public static IEnumerable<object[]> CopyTo_IntArray_TestData() { yield return new object[] { s_allTrue, new int[10], 0, Enumerable.Repeat(unchecked((int)0xffffffff), 10).ToArray(), typeof(int) }; yield return new object[] { s_allFalse, new int[11], 1, Enumerable.Repeat(0, 10).ToArray(), typeof(int) }; yield return new object[] { s_alternating, new int[12], 1, Enumerable.Repeat(unchecked((int)0xaaaaaaaa), 10).ToArray(), typeof(int) }; yield return new object[] { s_allTrue, new bool[320], 0, Enumerable.Repeat(true, 320).ToArray(), typeof(bool) }; yield return new object[] { s_allFalse, new bool[321], 1, Enumerable.Repeat(false, 320).ToArray(), typeof(bool) }; yield return new object[] { s_alternating, new bool[322], 1, Enumerable.Range(0, 320).Select(i => i % 2 == 1).ToArray(), typeof(bool) }; yield return new object[] { s_allTrue, new byte[40], 0, Enumerable.Repeat((byte)255, 40).ToArray(), typeof(byte) }; yield return new object[] { s_allFalse, new byte[41], 1, Enumerable.Repeat((byte)0, 40).ToArray(), typeof(byte) }; yield return new object[] { s_alternating, new byte[42], 1, Enumerable.Repeat((byte)170, 40).ToArray(), typeof(byte) }; } [Theory] [MemberData(nameof(CopyTo_IntArray_TestData))] public void CopyTo(BitArray bitArray, Array array, int index, Array expected, Type arrayType) { object defaultValue = Activator.CreateInstance(arrayType); ICollection collection = bitArray; collection.CopyTo(array, index); for (int i = 0; i < index; i++) { Assert.Equal(defaultValue, array.GetValue(i)); } for (int i = 0; i < expected.Length; i++) { Assert.Equal(expected.GetValue(i), array.GetValue(i + index)); } for (int i = index + expected.Length; i < array.Length; i++) { Assert.Equal(defaultValue, array.GetValue(i)); } } [Fact] public void CopyTo_Invalid() { ICollection bitArray = new BitArray(10); // Invalid array Assert.Throws<ArgumentNullException>("array", () => bitArray.CopyTo(null, 0)); Assert.Throws<ArgumentException>(null, () => bitArray.CopyTo(new long[10], 0)); Assert.Throws<ArgumentException>(null, () => bitArray.CopyTo(new int[10, 10], 0)); // Invalid index Assert.Throws<ArgumentOutOfRangeException>("index", () => bitArray.CopyTo(new byte[10], -1)); Assert.Throws<ArgumentException>(null, () => bitArray.CopyTo(new byte[1], 2)); Assert.Throws<ArgumentException>(null, () => bitArray.CopyTo(new bool[10], 2)); } [Fact] public void SyncRoot() { ICollection bitArray = new BitArray(10); Assert.Same(bitArray.SyncRoot, bitArray.SyncRoot); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Runtime.InteropServices; using System.Threading; using Xunit; namespace System.IO.Tests { public abstract class FileSystemWatcherTest : FileCleanupTestBase { // Events are reported asynchronously by the OS, so allow an amount of time for // them to arrive before testing an assertion. If we expect an event to occur, // we can wait for it for a relatively long time, as if it doesn't arrive, we're // going to fail the test. If we don't expect an event to occur, then we need // to keep the timeout short, as in a successful run we'll end up waiting for // the entire timeout specified. public const int WaitForExpectedEventTimeout = 500; // ms to wait for an event to happen public const int LongWaitTimeout = 50000; // ms to wait for an event that takes a longer time than the average operation public const int SubsequentExpectedWait = 10; // ms to wait for checks that occur after the first. public const int WaitForExpectedEventTimeout_NoRetry = 3000;// ms to wait for an event that isn't surrounded by a retry. public const int DefaultAttemptsForExpectedEvent = 3; // Number of times an expected event should be retried if failing. public const int DefaultAttemptsForUnExpectedEvent = 2; // Number of times an unexpected event should be retried if failing. /// <summary> /// Watches the Changed WatcherChangeType and unblocks the returned AutoResetEvent when a /// Changed event is thrown by the watcher. /// </summary> public static AutoResetEvent WatchChanged(FileSystemWatcher watcher, string[] expectedPaths = null) { AutoResetEvent eventOccurred = new AutoResetEvent(false); watcher.Changed += (o, e) => { Assert.Equal(WatcherChangeTypes.Changed, e.ChangeType); if (expectedPaths != null) { Assert.Contains(Path.GetFullPath(e.FullPath), expectedPaths); } eventOccurred.Set(); }; return eventOccurred; } /// <summary> /// Watches the Created WatcherChangeType and unblocks the returned AutoResetEvent when a /// Created event is thrown by the watcher. /// </summary> public static AutoResetEvent WatchCreated(FileSystemWatcher watcher, string[] expectedPaths = null) { AutoResetEvent eventOccurred = new AutoResetEvent(false); watcher.Created += (o, e) => { Assert.Equal(WatcherChangeTypes.Created, e.ChangeType); if (expectedPaths != null) { Assert.Contains(Path.GetFullPath(e.FullPath), expectedPaths); } eventOccurred.Set(); }; return eventOccurred; } /// <summary> /// Watches the Renamed WatcherChangeType and unblocks the returned AutoResetEvent when a /// Renamed event is thrown by the watcher. /// </summary> public static AutoResetEvent WatchDeleted(FileSystemWatcher watcher, string[] expectedPaths = null) { AutoResetEvent eventOccurred = new AutoResetEvent(false); watcher.Deleted += (o, e) => { Assert.Equal(WatcherChangeTypes.Deleted, e.ChangeType); if (expectedPaths != null) { Assert.Contains(Path.GetFullPath(e.FullPath), expectedPaths); } eventOccurred.Set(); }; return eventOccurred; } /// <summary> /// Watches the Renamed WatcherChangeType and unblocks the returned AutoResetEvent when a /// Renamed event is thrown by the watcher. /// </summary> public static AutoResetEvent WatchRenamed(FileSystemWatcher watcher, string[] expectedPaths = null) { AutoResetEvent eventOccurred = new AutoResetEvent(false); watcher.Renamed += (o, e) => { Assert.Equal(WatcherChangeTypes.Renamed, e.ChangeType); if (expectedPaths != null) { Assert.Contains(Path.GetFullPath(e.FullPath), expectedPaths); } eventOccurred.Set(); }; return eventOccurred; } /// <summary> /// Asserts that the given handle will be signaled within the default timeout. /// </summary> public static void ExpectEvent(WaitHandle eventOccurred, string eventName_NoRetry) { string message = String.Format("Didn't observe a {0} event within {1}ms", eventName_NoRetry, WaitForExpectedEventTimeout_NoRetry); Assert.True(eventOccurred.WaitOne(WaitForExpectedEventTimeout_NoRetry), message); } /// <summary> /// Does verification that the given watcher will throw exactly/only the events in "expectedEvents" when /// "action" is executed. /// </summary> /// <param name="watcher">The FileSystemWatcher to test</param> /// <param name="expectedEvents">All of the events that are expected to be raised by this action</param> /// <param name="action">The Action that will trigger events.</param> /// <param name="cleanup">Optional. Undoes the action and cleans up the watcher so the test may be run again if necessary.</param> public static void ExpectEvent(FileSystemWatcher watcher, WatcherChangeTypes expectedEvents, Action action, Action cleanup = null) { ExpectEvent(watcher, expectedEvents, action, cleanup, (string[])null); } /// <summary> /// Does verification that the given watcher will throw exactly/only the events in "expectedEvents" when /// "action" is executed. /// </summary> /// <param name="watcher">The FileSystemWatcher to test</param> /// <param name="expectedEvents">All of the events that are expected to be raised by this action</param> /// <param name="action">The Action that will trigger events.</param> /// <param name="cleanup">Optional. Undoes the action and cleans up the watcher so the test may be run again if necessary.</param> /// <param name="expectedPath">Optional. Adds path verification to all expected events.</param> /// <param name="attempts">Optional. Number of times the test should be executed if it's failing.</param> public static void ExpectEvent(FileSystemWatcher watcher, WatcherChangeTypes expectedEvents, Action action, Action cleanup = null, string expectedPath = null, int attempts = DefaultAttemptsForExpectedEvent, int timeout = WaitForExpectedEventTimeout) { ExpectEvent(watcher, expectedEvents, action, cleanup, expectedPath == null ? null : new string[] { expectedPath }, attempts, timeout); } /// <summary> /// Does verification that the given watcher will throw exactly/only the events in "expectedEvents" when /// "action" is executed. /// </summary> /// <param name="watcher">The FileSystemWatcher to test</param> /// <param name="expectedEvents">All of the events that are expected to be raised by this action</param> /// <param name="action">The Action that will trigger events.</param> /// <param name="cleanup">Optional. Undoes the action and cleans up the watcher so the test may be run again if necessary.</param> /// <param name="expectedPath">Optional. Adds path verification to all expected events.</param> /// <param name="attempts">Optional. Number of times the test should be executed if it's failing.</param> public static void ExpectEvent(FileSystemWatcher watcher, WatcherChangeTypes expectedEvents, Action action, Action cleanup = null, string[] expectedPaths = null, int attempts = DefaultAttemptsForExpectedEvent, int timeout = WaitForExpectedEventTimeout) { int attemptsCompleted = 0; bool result = false; while (!result && attemptsCompleted++ < attempts) { if (attemptsCompleted > 1) { // Re-create the watcher to get a clean iteration. watcher = new FileSystemWatcher() { IncludeSubdirectories = watcher.IncludeSubdirectories, NotifyFilter = watcher.NotifyFilter, Filter = watcher.Filter, Path = watcher.Path, InternalBufferSize = watcher.InternalBufferSize }; // Most intermittent failures in FSW are caused by either a shortage of resources (e.g. inotify instances) // or by insufficient time to execute (e.g. CI gets bogged down). Immediately re-running a failed test // won't resolve the first issue, so we wait a little while hoping that things clear up for the next run. Thread.Sleep(500); } result = ExecuteAndVerifyEvents(watcher, expectedEvents, action, attemptsCompleted == attempts, expectedPaths, timeout); if (cleanup != null) cleanup(); } } /// <summary> /// Helper for the ExpectEvent function. /// </summary> /// <param name="watcher">The FileSystemWatcher to test</param> /// <param name="expectedEvents">All of the events that are expected to be raised by this action</param> /// <param name="action">The Action that will trigger events.</param> /// <param name="assertExpected">True if results should be asserted. Used if there is no retry.</param> /// <param name="expectedPath"> Adds path verification to all expected events.</param> /// <returns>True if the events raised correctly; else, false.</returns> public static bool ExecuteAndVerifyEvents(FileSystemWatcher watcher, WatcherChangeTypes expectedEvents, Action action, bool assertExpected, string[] expectedPaths, int timeout) { bool result = true, verifyChanged = true, verifyCreated = true, verifyDeleted = true, verifyRenamed = true; AutoResetEvent changed = null, created = null, deleted = null, renamed = null; string[] expectedFullPaths = expectedPaths == null ? null : expectedPaths.Select(e => Path.GetFullPath(e)).ToArray(); // On OSX we get a number of extra events tacked onto valid events. As such, we can not ever confidently // say that a event won't occur, only that one will occur. if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { if (verifyChanged = ((expectedEvents & WatcherChangeTypes.Changed) > 0)) changed = WatchChanged(watcher, expectedPaths); if (verifyCreated = ((expectedEvents & WatcherChangeTypes.Created) > 0)) created = WatchCreated(watcher, expectedPaths); if (verifyDeleted = ((expectedEvents & WatcherChangeTypes.Deleted) > 0)) deleted = WatchDeleted(watcher, expectedPaths); if (verifyRenamed = ((expectedEvents & WatcherChangeTypes.Renamed) > 0)) renamed = WatchRenamed(watcher, expectedPaths); } else { changed = WatchChanged(watcher, (expectedEvents & WatcherChangeTypes.Changed) > 0 ? expectedPaths : null); created = WatchCreated(watcher, (expectedEvents & WatcherChangeTypes.Created) > 0 ? expectedPaths : null); deleted = WatchDeleted(watcher, (expectedEvents & WatcherChangeTypes.Deleted) > 0 ? expectedPaths : null); renamed = WatchRenamed(watcher, (expectedEvents & WatcherChangeTypes.Renamed) > 0 ? expectedPaths : null); } watcher.EnableRaisingEvents = true; action(); // Verify Changed if (verifyChanged) { bool Changed_expected = ((expectedEvents & WatcherChangeTypes.Changed) > 0); bool Changed_actual = changed.WaitOne(timeout); result = Changed_expected == Changed_actual; if (assertExpected) Assert.True(Changed_expected == Changed_actual, "Changed event did not occur as expected"); } // Verify Created if (verifyCreated) { bool Created_expected = ((expectedEvents & WatcherChangeTypes.Created) > 0); bool Created_actual = created.WaitOne(verifyChanged ? SubsequentExpectedWait : timeout); result = result && Created_expected == Created_actual; if (assertExpected) Assert.True(Created_expected == Created_actual, "Created event did not occur as expected"); } // Verify Deleted if (verifyDeleted) { bool Deleted_expected = ((expectedEvents & WatcherChangeTypes.Deleted) > 0); bool Deleted_actual = deleted.WaitOne(verifyChanged || verifyCreated ? SubsequentExpectedWait : timeout); result = result && Deleted_expected == Deleted_actual; if (assertExpected) Assert.True(Deleted_expected == Deleted_actual, "Deleted event did not occur as expected"); } // Verify Renamed if (verifyRenamed) { bool Renamed_expected = ((expectedEvents & WatcherChangeTypes.Renamed) > 0); bool Renamed_actual = renamed.WaitOne(verifyChanged || verifyCreated || verifyDeleted? SubsequentExpectedWait : timeout); result = result && Renamed_expected == Renamed_actual; if (assertExpected) Assert.True(Renamed_expected == Renamed_actual, "Renamed event did not occur as expected"); } watcher.EnableRaisingEvents = false; return result; } /// <summary> /// Does verification that the given watcher will throw an Error when the given action is executed. /// </summary> /// <param name="watcher">The FileSystemWatcher to test</param> /// <param name="action">The Action that will trigger a failure.</param> /// <param name="cleanup">Undoes the action and cleans up the watcher so the test may be run again if necessary.</param> /// <param name="attempts">Optional. Number of times the test should be executed if it's failing.</param> public static void ExpectError(FileSystemWatcher watcher, Action action, Action cleanup, int attempts = DefaultAttemptsForExpectedEvent) { string message = string.Format("Did not observe an error event within {0}ms and {1} attempts.", WaitForExpectedEventTimeout, attempts); Assert.True(TryErrorEvent(watcher, action, cleanup, attempts, expected: true), message); } /// <summary> /// Does verification that the given watcher will <b>not</b> throw an Error when the given action is executed. /// </summary> /// <param name="watcher">The FileSystemWatcher to test</param> /// <param name="action">The Action that will not trigger a failure.</param> /// <param name="cleanup">Undoes the action and cleans up the watcher so the test may be run again if necessary.</param> /// <param name="attempts">Optional. Number of times the test should be executed if it's failing.</param> public static void ExpectNoError(FileSystemWatcher watcher, Action action, Action cleanup, int attempts = DefaultAttemptsForUnExpectedEvent) { string message = string.Format("Should not observe an error event within {0}ms. Attempted {1} times and received the event each time.", WaitForExpectedEventTimeout, attempts); Assert.False(TryErrorEvent(watcher, action, cleanup, attempts, expected: true), message); } /// /// <summary> /// Helper method for the ExpectError/ExpectNoError functions. /// </summary> /// <param name="watcher">The FileSystemWatcher to test</param> /// <param name="action">The Action to execute.</param> /// <param name="cleanup">Undoes the action and cleans up the watcher so the test may be run again if necessary.</param> /// <param name="attempts">Number of times the test should be executed if it's failing.</param> /// <param name="expected">Whether it is expected that an error event will be arisen.</param> /// <returns>True if an Error event was raised by the watcher when the given action was executed; else, false.</returns> public static bool TryErrorEvent(FileSystemWatcher watcher, Action action, Action cleanup, int attempts, bool expected) { int attemptsCompleted = 0; bool result = !expected; while (result != expected && attemptsCompleted++ < attempts) { if (attemptsCompleted > 1) { // Re-create the watcher to get a clean iteration. watcher = new FileSystemWatcher() { IncludeSubdirectories = watcher.IncludeSubdirectories, NotifyFilter = watcher.NotifyFilter, Filter = watcher.Filter, Path = watcher.Path, InternalBufferSize = watcher.InternalBufferSize }; // Most intermittent failures in FSW are caused by either a shortage of resources (e.g. inotify instances) // or by insufficient time to execute (e.g. CI gets bogged down). Immediately re-running a failed test // won't resolve the first issue, so we wait a little while hoping that things clear up for the next run. Thread.Sleep(500); } AutoResetEvent errorOccurred = new AutoResetEvent(false); watcher.Error += (o, e) => { errorOccurred.Set(); }; // Enable raising events but be careful with the possibility of the max user inotify instances being reached already. if (attemptsCompleted <= attempts) { try { watcher.EnableRaisingEvents = true; } catch (IOException) // Max User INotify instances. Isn't the type of error we're checking for. { continue; } } else { watcher.EnableRaisingEvents = true; } action(); result = errorOccurred.WaitOne(WaitForExpectedEventTimeout); watcher.EnableRaisingEvents = false; cleanup(); } return result; } /// <summary> /// In some cases (such as when running without elevated privileges), /// the symbolic link may fail to create. Only run this test if it creates /// links successfully. /// </summary> protected static bool CanCreateSymbolicLinks { get { bool success = true; // Verify file symlink creation string path = Path.GetTempFileName(); string linkPath = path + ".link"; success = CreateSymLink(path, linkPath, isDirectory: false); try { File.Delete(path); } catch { } try { File.Delete(linkPath); } catch { } // Verify directory symlink creation path = Path.GetTempFileName(); linkPath = path + ".link"; success = success && CreateSymLink(path, linkPath, isDirectory: true); try { Directory.Delete(path); } catch { } try { Directory.Delete(linkPath); } catch { } return success; } } public static bool CreateSymLink(string targetPath, string linkPath, bool isDirectory) { Process symLinkProcess = new Process(); if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { symLinkProcess.StartInfo.FileName = "cmd"; symLinkProcess.StartInfo.Arguments = string.Format("/c mklink{0} \"{1}\" \"{2}\"", isDirectory ? " /D" : "", Path.GetFullPath(linkPath), Path.GetFullPath(targetPath)); } else { symLinkProcess.StartInfo.FileName = "/bin/ln"; symLinkProcess.StartInfo.Arguments = string.Format("-s \"{0}\" \"{1}\"", Path.GetFullPath(targetPath), Path.GetFullPath(linkPath)); } symLinkProcess.StartInfo.RedirectStandardOutput = true; symLinkProcess.Start(); if (symLinkProcess != null) { symLinkProcess.WaitForExit(); return (0 == symLinkProcess.ExitCode); } else { return false; } } public static IEnumerable<object[]> FilterTypes() { foreach (NotifyFilters filter in Enum.GetValues(typeof(NotifyFilters))) yield return new object[] { filter }; } // Linux and OSX systems have less precise filtering systems than Windows, so most // metadata filters are effectively equivalent to each other on those systems. For example // there isn't a way to filter only LastWrite events on either system; setting // Filters to LastWrite will allow events from attribute change, creation time // change, size change, etc. public const NotifyFilters LinuxFiltersForAttribute = NotifyFilters.Attributes | NotifyFilters.CreationTime | NotifyFilters.LastAccess | NotifyFilters.LastWrite | NotifyFilters.Security | NotifyFilters.Size; public const NotifyFilters LinuxFiltersForModify = NotifyFilters.LastAccess | NotifyFilters.LastWrite | NotifyFilters.Security | NotifyFilters.Size; public const NotifyFilters OSXFiltersForModify = NotifyFilters.Attributes | NotifyFilters.CreationTime | NotifyFilters.LastAccess | NotifyFilters.LastWrite | NotifyFilters.Size; } }
using Orleans.CodeGenerator.SyntaxGeneration; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Syntax; using System.Collections.Generic; using System.Linq; using System.Reflection; using static Orleans.CodeGenerator.InvokableGenerator; using static Orleans.CodeGenerator.SerializerGenerator; using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory; namespace Orleans.CodeGenerator { internal static class CopierGenerator { private const string BaseTypeCopierFieldName = "_baseTypeCopier"; private const string ActivatorFieldName = "_activator"; private const string DeepCopyMethodName = "DeepCopy"; public static ClassDeclarationSyntax GenerateCopier( LibraryTypes libraryTypes, ISerializableTypeDescription type) { var simpleClassName = GetSimpleClassName(type); var members = new List<ISerializableMember>(); foreach (var member in type.Members) { if (member is ISerializableMember serializable) { members.Add(serializable); } else if (member is IFieldDescription field) { members.Add(new SerializableMember(libraryTypes, type, field, members.Count)); } else if (member is MethodParameterFieldDescription methodParameter) { members.Add(new SerializableMethodMember(methodParameter)); } } var accessibility = type.Accessibility switch { Accessibility.Public => SyntaxKind.PublicKeyword, _ => SyntaxKind.InternalKeyword, }; var classDeclaration = ClassDeclaration(simpleClassName) .AddBaseListTypes(SimpleBaseType(libraryTypes.DeepCopier_1.ToTypeSyntax(type.TypeSyntax))) .AddModifiers(Token(accessibility), Token(SyntaxKind.SealedKeyword)) .AddAttributeLists(AttributeList(SingletonSeparatedList(CodeGenerator.GetGeneratedCodeAttributeSyntax()))); if (type.IsImmutable) { var copyMethod = GenerateImmutableTypeCopyMethod(type, libraryTypes); classDeclaration = classDeclaration.AddMembers(copyMethod); } else { var fieldDescriptions = GetFieldDescriptions(type, members, libraryTypes); var fieldDeclarations = GetFieldDeclarations(fieldDescriptions); var ctor = GenerateConstructor(libraryTypes, simpleClassName, fieldDescriptions); var copyMethod = GenerateMemberwiseDeepCopyMethod(type, fieldDescriptions, members, libraryTypes); classDeclaration = classDeclaration .AddMembers(copyMethod) .AddMembers(fieldDeclarations) .AddMembers(ctor); if (!type.IsSealedType) { classDeclaration = classDeclaration .AddMembers(GenerateBaseCopierDeepCopyMethod(type, fieldDescriptions, members, libraryTypes)) .AddBaseListTypes(SimpleBaseType(libraryTypes.BaseCopier_1.ToTypeSyntax(type.TypeSyntax))); } } if (type.IsGenericType) { classDeclaration = SyntaxFactoryUtility.AddGenericTypeParameters(classDeclaration, type.TypeParameters); } return classDeclaration; } public static string GetSimpleClassName(ISerializableTypeDescription serializableType) => GetSimpleClassName(serializableType.Name); public static string GetSimpleClassName(string name) => $"Copier_{name}"; public static string GetGeneratedNamespaceName(ITypeSymbol type) => type.GetNamespaceAndNesting() switch { { Length: > 0 } ns => $"{CodeGenerator.CodeGeneratorName}.{ns}", _ => CodeGenerator.CodeGeneratorName }; private static MemberDeclarationSyntax[] GetFieldDeclarations(List<GeneratedFieldDescription> fieldDescriptions) { return fieldDescriptions.Select(GetFieldDeclaration).ToArray(); static MemberDeclarationSyntax GetFieldDeclaration(GeneratedFieldDescription description) { switch (description) { case SetterFieldDescription setter: { var fieldSetterVariable = VariableDeclarator(setter.FieldName); return FieldDeclaration(VariableDeclaration(setter.FieldType).AddVariables(fieldSetterVariable)) .AddModifiers( Token(SyntaxKind.PrivateKeyword), Token(SyntaxKind.ReadOnlyKeyword)); } case GetterFieldDescription getter: { var fieldGetterVariable = VariableDeclarator(getter.FieldName); return FieldDeclaration(VariableDeclaration(getter.FieldType).AddVariables(fieldGetterVariable)) .AddModifiers( Token(SyntaxKind.PrivateKeyword), Token(SyntaxKind.ReadOnlyKeyword)); } default: return FieldDeclaration(VariableDeclaration(description.FieldType, SingletonSeparatedList(VariableDeclarator(description.FieldName)))) .AddModifiers(Token(SyntaxKind.PrivateKeyword), Token(SyntaxKind.ReadOnlyKeyword)); } } } private static ConstructorDeclarationSyntax GenerateConstructor(LibraryTypes libraryTypes, string simpleClassName, List<GeneratedFieldDescription> fieldDescriptions) { var injected = fieldDescriptions.Where(f => f.IsInjected).ToList(); var parameters = new List<ParameterSyntax>(injected.Select(f => Parameter(f.FieldName.ToIdentifier()).WithType(f.FieldType))); const string CodecProviderParameterName = "codecProvider"; parameters.Add(Parameter(Identifier(CodecProviderParameterName)).WithType(libraryTypes.ICodecProvider.ToTypeSyntax())); IEnumerable<StatementSyntax> GetStatements() { foreach (var field in fieldDescriptions) { switch (field) { case GetterFieldDescription getter: yield return getter.InitializationSyntax; break; case SetterFieldDescription setter: yield return setter.InitializationSyntax; break; case GeneratedFieldDescription _ when field.IsInjected: yield return ExpressionStatement( AssignmentExpression( SyntaxKind.SimpleAssignmentExpression, ThisExpression().Member(field.FieldName.ToIdentifierName()), Unwrapped(field.FieldName.ToIdentifierName()))); break; case CopierFieldDescription codec when !field.IsInjected: { yield return ExpressionStatement( AssignmentExpression( SyntaxKind.SimpleAssignmentExpression, ThisExpression().Member(field.FieldName.ToIdentifierName()), GetService(field.FieldType))); } break; } } } return ConstructorDeclaration(simpleClassName) .AddModifiers(Token(SyntaxKind.PublicKeyword)) .AddParameterListParameters(parameters.ToArray()) .AddBodyStatements(GetStatements().ToArray()); static ExpressionSyntax Unwrapped(ExpressionSyntax expr) { return InvocationExpression( MemberAccessExpression(SyntaxKind.SimpleMemberAccessExpression, IdentifierName("OrleansGeneratedCodeHelper"), IdentifierName("UnwrapService")), ArgumentList(SeparatedList(new[] { Argument(ThisExpression()), Argument(expr) }))); } static ExpressionSyntax GetService(TypeSyntax type) { return InvocationExpression( MemberAccessExpression(SyntaxKind.SimpleMemberAccessExpression, IdentifierName("OrleansGeneratedCodeHelper"), GenericName(Identifier("GetService"), TypeArgumentList(SingletonSeparatedList(type)))), ArgumentList(SeparatedList(new[] { Argument(ThisExpression()), Argument(IdentifierName(CodecProviderParameterName)) }))); } } private static List<GeneratedFieldDescription> GetFieldDescriptions( ISerializableTypeDescription serializableTypeDescription, List<ISerializableMember> members, LibraryTypes libraryTypes) { var fields = new List<GeneratedFieldDescription>(); if (serializableTypeDescription.HasComplexBaseType) { fields.Add(new BaseCopierFieldDescription(libraryTypes.BaseCopier_1.ToTypeSyntax(serializableTypeDescription.BaseTypeSyntax), BaseTypeCopierFieldName)); } if (serializableTypeDescription.UseActivator) { fields.Add(new ActivatorFieldDescription(libraryTypes.IActivator_1.ToTypeSyntax(serializableTypeDescription.TypeSyntax), ActivatorFieldName)); } // Add a codec field for any field in the target which does not have a static codec. fields.AddRange(serializableTypeDescription.Members .Distinct(MemberDescriptionTypeComparer.Default) .Where(t => !libraryTypes.StaticCopiers.Any(c => SymbolEqualityComparer.Default.Equals(c.UnderlyingType, t.Type))) .Select(member => GetCopierDescription(member))); foreach (var member in members) { if (member.GetGetterFieldDescription() is { } getterFieldDescription) { fields.Add(getterFieldDescription); } if (member.GetSetterFieldDescription() is { } setterFieldDescription) { fields.Add(setterFieldDescription); } } for (var hookIndex = 0; hookIndex < serializableTypeDescription.SerializationHooks.Count; ++hookIndex) { var hookType = serializableTypeDescription.SerializationHooks[hookIndex]; fields.Add(new SerializationHookFieldDescription(hookType.ToTypeSyntax(), $"_hook{hookIndex}")); } return fields; CopierFieldDescription GetCopierDescription(IMemberDescription member) { TypeSyntax copierType = null; var t = member.Type; if (t.HasAttribute(libraryTypes.GenerateSerializerAttribute) && (!SymbolEqualityComparer.Default.Equals(t.ContainingAssembly, libraryTypes.Compilation.Assembly) || t.ContainingAssembly.HasAttribute(libraryTypes.TypeManifestProviderAttribute))) { // Use the concrete generated type and avoid expensive interface dispatch if (t is INamedTypeSymbol namedTypeSymbol && namedTypeSymbol.IsGenericType) { // Construct the full generic type name var ns = GetGeneratedNamespaceName(t); var name = GenericName(Identifier(GetSimpleClassName(t.Name)), TypeArgumentList(SeparatedList(namedTypeSymbol.TypeArguments.Select(arg => arg.ToTypeSyntax())))); copierType = QualifiedName(ParseName(ns), name); } else { var simpleName = $"{GetGeneratedNamespaceName(t)}.{GetSimpleClassName(t.Name)}"; copierType = ParseTypeName(simpleName); } } else if (libraryTypes.WellKnownCopiers.FirstOrDefault(c => SymbolEqualityComparer.Default.Equals(c.UnderlyingType, t)) is WellKnownCopierDescription codec) { // The codec is not a static copier and is also not a generic copiers. copierType = codec.CopierType.ToTypeSyntax(); } else if (t is INamedTypeSymbol named && libraryTypes.WellKnownCopiers.FirstOrDefault(c => t is INamedTypeSymbol named && named.ConstructedFrom is ISymbol unboundFieldType && SymbolEqualityComparer.Default.Equals(c.UnderlyingType, unboundFieldType)) is WellKnownCopierDescription genericCopier) { // Construct the generic copier type using the field's type arguments. copierType = genericCopier.CopierType.Construct(named.TypeArguments.ToArray()).ToTypeSyntax(); } else { // Use the IDeepCopier<T> interface copierType = libraryTypes.DeepCopier_1.ToTypeSyntax(member.TypeSyntax); } var fieldName = '_' + ToLowerCamelCase(member.TypeNameIdentifier) + "Copier"; return new CopierFieldDescription(copierType, fieldName, t); } static string ToLowerCamelCase(string input) => char.IsLower(input, 0) ? input : char.ToLowerInvariant(input[0]) + input.Substring(1); } private static MemberDeclarationSyntax GenerateMemberwiseDeepCopyMethod( ISerializableTypeDescription type, List<GeneratedFieldDescription> copierFields, List<ISerializableMember> members, LibraryTypes libraryTypes) { var returnType = type.TypeSyntax; var originalParam = "original".ToIdentifierName(); var contextParam = "context".ToIdentifierName(); var resultVar = "result".ToIdentifierName(); var body = new List<StatementSyntax>(); ExpressionSyntax createValueExpression = type.UseActivator switch { true => InvocationExpression(copierFields.OfType<ActivatorFieldDescription>().Single().FieldName.ToIdentifierName().Member("Create")), false => type.GetObjectCreationExpression(libraryTypes) }; if (!type.IsValueType) { // C#: if (context.TryGetCopy(original, out T result)) { return result; } var tryGetCopy = InvocationExpression( contextParam.Member("TryGetCopy"), ArgumentList(SeparatedList(new[] { Argument(originalParam), Argument(DeclarationExpression( type.TypeSyntax, SingleVariableDesignation(Identifier("result")))) .WithRefKindKeyword(Token(SyntaxKind.OutKeyword)) }))); body.Add(IfStatement(tryGetCopy, ReturnStatement(resultVar))); if (!type.IsSealedType) { // C#: if (original.GetType() != typeof(<codec>)) { return context.Copy(original); } var exactTypeMatch = BinaryExpression( SyntaxKind.NotEqualsExpression, InvocationExpression( MemberAccessExpression(SyntaxKind.SimpleMemberAccessExpression, originalParam, IdentifierName("GetType"))), TypeOfExpression(type.TypeSyntax)); var contextCopy = InvocationExpression(MemberAccessExpression(SyntaxKind.SimpleMemberAccessExpression, contextParam, IdentifierName("Copy"))) .WithArgumentList(ArgumentList(SingletonSeparatedList(Argument(originalParam)))); body.Add(IfStatement(exactTypeMatch, ReturnStatement(contextCopy))); } // C#: result = _activator.Create(); body.Add(ExpressionStatement(AssignmentExpression(SyntaxKind.SimpleAssignmentExpression, resultVar, createValueExpression))); // C#: context.RecordCopy(original, result); body.Add(ExpressionStatement(InvocationExpression(contextParam.Member("RecordCopy"), ArgumentList(SeparatedList(new[] { Argument(originalParam), Argument(resultVar) }))))); if (type.HasComplexBaseType) { // C#: _baseTypeCopier.DeepCopy(original, result, context); body.Add( ExpressionStatement( InvocationExpression( ThisExpression().Member(BaseTypeCopierFieldName.ToIdentifierName()).Member(DeepCopyMethodName), ArgumentList(SeparatedList(new[] { Argument(originalParam), Argument(resultVar), Argument(contextParam) }))))); } } else { // C#: TField result = _activator.Create(); // or C#: TField result = new TField(); body.Add(LocalDeclarationStatement( VariableDeclaration( type.TypeSyntax, SingletonSeparatedList(VariableDeclarator(resultVar.Identifier) .WithInitializer(EqualsValueClause(createValueExpression)))))); } body.AddRange(AddSerializationCallbacks(type, originalParam, resultVar, "OnCopying")); body.AddRange(GenerateMemberwiseCopy(copierFields, members, libraryTypes, originalParam, contextParam, resultVar)); body.AddRange(AddSerializationCallbacks(type, originalParam, resultVar, "OnCopied")); body.Add(ReturnStatement(resultVar)); var parameters = new[] { Parameter(originalParam.Identifier).WithType(type.TypeSyntax), Parameter(contextParam.Identifier).WithType(libraryTypes.CopyContext.ToTypeSyntax()) }; return MethodDeclaration(returnType, DeepCopyMethodName) .AddModifiers(Token(SyntaxKind.PublicKeyword)) .AddParameterListParameters(parameters) .AddAttributeLists(AttributeList(SingletonSeparatedList(CodeGenerator.GetMethodImplAttributeSyntax()))) .AddBodyStatements(body.ToArray()); } private static MemberDeclarationSyntax GenerateBaseCopierDeepCopyMethod( ISerializableTypeDescription type, List<GeneratedFieldDescription> copierFields, List<ISerializableMember> members, LibraryTypes libraryTypes) { var inputParam = "input".ToIdentifierName(); var resultParam = "output".ToIdentifierName(); var contextParam = "context".ToIdentifierName(); var body = new List<StatementSyntax>(); if (type.HasComplexBaseType) { // C#: _baseTypeCopier.DeepCopy(original, result, context); body.Add( ExpressionStatement( InvocationExpression( ThisExpression().Member(BaseTypeCopierFieldName.ToIdentifierName()).Member(DeepCopyMethodName), ArgumentList(SeparatedList(new[] { Argument(inputParam), Argument(resultParam), Argument(contextParam) }))))); } body.AddRange(AddSerializationCallbacks(type, inputParam, resultParam, "OnCopying")); body.AddRange(GenerateMemberwiseCopy(copierFields, members, libraryTypes, inputParam, contextParam, resultParam)); body.AddRange(AddSerializationCallbacks(type, inputParam, resultParam, "OnCopied")); var parameters = new[] { Parameter(inputParam.Identifier).WithType(type.TypeSyntax), Parameter(resultParam.Identifier).WithType(type.TypeSyntax), Parameter(contextParam.Identifier).WithType(libraryTypes.CopyContext.ToTypeSyntax()) }; return MethodDeclaration(PredefinedType(Token(SyntaxKind.VoidKeyword)), DeepCopyMethodName) .AddModifiers(Token(SyntaxKind.PublicKeyword)) .AddParameterListParameters(parameters) .AddAttributeLists(AttributeList(SingletonSeparatedList(CodeGenerator.GetMethodImplAttributeSyntax()))) .AddBodyStatements(body.ToArray()); } private static IEnumerable<StatementSyntax> GenerateMemberwiseCopy( List<GeneratedFieldDescription> copierFields, List<ISerializableMember> members, LibraryTypes libraryTypes, IdentifierNameSyntax sourceVar, IdentifierNameSyntax contextVar, IdentifierNameSyntax destinationVar) { var codecs = copierFields.OfType<ICopierDescription>() .Concat(libraryTypes.StaticCopiers) .ToList(); var orderedMembers = members.OrderBy(m => m.Member.FieldId).ToList(); foreach (var member in orderedMembers) { var description = member.Member; // Copiers can either be static classes or injected into the constructor. // Either way, the member signatures are the same. var codec = codecs.First(f => SymbolEqualityComparer.Default.Equals(f.UnderlyingType, description.Type)); var memberType = description.Type; var staticCopier = libraryTypes.StaticCopiers.FirstOrDefault(c => SymbolEqualityComparer.Default.Equals(c.UnderlyingType, memberType)); ExpressionSyntax codecExpression; if (staticCopier != null) { codecExpression = staticCopier.CopierType.ToNameSyntax(); } else { var instanceCopier = copierFields.OfType<CopierFieldDescription>().First(f => SymbolEqualityComparer.Default.Equals(f.UnderlyingType, memberType)); codecExpression = ThisExpression().Member(instanceCopier.FieldName); } ExpressionSyntax getValueExpression; if (member.IsShallowCopyable) { getValueExpression = member.GetGetter(sourceVar); } else { getValueExpression = InvocationExpression( codecExpression.Member(DeepCopyMethodName), ArgumentList(SeparatedList(new[] { Argument(member.GetGetter(sourceVar)), Argument(contextVar) }))); if (!SymbolEqualityComparer.Default.Equals(codec.UnderlyingType, member.Member.Type)) { // If the member type type differs from the codec type (eg because the member is an array), cast the result. getValueExpression = CastExpression(description.TypeSyntax, getValueExpression); } } var memberAssignment = ExpressionStatement(member.GetSetter(destinationVar, getValueExpression)); yield return memberAssignment; } } private static MemberDeclarationSyntax GenerateImmutableTypeCopyMethod( ISerializableTypeDescription type, LibraryTypes libraryTypes) { var returnType = type.TypeSyntax; var inputParam = "input".ToIdentifierName(); var body = new StatementSyntax[] { ReturnStatement(inputParam) }; var parameters = new[] { Parameter("input".ToIdentifier()).WithType(returnType), Parameter("_".ToIdentifier()).WithType(libraryTypes.CopyContext.ToTypeSyntax()), }; return MethodDeclaration(returnType, DeepCopyMethodName) .AddModifiers(Token(SyntaxKind.PublicKeyword)) .AddParameterListParameters(parameters) .AddAttributeLists(AttributeList(SingletonSeparatedList(CodeGenerator.GetMethodImplAttributeSyntax()))) .AddBodyStatements(body.ToArray()); } private static IEnumerable<StatementSyntax> AddSerializationCallbacks(ISerializableTypeDescription type, IdentifierNameSyntax originalInstance, IdentifierNameSyntax resultInstance, string callbackMethodName) { for (var hookIndex = 0; hookIndex < type.SerializationHooks.Count; ++hookIndex) { var hookType = type.SerializationHooks[hookIndex]; var member = hookType.GetAllMembers<IMethodSymbol>(callbackMethodName, Accessibility.Public).FirstOrDefault(); if (member is null || member.Parameters.Length != 2) { continue; } var originalArgument = Argument(originalInstance); if (member.Parameters[0].RefKind == RefKind.Ref) { originalArgument = originalArgument.WithRefOrOutKeyword(Token(SyntaxKind.RefKeyword)); } var resultArgument = Argument(resultInstance); if (member.Parameters[1].RefKind == RefKind.Ref) { resultArgument = resultArgument.WithRefOrOutKeyword(Token(SyntaxKind.RefKeyword)); } yield return ExpressionStatement(InvocationExpression( ThisExpression().Member($"_hook{hookIndex}").Member(callbackMethodName), ArgumentList(SeparatedList(new[] { originalArgument, resultArgument })))); } } internal class BaseCopierFieldDescription : GeneratedFieldDescription { public BaseCopierFieldDescription(TypeSyntax fieldType, string fieldName) : base(fieldType, fieldName) { } public override bool IsInjected => true; } internal class CopierFieldDescription : GeneratedFieldDescription, ICopierDescription { public CopierFieldDescription(TypeSyntax fieldType, string fieldName, ITypeSymbol underlyingType) : base(fieldType, fieldName) { UnderlyingType = underlyingType; } public ITypeSymbol UnderlyingType { get; } public override bool IsInjected => false; } } }
/* Project Orleans Cloud Service SDK ver. 1.0 Copyright (c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ //#define TRACE_SERIALIZATION using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Net; using System.Runtime.Serialization; using System.Text; using Orleans.Runtime; using Orleans.CodeGeneration; namespace Orleans.Serialization { /// <summary> /// Reader for Orleans binary token streams /// </summary> public class BinaryTokenStreamReader { private readonly IList<ArraySegment<byte>> buffers; private int currentSegmentIndex; private ArraySegment<byte> currentSegment; private byte[] currentBuffer; private int currentOffset; private int totalProcessedBytes; private readonly int totalLength; private static readonly ArraySegment<byte> emptySegment = new ArraySegment<byte>(new byte[0]); /// <summary> /// Create a new BinaryTokenStreamReader to read from the specified input byte array. /// </summary> /// <param name="input">Input binary data to be tokenized.</param> public BinaryTokenStreamReader(byte[] input) : this(new List<ArraySegment<byte>> { new ArraySegment<byte>(input) }) { } /// <summary> /// Create a new BinaryTokenStreamReader to read from the specified input buffers. /// </summary> /// <param name="buffs">The list of ArraySegments to use for the data.</param> public BinaryTokenStreamReader(IList<ArraySegment<byte>> buffs) { buffers = buffs; totalProcessedBytes = 0; currentSegmentIndex = 0; currentSegment = buffs[0]; currentBuffer = currentSegment.Array; currentOffset = currentSegment.Offset; totalLength = buffs.Sum(b => b.Count); Trace("Starting new stream reader"); } /// <summary> Current read position in the stream. </summary> public int CurrentPosition { get { return currentOffset + totalProcessedBytes - currentSegment.Offset; } } private void StartNextSegment() { totalProcessedBytes += currentSegment.Count; currentSegmentIndex++; if (currentSegmentIndex < buffers.Count) { currentSegment = buffers[currentSegmentIndex]; currentBuffer = currentSegment.Array; currentOffset = currentSegment.Offset; } else { currentSegment = emptySegment; currentBuffer = null; currentOffset = 0; } } private ArraySegment<byte> CheckLength(int n) { bool ignore; return CheckLength(n, out ignore); } private ArraySegment<byte> CheckLength(int n, out bool safeToUse) { safeToUse = false; if (n == 0) { safeToUse = true; return emptySegment; } if ((CurrentPosition + n > totalLength)) { throw new SerializationException( String.Format("Attempt to read past the end of the input stream: CurrentPosition={0}, n={1}, totalLength={2}", CurrentPosition, n, totalLength)); } if (currentSegmentIndex >= buffers.Count) { throw new SerializationException( String.Format("Attempt to read past buffers.Count: currentSegmentIndex={0}, buffers.Count={1}.", currentSegmentIndex, buffers.Count)); } if (currentOffset == currentSegment.Offset + currentSegment.Count) { StartNextSegment(); } if (currentOffset + n <= currentSegment.Offset + currentSegment.Count) { var result = new ArraySegment<byte>(currentBuffer, currentOffset, n); currentOffset += n; if (currentOffset >= currentSegment.Offset + currentSegment.Count) { StartNextSegment(); } return result; } var temp = new byte[n]; var i = 0; while (i < n) { var bytesFromThisBuffer = Math.Min(currentSegment.Offset + currentSegment.Count - currentOffset, n - i); Buffer.BlockCopy(currentBuffer, currentOffset, temp, i, bytesFromThisBuffer); i += bytesFromThisBuffer; currentOffset += bytesFromThisBuffer; if (currentOffset >= currentSegment.Offset + currentSegment.Count) { StartNextSegment(); } } safeToUse = true; return new ArraySegment<byte>(temp); } /// <summary> Read an <c>Int32</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public int ReadInt() { var buff = CheckLength(sizeof(int)); var val = BitConverter.ToInt32(buff.Array, buff.Offset); Trace("--Read int {0}", val); return val; } /// <summary> Read an <c>UInt32</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public uint ReadUInt() { var buff = CheckLength(sizeof(uint)); var val = BitConverter.ToUInt32(buff.Array, buff.Offset); Trace("--Read uint {0}", val); return val; } /// <summary> Read an <c>Int16</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public short ReadShort() { var buff = CheckLength(sizeof(short)); var val = BitConverter.ToInt16(buff.Array, buff.Offset); Trace("--Read short {0}", val); return val; } /// <summary> Read an <c>UInt16</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public ushort ReadUShort() { var buff = CheckLength(sizeof(ushort)); var val = BitConverter.ToUInt16(buff.Array, buff.Offset); Trace("--Read ushort {0}", val); return val; } /// <summary> Read an <c>Int64</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public long ReadLong() { var buff = CheckLength(sizeof(long)); var val = BitConverter.ToInt64(buff.Array, buff.Offset); Trace("--Read long {0}", val); return val; } /// <summary> Read an <c>UInt64</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public ulong ReadULong() { var buff = CheckLength(sizeof(ulong)); var val = BitConverter.ToUInt64(buff.Array, buff.Offset); Trace("--Read ulong {0}", val); return val; } /// <summary> Read an <c>float</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public float ReadFloat() { var buff = CheckLength(sizeof(float)); var val = BitConverter.ToSingle(buff.Array, buff.Offset); Trace("--Read float {0}", val); return val; } /// <summary> Read an <c>double</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public double ReadDouble() { var buff = CheckLength(sizeof(double)); var val = BitConverter.ToDouble(buff.Array, buff.Offset); Trace("--Read double {0}", val); return val; } /// <summary> Read an <c>decimal</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public decimal ReadDecimal() { var buff = CheckLength(4 * sizeof(int)); var raw = new int[4]; Trace("--Read decimal"); var n = buff.Offset; for (var i = 0; i < 4; i++) { raw[i] = BitConverter.ToInt32(buff.Array, n); n += sizeof(int); } return new decimal(raw); } /// <summary> Read an <c>string</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public string ReadString() { var n = ReadInt(); if (n == 0) { Trace("--Read empty string"); return String.Empty; } string s = null; // a length of -1 indicates that the string is null. if (-1 != n) { var buff = CheckLength(n); s = Encoding.UTF8.GetString(buff.Array, buff.Offset, n); } Trace("--Read string '{0}'", s); return s; } /// <summary> Read the next bytes from the stream. </summary> /// <param name="count">Number of bytes to read.</param> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public byte[] ReadBytes(int count) { if (count == 0) { return new byte[0]; } bool safeToUse; var buff = CheckLength(count, out safeToUse); Trace("--Read byte array of length {0}", count); if (!safeToUse) { var result = new byte[count]; Array.Copy(buff.Array, buff.Offset, result, 0, count); return result; } else { return buff.Array; } } /// <summary> Read the next bytes from the stream. </summary> /// <param name="destination">Output array to store the returned data in.</param> /// <param name="offset">Offset into the destination array to write to.</param> /// <param name="count">Number of bytes to read.</param> public void ReadByteArray(byte[] destination, int offset, int count) { if (offset + count > destination.Length) { throw new ArgumentOutOfRangeException("count", "Reading into an array that is too small"); } var buff = CheckLength(count); Buffer.BlockCopy(buff.Array, buff.Offset, destination, offset, count); } /// <summary> Read an <c>char</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public char ReadChar() { Trace("--Read char"); return Convert.ToChar(ReadShort()); } /// <summary> Read an <c>byte</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public byte ReadByte() { var buff = CheckLength(1); Trace("--Read byte"); return buff.Array[buff.Offset]; } /// <summary> Read an <c>sbyte</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public sbyte ReadSByte() { var buff = CheckLength(1); Trace("--Read sbyte"); return unchecked((sbyte)(buff.Array[buff.Offset])); } /// <summary> Read an <c>IPAddress</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public IPAddress ReadIPAddress() { var buff = CheckLength(16); bool v4 = true; for (var i = 0; i < 12; i++) { if (buff.Array[buff.Offset + i] != 0) { v4 = false; break; } } if (v4) { var v4Bytes = new byte[4]; for (var i = 0; i < 4; i++) { v4Bytes[i] = buff.Array[buff.Offset + 12 + i]; } return new IPAddress(v4Bytes); } else { var v6Bytes = new byte[16]; for (var i = 0; i < 16; i++) { v6Bytes[i] = buff.Array[buff.Offset + i]; } return new IPAddress(v6Bytes); } } /// <summary> Read an <c>IPEndPoint</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public IPEndPoint ReadIPEndPoint() { var addr = ReadIPAddress(); var port = ReadInt(); return new IPEndPoint(addr, port); } /// <summary> Read an <c>SiloAddress</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public SiloAddress ReadSiloAddress() { var ep = ReadIPEndPoint(); var gen = ReadInt(); return SiloAddress.New(ep, gen); } /// <summary> Read an <c>GrainId</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> internal GrainId ReadGrainId() { UniqueKey key = ReadUniqueKey(); return GrainId.GetGrainId(key); } /// <summary> Read an <c>ActivationId</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> internal ActivationId ReadActivationId() { UniqueKey key = ReadUniqueKey(); return ActivationId.GetActivationId(key); } internal UniqueKey ReadUniqueKey() { ulong n0 = ReadULong(); ulong n1 = ReadULong(); ulong typeCodeData = ReadULong(); string keyExt = ReadString(); return UniqueKey.NewKey(n0, n1, typeCodeData, keyExt); } /// <summary> Read an <c>ActivationAddress</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> internal ActivationAddress ReadActivationAddress() { var silo = ReadSiloAddress(); var grain = ReadGrainId(); var act = ReadActivationId(); if (silo.Equals(SiloAddress.Zero)) silo = null; if (act.Equals(ActivationId.Zero)) act = null; return ActivationAddress.GetAddress(silo, grain, act); } /// <summary> /// Read a block of data into the specified output <c>Array</c>. /// </summary> /// <param name="array">Array to output the data to.</param> /// <param name="n">Number of bytes to read.</param> public void ReadBlockInto(Array array, int n) { var buff = CheckLength(n); Buffer.BlockCopy(buff.Array, buff.Offset, array, 0, n); Trace("--Read block of {0} bytes", n); } /// <summary> /// Peek at the next token in this input stream. /// </summary> /// <returns>Next token thatr will be read from the stream.</returns> internal SerializationTokenType PeekToken() { if (currentOffset == currentSegment.Count + currentSegment.Offset) StartNextSegment(); return (SerializationTokenType)currentBuffer[currentOffset]; } /// <summary> Read a <c>SerializationTokenType</c> value from the stream. </summary> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> internal SerializationTokenType ReadToken() { var buff = CheckLength(1); Trace("--Read token {0}", (SerializationTokenType)buff.Array[buff.Offset]); return (SerializationTokenType)buff.Array[buff.Offset]; } internal bool TryReadSimpleType(out object result, out SerializationTokenType token) { token = ReadToken(); byte[] bytes; switch (token) { case SerializationTokenType.True: result = true; break; case SerializationTokenType.False: result = false; break; case SerializationTokenType.Null: result = null; break; case SerializationTokenType.Object: result = new object(); break; case SerializationTokenType.Int: result = ReadInt(); break; case SerializationTokenType.Uint: result = ReadUInt(); break; case SerializationTokenType.Short: result = ReadShort(); break; case SerializationTokenType.Ushort: result = ReadUShort(); break; case SerializationTokenType.Long: result = ReadLong(); break; case SerializationTokenType.Ulong: result = ReadULong(); break; case SerializationTokenType.Byte: result = ReadByte(); break; case SerializationTokenType.Sbyte: result = ReadSByte(); break; case SerializationTokenType.Float: result = ReadFloat(); break; case SerializationTokenType.Double: result = ReadDouble(); break; case SerializationTokenType.Decimal: result = ReadDecimal(); break; case SerializationTokenType.String: result = ReadString(); break; case SerializationTokenType.Character: result = ReadChar(); break; case SerializationTokenType.Guid: bytes = ReadBytes(16); result = new Guid(bytes); break; case SerializationTokenType.Date: result = new DateTime(ReadLong()); break; case SerializationTokenType.TimeSpan: result = new TimeSpan(ReadLong()); break; case SerializationTokenType.GrainId: result = ReadGrainId(); break; case SerializationTokenType.ActivationId: result = ReadActivationId(); break; case SerializationTokenType.SiloAddress: result = ReadSiloAddress(); break; case SerializationTokenType.ActivationAddress: result = ReadActivationAddress(); break; case SerializationTokenType.IpAddress: result = ReadIPAddress(); break; case SerializationTokenType.IpEndPoint: result = ReadIPEndPoint(); break; case SerializationTokenType.CorrelationId: result = new CorrelationId(ReadBytes(CorrelationId.SIZE_BYTES)); break; default: result = null; return false; } return true; } /// <summary> Read a <c>Type</c> value from the stream. </summary> /// <param name="expected">Expected Type, if known.</param> /// <returns>Data from current position in stream, converted to the appropriate output type.</returns> public Type ReadFullTypeHeader(Type expected = null) { var token = ReadToken(); if (token == SerializationTokenType.ExpectedType) { return expected; } var t = CheckSpecialTypeCode(token); if (t != null) { return t; } if (token == SerializationTokenType.SpecifiedType) { #if TRACE_SERIALIZATION var tt = ReadSpecifiedTypeHeader(); Trace("--Read specified type header for type {0}", tt); return tt; #else return ReadSpecifiedTypeHeader(); #endif } throw new SerializationException("Invalid '" + token + "'token in input stream where full type header is expected"); } internal static Type CheckSpecialTypeCode(SerializationTokenType token) { switch (token) { case SerializationTokenType.Boolean: return typeof(bool); case SerializationTokenType.Int: return typeof(int); case SerializationTokenType.Short: return typeof(short); case SerializationTokenType.Long: return typeof(long); case SerializationTokenType.Sbyte: return typeof(sbyte); case SerializationTokenType.Uint: return typeof(uint); case SerializationTokenType.Ushort: return typeof(ushort); case SerializationTokenType.Ulong: return typeof(ulong); case SerializationTokenType.Byte: return typeof(byte); case SerializationTokenType.Float: return typeof(float); case SerializationTokenType.Double: return typeof(double); case SerializationTokenType.Decimal: return typeof(decimal); case SerializationTokenType.String: return typeof(string); case SerializationTokenType.Character: return typeof(char); case SerializationTokenType.Guid: return typeof(Guid); case SerializationTokenType.Date: return typeof(DateTime); case SerializationTokenType.TimeSpan: return typeof(TimeSpan); case SerializationTokenType.IpAddress: return typeof(IPAddress); case SerializationTokenType.IpEndPoint: return typeof(IPEndPoint); case SerializationTokenType.GrainId: return typeof(GrainId); case SerializationTokenType.ActivationId: return typeof(ActivationId); case SerializationTokenType.SiloAddress: return typeof(SiloAddress); case SerializationTokenType.ActivationAddress: return typeof(ActivationAddress); case SerializationTokenType.CorrelationId: return typeof(CorrelationId); #if false // Note: not yet implemented as simple types on the Writer side case SerializationTokenType.Object: return typeof(Object); case SerializationTokenType.ByteArray: return typeof(byte[]); case SerializationTokenType.ShortArray: return typeof(short[]); case SerializationTokenType.IntArray: return typeof(int[]); case SerializationTokenType.LongArray: return typeof(long[]); case SerializationTokenType.UShortArray: return typeof(ushort[]); case SerializationTokenType.UIntArray: return typeof(uint[]); case SerializationTokenType.ULongArray: return typeof(ulong[]); case SerializationTokenType.FloatArray: return typeof(float[]); case SerializationTokenType.DoubleArray: return typeof(double[]); case SerializationTokenType.CharArray: return typeof(char[]); case SerializationTokenType.BoolArray: return typeof(bool[]); #endif default: break; } return null; } /// <summary> Read a <c>Type</c> value from the stream. </summary> internal Type ReadSpecifiedTypeHeader() { // Assumes that the SpecifiedType token has already been read var token = ReadToken(); switch (token) { case SerializationTokenType.Boolean: return typeof(bool); case SerializationTokenType.Int: return typeof(int); case SerializationTokenType.Short: return typeof(short); case SerializationTokenType.Long: return typeof(long); case SerializationTokenType.Sbyte: return typeof(sbyte); case SerializationTokenType.Uint: return typeof(uint); case SerializationTokenType.Ushort: return typeof(ushort); case SerializationTokenType.Ulong: return typeof(ulong); case SerializationTokenType.Byte: return typeof(byte); case SerializationTokenType.Float: return typeof(float); case SerializationTokenType.Double: return typeof(double); case SerializationTokenType.Decimal: return typeof(decimal); case SerializationTokenType.String: return typeof(string); case SerializationTokenType.Character: return typeof(char); case SerializationTokenType.Guid: return typeof(Guid); case SerializationTokenType.Date: return typeof(DateTime); case SerializationTokenType.TimeSpan: return typeof(TimeSpan); case SerializationTokenType.IpAddress: return typeof(IPAddress); case SerializationTokenType.IpEndPoint: return typeof(IPEndPoint); case SerializationTokenType.GrainId: return typeof(GrainId); case SerializationTokenType.ActivationId: return typeof(ActivationId); case SerializationTokenType.SiloAddress: return typeof(SiloAddress); case SerializationTokenType.ActivationAddress: return typeof(ActivationAddress); case SerializationTokenType.CorrelationId: return typeof(CorrelationId); case SerializationTokenType.Request: return typeof(InvokeMethodRequest); case SerializationTokenType.Response: return typeof(Response); case SerializationTokenType.StringObjDict: return typeof(Dictionary<string, object>); case SerializationTokenType.Object: return typeof(Object); case SerializationTokenType.Tuple + 1: Trace("----Reading type info for a Tuple'1"); return typeof(Tuple<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Tuple + 2: Trace("----Reading type info for a Tuple'2"); return typeof(Tuple<,>).MakeGenericType(ReadGenericArguments(2)); case SerializationTokenType.Tuple + 3: Trace("----Reading type info for a Tuple'3"); return typeof(Tuple<,,>).MakeGenericType(ReadGenericArguments(3)); case SerializationTokenType.Tuple + 4: Trace("----Reading type info for a Tuple'4"); return typeof(Tuple<,,,>).MakeGenericType(ReadGenericArguments(4)); case SerializationTokenType.Tuple + 5: Trace("----Reading type info for a Tuple'5"); return typeof(Tuple<,,,,>).MakeGenericType(ReadGenericArguments(5)); case SerializationTokenType.Tuple + 6: Trace("----Reading type info for a Tuple'6"); return typeof(Tuple<,,,,,>).MakeGenericType(ReadGenericArguments(6)); case SerializationTokenType.Tuple + 7: Trace("----Reading type info for a Tuple'7"); return typeof(Tuple<,,,,,,>).MakeGenericType(ReadGenericArguments(7)); case SerializationTokenType.Array + 1: var et1 = ReadFullTypeHeader(); return et1.MakeArrayType(); case SerializationTokenType.Array + 2: var et2 = ReadFullTypeHeader(); return et2.MakeArrayType(2); case SerializationTokenType.Array + 3: var et3 = ReadFullTypeHeader(); return et3.MakeArrayType(3); case SerializationTokenType.Array + 4: var et4 = ReadFullTypeHeader(); return et4.MakeArrayType(4); case SerializationTokenType.Array + 5: var et5 = ReadFullTypeHeader(); return et5.MakeArrayType(5); case SerializationTokenType.Array + 6: var et6 = ReadFullTypeHeader(); return et6.MakeArrayType(6); case SerializationTokenType.Array + 7: var et7 = ReadFullTypeHeader(); return et7.MakeArrayType(7); case SerializationTokenType.Array + 8: var et8 = ReadFullTypeHeader(); return et8.MakeArrayType(8); case SerializationTokenType.List: return typeof(List<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Dictionary: return typeof(Dictionary<,>).MakeGenericType(ReadGenericArguments(2)); case SerializationTokenType.KeyValuePair: return typeof(KeyValuePair<,>).MakeGenericType(ReadGenericArguments(2)); case SerializationTokenType.Set: return typeof(HashSet<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.SortedList: return typeof(SortedList<,>).MakeGenericType(ReadGenericArguments(2)); case SerializationTokenType.SortedSet: return typeof(SortedSet<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Stack: return typeof(Stack<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Queue: return typeof(Queue<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.LinkedList: return typeof(LinkedList<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.Nullable: return typeof(Nullable<>).MakeGenericType(ReadGenericArguments(1)); case SerializationTokenType.ByteArray: return typeof(byte[]); case SerializationTokenType.ShortArray: return typeof(short[]); case SerializationTokenType.IntArray: return typeof(int[]); case SerializationTokenType.LongArray: return typeof(long[]); case SerializationTokenType.UShortArray: return typeof(ushort[]); case SerializationTokenType.UIntArray: return typeof(uint[]); case SerializationTokenType.ULongArray: return typeof(ulong[]); case SerializationTokenType.FloatArray: return typeof(float[]); case SerializationTokenType.DoubleArray: return typeof(double[]); case SerializationTokenType.CharArray: return typeof(char[]); case SerializationTokenType.BoolArray: return typeof(bool[]); case SerializationTokenType.NamedType: var typeName = ReadString(); try { return SerializationManager.ResolveTypeName(typeName); } catch (TypeAccessException ex) { throw new TypeAccessException("Named type \"" + typeName + "\" is invalid: " + ex.Message); } default: break; } throw new SerializationException("Unexpected '" + token + "' found when expecting a type reference"); } private Type[] ReadGenericArguments(int n) { Trace("About to read {0} generic arguments", n); var args = new Type[n]; for (var i = 0; i < n; i++) { args[i] = ReadFullTypeHeader(); } Trace("Finished reading {0} generic arguments", n); return args; } private StreamWriter trace; [Conditional("TRACE_SERIALIZATION")] private void Trace(string format, params object[] args) { if (trace == null) { var path = String.Format("d:\\Trace-{0}.{1}.{2}.txt", DateTime.UtcNow.Hour, DateTime.UtcNow.Minute, DateTime.UtcNow.Ticks); Console.WriteLine("Opening trace file at '{0}'", path); trace = File.CreateText(path); } trace.Write(format, args); trace.WriteLine(" at offset {0}", CurrentPosition); trace.Flush(); } } }
//--------------------------------------------------------------------------- // // File: KeyConverter.cs // // Description: // // KeyConverter : Converts a key string to the *Type* that the string represents and vice-versa // // Features: // // History: // 05/28/2003 created: Chandrasekhar Rentachintala // // Copyright (C) 2003 by Microsoft Corporation. All rights reserved. // //--------------------------------------------------------------------------- using System; using System.ComponentModel; // for TypeConverter using System.Globalization; // for CultureInfo using System.Reflection; using System.Windows; using System.Windows.Input; using System.Windows.Markup; using System.Security.Permissions; using MS.Utility; using MS.Internal.WindowsBase; namespace System.Windows.Input { /// <summary> /// Key Converter class for converting between a string and the Type of a Key /// </summary> /// <ExternalAPI/> public class KeyConverter : TypeConverter { /// <summary> /// CanConvertFrom() /// </summary> /// <param name="context"></param> /// <param name="sourceType"></param> /// <returns></returns> /// <ExternalAPI/> public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType) { if (sourceType == typeof(string)) { return true; } else { return false; } } /// <summary> /// TypeConverter method override. /// </summary> /// <param name="context">ITypeDescriptorContext</param> /// <param name="destinationType">Type to convert to</param> /// <returns>true if conversion is possible</returns> public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) { // We can convert to a string. // We can convert to an InstanceDescriptor or to a string. if (destinationType == typeof(string)) { // When invoked by the serialization engine we can convert to string only for known type if (context != null && context.Instance != null) { Key key = (Key)context.Instance; return ((int)key >= (int)Key.None && (int)key <= (int)Key.DeadCharProcessed); } } return false; } /// <summary> /// ConvertFrom() /// </summary> /// <param name="context"></param> /// <param name="culture"></param> /// <param name="source"></param> /// <returns></returns> /// <ExternalAPI/> public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object source) { if (source is string) { string fullName = ((string)source).Trim(); object key = GetKey(fullName, CultureInfo.InvariantCulture); if (key != null) { return ((Key)key); } else { throw new NotSupportedException(SR.Get(SRID.Unsupported_Key, fullName)); } } throw GetConvertFromException(source); } /// <summary> /// ConvertTo() /// </summary> /// <param name="context"></param> /// <param name="culture"></param> /// <param name="value"></param> /// <param name="destinationType"></param> /// <returns></returns> /// <ExternalAPI/> public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) { if (destinationType == null) throw new ArgumentNullException("destinationType"); if (destinationType == typeof(string) && value != null) { Key key = (Key)value; if (key == Key.None) { return String.Empty; } if (key >= Key.D0 && key <= Key.D9) { return Char.ToString((char)(int)(key - Key.D0 + '0')); } if (key >= Key.A && key <= Key.Z) { return Char.ToString((char)(int)(key - Key.A + 'A')); } String strKey = MatchKey(key, culture); if (strKey != null && (strKey.Length != 0 || strKey == String.Empty)) { return strKey; } } throw GetConvertToException(value, destinationType); } private object GetKey(string keyToken, CultureInfo culture) { if (keyToken == String.Empty) { return Key.None; } else { keyToken = keyToken.ToUpper(culture); if (keyToken.Length == 1 && Char.IsLetterOrDigit(keyToken[0])) { if (Char.IsDigit(keyToken[0]) && (keyToken[0] >= '0' && keyToken[0] <= '9')) { return ((int)(Key)(Key.D0 + keyToken[0] - '0')); } else if (Char.IsLetter(keyToken[0]) && (keyToken[0] >= 'A' && keyToken[0] <= 'Z')) { return ((int)(Key)(Key.A + keyToken[0] - 'A')); } else { throw new ArgumentException(SR.Get(SRID.CannotConvertStringToType, keyToken, typeof(Key))); } } else { Key keyFound = (Key)(-1); switch (keyToken) { case "ENTER": keyFound = Key.Return; break; case "ESC": keyFound = Key.Escape; break; case "PGUP": keyFound = Key.PageUp; break; case "PGDN": keyFound = Key.PageDown; break; case "PRTSC": keyFound = Key.PrintScreen; break; case "INS": keyFound = Key.Insert; break; case "DEL": keyFound = Key.Delete; break; case "WINDOWS": keyFound = Key.LWin; break; case "WIN": keyFound = Key.LWin; break; case "LEFTWINDOWS": keyFound = Key.LWin; break; case "RIGHTWINDOWS": keyFound = Key.RWin; break; case "APPS": keyFound = Key.Apps; break; case "APPLICATION": keyFound = Key.Apps; break; case "BREAK": keyFound = Key.Cancel; break; case "BACKSPACE": keyFound = Key.Back; break; case "BKSP": keyFound = Key.Back; break; case "BS": keyFound = Key.Back; break; case "SHIFT": keyFound = Key.LeftShift; break; case "LEFTSHIFT": keyFound = Key.LeftShift; break; case "RIGHTSHIFT": keyFound = Key.RightShift; break; case "CONTROL": keyFound = Key.LeftCtrl; break; case "CTRL": keyFound = Key.LeftCtrl; break; case "LEFTCTRL": keyFound = Key.LeftCtrl; break; case "RIGHTCTRL": keyFound = Key.RightCtrl; break; case "ALT": keyFound = Key.LeftAlt; break; case "LEFTALT": keyFound = Key.LeftAlt; break; case "RIGHTALT": keyFound = Key.RightAlt; break; case "SEMICOLON": keyFound = Key.OemSemicolon; break; case "PLUS": keyFound = Key.OemPlus; break; case "COMMA": keyFound = Key.OemComma; break; case "MINUS": keyFound = Key.OemMinus; break; case "PERIOD": keyFound = Key.OemPeriod; break; case "QUESTION": keyFound = Key.OemQuestion; break; case "TILDE": keyFound = Key.OemTilde; break; case "OPENBRACKETS": keyFound = Key.OemOpenBrackets; break; case "PIPE": keyFound = Key.OemPipe; break; case "CLOSEBRACKETS": keyFound = Key.OemCloseBrackets; break; case "QUOTES": keyFound = Key.OemQuotes; break; case "BACKSLASH": keyFound = Key.OemBackslash; break; case "FINISH": keyFound = Key.OemFinish; break; case "ATTN": keyFound = Key.Attn; break; case "CRSEL": keyFound = Key.CrSel; break; case "EXSEL": keyFound = Key.ExSel; break; case "ERASEEOF": keyFound = Key.EraseEof; break; case "PLAY": keyFound = Key.Play; break; case "ZOOM": keyFound = Key.Zoom; break; case "PA1": keyFound = Key.Pa1; break; default: keyFound = (Key)Enum.Parse(typeof(Key), keyToken, true); break; } if ((int)keyFound != -1) { return keyFound; } return null; } } } private static string MatchKey(Key key, CultureInfo culture) { if (key == Key.None) return String.Empty; else { switch (key) { case Key.Back: return "Backspace"; case Key.LineFeed: return "Clear"; case Key.Escape: return "Esc"; } } if ((int)key >= (int)Key.None && (int)key <= (int)Key.DeadCharProcessed) return key.ToString(); else return null; } } }
// IExpressionEvaluator.cs // // Author: // Lluis Sanchez Gual <lluis@novell.com> // // Copyright (c) 2008 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // using System; using System.Text; using System.Globalization; using System.Collections.Generic; using System.Runtime.Serialization; using Mono.Debugging.Backend; using Mono.Debugging.Client; namespace Mono.Debugging.Evaluation { public abstract class ExpressionEvaluator { public ValueReference Evaluate (EvaluationContext ctx, string expression) { return Evaluate (ctx, expression, null); } public virtual ValueReference Evaluate (EvaluationContext ctx, string expression, object expectedType) { foreach (var variable in ctx.Adapter.GetLocalVariables (ctx)) if (variable.Name == expression) return variable; foreach (var parameter in ctx.Adapter.GetParameters (ctx)) if (parameter.Name == expression) return parameter; var thisVar = ctx.Adapter.GetThisReference (ctx); if (thisVar != null) { if (thisVar.Name == expression) return thisVar; foreach (var cv in thisVar.GetChildReferences (ctx.Options)) if (cv.Name == expression) return cv; } throw new EvaluatorException ("Invalid Expression: '{0}'", expression); } public virtual ValidationResult ValidateExpression (EvaluationContext ctx, string expression) { return new ValidationResult (true, null); } public string TargetObjectToString (EvaluationContext ctx, object obj) { var res = ctx.Adapter.TargetObjectToObject (ctx, obj); if (res == null) return null; if (res is EvaluationResult) return ((EvaluationResult) res).DisplayValue ?? ((EvaluationResult) res).Value; return res.ToString (); } public EvaluationResult TargetObjectToExpression (EvaluationContext ctx, object obj) { return ToExpression (ctx, ctx.Adapter.TargetObjectToObject (ctx, obj)); } public virtual EvaluationResult ToExpression (EvaluationContext ctx, object obj) { if (obj == null) return new EvaluationResult ("null"); if (obj is IntPtr ptr) return new EvaluationResult ("0x" + ptr.ToInt64 ().ToString ("x")); if (obj is char c) { string str; if (c == '\'') str = @"'\''"; else if (c == '"') str = "'\"'"; else str = EscapeString ("'" + c + "'"); return new EvaluationResult (str, ((int) c) + " " + str); } if (obj is string s) return new EvaluationResult ("\"" + EscapeString (s) + "\""); if (obj is bool b) return new EvaluationResult (b ? "true" : "false"); if (obj is decimal d) return new EvaluationResult (d.ToString (CultureInfo.InvariantCulture)); if (obj is EvaluationResult result) return result; if (ctx.Options.IntegerDisplayFormat == IntegerDisplayFormat.Hexadecimal) { string fval = null; if (obj is sbyte) fval = ((sbyte) obj).ToString ("x2"); else if (obj is int) fval = ((int) obj).ToString ("x4"); else if (obj is short) fval = ((short) obj).ToString ("x8"); else if (obj is long) fval = ((long) obj).ToString ("x16"); else if (obj is byte) fval = ((byte) obj).ToString ("x2"); else if (obj is uint) fval = ((uint) obj).ToString ("x4"); else if (obj is ushort) fval = ((ushort) obj).ToString ("x8"); else if (obj is ulong) fval = ((ulong) obj).ToString ("x16"); if (fval != null) return new EvaluationResult ("0x" + fval); } return new EvaluationResult (obj.ToString ()); } public static string EscapeString (string text) { var sb = new StringBuilder (); for (int i = 0; i < text.Length; i++) { char c = text[i]; string txt; switch (c) { case '"': txt = "\\\""; break; case '\0': txt = @"\0"; break; case '\\': txt = @"\\"; break; case '\a': txt = @"\a"; break; case '\b': txt = @"\b"; break; case '\f': txt = @"\f"; break; case '\v': txt = @"\v"; break; case '\n': txt = @"\n"; break; case '\r': txt = @"\r"; break; case '\t': txt = @"\t"; break; default: if (char.GetUnicodeCategory (c) == UnicodeCategory.OtherNotAssigned) { sb.AppendFormat ("\\u{0:x4}", (int) c); } else { sb.Append (c); } continue; } sb.Append (txt); } return sb.ToString (); } public virtual bool CaseSensitive { get { return true; } } public abstract string Resolve (DebuggerSession session, SourceLocation location, string expression); public virtual IEnumerable<ValueReference> GetLocalVariables (EvaluationContext ctx) { return ctx.Adapter.GetLocalVariables (ctx); } public virtual ValueReference GetThisReference (EvaluationContext ctx) { return ctx.Adapter.GetThisReference (ctx); } public virtual IEnumerable<ValueReference> GetParameters (EvaluationContext ctx) { return ctx.Adapter.GetParameters (ctx); } public virtual ValueReference GetCurrentException (EvaluationContext ctx) { return ctx.Adapter.GetCurrentException (ctx); } } [Serializable] public class EvaluatorException: Exception { protected EvaluatorException (SerializationInfo info, StreamingContext context) : base (info, context) { } public EvaluatorException (string msg, params object[] args): base (string.Format (msg, args)) { } public EvaluatorException (Exception innerException, string msg, params object [] args) : base (string.Format (msg, args), innerException) { } } [Serializable] public class EvaluatorAbortedException: EvaluatorException { protected EvaluatorAbortedException (SerializationInfo info, StreamingContext context) : base (info, context) { } public EvaluatorAbortedException () : base ("Aborted.") { } } [Serializable] public class NotSupportedExpressionException: EvaluatorException { protected NotSupportedExpressionException (SerializationInfo info, StreamingContext context) : base (info, context) { } public NotSupportedExpressionException () : base ("Expression not supported.") { } } [Serializable] public class ImplicitEvaluationDisabledException: EvaluatorException { protected ImplicitEvaluationDisabledException (SerializationInfo info, StreamingContext context) : base (info, context) { } public ImplicitEvaluationDisabledException () : base ("Implicit property and method evaluation is disabled.") { } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Security.Principal; namespace System.Security.AccessControl { public enum AccessControlType { Allow = 0, Deny = 1, } public abstract class AuthorizationRule { #region Private Members private readonly IdentityReference _identity; private readonly int _accessMask; private readonly bool _isInherited; private readonly InheritanceFlags _inheritanceFlags; private readonly PropagationFlags _propagationFlags; #endregion #region Constructors protected internal AuthorizationRule( IdentityReference identity, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags) { if (identity == null) { throw new ArgumentNullException(nameof(identity)); } if (accessMask == 0) { throw new ArgumentException(SR.Argument_ArgumentZero, nameof(accessMask)); } if (inheritanceFlags < InheritanceFlags.None || inheritanceFlags > (InheritanceFlags.ObjectInherit | InheritanceFlags.ContainerInherit)) { throw new ArgumentOutOfRangeException( nameof(inheritanceFlags), SR.Format(SR.Argument_InvalidEnumValue, inheritanceFlags, "InheritanceFlags")); } if (propagationFlags < PropagationFlags.None || propagationFlags > (PropagationFlags.NoPropagateInherit | PropagationFlags.InheritOnly)) { throw new ArgumentOutOfRangeException( nameof(propagationFlags), SR.Format(SR.Argument_InvalidEnumValue, inheritanceFlags, "PropagationFlags")); } if (identity.IsValidTargetType(typeof(SecurityIdentifier)) == false) { throw new ArgumentException( SR.Arg_MustBeIdentityReferenceType, nameof(identity)); } _identity = identity; _accessMask = accessMask; _isInherited = isInherited; _inheritanceFlags = inheritanceFlags; if (inheritanceFlags != 0) { _propagationFlags = propagationFlags; } else { _propagationFlags = 0; } } #endregion #region Properties public IdentityReference IdentityReference { get { return _identity; } } protected internal int AccessMask { get { return _accessMask; } } public bool IsInherited { get { return _isInherited; } } public InheritanceFlags InheritanceFlags { get { return _inheritanceFlags; } } public PropagationFlags PropagationFlags { get { return _propagationFlags; } } #endregion } public abstract class AccessRule : AuthorizationRule { #region Private Methods private readonly AccessControlType _type; #endregion #region Constructors protected AccessRule( IdentityReference identity, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, AccessControlType type) : base(identity, accessMask, isInherited, inheritanceFlags, propagationFlags) { if (type != AccessControlType.Allow && type != AccessControlType.Deny) { throw new ArgumentOutOfRangeException(nameof(type), SR.ArgumentOutOfRange_Enum); } if (inheritanceFlags < InheritanceFlags.None || inheritanceFlags > (InheritanceFlags.ObjectInherit | InheritanceFlags.ContainerInherit)) { throw new ArgumentOutOfRangeException( nameof(inheritanceFlags), SR.Format(SR.Argument_InvalidEnumValue, inheritanceFlags, "InheritanceFlags")); } if (propagationFlags < PropagationFlags.None || propagationFlags > (PropagationFlags.NoPropagateInherit | PropagationFlags.InheritOnly)) { throw new ArgumentOutOfRangeException( nameof(propagationFlags), SR.Format(SR.Argument_InvalidEnumValue, inheritanceFlags, "PropagationFlags")); } _type = type; } #endregion #region Properties public AccessControlType AccessControlType { get { return _type; } } #endregion } public abstract class ObjectAccessRule : AccessRule { #region Private Members private readonly Guid _objectType; private readonly Guid _inheritedObjectType; private readonly ObjectAceFlags _objectFlags = ObjectAceFlags.None; #endregion #region Constructors protected ObjectAccessRule(IdentityReference identity, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, Guid objectType, Guid inheritedObjectType, AccessControlType type) : base(identity, accessMask, isInherited, inheritanceFlags, propagationFlags, type) { if ((!objectType.Equals(Guid.Empty)) && ((accessMask & ObjectAce.AccessMaskWithObjectType) != 0)) { _objectType = objectType; _objectFlags |= ObjectAceFlags.ObjectAceTypePresent; } else { _objectType = Guid.Empty; } if ((!inheritedObjectType.Equals(Guid.Empty)) && ((inheritanceFlags & InheritanceFlags.ContainerInherit) != 0)) { _inheritedObjectType = inheritedObjectType; _objectFlags |= ObjectAceFlags.InheritedObjectAceTypePresent; } else { _inheritedObjectType = Guid.Empty; } } #endregion #region Properties public Guid ObjectType { get { return _objectType; } } public Guid InheritedObjectType { get { return _inheritedObjectType; } } public ObjectAceFlags ObjectFlags { get { return _objectFlags; } } #endregion } public abstract class AuditRule : AuthorizationRule { #region Private Members private readonly AuditFlags _flags; #endregion #region Constructors protected AuditRule( IdentityReference identity, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, AuditFlags auditFlags) : base(identity, accessMask, isInherited, inheritanceFlags, propagationFlags) { if (auditFlags == AuditFlags.None) { throw new ArgumentException(SR.Arg_EnumAtLeastOneFlag, nameof(auditFlags)); } else if ((auditFlags & ~(AuditFlags.Success | AuditFlags.Failure)) != 0) { throw new ArgumentOutOfRangeException(nameof(auditFlags), SR.ArgumentOutOfRange_Enum); } _flags = auditFlags; } #endregion #region Public Properties public AuditFlags AuditFlags { get { return _flags; } } #endregion } public abstract class ObjectAuditRule : AuditRule { #region Private Members private readonly Guid _objectType; private readonly Guid _inheritedObjectType; private readonly ObjectAceFlags _objectFlags = ObjectAceFlags.None; #endregion #region Constructors protected ObjectAuditRule(IdentityReference identity, int accessMask, bool isInherited, InheritanceFlags inheritanceFlags, PropagationFlags propagationFlags, Guid objectType, Guid inheritedObjectType, AuditFlags auditFlags) : base(identity, accessMask, isInherited, inheritanceFlags, propagationFlags, auditFlags) { if ((!objectType.Equals(Guid.Empty)) && ((accessMask & ObjectAce.AccessMaskWithObjectType) != 0)) { _objectType = objectType; _objectFlags |= ObjectAceFlags.ObjectAceTypePresent; } else { _objectType = Guid.Empty; } if ((!inheritedObjectType.Equals(Guid.Empty)) && ((inheritanceFlags & InheritanceFlags.ContainerInherit) != 0)) { _inheritedObjectType = inheritedObjectType; _objectFlags |= ObjectAceFlags.InheritedObjectAceTypePresent; } else { _inheritedObjectType = Guid.Empty; } } #endregion #region Public Properties public Guid ObjectType { get { return _objectType; } } public Guid InheritedObjectType { get { return _inheritedObjectType; } } public ObjectAceFlags ObjectFlags { get { return _objectFlags; } } #endregion } public sealed class AuthorizationRuleCollection : ReadOnlyCollectionBase { #region Constructors public AuthorizationRuleCollection() : base() { } #endregion #region Public methods public void AddRule(AuthorizationRule rule) { InnerList.Add(rule); } #endregion #region ICollection Members public void CopyTo(AuthorizationRule[] rules, int index) { ((ICollection)this).CopyTo(rules, index); } #endregion #region Public properties public AuthorizationRule this[int index] { get { return InnerList[index] as AuthorizationRule; } } #endregion } }
using System; using Org.BouncyCastle.Crypto.Parameters; using Org.BouncyCastle.Crypto.Utilities; namespace Org.BouncyCastle.Crypto.Engines { /** * an implementation of the AES (Rijndael), from FIPS-197. * <p> * For further details see: <a href="http://csrc.nist.gov/encryption/aes/">http://csrc.nist.gov/encryption/aes/</a>. * * This implementation is based on optimizations from Dr. Brian Gladman's paper and C code at * <a href="http://fp.gladman.plus.com/cryptography_technology/rijndael/">http://fp.gladman.plus.com/cryptography_technology/rijndael/</a> * * There are three levels of tradeoff of speed vs memory * Because java has no preprocessor, they are written as three separate classes from which to choose * * The fastest uses 8Kbytes of static tables to precompute round calculations, 4 256 word tables for encryption * and 4 for decryption. * * The middle performance version uses only one 256 word table for each, for a total of 2Kbytes, * adding 12 rotate operations per round to compute the values contained in the other tables from * the contents of the first. * * The slowest version uses no static tables at all and computes the values in each round. * </p> * <p> * This file contains the middle performance version with 2Kbytes of static tables for round precomputation. * </p> */ public class AesEngine : IBlockCipher { // The S box private static readonly byte[] S = { 99, 124, 119, 123, 242, 107, 111, 197, 48, 1, 103, 43, 254, 215, 171, 118, 202, 130, 201, 125, 250, 89, 71, 240, 173, 212, 162, 175, 156, 164, 114, 192, 183, 253, 147, 38, 54, 63, 247, 204, 52, 165, 229, 241, 113, 216, 49, 21, 4, 199, 35, 195, 24, 150, 5, 154, 7, 18, 128, 226, 235, 39, 178, 117, 9, 131, 44, 26, 27, 110, 90, 160, 82, 59, 214, 179, 41, 227, 47, 132, 83, 209, 0, 237, 32, 252, 177, 91, 106, 203, 190, 57, 74, 76, 88, 207, 208, 239, 170, 251, 67, 77, 51, 133, 69, 249, 2, 127, 80, 60, 159, 168, 81, 163, 64, 143, 146, 157, 56, 245, 188, 182, 218, 33, 16, 255, 243, 210, 205, 12, 19, 236, 95, 151, 68, 23, 196, 167, 126, 61, 100, 93, 25, 115, 96, 129, 79, 220, 34, 42, 144, 136, 70, 238, 184, 20, 222, 94, 11, 219, 224, 50, 58, 10, 73, 6, 36, 92, 194, 211, 172, 98, 145, 149, 228, 121, 231, 200, 55, 109, 141, 213, 78, 169, 108, 86, 244, 234, 101, 122, 174, 8, 186, 120, 37, 46, 28, 166, 180, 198, 232, 221, 116, 31, 75, 189, 139, 138, 112, 62, 181, 102, 72, 3, 246, 14, 97, 53, 87, 185, 134, 193, 29, 158, 225, 248, 152, 17, 105, 217, 142, 148, 155, 30, 135, 233, 206, 85, 40, 223, 140, 161, 137, 13, 191, 230, 66, 104, 65, 153, 45, 15, 176, 84, 187, 22, }; // The inverse S-box private static readonly byte[] Si = { 82, 9, 106, 213, 48, 54, 165, 56, 191, 64, 163, 158, 129, 243, 215, 251, 124, 227, 57, 130, 155, 47, 255, 135, 52, 142, 67, 68, 196, 222, 233, 203, 84, 123, 148, 50, 166, 194, 35, 61, 238, 76, 149, 11, 66, 250, 195, 78, 8, 46, 161, 102, 40, 217, 36, 178, 118, 91, 162, 73, 109, 139, 209, 37, 114, 248, 246, 100, 134, 104, 152, 22, 212, 164, 92, 204, 93, 101, 182, 146, 108, 112, 72, 80, 253, 237, 185, 218, 94, 21, 70, 87, 167, 141, 157, 132, 144, 216, 171, 0, 140, 188, 211, 10, 247, 228, 88, 5, 184, 179, 69, 6, 208, 44, 30, 143, 202, 63, 15, 2, 193, 175, 189, 3, 1, 19, 138, 107, 58, 145, 17, 65, 79, 103, 220, 234, 151, 242, 207, 206, 240, 180, 230, 115, 150, 172, 116, 34, 231, 173, 53, 133, 226, 249, 55, 232, 28, 117, 223, 110, 71, 241, 26, 113, 29, 41, 197, 137, 111, 183, 98, 14, 170, 24, 190, 27, 252, 86, 62, 75, 198, 210, 121, 32, 154, 219, 192, 254, 120, 205, 90, 244, 31, 221, 168, 51, 136, 7, 199, 49, 177, 18, 16, 89, 39, 128, 236, 95, 96, 81, 127, 169, 25, 181, 74, 13, 45, 229, 122, 159, 147, 201, 156, 239, 160, 224, 59, 77, 174, 42, 245, 176, 200, 235, 187, 60, 131, 83, 153, 97, 23, 43, 4, 126, 186, 119, 214, 38, 225, 105, 20, 99, 85, 33, 12, 125, }; // vector used in calculating key schedule (powers of x in GF(256)) private static readonly byte[] rcon = { 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91 }; // precomputation tables of calculations for rounds private static readonly uint[] T0 = { 0xa56363c6, 0x847c7cf8, 0x997777ee, 0x8d7b7bf6, 0x0df2f2ff, 0xbd6b6bd6, 0xb16f6fde, 0x54c5c591, 0x50303060, 0x03010102, 0xa96767ce, 0x7d2b2b56, 0x19fefee7, 0x62d7d7b5, 0xe6abab4d, 0x9a7676ec, 0x45caca8f, 0x9d82821f, 0x40c9c989, 0x877d7dfa, 0x15fafaef, 0xeb5959b2, 0xc947478e, 0x0bf0f0fb, 0xecadad41, 0x67d4d4b3, 0xfda2a25f, 0xeaafaf45, 0xbf9c9c23, 0xf7a4a453, 0x967272e4, 0x5bc0c09b, 0xc2b7b775, 0x1cfdfde1, 0xae93933d, 0x6a26264c, 0x5a36366c, 0x413f3f7e, 0x02f7f7f5, 0x4fcccc83, 0x5c343468, 0xf4a5a551, 0x34e5e5d1, 0x08f1f1f9, 0x937171e2, 0x73d8d8ab, 0x53313162, 0x3f15152a, 0x0c040408, 0x52c7c795, 0x65232346, 0x5ec3c39d, 0x28181830, 0xa1969637, 0x0f05050a, 0xb59a9a2f, 0x0907070e, 0x36121224, 0x9b80801b, 0x3de2e2df, 0x26ebebcd, 0x6927274e, 0xcdb2b27f, 0x9f7575ea, 0x1b090912, 0x9e83831d, 0x742c2c58, 0x2e1a1a34, 0x2d1b1b36, 0xb26e6edc, 0xee5a5ab4, 0xfba0a05b, 0xf65252a4, 0x4d3b3b76, 0x61d6d6b7, 0xceb3b37d, 0x7b292952, 0x3ee3e3dd, 0x712f2f5e, 0x97848413, 0xf55353a6, 0x68d1d1b9, 0x00000000, 0x2cededc1, 0x60202040, 0x1ffcfce3, 0xc8b1b179, 0xed5b5bb6, 0xbe6a6ad4, 0x46cbcb8d, 0xd9bebe67, 0x4b393972, 0xde4a4a94, 0xd44c4c98, 0xe85858b0, 0x4acfcf85, 0x6bd0d0bb, 0x2aefefc5, 0xe5aaaa4f, 0x16fbfbed, 0xc5434386, 0xd74d4d9a, 0x55333366, 0x94858511, 0xcf45458a, 0x10f9f9e9, 0x06020204, 0x817f7ffe, 0xf05050a0, 0x443c3c78, 0xba9f9f25, 0xe3a8a84b, 0xf35151a2, 0xfea3a35d, 0xc0404080, 0x8a8f8f05, 0xad92923f, 0xbc9d9d21, 0x48383870, 0x04f5f5f1, 0xdfbcbc63, 0xc1b6b677, 0x75dadaaf, 0x63212142, 0x30101020, 0x1affffe5, 0x0ef3f3fd, 0x6dd2d2bf, 0x4ccdcd81, 0x140c0c18, 0x35131326, 0x2fececc3, 0xe15f5fbe, 0xa2979735, 0xcc444488, 0x3917172e, 0x57c4c493, 0xf2a7a755, 0x827e7efc, 0x473d3d7a, 0xac6464c8, 0xe75d5dba, 0x2b191932, 0x957373e6, 0xa06060c0, 0x98818119, 0xd14f4f9e, 0x7fdcdca3, 0x66222244, 0x7e2a2a54, 0xab90903b, 0x8388880b, 0xca46468c, 0x29eeeec7, 0xd3b8b86b, 0x3c141428, 0x79dedea7, 0xe25e5ebc, 0x1d0b0b16, 0x76dbdbad, 0x3be0e0db, 0x56323264, 0x4e3a3a74, 0x1e0a0a14, 0xdb494992, 0x0a06060c, 0x6c242448, 0xe45c5cb8, 0x5dc2c29f, 0x6ed3d3bd, 0xefacac43, 0xa66262c4, 0xa8919139, 0xa4959531, 0x37e4e4d3, 0x8b7979f2, 0x32e7e7d5, 0x43c8c88b, 0x5937376e, 0xb76d6dda, 0x8c8d8d01, 0x64d5d5b1, 0xd24e4e9c, 0xe0a9a949, 0xb46c6cd8, 0xfa5656ac, 0x07f4f4f3, 0x25eaeacf, 0xaf6565ca, 0x8e7a7af4, 0xe9aeae47, 0x18080810, 0xd5baba6f, 0x887878f0, 0x6f25254a, 0x722e2e5c, 0x241c1c38, 0xf1a6a657, 0xc7b4b473, 0x51c6c697, 0x23e8e8cb, 0x7cdddda1, 0x9c7474e8, 0x211f1f3e, 0xdd4b4b96, 0xdcbdbd61, 0x868b8b0d, 0x858a8a0f, 0x907070e0, 0x423e3e7c, 0xc4b5b571, 0xaa6666cc, 0xd8484890, 0x05030306, 0x01f6f6f7, 0x120e0e1c, 0xa36161c2, 0x5f35356a, 0xf95757ae, 0xd0b9b969, 0x91868617, 0x58c1c199, 0x271d1d3a, 0xb99e9e27, 0x38e1e1d9, 0x13f8f8eb, 0xb398982b, 0x33111122, 0xbb6969d2, 0x70d9d9a9, 0x898e8e07, 0xa7949433, 0xb69b9b2d, 0x221e1e3c, 0x92878715, 0x20e9e9c9, 0x49cece87, 0xff5555aa, 0x78282850, 0x7adfdfa5, 0x8f8c8c03, 0xf8a1a159, 0x80898909, 0x170d0d1a, 0xdabfbf65, 0x31e6e6d7, 0xc6424284, 0xb86868d0, 0xc3414182, 0xb0999929, 0x772d2d5a, 0x110f0f1e, 0xcbb0b07b, 0xfc5454a8, 0xd6bbbb6d, 0x3a16162c }; private static readonly uint[] Tinv0 = { 0x50a7f451, 0x5365417e, 0xc3a4171a, 0x965e273a, 0xcb6bab3b, 0xf1459d1f, 0xab58faac, 0x9303e34b, 0x55fa3020, 0xf66d76ad, 0x9176cc88, 0x254c02f5, 0xfcd7e54f, 0xd7cb2ac5, 0x80443526, 0x8fa362b5, 0x495ab1de, 0x671bba25, 0x980eea45, 0xe1c0fe5d, 0x02752fc3, 0x12f04c81, 0xa397468d, 0xc6f9d36b, 0xe75f8f03, 0x959c9215, 0xeb7a6dbf, 0xda595295, 0x2d83bed4, 0xd3217458, 0x2969e049, 0x44c8c98e, 0x6a89c275, 0x78798ef4, 0x6b3e5899, 0xdd71b927, 0xb64fe1be, 0x17ad88f0, 0x66ac20c9, 0xb43ace7d, 0x184adf63, 0x82311ae5, 0x60335197, 0x457f5362, 0xe07764b1, 0x84ae6bbb, 0x1ca081fe, 0x942b08f9, 0x58684870, 0x19fd458f, 0x876cde94, 0xb7f87b52, 0x23d373ab, 0xe2024b72, 0x578f1fe3, 0x2aab5566, 0x0728ebb2, 0x03c2b52f, 0x9a7bc586, 0xa50837d3, 0xf2872830, 0xb2a5bf23, 0xba6a0302, 0x5c8216ed, 0x2b1ccf8a, 0x92b479a7, 0xf0f207f3, 0xa1e2694e, 0xcdf4da65, 0xd5be0506, 0x1f6234d1, 0x8afea6c4, 0x9d532e34, 0xa055f3a2, 0x32e18a05, 0x75ebf6a4, 0x39ec830b, 0xaaef6040, 0x069f715e, 0x51106ebd, 0xf98a213e, 0x3d06dd96, 0xae053edd, 0x46bde64d, 0xb58d5491, 0x055dc471, 0x6fd40604, 0xff155060, 0x24fb9819, 0x97e9bdd6, 0xcc434089, 0x779ed967, 0xbd42e8b0, 0x888b8907, 0x385b19e7, 0xdbeec879, 0x470a7ca1, 0xe90f427c, 0xc91e84f8, 0x00000000, 0x83868009, 0x48ed2b32, 0xac70111e, 0x4e725a6c, 0xfbff0efd, 0x5638850f, 0x1ed5ae3d, 0x27392d36, 0x64d90f0a, 0x21a65c68, 0xd1545b9b, 0x3a2e3624, 0xb1670a0c, 0x0fe75793, 0xd296eeb4, 0x9e919b1b, 0x4fc5c080, 0xa220dc61, 0x694b775a, 0x161a121c, 0x0aba93e2, 0xe52aa0c0, 0x43e0223c, 0x1d171b12, 0x0b0d090e, 0xadc78bf2, 0xb9a8b62d, 0xc8a91e14, 0x8519f157, 0x4c0775af, 0xbbdd99ee, 0xfd607fa3, 0x9f2601f7, 0xbcf5725c, 0xc53b6644, 0x347efb5b, 0x7629438b, 0xdcc623cb, 0x68fcedb6, 0x63f1e4b8, 0xcadc31d7, 0x10856342, 0x40229713, 0x2011c684, 0x7d244a85, 0xf83dbbd2, 0x1132f9ae, 0x6da129c7, 0x4b2f9e1d, 0xf330b2dc, 0xec52860d, 0xd0e3c177, 0x6c16b32b, 0x99b970a9, 0xfa489411, 0x2264e947, 0xc48cfca8, 0x1a3ff0a0, 0xd82c7d56, 0xef903322, 0xc74e4987, 0xc1d138d9, 0xfea2ca8c, 0x360bd498, 0xcf81f5a6, 0x28de7aa5, 0x268eb7da, 0xa4bfad3f, 0xe49d3a2c, 0x0d927850, 0x9bcc5f6a, 0x62467e54, 0xc2138df6, 0xe8b8d890, 0x5ef7392e, 0xf5afc382, 0xbe805d9f, 0x7c93d069, 0xa92dd56f, 0xb31225cf, 0x3b99acc8, 0xa77d1810, 0x6e639ce8, 0x7bbb3bdb, 0x097826cd, 0xf418596e, 0x01b79aec, 0xa89a4f83, 0x656e95e6, 0x7ee6ffaa, 0x08cfbc21, 0xe6e815ef, 0xd99be7ba, 0xce366f4a, 0xd4099fea, 0xd67cb029, 0xafb2a431, 0x31233f2a, 0x3094a5c6, 0xc066a235, 0x37bc4e74, 0xa6ca82fc, 0xb0d090e0, 0x15d8a733, 0x4a9804f1, 0xf7daec41, 0x0e50cd7f, 0x2ff69117, 0x8dd64d76, 0x4db0ef43, 0x544daacc, 0xdf0496e4, 0xe3b5d19e, 0x1b886a4c, 0xb81f2cc1, 0x7f516546, 0x04ea5e9d, 0x5d358c01, 0x737487fa, 0x2e410bfb, 0x5a1d67b3, 0x52d2db92, 0x335610e9, 0x1347d66d, 0x8c61d79a, 0x7a0ca137, 0x8e14f859, 0x893c13eb, 0xee27a9ce, 0x35c961b7, 0xede51ce1, 0x3cb1477a, 0x59dfd29c, 0x3f73f255, 0x79ce1418, 0xbf37c773, 0xeacdf753, 0x5baafd5f, 0x146f3ddf, 0x86db4478, 0x81f3afca, 0x3ec468b9, 0x2c342438, 0x5f40a3c2, 0x72c31d16, 0x0c25e2bc, 0x8b493c28, 0x41950dff, 0x7101a839, 0xdeb30c08, 0x9ce4b4d8, 0x90c15664, 0x6184cb7b, 0x70b632d5, 0x745c6c48, 0x4257b8d0 }; private uint Shift( uint r, int shift) { return (r >> shift) | (r << (32 - shift)); } /* multiply four bytes in GF(2^8) by 'x' {02} in parallel */ private const uint m1 = 0x80808080; private const uint m2 = 0x7f7f7f7f; private const uint m3 = 0x0000001b; private uint FFmulX( uint x) { return ((x & m2) << 1) ^ (((x & m1) >> 7) * m3); } /* The following defines provide alternative definitions of FFmulX that might give improved performance if a fast 32-bit multiply is not available. private int FFmulX(int x) { int u = x & m1; u |= (u >> 1); return ((x & m2) << 1) ^ ((u >>> 3) | (u >>> 6)); } private static final int m4 = 0x1b1b1b1b; private int FFmulX(int x) { int u = x & m1; return ((x & m2) << 1) ^ ((u - (u >>> 7)) & m4); } */ private uint Inv_Mcol( uint x) { uint f2 = FFmulX(x); uint f4 = FFmulX(f2); uint f8 = FFmulX(f4); uint f9 = x ^ f8; return f2 ^ f4 ^ f8 ^ Shift(f2 ^ f9, 8) ^ Shift(f4 ^ f9, 16) ^ Shift(f9, 24); } private uint SubWord( uint x) { return (uint)S[x&255] | (((uint)S[(x>>8)&255]) << 8) | (((uint)S[(x>>16)&255]) << 16) | (((uint)S[(x>>24)&255]) << 24); } /** * Calculate the necessary round keys * The number of calculations depends on key size and block size * AES specified a fixed block size of 128 bits and key sizes 128/192/256 bits * This code is written assuming those are the only possible values */ private uint[,] GenerateWorkingKey( byte[] key, bool forEncryption) { int KC = key.Length / 4; // key length in words int t; if ((KC != 4) && (KC != 6) && (KC != 8)) throw new ArgumentException("Key length not 128/192/256 bits."); ROUNDS = KC + 6; // This is not always true for the generalized Rijndael that allows larger block sizes uint[,] W = new uint[ROUNDS+1, 4]; // 4 words in a block // // copy the key into the round key array // t = 0; for (int i = 0; i < key.Length; t++) { W[t >> 2, t & 3] = Pack.LE_To_UInt32(key, i); i+=4; } // // while not enough round key material calculated // calculate new values // int k = (ROUNDS + 1) << 2; for (int i = KC; (i < k); i++) { uint temp = W[(i-1)>>2, (i-1)&3]; if ((i % KC) == 0) { temp = SubWord(Shift(temp, 8)) ^ rcon[(i / KC)-1]; } else if ((KC > 6) && ((i % KC) == 4)) { temp = SubWord(temp); } W[i>>2, i&3] = W[(i - KC)>>2, (i-KC)&3] ^ temp; } if (!forEncryption) { for (int j = 1; j < ROUNDS; j++) { for (int i = 0; i < 4; i++) { W[j, i] = Inv_Mcol(W[j, i]); } } } return W; } private int ROUNDS; private uint[,] WorkingKey; private uint C0, C1, C2, C3; private bool forEncryption; private const int BLOCK_SIZE = 16; /** * default constructor - 128 bit block size. */ public AesEngine() { } /** * initialise an AES cipher. * * @param forEncryption whether or not we are for encryption. * @param parameters the parameters required to set up the cipher. * @exception ArgumentException if the parameters argument is * inappropriate. */ public void Init( bool forEncryption, ICipherParameters parameters) { KeyParameter keyParameter = parameters as KeyParameter; if (keyParameter == null) throw new ArgumentException("invalid parameter passed to AES init - " + parameters.GetType().Name); WorkingKey = GenerateWorkingKey(keyParameter.GetKey(), forEncryption); this.forEncryption = forEncryption; } public string AlgorithmName { get { return "AES"; } } public bool IsPartialBlockOkay { get { return false; } } public int GetBlockSize() { return BLOCK_SIZE; } public int ProcessBlock( byte[] input, int inOff, byte[] output, int outOff) { if (WorkingKey == null) { throw new InvalidOperationException("AES engine not initialised"); } if ((inOff + (32 / 2)) > input.Length) { throw new DataLengthException("input buffer too short"); } if ((outOff + (32 / 2)) > output.Length) { throw new DataLengthException("output buffer too short"); } UnPackBlock(input, inOff); if (forEncryption) { EncryptBlock(WorkingKey); } else { DecryptBlock(WorkingKey); } PackBlock(output, outOff); return BLOCK_SIZE; } public void Reset() { } private void UnPackBlock( byte[] bytes, int off) { C0 = Pack.LE_To_UInt32(bytes, off); C1 = Pack.LE_To_UInt32(bytes, off + 4); C2 = Pack.LE_To_UInt32(bytes, off + 8); C3 = Pack.LE_To_UInt32(bytes, off + 12); } private void PackBlock( byte[] bytes, int off) { Pack.UInt32_To_LE(C0, bytes, off); Pack.UInt32_To_LE(C1, bytes, off + 4); Pack.UInt32_To_LE(C2, bytes, off + 8); Pack.UInt32_To_LE(C3, bytes, off + 12); } private void EncryptBlock( uint[,] KW) { uint r, r0, r1, r2, r3; C0 ^= KW[0, 0]; C1 ^= KW[0, 1]; C2 ^= KW[0, 2]; C3 ^= KW[0, 3]; for (r = 1; r < ROUNDS - 1;) { r0 = T0[C0&255] ^ Shift(T0[(C1>>8)&255], 24) ^ Shift(T0[(C2>>16)&255],16) ^ Shift(T0[(C3>>24)&255],8) ^ KW[r,0]; r1 = T0[C1&255] ^ Shift(T0[(C2>>8)&255], 24) ^ Shift(T0[(C3>>16)&255], 16) ^ Shift(T0[(C0>>24)&255], 8) ^ KW[r,1]; r2 = T0[C2&255] ^ Shift(T0[(C3>>8)&255], 24) ^ Shift(T0[(C0>>16)&255], 16) ^ Shift(T0[(C1>>24)&255], 8) ^ KW[r,2]; r3 = T0[C3&255] ^ Shift(T0[(C0>>8)&255], 24) ^ Shift(T0[(C1>>16)&255], 16) ^ Shift(T0[(C2>>24)&255], 8) ^ KW[r++,3]; C0 = T0[r0&255] ^ Shift(T0[(r1>>8)&255], 24) ^ Shift(T0[(r2>>16)&255], 16) ^ Shift(T0[(r3>>24)&255], 8) ^ KW[r,0]; C1 = T0[r1&255] ^ Shift(T0[(r2>>8)&255], 24) ^ Shift(T0[(r3>>16)&255], 16) ^ Shift(T0[(r0>>24)&255], 8) ^ KW[r,1]; C2 = T0[r2&255] ^ Shift(T0[(r3>>8)&255], 24) ^ Shift(T0[(r0>>16)&255], 16) ^ Shift(T0[(r1>>24)&255], 8) ^ KW[r,2]; C3 = T0[r3&255] ^ Shift(T0[(r0>>8)&255], 24) ^ Shift(T0[(r1>>16)&255], 16) ^ Shift(T0[(r2>>24)&255], 8) ^ KW[r++,3]; } r0 = T0[C0&255] ^ Shift(T0[(C1>>8)&255], 24) ^ Shift(T0[(C2>>16)&255], 16) ^ Shift(T0[(C3>>24)&255], 8) ^ KW[r,0]; r1 = T0[C1&255] ^ Shift(T0[(C2>>8)&255], 24) ^ Shift(T0[(C3>>16)&255], 16) ^ Shift(T0[(C0>>24)&255], 8) ^ KW[r,1]; r2 = T0[C2&255] ^ Shift(T0[(C3>>8)&255], 24) ^ Shift(T0[(C0>>16)&255], 16) ^ Shift(T0[(C1>>24)&255], 8) ^ KW[r,2]; r3 = T0[C3&255] ^ Shift(T0[(C0>>8)&255], 24) ^ Shift(T0[(C1>>16)&255], 16) ^ Shift(T0[(C2>>24)&255], 8) ^ KW[r++,3]; // the final round's table is a simple function of S so we don't use a whole other four tables for it C0 = (uint)S[r0&255] ^ (((uint)S[(r1>>8)&255])<<8) ^ (((uint)S[(r2>>16)&255])<<16) ^ (((uint)S[(r3>>24)&255])<<24) ^ KW[r,0]; C1 = (uint)S[r1&255] ^ (((uint)S[(r2>>8)&255])<<8) ^ (((uint)S[(r3>>16)&255])<<16) ^ (((uint)S[(r0>>24)&255])<<24) ^ KW[r,1]; C2 = (uint)S[r2&255] ^ (((uint)S[(r3>>8)&255])<<8) ^ (((uint)S[(r0>>16)&255])<<16) ^ (((uint)S[(r1>>24)&255])<<24) ^ KW[r,2]; C3 = (uint)S[r3&255] ^ (((uint)S[(r0>>8)&255])<<8) ^ (((uint)S[(r1>>16)&255])<<16) ^ (((uint)S[(r2>>24)&255])<<24) ^ KW[r,3]; } private void DecryptBlock( uint[,] KW) { int r; uint r0, r1, r2, r3; C0 ^= KW[ROUNDS,0]; C1 ^= KW[ROUNDS,1]; C2 ^= KW[ROUNDS,2]; C3 ^= KW[ROUNDS,3]; for (r = ROUNDS-1; r>1;) { r0 = Tinv0[C0&255] ^ Shift(Tinv0[(C3>>8)&255], 24) ^ Shift(Tinv0[(C2>>16)&255], 16) ^ Shift(Tinv0[(C1>>24)&255], 8) ^ KW[r,0]; r1 = Tinv0[C1&255] ^ Shift(Tinv0[(C0>>8)&255], 24) ^ Shift(Tinv0[(C3>>16)&255], 16) ^ Shift(Tinv0[(C2>>24)&255], 8) ^ KW[r,1]; r2 = Tinv0[C2&255] ^ Shift(Tinv0[(C1>>8)&255], 24) ^ Shift(Tinv0[(C0>>16)&255], 16) ^ Shift(Tinv0[(C3>>24)&255], 8) ^ KW[r,2]; r3 = Tinv0[C3&255] ^ Shift(Tinv0[(C2>>8)&255], 24) ^ Shift(Tinv0[(C1>>16)&255], 16) ^ Shift(Tinv0[(C0>>24)&255], 8) ^ KW[r--,3]; C0 = Tinv0[r0&255] ^ Shift(Tinv0[(r3>>8)&255], 24) ^ Shift(Tinv0[(r2>>16)&255], 16) ^ Shift(Tinv0[(r1>>24)&255], 8) ^ KW[r,0]; C1 = Tinv0[r1&255] ^ Shift(Tinv0[(r0>>8)&255], 24) ^ Shift(Tinv0[(r3>>16)&255], 16) ^ Shift(Tinv0[(r2>>24)&255], 8) ^ KW[r,1]; C2 = Tinv0[r2&255] ^ Shift(Tinv0[(r1>>8)&255], 24) ^ Shift(Tinv0[(r0>>16)&255], 16) ^ Shift(Tinv0[(r3>>24)&255], 8) ^ KW[r,2]; C3 = Tinv0[r3&255] ^ Shift(Tinv0[(r2>>8)&255], 24) ^ Shift(Tinv0[(r1>>16)&255], 16) ^ Shift(Tinv0[(r0>>24)&255], 8) ^ KW[r--,3]; } r0 = Tinv0[C0&255] ^ Shift(Tinv0[(C3>>8)&255], 24) ^ Shift(Tinv0[(C2>>16)&255], 16) ^ Shift(Tinv0[(C1>>24)&255], 8) ^ KW[r,0]; r1 = Tinv0[C1&255] ^ Shift(Tinv0[(C0>>8)&255], 24) ^ Shift(Tinv0[(C3>>16)&255], 16) ^ Shift(Tinv0[(C2>>24)&255], 8) ^ KW[r,1]; r2 = Tinv0[C2&255] ^ Shift(Tinv0[(C1>>8)&255], 24) ^ Shift(Tinv0[(C0>>16)&255], 16) ^ Shift(Tinv0[(C3>>24)&255], 8) ^ KW[r,2]; r3 = Tinv0[C3&255] ^ Shift(Tinv0[(C2>>8)&255], 24) ^ Shift(Tinv0[(C1>>16)&255], 16) ^ Shift(Tinv0[(C0>>24)&255], 8) ^ KW[r,3]; // the final round's table is a simple function of Si so we don't use a whole other four tables for it C0 = (uint)Si[r0&255] ^ (((uint)Si[(r3>>8)&255])<<8) ^ (((uint)Si[(r2>>16)&255])<<16) ^ (((uint)Si[(r1>>24)&255])<<24) ^ KW[0,0]; C1 = (uint)Si[r1&255] ^ (((uint)Si[(r0>>8)&255])<<8) ^ (((uint)Si[(r3>>16)&255])<<16) ^ (((uint)Si[(r2>>24)&255])<<24) ^ KW[0,1]; C2 = (uint)Si[r2&255] ^ (((uint)Si[(r1>>8)&255])<<8) ^ (((uint)Si[(r0>>16)&255])<<16) ^ (((uint)Si[(r3>>24)&255])<<24) ^ KW[0,2]; C3 = (uint)Si[r3&255] ^ (((uint)Si[(r2>>8)&255])<<8) ^ (((uint)Si[(r1>>16)&255])<<16) ^ (((uint)Si[(r0>>24)&255])<<24) ^ KW[0,3]; } } }
using UnityEngine; using System.Collections; using UnityEngine.UI; using SimpleJSON ; public class Getinformation : MonoBehaviour { // private Rigidbody rb; private string url = "localhost:26666/unity/status"; private string report_url = "localhost:26666/unity/report"; // public Rigidbody projectile; // public Text content; public int i; public string dplabel = "start"; string drugname = "Drug Name"; string dos = "Dosage"; string heart = "null"; string oxygen = "null"; string diastolic_arterialpressure = "null"; string systolic_arterial_pressure = "null"; string respiration_rate = "null"; private string gameStatus = "gaming"; private bool game_over_sign = false; private string game_over = "false"; void Start(){ StartCoroutine(RepeatedGet()); // OpenWebpage(); } string oxygenStr = "Oxygen Level: "; private GUIStyle guiStyle = new GUIStyle(); void OnGUI () { guiStyle.fontSize = 20; guiStyle.normal.textColor = Color.white; GUI.contentColor = Color.black; if (gameStatus == "gaming") { // GUI.Label(new Rect(10,10,Screen.width,50),"Please input the drugname and dosage"); // GUI.Label(new Rect(100, 100, 100, 25), "Client"); if(GUI.Button(new Rect(10,120,100,30),"Submit")) { Debug.Log("Submit input to hub"); StartCoroutine(SendDrug(drugname, dos)); Debug.Log("FInished Submit input to hub"); } //GUI.Label (new Rect (10,150,160,20), "Anesthesia Machine"); // if(GUI.Button(new Rect(10,170,100,30),"Turn ON ")) // { // Debug.Log("Turn ON anesthesia machine"); // StartCoroutine(SendAnesthesiaMachine("on")); // } // if(GUI.Button(new Rect(110,170,100,30),"Turn OFF")) // { // Debug.Log("Turn OFF anesthesia machine"); // StartCoroutine(SendAnesthesiaMachine("off")); // } // call code if(GUI.Button(new Rect(600, 10, 100, 30), "Call Code")) { Debug.Log ("Call code"); StartCoroutine(SendCallCode("blue")); } // Debrief message if (GUI.Button (new Rect (600, 40, 100, 30), "Debrief")) { Debug.Log ("Debrief"); gameStatus = "debrief"; // open a url in browser Application.OpenURL("http://unity3d.com/"); // To open a html in unity, we need to parse & render the http response while Unity // does not have a built-in HTML parser/renderer // There is a HtmlTexturePlugin which depends on Webkit that is available natively in OSX // but not Windows // Another solution available in Windows: // http://labs.awesomium.com/unity3d-integration-tutorial-part-1/ // See also: http://www.andrewnoske.com/wiki/Unity_-_Web_Interaction // Here I d } // GUI.Label(new Rect(40,200,Screen.width,50),"Patient Status:"); // GUI.Label(new Rect(40,220,Screen.width,50),oxygenStr,guiStyle); // GUI.Label(new Rect(40,240,Screen.width,50),"Heart Rate: " + heart,guiStyle); // GUI.Label(new Rect(40,260,Screen.width,50),"Arterial Pressure: " + systolic_arterial_pressure + "/" + diastolic_arterialpressure,guiStyle); // GUI.Label (new Rect (40, 280, Screen.width, 50), "Respiration Rate: " + respiration_rate,guiStyle); // drugname = GUI.TextField(new Rect(60,40,200,30),drugname,15); // dos = GUI.TextField(new Rect(60,80,200,30),dos,15); } if (gameStatus == "debrief") { // GUI.Label(new Rect(70,50,40,50),"Score:"); // GUI.Label(new Rect(110,50,40,50),"100"); // string debrief = "you did good!\nblablabla\nblabla\n\nblablabla\nbla"; // GUI.TextField(new Rect(100, 100, 400, 200), debrief, 100); } } IEnumerator RepeatedGet() { while (true) { WWW w = new WWW (url); WWW k = new WWW (report_url); yield return w; yield return new WaitForSeconds(1); if (w.error == null) { var HubResponse = JSON.Parse(w.text); dplabel = HubResponse["heart_rate"].Value; oxygen = HubResponse["oxygen_saturation"].Value; oxygenStr = "Oxygen Level: " + oxygen; heart = HubResponse["heart_rate"]; systolic_arterial_pressure = HubResponse["systolic_arterial_pressure"]; diastolic_arterialpressure = HubResponse["diastolic_arterialpressure"]; respiration_rate = HubResponse["respiration_rate"]; //{"heart_rate":"72","respiration_rate":"16.2651","systolic_arterial_pressure":"106.976","diastolic_arterialpressure":"63.8783","oxygen_saturation":"0.968285"} var report_response = JSON.Parse(k.text); //Game_over Sign game_over = report_response ["GameOver"]; if (game_over.Equals ("true")) { game_over_sign = true; } } else { Debug.Log("error"); // content.text = w.text; } } } IEnumerator SendDrug(string drugName, string dose){ Debug.Log(drugName); Debug.Log(dose); // Create a form object for sending high score data to the server WWWForm form = new WWWForm(); // Assuming the perl script manages high scores for different games form.AddField( "drug_name", drugName ); // The name of the player submitting the scores form.AddField( "dose", dose ); // Create a download object WWW download = new WWW( "http://localhost:26666/unity/action/inject", form ); // Wait until the download is done yield return download; if(!string.IsNullOrEmpty(download.error)) { print( "Error downloading: " + download.error ); } else { // show the highscores Debug.Log(download.text); } } IEnumerator SendAnesthesiaMachine(string status) { Debug.Log("Adjusting Anesthedia Machine"); // Create a form object for sending high score data to the server WWWForm form = new WWWForm(); // Assuming the perl script manages high scores for different games form.AddField( "status", status); // Create a download object WWW download = new WWW( "http://localhost:26666/unity/action/oxygen", form ); // Wait until the download is done yield return download; if(!string.IsNullOrEmpty(download.error)) { print( "Error downloading: " + download.error ); } else { // show the highscores Debug.Log(download.text); } } IEnumerator SendCallCode(string code) { Debug.Log("Call Code"); // Create a form object for sending high score data to the server WWWForm form = new WWWForm(); // Assuming the perl script manages high scores for different games form.AddField( "code", code); // Create a download object WWW download = new WWW( "http://localhost:26666/unity/action/call_code", form ); // Wait until the download is done yield return download; if(!string.IsNullOrEmpty(download.error)) { print( "Error downloading: " + download.error ); } else { // show the highscores Debug.Log(download.text); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Composition; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Host.Mef; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Text; using Microsoft.VisualStudio.LanguageServices.Implementation.F1Help; using Roslyn.Utilities; namespace Microsoft.VisualStudio.LanguageServices.CSharp.LanguageService { [ExportLanguageService(typeof(IHelpContextService), LanguageNames.CSharp), Shared] internal class CSharpHelpContextService : AbstractHelpContextService { public override string Language { get { return "csharp"; } } public override string Product { get { return "csharp"; } } private static string Keyword(string text) { return text + "_CSharpKeyword"; } public override async Task<string> GetHelpTermAsync(Document document, TextSpan span, CancellationToken cancellationToken) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var syntaxFacts = document.GetLanguageService<ISyntaxFactsService>(); // For now, find the token under the start of the selection. var syntaxTree = await document.GetSyntaxTreeAsync(cancellationToken).ConfigureAwait(false); var token = syntaxTree.GetTouchingToken(span.Start, cancellationToken, findInsideTrivia: true); if (IsValid(token, span)) { var semanticModel = await document.GetSemanticModelForSpanAsync(span, cancellationToken).ConfigureAwait(false); var result = TryGetText(token, semanticModel, document, syntaxFacts, cancellationToken); if (string.IsNullOrEmpty(result)) { var previousToken = token.GetPreviousToken(); if (IsValid(previousToken, span)) { result = TryGetText(previousToken, semanticModel, document, syntaxFacts, cancellationToken); } } return result; } var trivia = root.FindTrivia(span.Start, findInsideTrivia: true); if (trivia.Span.IntersectsWith(span) && trivia.Kind() == SyntaxKind.PreprocessingMessageTrivia && trivia.Token.GetAncestor<RegionDirectiveTriviaSyntax>() != null) { return "#region"; } if (trivia.IsRegularOrDocComment()) { // just find the first "word" that intersects with our position var text = await syntaxTree.GetTextAsync(cancellationToken).ConfigureAwait(false); int start = span.Start; int end = span.Start; while (start > 0 && syntaxFacts.IsIdentifierPartCharacter(text[start - 1])) { start--; } while (end < text.Length - 1 && syntaxFacts.IsIdentifierPartCharacter(text[end])) { end++; } return text.GetSubText(TextSpan.FromBounds(start, end)).ToString(); } return string.Empty; } private bool IsValid(SyntaxToken token, TextSpan span) { // If the token doesn't actually intersect with our position, give up return token.Kind() == SyntaxKind.EndIfDirectiveTrivia || token.Span.IntersectsWith(span); } private string TryGetText(SyntaxToken token, SemanticModel semanticModel, Document document, ISyntaxFactsService syntaxFacts, CancellationToken cancellationToken) { string text = null; if (TryGetTextForContextualKeyword(token, document, syntaxFacts, out text) || TryGetTextForKeyword(token, document, syntaxFacts, out text) || TryGetTextForPreProcessor(token, document, syntaxFacts, out text) || TryGetTextForSymbol(token, semanticModel, document, cancellationToken, out text) || TryGetTextForOperator(token, document, out text)) { return text; } return string.Empty; } private bool TryGetTextForSymbol(SyntaxToken token, SemanticModel semanticModel, Document document, CancellationToken cancellationToken, out string text) { ISymbol symbol; if (token.Parent is TypeArgumentListSyntax) { var genericName = token.GetAncestor<GenericNameSyntax>(); symbol = semanticModel.GetSymbolInfo(genericName, cancellationToken).Symbol ?? semanticModel.GetTypeInfo(genericName, cancellationToken).Type; } else if (token.Parent is NullableTypeSyntax && token.IsKind(SyntaxKind.QuestionToken)) { text = "System.Nullable`1"; return true; } else { var symbols = semanticModel.GetSymbols(token, document.Project.Solution.Workspace, bindLiteralsToUnderlyingType: true, cancellationToken: cancellationToken); symbol = symbols.FirstOrDefault(); if (symbol == null) { var bindableParent = document.GetLanguageService<ISyntaxFactsService>().GetBindableParent(token); var overloads = semanticModel.GetMemberGroup(bindableParent); symbol = overloads.FirstOrDefault(); } } // Local: return the name if it's the declaration, otherwise the type if (symbol is ILocalSymbol && !symbol.DeclaringSyntaxReferences.Any(d => d.GetSyntax().DescendantTokens().Contains(token))) { symbol = ((ILocalSymbol)symbol).Type; } // Range variable: use the type if (symbol is IRangeVariableSymbol) { var info = semanticModel.GetTypeInfo(token.Parent, cancellationToken); symbol = info.Type; } // Just use syntaxfacts for operators if (symbol is IMethodSymbol && ((IMethodSymbol)symbol).MethodKind == MethodKind.BuiltinOperator) { text = null; return false; } text = symbol != null ? Format(symbol) : null; return symbol != null; } private bool TryGetTextForOperator(SyntaxToken token, Document document, out string text) { var syntaxFacts = document.GetLanguageService<ISyntaxFactsService>(); if (syntaxFacts.IsOperator(token) || syntaxFacts.IsPredefinedOperator(token) || SyntaxFacts.IsAssignmentExpressionOperatorToken(token.Kind())) { text = Keyword(syntaxFacts.GetText(token.RawKind)); return true; } if (token.IsKind(SyntaxKind.ColonColonToken)) { text = "::_CSharpKeyword"; return true; } if (token.Kind() == SyntaxKind.ColonToken && token.Parent is NameColonSyntax) { text = "cs_namedParameter"; return true; } if (token.IsKind(SyntaxKind.QuestionToken) && token.Parent is ConditionalExpressionSyntax) { text = "?_CSharpKeyword"; return true; } if (token.IsKind(SyntaxKind.EqualsGreaterThanToken)) { text = "=>_CSharpKeyword"; return true; } if (token.IsKind(SyntaxKind.PlusEqualsToken)) { text = "+=_CSharpKeyword"; return true; } if (token.IsKind(SyntaxKind.MinusEqualsToken)) { text = "-=_CSharpKeyword"; return true; } text = null; return false; } private bool TryGetTextForPreProcessor(SyntaxToken token, Document document, ISyntaxFactsService syntaxFacts, out string text) { if (syntaxFacts.IsPreprocessorKeyword(token)) { text = "#" + token.Text; return true; } if (token.IsKind(SyntaxKind.EndOfDirectiveToken) && token.GetAncestor<RegionDirectiveTriviaSyntax>() != null) { text = "#region"; return true; } text = null; return false; } private bool TryGetTextForContextualKeyword(SyntaxToken token, Document document, ISyntaxFactsService syntaxFacts, out string text) { if (token.IsContextualKeyword()) { switch (token.Kind()) { case SyntaxKind.PartialKeyword: if (token.Parent.GetAncestorOrThis<MethodDeclarationSyntax>() != null) { text = "partialmethod_CSharpKeyword"; return true; } else if (token.Parent.GetAncestorOrThis<ClassDeclarationSyntax>() != null) { text = "partialtype_CSharpKeyword"; return true; } break; case SyntaxKind.WhereKeyword: if (token.Parent.GetAncestorOrThis<TypeParameterConstraintClauseSyntax>() != null) { text = "whereconstraint_CSharpKeyword"; } else { text = "whereclause_CSharpKeyword"; } return true; } } text = null; return false; } private bool TryGetTextForKeyword(SyntaxToken token, Document document, ISyntaxFactsService syntaxFacts, out string text) { if (token.Kind() == SyntaxKind.InKeyword) { if (token.GetAncestor<FromClauseSyntax>() != null) { text = "from_CSharpKeyword"; return true; } if (token.GetAncestor<JoinClauseSyntax>() != null) { text = "join_CSharpKeyword"; return true; } } if (token.IsKeyword()) { text = Keyword(token.Text); return true; } if (token.ValueText == "var" && token.IsKind(SyntaxKind.IdentifierToken) && token.Parent.Parent is VariableDeclarationSyntax && token.Parent == ((VariableDeclarationSyntax)token.Parent.Parent).Type) { text = "var_CSharpKeyword"; return true; } if (syntaxFacts.IsTypeNamedDynamic(token, token.Parent)) { text = "dynamic_CSharpKeyword"; return true; } text = null; return false; } private string FormatTypeOrNamespace(INamespaceOrTypeSymbol symbol) { var displayString = symbol.ToDisplayString(TypeFormat); var type = symbol as ITypeSymbol; if (type != null && type.OriginalDefinition.SpecialType == SpecialType.System_Nullable_T) { return "System.Nullable`1"; } if (symbol.GetTypeArguments().Any()) { return string.Format("{0}`{1}", displayString, symbol.GetTypeArguments().Length); } return displayString; } private string Format(ISymbol symbol) { if (symbol is ITypeSymbol || symbol is INamespaceSymbol) { return FormatTypeOrNamespace((INamespaceOrTypeSymbol)symbol); } if (symbol.MatchesKind(SymbolKind.Alias, SymbolKind.Local, SymbolKind.Parameter)) { return Format(symbol.GetSymbolType()); } var containingType = FormatTypeOrNamespace(symbol.ContainingType); var name = symbol.ToDisplayString(NameFormat); if (symbol.IsConstructor()) { return string.Format("{0}.#ctor", containingType); } if (symbol.GetTypeArguments().Any()) { return string.Format("{0}.{1}``{2}", containingType, name, symbol.GetTypeArguments().Length); } return string.Format("{0}.{1}", containingType, name); } } }
// XPlat Apps licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. #if __ANDROID__ namespace XPlat.Device.Geolocation { using System; using System.Linq; using System.Threading.Tasks; using Android.Content; using Android.Locations; using Android.OS; using Java.Lang; using XPlat.Foundation; using Exception = System.Exception; /// <summary>Provides access to the current geographic location.</summary> public class Geolocator : IGeolocator { private readonly object obj = new object(); private readonly LocationManager locationManager; private readonly string[] locationProviders; private GeolocatorLocationListener locationListener; private uint reportInterval = 1; private PositionAccuracy desiredAccuracy; /// <summary> /// Initializes a new instance of the <see cref="Geolocator"/> class using the default <see cref="Android.App.Application.Context"/>. /// </summary> public Geolocator() : this(Android.App.Application.Context) { } /// <summary> /// Initializes a new instance of the <see cref="Geolocator"/> class. /// </summary> /// <param name="context"> /// The Android context. /// </param> public Geolocator(Context context) { this.locationManager = (LocationManager)context.GetSystemService(Context.LocationService); this.locationProviders = this.locationManager.GetProviders(false).Where(p => p != LocationManager.PassiveProvider).ToArray(); } /// <summary>Raised when the location is updated.</summary> public event TypedEventHandler<IGeolocator, PositionChangedEventArgs> PositionChanged; /// <summary>Raised when the ability of the Geolocator to provide updated location changes.</summary> public event TypedEventHandler<IGeolocator, StatusChangedEventArgs> StatusChanged; /// <summary>Gets the time used for converting the Android <see cref="Location"/> objects Time property to a DateTime.</summary> public static DateTime AndroidLocationTime => new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); /// <summary>Gets the last known position recorded by the Geolocator.</summary> public Geoposition LastKnownPosition { get; private set; } /// <summary>Gets or sets the requested minimum time interval between location updates, in milliseconds. If your application requires updates infrequently, set this value so that location services can conserve power by calculating location only when needed.</summary> public uint ReportInterval { get => this.reportInterval; set { this.reportInterval = value; if (this.locationListener != null) { this.locationListener.ReportInterval = value; } } } /// <summary>Gets or sets the distance of movement, in meters, relative to the coordinate from the last PositionChanged event, that is required for the Geolocator to raise a PositionChanged event.</summary> public double MovementThreshold { get; set; } /// <summary>Gets the status that indicates the ability of the Geolocator to provide location updates.</summary> public PositionStatus LocationStatus { get; private set; } /// <summary>Gets or sets the accuracy level at which the Geolocator provides location updates.</summary> public PositionAccuracy DesiredAccuracy { get => this.desiredAccuracy; set { this.desiredAccuracy = value; switch (value) { case PositionAccuracy.Default: if (this.DesiredAccuracyInMeters != 500) { this.DesiredAccuracyInMeters = 500; } break; case PositionAccuracy.High: if (this.DesiredAccuracyInMeters != 10) { this.DesiredAccuracyInMeters = 10; } break; } } } /// <summary>Gets or sets the desired accuracy in meters for data returned from the location service.</summary> public uint DesiredAccuracyInMeters { get; set; } /// <summary>Starts an asynchronous operation to retrieve the current location of the device.</summary> /// <returns>An asynchronous operation that, upon completion, returns a Geoposition marking the found location.</returns> public Task<Geoposition> GetGeopositionAsync() { return this.GetGeopositionAsync(TimeSpan.MaxValue, TimeSpan.FromMinutes(1)); } /// <summary> /// Starts an asynchronous operation to retrieve the current location of the device. /// </summary> /// <param name="maximumAge"> /// The maximum acceptable age of cached location data. /// </param> /// <param name="timeout"> /// The timeout. /// </param> /// <returns> /// An asynchronous operation that, upon completion, returns a Geoposition marking the found location. /// </returns> public async Task<Geoposition> GetGeopositionAsync(TimeSpan maximumAge, TimeSpan timeout) { var tcs = new TaskCompletionSource<Geoposition>(); GeolocationAccessStatus access = await this.RequestAccessAsync(); if (access == GeolocationAccessStatus.Allowed) { LocationRetriever[] retriever = { null }; retriever[0] = new LocationRetriever( this.DesiredAccuracyInMeters, timeout, this.locationProviders.Where(this.locationManager.IsProviderEnabled), () => { try { this.locationManager.RemoveUpdates(retriever[0]); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.ToString()); } }); try { Looper looperThread = Looper.MyLooper() ?? Looper.MainLooper; int numEnabledProviders = 0; foreach (string provider in this.locationProviders) { if (this.locationManager.IsProviderEnabled(provider)) { numEnabledProviders++; } this.locationManager.RequestLocationUpdates(provider, 0, 0, retriever[0], looperThread); } if (numEnabledProviders == 0) { try { this.locationManager.RemoveUpdates(retriever[0]); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.ToString()); } tcs.TrySetException( new GeolocatorException("A location cannot be retrieved as the provider is unavailable.")); return await tcs.Task; } } catch (SecurityException ex) { tcs.TrySetException( new GeolocatorException("A location cannot be retrieved as the access was unauthorized.", ex)); return await tcs.Task; } return await retriever[0].Task; } lock (this.obj) { if (this.LastKnownPosition == null || (this.LastKnownPosition.Coordinate != null && !(this.LastKnownPosition.Coordinate.Timestamp <= DateTime.UtcNow.Subtract(maximumAge)))) { // Attempts to get the current location based on the event handler of this Geolocator. TypedEventHandler<IGeolocator, PositionChangedEventArgs> positionResponse = null; positionResponse = (s, e) => { tcs.TrySetResult(e.Position); this.PositionChanged -= positionResponse; }; this.PositionChanged += positionResponse; } else { tcs.SetResult(this.LastKnownPosition); } } return await tcs.Task; } /// <summary>Requests permission to access location data.</summary> /// <returns>A GeolocationAccessStatus that indicates if permission to location data has been granted.</returns> public Task<GeolocationAccessStatus> RequestAccessAsync() { GeolocationAccessStatus status = this.locationProviders.Any(this.locationManager.IsProviderEnabled) ? GeolocationAccessStatus.Allowed : GeolocationAccessStatus.Denied; if (this.locationListener != null || status != GeolocationAccessStatus.Allowed) { return Task.FromResult(status); } this.locationListener = new GeolocatorLocationListener( this.locationManager, this.ReportInterval, this.locationProviders); this.locationListener.PositionChanged += this.LocationListener_PositionChanged; this.locationListener.StatusChanged += this.LocationListener_StatusChanged; Looper looperThread = Looper.MyLooper() ?? Looper.MainLooper; foreach (string provider in this.locationProviders) { this.locationManager.RequestLocationUpdates( provider, this.reportInterval, (float)this.MovementThreshold, this.locationListener, looperThread); } return Task.FromResult(status); } private void LocationListener_PositionChanged(GeolocatorLocationListener sender, PositionChangedEventArgs args) { lock (this.obj) { this.LastKnownPosition = args.Position; this.PositionChanged?.Invoke(this, args); } } private void LocationListener_StatusChanged(GeolocatorLocationListener sender, StatusChangedEventArgs args) { this.LocationStatus = args.Status; this.StatusChanged?.Invoke(this, args); } } } #endif
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.InteropServices; using System.Text; using System.Threading.Tasks; using Xunit; internal class IOServices { public static IEnumerable<string> GetReadyDrives() { foreach (string drive in GetLogicalDrives()) { if (IsReady(drive)) yield return drive; } } public static string GetNotReadyDrive() { string[] drives = GetLogicalDrives(); foreach (string drive in drives) { if (!IsReady(drive)) return drive; } return null; } public static string GetNonExistentDrive() { string[] availableDrives = GetLogicalDrives(); for (char drive = 'A'; drive <= 'Z'; drive++) { if (!availableDrives.Contains(drive + @":\")) return drive + @":\"; } return null; } public static string GetNtfsDriveOtherThanCurrent() { return GetNtfsDriveOtherThan(GetCurrentDrive()); } public static string GetNtfsDriveOtherThan(string drive) { foreach (string otherDrive in GetLogicalDrives()) { if (string.Equals(drive, otherDrive, StringComparison.OrdinalIgnoreCase)) continue; if (!IsFixed(otherDrive)) continue; if (!IsReady(otherDrive)) continue; if (IsDriveNTFS(otherDrive)) return otherDrive; } return null; } public static string GetNonNtfsDriveOtherThanCurrent() { return GetNonNtfsDriveOtherThan(GetCurrentDrive()); } public static string GetNonNtfsDriveOtherThan(string drive) { foreach (string otherDrive in GetLogicalDrives()) { if (string.Equals(drive, otherDrive, StringComparison.OrdinalIgnoreCase)) continue; if (!IsReady(otherDrive)) continue; if (!IsDriveNTFS(otherDrive)) return otherDrive; } return null; } public static PathInfo GetPath(string rootPath, int characterCount, bool extended) { if (extended) rootPath = IOInputs.ExtendedPrefix + rootPath; return GetPath(rootPath, characterCount); } public static PathInfo GetPath(string rootPath, int characterCount, int maxComponent = IOInputs.MaxComponent) { List<string> paths = new List<string>(); rootPath = rootPath.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); StringBuilder path = new StringBuilder(characterCount); path.Append(rootPath); while (path.Length < characterCount) { // Add directory seperator after each dir but not at the end of the path path.Append(Path.DirectorySeparatorChar); // Continue adding guids until the character count is hit string guid = Guid.NewGuid().ToString(); path.Append(guid.Substring(0, Math.Min(characterCount - path.Length, guid.Length))); if (path.Length + 1 == characterCount) { // If only one character is missing add a k! path.Append('k'); } paths.Add(path.ToString()); } Assert.Equal(path.Length, characterCount); return new PathInfo(paths.ToArray()); } public static IEnumerable<string> CreateDirectories(string rootPath, params string[] names) { List<string> paths = new List<string>(); foreach (string name in names) { string path = Path.Combine(rootPath, name); Directory.CreateDirectory(path); paths.Add(path); } return paths; } public static IEnumerable<string> CreateFiles(string rootPath, params string[] names) { List<string> paths = new List<string>(); foreach (string name in names) { string path = Path.Combine(rootPath, name); FileStream stream = File.Create(path); stream.Dispose(); paths.Add(path); } return paths; } public static string AddTrailingSlashIfNeeded(string path) { if (path.Length > 0 && path[path.Length - 1] != Path.DirectorySeparatorChar && path[path.Length - 1] != Path.AltDirectorySeparatorChar) { path = path + Path.DirectorySeparatorChar; } return path; } public static string RemoveTrailingSlash(string path) { return path.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); } private static string[] GetLogicalDrives() { // From .NET Framework's Directory.GetLogicalDrives int drives = DllImports.GetLogicalDrives(); if (drives == 0) throw new InvalidOperationException(); uint d = (uint)drives; int count = 0; while (d != 0) { if (((int)d & 1) != 0) count++; d >>= 1; } string[] result = new string[count]; char[] root = new char[] { 'A', ':', '\\' }; d = (uint)drives; count = 0; while (d != 0) { if (((int)d & 1) != 0) { result[count++] = new string(root); } d >>= 1; root[0]++; } return result; } public static string GetCurrentDrive() { return Path.GetPathRoot(Directory.GetCurrentDirectory()); } public static bool IsDriveNTFS(string drive) { if (PlatformDetection.IsWinRT) { // we cannot determine filesystem so assume NTFS return true; } var di = new DriveInfo(drive); return string.Equals(di.DriveFormat, "NTFS", StringComparison.OrdinalIgnoreCase); } public static long GetAvailableFreeBytes(string drive) { long ignored; long userBytes; if (!DllImports.GetDiskFreeSpaceEx(drive, out userBytes, out ignored, out ignored)) { throw new IOException("DriveName: " + drive + " ErrorCode:" + Marshal.GetLastWin32Error()); } return userBytes; } private static bool IsReady(string drive) { const int ERROR_NOT_READY = 0x00000015; long ignored; if (!DllImports.GetDiskFreeSpaceEx(drive, out ignored, out ignored, out ignored)) { return Marshal.GetLastWin32Error() != ERROR_NOT_READY; } return true; } private static bool IsFixed(string drive) { return DllImports.GetDriveType(drive) == 3; } }
using System; namespace Xevle.Core.DataTypes { /// <summary> /// Class for changing the byte-order of values and arrays of value types. /// </summary> /// <threadsafety static="true" instance="true"/> public static class ByteOrder { #region Swap for values /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static char Swap(char value) { return (char)(value << 8 | (value >> 8 & 255)); } /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static short Swap(short value) { return (short)(value << 8 | (value >> 8 & 255)); } /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static ushort Swap(ushort value) { return (ushort)(value << 8 | (value >> 8 & 255)); } /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static int Swap(int value) { return value << 24 | (value & 0xff00) << 8 | (value >> 8 & 0xff00) | (value >> 24 & 0xff); } /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static uint Swap(uint value) { return value << 24 | (value & 0xff00) << 8 | (value >> 8 & 0xff00) | (value >> 24 & 0xff); } /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static long Swap(long value) { // a tiny bit slower and unsafe //long ret; //int* pVal=(int*)&value; //int* pRet=(int*)&ret; //int a=*pVal; //pRet[1]=a<<24|(a&0xff00)<<8|(a>>8&0xff00)|(a>>24&0xff); //int b=pVal[1]; //*pRet=b<<24|(b&0xff00)<<8|(b>>8&0xff00)|(b>>24&0xff); //return ret; uint a = (uint)value; a = a << 24 | (a & 0xff00) << 8 | (a >> 8 & 0xff00) | (a >> 24 & 0xff); uint b = (uint)(value >> 32); return (long)a << 32 | b << 24 | (b & 0xff00) << 8 | (b >> 8 & 0xff00) | (b >> 24 & 0xff); } /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static ulong Swap(ulong value) { // a tiny bit slower and unsafe //ulong ret; //int* pVal=(int*)&value; //int* pRet=(int*)&ret; //int a=*pVal; //pRet[1]=a<<24|(a&0xff00)<<8|(a>>8&0xff00)|(a>>24&0xff); //int b=pVal[1]; //*pRet=b<<24|(b&0xff00)<<8|(b>>8&0xff00)|(b>>24&0xff); //return ret; uint a = (uint)value; a = a << 24 | (a & 0xff00) << 8 | (a >> 8 & 0xff00) | (a >> 24 & 0xff); uint b = (uint)(value >> 32); return (ulong)a << 32 | b << 24 | (b & 0xff00) << 8 | (b >> 8 & 0xff00) | (b >> 24 & 0xff); } /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static UInt128 Swap(UInt128 value) { return new UInt128(Swap(value.Low), Swap(value.High)); } /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static unsafe float Swap(float value) { // slower //float ret; //byte* pVal=(byte*)&value; //byte* pRet=(byte*)&ret+3; //*pRet--=*pVal++; //*pRet--=*pVal++; //*pRet--=*pVal++; //*pRet=*pVal; //return ret; float ret = 0; int v = *(int*)&value; *(int*)&ret = v << 24 | (v & 0xff00) << 8 | (v >> 8 & 0xff00) | (v >> 24 & 0xff); return ret; } /// <summary> /// Reverses the byte-order of a value. /// </summary> /// <param name="value">The value which byte-order is to be reversed.</param> /// <returns>The byte-order reversed value.</returns> public static unsafe double Swap(double value) { // slower //double ret; //byte* pVal=(byte*)&value; //byte* pRet=(byte*)&ret+7; //*pRet--=*pVal++; //*pRet--=*pVal++; //*pRet--=*pVal++; //*pRet--=*pVal++; //*pRet--=*pVal++; //*pRet--=*pVal++; //*pRet--=*pVal++; //*pRet=*pVal; //return ret; double ret; int* pVal = (int*)&value; int* pRet = (int*)&ret; int a = *pVal; pRet[1] = a << 24 | (a & 0xff00) << 8 | (a >> 8 & 0xff00) | (a >> 24 & 0xff); int b = pVal[1]; *pRet = b << 24 | (b & 0xff00) << 8 | (b >> 8 & 0xff00) | (b >> 24 & 0xff); return ret; } #endregion #region Swap for arrays of value types /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> /// <overloads>These functions reverse the byte-order of the values of an array.</overloads> public static unsafe char[] Swap(char[] values) { if (values == null) return null; if (values.Length == 0) return new char[0]; int length = values.Length; char[] ret = new char[length]; fixed(char* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[1]; dest[1] = src[0]; dest += 2; src += 2; } } return ret; } /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> public static unsafe short[] Swap(short[] values) { if (values == null) return null; if (values.Length == 0) return new short[0]; int length = values.Length; short[] ret = new short[length]; fixed(short* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[1]; dest[1] = src[0]; dest += 2; src += 2; } } return ret; } /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> public static unsafe ushort[] Swap(ushort[] values) { if (values == null) return null; if (values.Length == 0) return new ushort[0]; int length = values.Length; ushort[] ret = new ushort[length]; fixed(ushort* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[1]; dest[1] = src[0]; dest += 2; src += 2; } } return ret; } /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> public static unsafe int[] Swap(int[] values) { if (values == null) return null; if (values.Length == 0) return new int[0]; int length = values.Length; int[] ret = new int[length]; fixed(int* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[3]; dest[1] = src[2]; dest[2] = src[1]; dest[3] = src[0]; dest += 4; src += 4; } } return ret; } /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> public static unsafe uint[] Swap(uint[] values) { if (values == null) return null; if (values.Length == 0) return new uint[0]; int length = values.Length; uint[] ret = new uint[length]; fixed(uint* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[3]; dest[1] = src[2]; dest[2] = src[1]; dest[3] = src[0]; dest += 4; src += 4; } } return ret; } /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> public static unsafe long[] Swap(long[] values) { if (values == null) return null; if (values.Length == 0) return new long[0]; int length = values.Length; long[] ret = new long[length]; fixed(long* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[7]; dest[1] = src[6]; dest[2] = src[5]; dest[3] = src[4]; dest[4] = src[3]; dest[5] = src[2]; dest[6] = src[1]; dest[7] = src[0]; dest += 8; src += 8; } } return ret; } /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> public static unsafe ulong[] Swap(ulong[] values) { if (values == null) return null; if (values.Length == 0) return new ulong[0]; int length = values.Length; ulong[] ret = new ulong[length]; fixed(ulong* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[7]; dest[1] = src[6]; dest[2] = src[5]; dest[3] = src[4]; dest[4] = src[3]; dest[5] = src[2]; dest[6] = src[1]; dest[7] = src[0]; dest += 8; src += 8; } } return ret; } /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> public static unsafe UInt128[] Swap(UInt128[] values) { if (values == null) return null; if (values.Length == 0) return new UInt128[0]; int length = values.Length; UInt128[] ret = new UInt128[length]; fixed(UInt128* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[15]; dest[1] = src[14]; dest[2] = src[13]; dest[3] = src[12]; dest[4] = src[11]; dest[5] = src[10]; dest[6] = src[9]; dest[7] = src[8]; dest[8] = src[7]; dest[9] = src[6]; dest[10] = src[5]; dest[11] = src[4]; dest[12] = src[3]; dest[13] = src[2]; dest[14] = src[1]; dest[15] = src[0]; dest += 16; src += 16; } } return ret; } /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> public static unsafe float[] Swap(float[] values) { if (values == null) return null; if (values.Length == 0) return new float[0]; int length = values.Length; float[] ret = new float[length]; fixed(float* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[3]; dest[1] = src[2]; dest[2] = src[1]; dest[3] = src[0]; dest += 4; src += 4; } } return ret; } /// <summary> /// Reverses the byte-order of the values of an array. /// </summary> /// <param name="values">The array of values which byte-order is to be reversed.</param> /// <returns>The byte-order reversed values.</returns> public static unsafe double[] Swap(double[] values) { if (values == null) return null; if (values.Length == 0) return new double[0]; int length = values.Length; double[] ret = new double[length]; fixed(double* pValue=values, pRet=ret) { byte* dest = (byte*)pRet; byte* src = (byte*)pValue; for (int i = 0; i < length; i++) { dest[0] = src[7]; dest[1] = src[6]; dest[2] = src[5]; dest[3] = src[4]; dest[4] = src[3]; dest[5] = src[2]; dest[6] = src[1]; dest[7] = src[0]; dest += 8; src += 8; } } return ret; } #endregion /// <summary> /// Reverses the order of the bytes of an array. /// </summary> /// <param name="bytes">The byte array which order is to be reversed.</param> /// <returns>The order reversed byte array.</returns> public static byte[] Reverse(byte[] bytes) { byte[] ret = new byte[bytes.Length]; for (int i = 0, j = bytes.Length - 1; i < bytes.Length; i++, j--) ret[i] = bytes[j]; return ret; } } }
using System.Threading; namespace MediatR.Tests; using System; using System.Threading.Tasks; using Shouldly; using StructureMap; using Xunit; public class ExceptionTests { private readonly IMediator _mediator; public class Ping : IRequest<Pong> { } public class Pong { } public class VoidPing : IRequest { } public class Pinged : INotification { } public class AsyncPing : IRequest<Pong> { } public class AsyncVoidPing : IRequest { } public class AsyncPinged : INotification { } public class NullPing : IRequest<Pong> { } public class VoidNullPing : IRequest { } public class NullPinged : INotification { } public class NullPingHandler : IRequestHandler<NullPing, Pong> { public Task<Pong> Handle(NullPing request, CancellationToken cancellationToken) { return Task.FromResult(new Pong()); } } public class VoidNullPingHandler : IRequestHandler<VoidNullPing, Unit> { public Task<Unit> Handle(VoidNullPing request, CancellationToken cancellationToken) { return Unit.Task; } } public ExceptionTests() { var container = new Container(cfg => { cfg.For<ServiceFactory>().Use<ServiceFactory>(ctx => ctx.GetInstance); cfg.For<IMediator>().Use<Mediator>(); }); _mediator = container.GetInstance<IMediator>(); } [Fact] public async Task Should_throw_for_send() { await Should.ThrowAsync<InvalidOperationException>(async () => await _mediator.Send(new Ping())); } [Fact] public async Task Should_throw_for_void_send() { await Should.ThrowAsync<InvalidOperationException>(async () => await _mediator.Send(new VoidPing())); } [Fact] public async Task Should_not_throw_for_publish() { Exception ex = null; try { await _mediator.Publish(new Pinged()); } catch (Exception e) { ex = e; } ex.ShouldBeNull(); } [Fact] public async Task Should_throw_for_async_send() { await Should.ThrowAsync<InvalidOperationException>(async () => await _mediator.Send(new AsyncPing())); } [Fact] public async Task Should_throw_for_async_void_send() { await Should.ThrowAsync<InvalidOperationException>(async () => await _mediator.Send(new AsyncVoidPing())); } [Fact] public async Task Should_not_throw_for_async_publish() { Exception ex = null; try { await _mediator.Publish(new AsyncPinged()); } catch (Exception e) { ex = e; } ex.ShouldBeNull(); } [Fact] public async Task Should_throw_argument_exception_for_send_when_request_is_null() { var container = new Container(cfg => { cfg.Scan(scanner => { scanner.AssemblyContainingType(typeof(NullPing)); scanner.IncludeNamespaceContainingType<Ping>(); scanner.WithDefaultConventions(); scanner.AddAllTypesOf(typeof(IRequestHandler<,>)); }); cfg.For<ServiceFactory>().Use<ServiceFactory>(ctx => t => ctx.GetInstance(t)); cfg.For<IMediator>().Use<Mediator>(); }); var mediator = container.GetInstance<IMediator>(); NullPing request = null; await Should.ThrowAsync<ArgumentNullException>(async () => await mediator.Send(request)); } [Fact] public async Task Should_throw_argument_exception_for_void_send_when_request_is_null() { var container = new Container(cfg => { cfg.Scan(scanner => { scanner.AssemblyContainingType(typeof(VoidNullPing)); scanner.IncludeNamespaceContainingType<Ping>(); scanner.WithDefaultConventions(); scanner.AddAllTypesOf(typeof(IRequestHandler<,>)); }); cfg.For<ServiceFactory>().Use<ServiceFactory>(ctx => t => ctx.GetInstance(t)); cfg.For<IMediator>().Use<Mediator>(); }); var mediator = container.GetInstance<IMediator>(); VoidNullPing request = null; await Should.ThrowAsync<ArgumentNullException>(async () => await mediator.Send(request)); } [Fact] public async Task Should_throw_argument_exception_for_publish_when_request_is_null() { var container = new Container(cfg => { cfg.Scan(scanner => { scanner.AssemblyContainingType(typeof(NullPinged)); scanner.IncludeNamespaceContainingType<Ping>(); scanner.WithDefaultConventions(); scanner.AddAllTypesOf(typeof(IRequestHandler<,>)); }); cfg.For<ServiceFactory>().Use<ServiceFactory>(ctx => t => ctx.GetInstance(t)); cfg.For<IMediator>().Use<Mediator>(); }); var mediator = container.GetInstance<IMediator>(); NullPinged notification = null; await Should.ThrowAsync<ArgumentNullException>(async () => await mediator.Publish(notification)); } [Fact] public async Task Should_throw_argument_exception_for_publish_when_request_is_null_object() { var container = new Container(cfg => { cfg.Scan(scanner => { scanner.AssemblyContainingType(typeof(NullPinged)); scanner.IncludeNamespaceContainingType<Ping>(); scanner.WithDefaultConventions(); scanner.AddAllTypesOf(typeof(IRequestHandler<,>)); }); cfg.For<ServiceFactory>().Use<ServiceFactory>(ctx => t => ctx.GetInstance(t)); cfg.For<IMediator>().Use<Mediator>(); }); var mediator = container.GetInstance<IMediator>(); object notification = null; await Should.ThrowAsync<ArgumentNullException>(async () => await mediator.Publish(notification)); } [Fact] public async Task Should_throw_argument_exception_for_publish_when_request_is_not_notification() { var container = new Container(cfg => { cfg.Scan(scanner => { scanner.AssemblyContainingType(typeof(NullPinged)); scanner.IncludeNamespaceContainingType<Ping>(); scanner.WithDefaultConventions(); scanner.AddAllTypesOf(typeof(IRequestHandler<,>)); }); cfg.For<ServiceFactory>().Use<ServiceFactory>(ctx => t => ctx.GetInstance(t)); cfg.For<IMediator>().Use<Mediator>(); }); var mediator = container.GetInstance<IMediator>(); object notification = "totally not notification"; await Should.ThrowAsync<ArgumentException>(async () => await mediator.Publish(notification)); } public class PingException : IRequest { } public class PingExceptionHandler : IRequestHandler<PingException> { public Task<Unit> Handle(PingException request, CancellationToken cancellationToken) { throw new NotImplementedException(); } } [Fact] public async Task Should_throw_exception_for_non_generic_send_when_exception_occurs() { var container = new Container(cfg => { cfg.Scan(scanner => { scanner.AssemblyContainingType(typeof(NullPinged)); scanner.IncludeNamespaceContainingType<Ping>(); scanner.WithDefaultConventions(); scanner.AddAllTypesOf(typeof(IRequestHandler<,>)); }); cfg.For<ServiceFactory>().Use<ServiceFactory>(ctx => t => ctx.GetInstance(t)); cfg.For<IMediator>().Use<Mediator>(); }); var mediator = container.GetInstance<IMediator>(); object pingException = new PingException(); await Should.ThrowAsync<NotImplementedException>(async () => await mediator.Send(pingException)); } [Fact] public async Task Should_throw_exception_for_non_request_send() { var container = new Container(cfg => { cfg.Scan(scanner => { scanner.AssemblyContainingType(typeof(NullPinged)); scanner.IncludeNamespaceContainingType<Ping>(); scanner.WithDefaultConventions(); scanner.AddAllTypesOf(typeof(IRequestHandler<,>)); }); cfg.For<ServiceFactory>().Use<ServiceFactory>(ctx => t => ctx.GetInstance(t)); cfg.For<IMediator>().Use<Mediator>(); }); var mediator = container.GetInstance<IMediator>(); object nonRequest = new NonRequest(); var argumentException = await Should.ThrowAsync<ArgumentException>(async () => await mediator.Send(nonRequest)); Assert.StartsWith("NonRequest does not implement IRequest", argumentException.Message); } public class NonRequest { } [Fact] public async Task Should_throw_exception_for_generic_send_when_exception_occurs() { var container = new Container(cfg => { cfg.Scan(scanner => { scanner.AssemblyContainingType(typeof(NullPinged)); scanner.IncludeNamespaceContainingType<Ping>(); scanner.WithDefaultConventions(); scanner.AddAllTypesOf(typeof(IRequestHandler<,>)); }); cfg.For<ServiceFactory>().Use<ServiceFactory>(ctx => t => ctx.GetInstance(t)); cfg.For<IMediator>().Use<Mediator>(); }); var mediator = container.GetInstance<IMediator>(); PingException pingException = new PingException(); await Should.ThrowAsync<NotImplementedException>(async () => await mediator.Send(pingException)); } }
using NUnit.Framework; using System; using System.Collections.Generic; using System.IO; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; namespace Lucene.Net.Store { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Analyzer = Lucene.Net.Analysis.Analyzer; using BytesRef = Lucene.Net.Util.BytesRef; using DirectoryReader = Lucene.Net.Index.DirectoryReader; using Document = Documents.Document; using IndexSearcher = Lucene.Net.Search.IndexSearcher; using IndexWriter = Lucene.Net.Index.IndexWriter; using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig; using LineFileDocs = Lucene.Net.Util.LineFileDocs; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; using Term = Lucene.Net.Index.Term; using TermQuery = Lucene.Net.Search.TermQuery; using TestUtil = Lucene.Net.Util.TestUtil; using TopDocs = Lucene.Net.Search.TopDocs; [TestFixture] public class TestNRTCachingDirectory : LuceneTestCase { [Test] public virtual void TestNRTAndCommit() { Directory dir = NewDirectory(); NRTCachingDirectory cachedDir = new NRTCachingDirectory(dir, 2.0, 25.0); MockAnalyzer analyzer = new MockAnalyzer(Random); analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); RandomIndexWriter w = new RandomIndexWriter(Random, cachedDir, conf); LineFileDocs docs = new LineFileDocs(Random, DefaultCodecSupportsDocValues); int numDocs = TestUtil.NextInt32(Random, 100, 400); if (Verbose) { Console.WriteLine("TEST: numDocs=" + numDocs); } IList<BytesRef> ids = new List<BytesRef>(); DirectoryReader r = null; for (int docCount = 0; docCount < numDocs; docCount++) { Document doc = docs.NextDoc(); ids.Add(new BytesRef(doc.Get("docid"))); w.AddDocument(doc); if (Random.Next(20) == 17) { if (r == null) { r = DirectoryReader.Open(w.IndexWriter, false); } else { DirectoryReader r2 = DirectoryReader.OpenIfChanged(r); if (r2 != null) { r.Dispose(); r = r2; } } Assert.AreEqual(1 + docCount, r.NumDocs); IndexSearcher s = NewSearcher(r); // Just make sure search can run; we can't assert // totHits since it could be 0 TopDocs hits = s.Search(new TermQuery(new Term("body", "the")), 10); // System.out.println("tot hits " + hits.totalHits); } } if (r != null) { r.Dispose(); } // Close should force cache to clear since all files are sync'd w.Dispose(); string[] cachedFiles = cachedDir.ListCachedFiles(); foreach (string file in cachedFiles) { Console.WriteLine("FAIL: cached file " + file + " remains after sync"); } Assert.AreEqual(0, cachedFiles.Length); r = DirectoryReader.Open(dir); foreach (BytesRef id in ids) { Assert.AreEqual(1, r.DocFreq(new Term("docid", id))); } r.Dispose(); cachedDir.Dispose(); docs.Dispose(); } // NOTE: not a test; just here to make sure the code frag // in the javadocs is correct! public virtual void VerifyCompiles() { Analyzer analyzer = null; Directory fsDir = FSDirectory.Open(new DirectoryInfo("/path/to/index")); NRTCachingDirectory cachedFSDir = new NRTCachingDirectory(fsDir, 2.0, 25.0); IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer); IndexWriter writer = new IndexWriter(cachedFSDir, conf); } [Test] public virtual void TestDeleteFile() { Directory dir = new NRTCachingDirectory(NewDirectory(), 2.0, 25.0); dir.CreateOutput("foo.txt", IOContext.DEFAULT).Dispose(); dir.DeleteFile("foo.txt"); Assert.AreEqual(0, dir.ListAll().Length); dir.Dispose(); } // LUCENE-3382 -- make sure we get exception if the directory really does not exist. [Test] public virtual void TestNoDir() { var tempDir = CreateTempDir("doesnotexist").FullName; System.IO.Directory.Delete(tempDir, true); using (Directory dir = new NRTCachingDirectory(NewFSDirectory(new DirectoryInfo(tempDir)), 2.0, 25.0)) { try { Assert.False(System.IO.Directory.Exists(tempDir)); DirectoryReader.Open(dir); Assert.Fail("did not hit expected exception"); } catch (DirectoryNotFoundException) { // expected } } } private static bool ContainsFile(Directory directory, string file) // LUCENENET specific method to prevent having to use Arrays.AsList(), which creates unnecessary memory allocations { return Array.IndexOf(directory.ListAll(), file) > -1; } // LUCENE-3382 test that we can add a file, and then when we call list() we get it back [Test] public virtual void TestDirectoryFilter() { Directory dir = new NRTCachingDirectory(NewFSDirectory(CreateTempDir("foo")), 2.0, 25.0); string name = "file"; try { dir.CreateOutput(name, NewIOContext(Random)).Dispose(); Assert.IsTrue(SlowFileExists(dir, name)); Assert.IsTrue(ContainsFile(dir, name)); } finally { dir.Dispose(); } } // LUCENE-3382 test that delegate compound files correctly. [Test] public virtual void TestCompoundFileAppendTwice() { Directory newDir = new NRTCachingDirectory(NewDirectory(), 2.0, 25.0); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), true); CreateSequenceFile(newDir, "d1", (sbyte)0, 15); IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random)); @out.WriteInt32(0); @out.Dispose(); Assert.AreEqual(1, csw.ListAll().Length); Assert.AreEqual("d.xyz", csw.ListAll()[0]); csw.Dispose(); CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), false); Assert.AreEqual(1, cfr.ListAll().Length); Assert.AreEqual("d.xyz", cfr.ListAll()[0]); cfr.Dispose(); newDir.Dispose(); } /// <summary> /// Creates a file of the specified size with sequential data. The first /// byte is written as the start byte provided. All subsequent bytes are /// computed as start + offset where offset is the number of the byte. /// </summary> private void CreateSequenceFile(Directory dir, string name, sbyte start, int size) { IndexOutput os = dir.CreateOutput(name, NewIOContext(Random)); for (int i = 0; i < size; i++) { os.WriteByte((byte)start); start++; } os.Dispose(); } } }
/* * MindTouch Dream - a distributed REST framework * Copyright (C) 2006-2011 MindTouch, Inc. * www.mindtouch.com oss@mindtouch.com * * For community documentation and downloads visit wiki.developer.mindtouch.com; * please review the licensing section. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; namespace MindTouch.Collections { /// <summary> /// Provides a queue that allows items to pushed and popped by a single thread while /// other threads may attempt steal from it. The implementation is based on the work done by /// Danny Hendler, Yossi Lev, Mark Moir, and Nir Shavit: "A dynamic-sized nonblocking work stealing deque", /// Distributed Computing, Volume 18, Issue 3 (February 2006), pp189-207, ISSN:0178-2770 /// </summary> /// <typeparam name="T">Collection item type.</typeparam> public sealed class WorkStealingDeque<T> { //--- Constants --- private const int DEFAULT_CAPACITY = 32; //--- Types --- internal class BottomData { //--- Fields --- internal readonly DequeNode Node; internal readonly int Index; //--- Constructors --- internal BottomData(DequeNode node, int index) { this.Node = node; this.Index = index; } } internal class TopData { //--- Fields --- internal readonly int Tag; internal readonly DequeNode Node; internal readonly int Index; //--- Constructors --- internal TopData(int tag, DequeNode node, int index) { this.Tag = tag; this.Node = node; this.Index = index; } } internal class DequeNode { //--- Fields --- internal readonly T[] Data; internal DequeNode Next; internal DequeNode Prev; //--- Constructors --- internal DequeNode(int capacity, DequeNode next) { Data = new T[capacity]; if(next != null) { this.Next = next; next.Prev = this; } } } //--- Class Methods --- private static bool IsEmpty(BottomData bottom, TopData top, int capacity) { if(ReferenceEquals(bottom.Node, top.Node) && ((bottom.Index == top.Index) || (bottom.Index == (top.Index + 1)))) { return true; } else if(ReferenceEquals(bottom.Node, top.Node.Next) && (bottom.Index == 0) && (top.Index == (capacity - 1))) { return true; } return false; } //--- Fields --- private readonly int _capacity; private BottomData _bottom; private TopData _top; //--- Constructors --- /// <summary> /// Create a new instance. /// </summary> public WorkStealingDeque() : this(DEFAULT_CAPACITY) { } /// <summary> /// Create a new instance. /// </summary> /// <param name="capacity">Maximum number of items in the queue.</param> public WorkStealingDeque(int capacity) { _capacity = capacity; DequeNode nodeB = new DequeNode(_capacity, null); DequeNode nodeA = new DequeNode(_capacity, nodeB); _bottom = new BottomData(nodeA, _capacity - 1); _top = new TopData(0, nodeA, _capacity - 1); } //--- Properties --- /// <summary> /// Total number of items in queue. /// </summary> public int Count { get { BottomData curBottom = _bottom; TopData curTop = _top; int count; // check if top and bottom share the same node if(ReferenceEquals(curBottom.Node, curTop.Node)) { count = Math.Max(0, curTop.Index - curBottom.Index); } else if(ReferenceEquals(curBottom.Node, curTop.Node.Next) && (curBottom.Index == 0) && (curTop.Index == (_capacity - 1))) { count = 0; } else { count = _capacity - (curBottom.Index + 1); for(var node = curBottom.Node.Next; (node != curTop.Node) && (node != null); node = node.Next) { count += _capacity; } count += curTop.Index + 1; } return count; } } //--- Methods --- /// <summary> /// Push an item onto the tail of the queue. /// </summary> /// <remarks> /// NOTE: Push() and TryPop() <strong>MUST</strong> be called from the same thread. /// </remarks> /// <param name="data">Item to push onto the tail of the queue.</param> public void Push(T data) { // read bottom data BottomData curBottom = _bottom; // write data in current bottom cell curBottom.Node.Data[curBottom.Index] = data; BottomData newBottom; if(curBottom.Index != 0) { newBottom = new BottomData(curBottom.Node, curBottom.Index - 1); } else { // allocate and link a new node DequeNode newNode = new DequeNode(_capacity, curBottom.Node); newBottom = new BottomData(newNode, _capacity - 1); } // update bottom _bottom = newBottom; } /// <summary> /// Pop an item from the tail of the queue. /// </summary> /// <remarks> /// NOTE: Push() and TryPop() <strong>MUST</strong> be called from the same thread. /// </remarks> /// <param name="item">Tail item of the queue when operation is successful.</param> /// <returns><see langword="True"/> if operation was successful.</returns> public bool TryPop(out T item) { item = default(T); // read bottom data BottomData curBottom = _bottom; BottomData newBottom; if(curBottom.Index != (_capacity - 1)) { newBottom = new BottomData(curBottom.Node, curBottom.Index + 1); } else { newBottom = new BottomData(curBottom.Node.Next, 0); } // update bottom _bottom = newBottom; // read top TopData curTop = _top; // read data to be popped T retVal = newBottom.Node.Data[newBottom.Index]; // case 1: if _top has crossed _bottom if(ReferenceEquals(curBottom.Node, curTop.Node) && (curBottom.Index == curTop.Index)) { // return bottom to its old position _bottom = curBottom; return false; } // case 2: when popping the last entry in the deque (i.e. deque is empty after the update of bottom) if(ReferenceEquals(newBottom.Node, curTop.Node) && (newBottom.Index == curTop.Index)) { // try to update _top's tag so no concurrent Steal operation will also pop the same entry TopData newTopVal = new TopData(curTop.Tag + 1, curTop.Node, curTop.Index); if(SysUtil.CAS(ref _top, curTop, newTopVal)) { // TODO (steveb): clear out the entry we read, so the GC can reclaim it // free old node if needed if(!ReferenceEquals(curBottom.Node, newBottom.Node)) { newBottom.Node.Prev = null; } item = retVal; return true; } else { // if CAS failed (i.e. a concurrent Steal operation alrady popped that last entry) // return bottom to its old position _bottom = curBottom; return false; } } // case 3: regular case (i.e. there was a least one entry in the deque _after_ bottom's update) // free old node if needed if(!ReferenceEquals(curBottom.Node, newBottom.Node)) { newBottom.Node.Prev = null; } item = retVal; return true; } /// <summary> /// Pop an item from the head of the queue. /// </summary> /// <remarks> /// NOTE: TrySteal() can be invoked from any thread. /// </remarks> /// <param name="item">Head item of the queue when operation is successful.</param> /// <returns><see langword="True"/> if operation was successful.</returns> public bool TrySteal(out T item) { // read top TopData curTop = _top; // read bottom BottomData curBottom = _bottom; if(IsEmpty(curBottom, curTop, _capacity)) { item = default(T); if(ReferenceEquals(curTop, _top)) { return false; } else { // NOTE (steveb): this is contentious access case; we currently return 'false' but may want to differentiate in the future return false; } } // if deque isn't empty, calcuate next top pointer TopData newTop; if(curTop.Index != 0) { // stay at current node newTop = new TopData(curTop.Tag, curTop.Node, curTop.Index - 1); } else { // move to next node and update tag newTop = new TopData(curTop.Tag + 1, curTop.Node.Prev, _capacity - 1); } // read value T retVal = curTop.Node.Data[curTop.Index]; // try updating _top using CAS if(SysUtil.CAS(ref _top, curTop, newTop)) { // TODO (steveb): clear out the entry we read, so the GC can reclaim it // free old node curTop.Node.Next = null; item = retVal; return true; } else { item = default(T); // NOTE (steveb): this is contentious access case; we currently return 'false' but may want to differentiate in the future return false; } } } }
#region License, Terms and Author(s) // // ELMAH - Error Logging Modules and Handlers for ASP.NET // Copyright (c) 2004-9 Atif Aziz. All rights reserved. // // Author(s): // // Scott Wilson <sw@scratchstudio.net> // Atif Aziz, http://www.raboof.com // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion [assembly: Elmah.Scc("$Id: XmlFileErrorLog.cs 795 2011-02-16 22:29:34Z azizatif $")] namespace Elmah { #region Imports using System; using System.Globalization; using System.IO; using System.Linq; using System.Runtime.CompilerServices; using System.Text; using System.Xml; using System.Collections.Generic; using IDictionary = System.Collections.IDictionary; #endregion /// <summary> /// An <see cref="ErrorLog"/> implementation that uses XML files stored on /// disk as its backing store. /// </summary> public class XmlFileErrorLog : ErrorLog { private readonly string _logPath; /// <summary> /// Initializes a new instance of the <see cref="XmlFileErrorLog"/> class /// using a dictionary of configured settings. /// </summary> public XmlFileErrorLog(IDictionary config) { if (config == null) throw new ArgumentNullException("config"); var logPath = config.Find("logPath", string.Empty); if (logPath.Length == 0) { // // For compatibility reasons with older version of this // implementation, we also try "LogPath". // logPath = config.Find("LogPath", string.Empty); if (logPath.Length == 0) throw new ApplicationException("Log path is missing for the XML file-based error log."); } if (logPath.StartsWith("~/")) logPath = MapPath(logPath); _logPath = logPath; } /// <remarks> /// This method is excluded from inlining so that if /// HostingEnvironment does not need JIT-ing if it is not implicated /// by the caller. /// </remarks> [ MethodImpl(MethodImplOptions.NoInlining) ] private static string MapPath(string path) { return System.Web.Hosting.HostingEnvironment.MapPath(path); } /// <summary> /// Initializes a new instance of the <see cref="XmlFileErrorLog"/> class /// to use a specific path to store/load XML files. /// </summary> public XmlFileErrorLog(string logPath) { if (logPath == null) throw new ArgumentNullException("logPath"); if (logPath.Length == 0) throw new ArgumentException(null, "logPath"); _logPath = logPath; } /// <summary> /// Gets the path to where the log is stored. /// </summary> public virtual string LogPath { get { return _logPath; } } /// <summary> /// Gets the name of this error log implementation. /// </summary> public override string Name { get { return "XML File-Based Error Log"; } } /// <summary> /// Logs an error to the database. /// </summary> /// <remarks> /// Logs an error as a single XML file stored in a folder. XML files are named with a /// sortable date and a unique identifier. Currently the XML files are stored indefinately. /// As they are stored as files, they may be managed using standard scheduled jobs. /// </remarks> public override string Log(Error error) { string logPath = LogPath; if (!Directory.Exists(logPath)) Directory.CreateDirectory(logPath); string errorId = Guid.NewGuid().ToString(); DateTime timeStamp = (error.Time > DateTime.MinValue ? error.Time : DateTime.Now); string fileName = string.Format(CultureInfo.InvariantCulture, @"error-{0:yyyy-MM-ddHHmmssZ}-{1}.xml", /* 0 */ timeStamp.ToUniversalTime(), /* 1 */ errorId); string path = Path.Combine(logPath, fileName); using (var writer = new XmlTextWriter(path, Encoding.UTF8)) { writer.Formatting = Formatting.Indented; writer.WriteStartElement("error"); writer.WriteAttributeString("errorId", errorId); ErrorXml.Encode(error, writer); writer.WriteEndElement(); writer.Flush(); } return errorId; } /// <summary> /// Returns a page of errors from the folder in descending order /// of logged time as defined by the sortable filenames. /// </summary> public override int GetErrors(int pageIndex, int pageSize, IList<ErrorLogEntry> errorEntryList) { if (pageIndex < 0) throw new ArgumentOutOfRangeException("pageIndex", pageIndex, null); if (pageSize < 0) throw new ArgumentOutOfRangeException("pageSize", pageSize, null); var logPath = LogPath; var dir = new DirectoryInfo(logPath); if (!dir.Exists) return 0; var infos = dir.GetFiles("error-*.xml"); if (!infos.Any()) return 0; var files = infos.Where(info => IsUserFile(info.Attributes)) .OrderBy(info => info.Name, StringComparer.OrdinalIgnoreCase) .Select(info => Path.Combine(logPath, info.Name)) .Reverse() .ToArray(); if (errorEntryList != null) { var entries = files.Skip(pageIndex * pageSize) .Take(pageSize) .Select(LoadErrorLogEntry); foreach (var entry in entries) errorEntryList.Add(entry); } return files.Length; // Return total } private ErrorLogEntry LoadErrorLogEntry(string path) { using (var reader = XmlReader.Create(path)) { if (!reader.IsStartElement("error")) return null; var id = reader.GetAttribute("errorId"); var error = ErrorXml.Decode(reader); return new ErrorLogEntry(this, id, error); } } /// <summary> /// Returns the specified error from the filesystem, or throws an exception if it does not exist. /// </summary> public override ErrorLogEntry GetError(string id) { try { id = (new Guid(id)).ToString(); // validate GUID } catch (FormatException e) { throw new ArgumentException(e.Message, id, e); } var file = new DirectoryInfo(LogPath).GetFiles(string.Format("error-*-{0}.xml", id)) .FirstOrDefault(); if (file == null) return null; if (!IsUserFile(file.Attributes)) return null; using (var reader = XmlReader.Create(file.FullName)) return new ErrorLogEntry(this, id, ErrorXml.Decode(reader)); } private static bool IsUserFile(FileAttributes attributes) { return 0 == (attributes & (FileAttributes.Directory | FileAttributes.Hidden | FileAttributes.System)); } } }
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using Microsoft.Azure.Commands.Common.Authentication; using Microsoft.Azure.Commands.Common.Authentication.Models; using Microsoft.Azure.Commands.Sql.Auditing.Model; using Microsoft.Azure.Management.Resources; using Microsoft.Azure.Management.Resources.Models; using Microsoft.Azure.Management.Sql.LegacySdk; using Microsoft.Azure.Management.Storage; using Microsoft.WindowsAzure.Management.Storage; using Newtonsoft.Json.Linq; using System; using System.Collections.Generic; using System.Net.Http; using System.Threading; using System.Threading.Tasks; namespace Microsoft.Azure.Commands.Sql.Common { /// <summary> /// This class is responsible for all the REST communication with the management libraries /// </summary> public class AzureEndpointsCommunicator { /// <summary> /// The Sql management client used by this communicator /// </summary> private static SqlManagementClient SqlClient { get; set; } /// <summary> /// The storage management client used by this communicator /// </summary> private static Microsoft.WindowsAzure.Management.Storage.StorageManagementClient StorageClient { get; set; } private static Microsoft.Azure.Management.Storage.StorageManagementClient StorageV2Client { get; set; } /// <summary> /// Gets or sets the Azure subscription /// </summary> private static AzureSubscription Subscription { get; set; } /// <summary> /// The resources management client used by this communicator /// </summary> private static ResourceManagementClient ResourcesClient { get; set; } /// <summary> /// Gets or sets the Azure profile /// </summary> public AzureContext Context { get; set; } /// <summary> /// Default Constructor. /// </summary> /// <param name="context">The Azure context</param> public AzureEndpointsCommunicator(AzureContext context) { Context = context; if (context.Subscription != Subscription) { Subscription = context.Subscription; StorageClient = null; ResourcesClient = null; StorageV2Client = null; } } /// <summary> /// Provides the storage keys for the storage account within the given resource group /// </summary> /// <returns>A dictionary with two entries, one for each possible key type with the appropriate key</returns> public async Task<Dictionary<StorageKeyKind, string>> GetStorageKeysAsync(string resourceGroupName, string storageAccountName) { SqlManagementClient client = GetCurrentSqlClient("none"); string url = Context.Environment.GetEndpointAsUri(AzureEnvironment.Endpoint.ResourceManager).ToString(); if (!url.EndsWith("/")) { url = url + "/"; } url = url + "subscriptions/" + (client.Credentials.SubscriptionId != null ? client.Credentials.SubscriptionId.Trim() : ""); url = url + "/resourceGroups/" + resourceGroupName; url = url + "/providers/Microsoft.ClassicStorage/storageAccounts/" + storageAccountName; url = url + "/listKeys?api-version=2014-06-01"; HttpRequestMessage httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Post; httpRequest.RequestUri = new Uri(url); await client.Credentials.ProcessHttpRequestAsync(httpRequest, CancellationToken.None).ConfigureAwait(false); HttpResponseMessage httpResponse = await client.HttpClient.SendAsync(httpRequest, CancellationToken.None).ConfigureAwait(false); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); Dictionary<StorageKeyKind, string> result = new Dictionary<StorageKeyKind, string>(); try { JToken responseDoc = JToken.Parse(responseContent); string primaryKey = (string)responseDoc["primaryKey"]; string secondaryKey = (string)responseDoc["secondaryKey"]; if (string.IsNullOrEmpty(primaryKey) || string.IsNullOrEmpty(secondaryKey)) throw new Exception(); // this is caught by the synced wrapper result.Add(StorageKeyKind.Primary, primaryKey); result.Add(StorageKeyKind.Secondary, secondaryKey); return result; } catch { try { return GetV2Keys(resourceGroupName, storageAccountName); } catch { throw; } } } private Dictionary<StorageKeyKind, string> GetV2Keys(string resourceGroupName, string storageAccountName) { Microsoft.Azure.Management.Storage.StorageManagementClient storageClient = GetCurrentStorageV2Client(Context); var r = storageClient.StorageAccounts.ListKeys(resourceGroupName, storageAccountName); string k1 = r.StorageAccountKeys.Key1; string k2 = r.StorageAccountKeys.Key2; Dictionary<StorageKeyKind, String> result = new Dictionary<StorageKeyKind, String>(); result.Add(StorageKeyKind.Primary, k1); result.Add(StorageKeyKind.Secondary, k2); return result; } /// <summary> /// Gets the storage keys for the given storage account. /// </summary> public Dictionary<StorageKeyKind, string> GetStorageKeys(string resourceGroupName, string storageAccountName) { try { return Task.Factory.StartNew((object epc) => (((AzureEndpointsCommunicator)epc).GetStorageKeysAsync(resourceGroupName, storageAccountName)), this, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } catch { throw new Exception(string.Format(Microsoft.Azure.Commands.Sql.Properties.Resources.StorageAccountNotFound, storageAccountName)); } } /// <summary> /// Returns the resource group of the provided storage account /// </summary> public string GetStorageResourceGroup(string storageAccountName) { ResourceManagementClient resourcesClient = GetCurrentResourcesClient(Context); Func<string, string> getResourceGroupName = resourceType => { ResourceListResult res = resourcesClient.Resources.List(new ResourceListParameters { ResourceGroupName = null, ResourceType = resourceType, TagName = null, TagValue = null }); var allResources = new List<GenericResourceExtended>(res.Resources); GenericResourceExtended account = allResources.Find(r => r.Name == storageAccountName); if (account != null) { String resId = account.Id; String[] segments = resId.Split('/'); int indexOfResoureGroup = new List<string>(segments).IndexOf("resourceGroups") + 1; return segments[indexOfResoureGroup]; } else { throw new Exception(string.Format(Microsoft.Azure.Commands.Sql.Properties.Resources.StorageAccountNotFound, storageAccountName)); } }; try { return getResourceGroupName("Microsoft.ClassicStorage/storageAccounts"); } catch { return getResourceGroupName("Microsoft.Storage/storageAccounts"); } } public Dictionary<StorageKeyKind, string> GetStorageKeys(string storageName) { var resourceGroup = GetStorageResourceGroup(storageName); return GetStorageKeys(resourceGroup, storageName); } /// <summary> /// Lazy creation of a single instance of a storage client /// </summary> private Microsoft.WindowsAzure.Management.Storage.StorageManagementClient GetCurrentStorageClient(AzureContext context) { if (StorageClient == null) StorageClient = AzureSession.ClientFactory.CreateClient<Microsoft.WindowsAzure.Management.Storage.StorageManagementClient>(Context, AzureEnvironment.Endpoint.ServiceManagement); return StorageClient; } /// <summary> /// Lazy creation of a single instance of a storage client /// </summary> private Microsoft.Azure.Management.Storage.StorageManagementClient GetCurrentStorageV2Client(AzureContext context) { if (StorageV2Client == null) StorageV2Client = AzureSession.ClientFactory.CreateClient<Microsoft.Azure.Management.Storage.StorageManagementClient>(Context, AzureEnvironment.Endpoint.ResourceManager); return StorageV2Client; } /// <summary> /// Lazy creation of a single instance of a resoures client /// </summary> private ResourceManagementClient GetCurrentResourcesClient(AzureContext context) { if (ResourcesClient == null) ResourcesClient = AzureSession.ClientFactory.CreateClient<ResourceManagementClient>(Context, AzureEnvironment.Endpoint.ResourceManager); return ResourcesClient; } /// <summary> /// Retrieve the SQL Management client for the currently selected subscription, adding the session and request /// id tracing headers for the current cmdlet invocation. /// </summary> /// <returns>The SQL Management client for the currently selected subscription.</returns> private SqlManagementClient GetCurrentSqlClient(String clientRequestId) { // Get the SQL management client for the current subscription if (SqlClient == null) { SqlClient = AzureSession.ClientFactory.CreateClient<SqlManagementClient>(Context, AzureEnvironment.Endpoint.ResourceManager); } SqlClient.HttpClient.DefaultRequestHeaders.Remove(Constants.ClientRequestIdHeaderName); SqlClient.HttpClient.DefaultRequestHeaders.Add(Constants.ClientRequestIdHeaderName, clientRequestId); return SqlClient; } } }
//------------------------------------------------------------------------------ // Symbooglix // // // Copyright 2014-2017 Daniel Liew // // This file is licensed under the MIT license. // See LICENSE.txt for details. //------------------------------------------------------------------------------ using NUnit.Framework; using System; using Symbooglix; using Microsoft.Basetypes; using Microsoft.Boogie; using System.Numerics; namespace ExprBuilderTests { [TestFixture()] public class ConstantsSimpleBuilder : SimpleExprBuilderTestBase { [Test()] public void True() { var builder = GetSimpleBuilder(); var constant = builder.True; Assert.AreEqual("true", constant.ToString()); CheckType(constant, t => t.IsBool); var constant2 = builder.ConstantBool(true); Assert.AreEqual("true", constant2.ToString()); CheckType(constant2, t => t.IsBool); } [Test()] public void False() { var builder = GetSimpleBuilder(); var constant = builder.False; Assert.AreEqual("false", constant.ToString()); CheckType(constant, t => t.IsBool); var constant2 = builder.ConstantBool(false); Assert.AreEqual("false", constant2.ToString()); CheckType(constant, t => t.IsBool); } [Test()] public void PositiveInteger() { var builder = GetSimpleBuilder(); var constant = builder.ConstantInt(5); Assert.AreEqual("5", constant.ToString()); Assert.AreEqual(5, constant.asBigNum.ToInt); CheckType(constant, t => t.IsInt); } [Test()] public void PositiveIntegerFromBigInt() { var builder = GetSimpleBuilder(); var constant = builder.ConstantInt(new BigInteger(5)); Assert.AreEqual("5", constant.ToString()); Assert.AreEqual(5, constant.asBigNum.ToInt); CheckType(constant, t => t.IsInt); } [Test()] public void NegativeInteger() { var builder = GetSimpleBuilder(); var constant = builder.ConstantInt(-5); Assert.AreEqual("-5", constant.ToString()); Assert.AreEqual(-5, constant.asBigNum.ToInt); CheckType(constant, t => t.IsInt); } [Test()] public void NegativeIntegerFromBigInt() { var builder = GetSimpleBuilder(); var constant = builder.ConstantInt( new BigInteger(-5)); Assert.AreEqual("-5", constant.ToString()); Assert.AreEqual(-5, constant.asBigNum.ToInt); CheckType(constant, t => t.IsInt); } [Test()] public void PositiveReal() { var builder = GetSimpleBuilder(); var constant = builder.ConstantReal("5.0"); Assert.AreEqual("5e0", constant.ToString()); CheckType(constant, t => t.IsReal); } [Test()] public void PositiveRealFromBigDec() { var builder = GetSimpleBuilder(); var constant = builder.ConstantReal(BigDec.FromInt(5)); Assert.AreEqual("5e0", constant.ToString()); CheckType(constant, t => t.IsReal); } [Test()] public void NegativeReal() { var builder = GetSimpleBuilder(); var constant = builder.ConstantReal("-5.0"); Assert.AreEqual("-5e0", constant.ToString()); CheckType(constant, t => t.IsReal); } [Test()] public void NegativeRealFromBigDec() { var builder = GetSimpleBuilder(); var constant = builder.ConstantReal(BigDec.FromInt(-5)); Assert.AreEqual("-5e0", constant.ToString()); CheckType(constant, t => t.IsReal); } [TestCase(5, 4, "5bv4")] [TestCase(11, 32, "11bv32")] [TestCase(0, 4, "0bv4")] [TestCase (15, 4, "15bv4")] public void PositiveBV(int decimalValue, int width, string expectedString) { _PositiveBV(decimalValue, width, expectedString); } [TestCase(16, 4)] [TestCase(20, 2)] [TestCase(256, 8)] [TestCase(311, 8)] [ExpectedException(typeof(ArgumentException))] public void PositiveBVOutOfRange(int decimalValue, int width) { _PositiveBV(decimalValue, width, "dummy"); } private void _PositiveBV(int decimalValue, int width, string expectedString) { Assert.IsTrue(decimalValue >= 0); var builder = GetSimpleBuilder(); // Test both versions of the API var constants = new Microsoft.Boogie.LiteralExpr[] { builder.ConstantBV(decimalValue, width), builder.ConstantBV(new BigInteger(decimalValue), width)}; foreach (var constant in constants) { Assert.AreEqual(expectedString, constant.ToString()); CheckType(constant, t => t.IsBv); Assert.AreEqual(width, constant.asBvConst.Bits); Assert.AreEqual(width, constant.Type.BvBits); Assert.AreEqual(decimalValue, constant.asBvConst.Value.ToInt); } } [TestCase(-5, 4, "11bv4")] [TestCase(-11, 32, "4294967285bv32")] [TestCase(0, 4, "0bv4")] [TestCase(-8, 4, "8bv4")] public void NegativeBV(int decimalValue, int width, string expectedString) { _NegativeBV(decimalValue, width, expectedString); } [TestCase(-9, 4)] [TestCase(-3, 2)] [TestCase(-129, 8)] [TestCase(-200, 8)] [ExpectedException(typeof(ArgumentException))] public void NegativeBVOutOfRange(int decimalValue, int width) { _NegativeBV(decimalValue, width, "dummy"); } public void _NegativeBV(int decimalValue, int width, string expectedString) { Assert.IsTrue(decimalValue <= 0); var builder = GetSimpleBuilder(); // Test both versions of the API var constants = new Microsoft.Boogie.LiteralExpr[] { builder.ConstantBV(decimalValue, width), builder.ConstantBV(new BigInteger(decimalValue), width)}; foreach (var constant in constants) { Assert.AreEqual(expectedString, constant.ToString()); CheckType(constant, t => t.IsBv); Assert.AreEqual(width, constant.asBvConst.Bits); Assert.AreEqual(width, constant.Type.BvBits); // Compute decimal representation of two's complement bv var MaxValuePlusOne = BigInteger.Pow(2, width); var expectedValue = (MaxValuePlusOne + decimalValue) % MaxValuePlusOne; Assert.AreEqual(expectedValue, constant.asBvConst.Value.ToBigInteger); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.Tracing; using System.IO; using System.Linq; using System.Net.Http.Headers; using System.Net.Test.Common; using System.Reflection; using System.Threading; using System.Threading.Tasks; using Xunit; namespace System.Net.Http.Functional.Tests { using Configuration = System.Net.Test.Common.Configuration; [SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, ".NET Core specific diagnostic test")] public class DiagnosticsTest : RemoteExecutorTestBase { [Fact] public static void EventSource_ExistsWithCorrectId() { Type esType = typeof(HttpClient).GetTypeInfo().Assembly.GetType("System.Net.NetEventSource", throwOnError: true, ignoreCase: false); Assert.NotNull(esType); Assert.Equal("Microsoft-System-Net-Http", EventSource.GetName(esType)); Assert.Equal(Guid.Parse("bdd9a83e-1929-5482-0d73-2fe5e1c0e16d"), EventSource.GetGuid(esType)); Assert.NotEmpty(EventSource.GenerateManifest(esType, "assemblyPathToIncludeInManifest")); } // Diagnostic tests are each invoked in their own process as they enable/disable // process-wide EventSource-based tracing, and other tests in the same process // could interfere with the tests, as well as the enabling of tracing interfering // with those tests. /// <remarks> /// This test must be in the same test collection as any others testing HttpClient/WinHttpHandler /// DiagnosticSources, since the global logging mechanism makes them conflict inherently. /// </remarks> [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticSourceLogging() { RemoteInvoke(() => { bool requestLogged = false; Guid requestGuid = Guid.Empty; bool responseLogged = false; Guid responseGuid = Guid.Empty; bool exceptionLogged = false; bool activityLogged = false; var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key.Equals("System.Net.Http.Request")) { Assert.NotNull(kvp.Value); GetPropertyValueFromAnonymousTypeInstance<HttpRequestMessage>(kvp.Value, "Request"); requestGuid = GetPropertyValueFromAnonymousTypeInstance<Guid>(kvp.Value, "LoggingRequestId"); requestLogged = true; } else if (kvp.Key.Equals("System.Net.Http.Response")) { Assert.NotNull(kvp.Value); GetPropertyValueFromAnonymousTypeInstance<HttpResponseMessage>(kvp.Value, "Response"); responseGuid = GetPropertyValueFromAnonymousTypeInstance<Guid>(kvp.Value, "LoggingRequestId"); var requestStatus = GetPropertyValueFromAnonymousTypeInstance<TaskStatus>(kvp.Value, "RequestTaskStatus"); Assert.Equal(TaskStatus.RanToCompletion, requestStatus); responseLogged = true; } else if (kvp.Key.Equals("System.Net.Http.Exception")) { exceptionLogged = true; } else if (kvp.Key.StartsWith("System.Net.Http.HttpRequestOut")) { activityLogged = true; } }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { diagnosticListenerObserver.Enable( s => !s.Contains("HttpRequestOut")); using (var client = new HttpClient()) { var response = client.GetAsync(Configuration.Http.RemoteEchoServer).Result; } Assert.True(requestLogged, "Request was not logged."); // Poll with a timeout since logging response is not synchronized with returning a response. WaitForTrue(() => responseLogged, TimeSpan.FromSeconds(1), "Response was not logged within 1 second timeout."); Assert.Equal(requestGuid, responseGuid); Assert.False(exceptionLogged, "Exception was logged for successful request"); Assert.False(activityLogged, "HttpOutReq was logged while HttpOutReq logging was disabled"); diagnosticListenerObserver.Disable(); } return SuccessExitCode; }).Dispose(); } /// <remarks> /// This test must be in the same test collection as any others testing HttpClient/WinHttpHandler /// DiagnosticSources, since the global logging mechanism makes them conflict inherently. /// </remarks> [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticSourceNoLogging() { RemoteInvoke(() => { bool requestLogged = false; bool responseLogged = false; bool activityStartLogged = false; bool activityStopLogged = false; var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key.Equals("System.Net.Http.Request")) { requestLogged = true; } else if (kvp.Key.Equals("System.Net.Http.Response")) { responseLogged = true; } else if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Start")) { activityStartLogged = true; } else if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Stop")) { activityStopLogged = true; } }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { using (var client = new HttpClient()) { LoopbackServer.CreateServerAsync(async (server, url) => { Task<List<string>> requestLines = LoopbackServer.AcceptSocketAsync(server, (s, stream, reader, writer) => LoopbackServer.ReadWriteAcceptedAsync(s, reader, writer)); Task response = client.GetAsync(url); await Task.WhenAll(response, requestLines); AssertNoHeadersAreInjected(requestLines.Result); }).Wait(); } Assert.False(requestLogged, "Request was logged while logging disabled."); Assert.False(activityStartLogged, "HttpRequestOut.Start was logged while logging disabled."); WaitForFalse(() => responseLogged, TimeSpan.FromSeconds(1), "Response was logged while logging disabled."); Assert.False(activityStopLogged, "HttpRequestOut.Stop was logged while logging disabled."); } return SuccessExitCode; }).Dispose(); } [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_HttpTracingEnabled_Succeeds() { RemoteInvoke(async () => { using (var listener = new TestEventListener("Microsoft-System-Net-Http", EventLevel.Verbose)) { var events = new ConcurrentQueue<EventWrittenEventArgs>(); await listener.RunWithCallbackAsync(events.Enqueue, async () => { // Exercise various code paths to get coverage of tracing using (var client = new HttpClient()) { // Do a get to a loopback server await LoopbackServer.CreateServerAsync(async (server, url) => { await TestHelper.WhenAllCompletedOrAnyFailed( LoopbackServer.ReadRequestAndSendResponseAsync(server), client.GetAsync(url)); }); // Do a post to a remote server byte[] expectedData = Enumerable.Range(0, 20000).Select(i => unchecked((byte)i)).ToArray(); HttpContent content = new ByteArrayContent(expectedData); content.Headers.ContentMD5 = TestHelper.ComputeMD5Hash(expectedData); using (HttpResponseMessage response = await client.PostAsync(Configuration.Http.RemoteEchoServer, content)) { Assert.Equal(HttpStatusCode.OK, response.StatusCode); } } }); // We don't validate receiving specific events, but rather that we do at least // receive some events, and that enabling tracing doesn't cause other failures // in processing. Assert.DoesNotContain(events, ev => ev.EventId == 0); // make sure there are no event source error messages Assert.InRange(events.Count, 1, int.MaxValue); } return SuccessExitCode; }).Dispose(); } [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticExceptionLogging() { RemoteInvoke(() => { bool exceptionLogged = false; bool responseLogged = false; var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key.Equals("System.Net.Http.Response")) { Assert.NotNull(kvp.Value); var requestStatus = GetPropertyValueFromAnonymousTypeInstance<TaskStatus>(kvp.Value, "RequestTaskStatus"); Assert.Equal(TaskStatus.Faulted, requestStatus); responseLogged = true; } else if (kvp.Key.Equals("System.Net.Http.Exception")) { Assert.NotNull(kvp.Value); GetPropertyValueFromAnonymousTypeInstance<Exception>(kvp.Value, "Exception"); exceptionLogged = true; } }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { diagnosticListenerObserver.Enable(s => !s.Contains("HttpRequestOut")); using (var client = new HttpClient()) { Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync($"http://{Guid.NewGuid()}.com")).Wait(); } // Poll with a timeout since logging response is not synchronized with returning a response. WaitForTrue(() => responseLogged, TimeSpan.FromSeconds(1), "Response with exception was not logged within 1 second timeout."); Assert.True(exceptionLogged, "Exception was not logged"); diagnosticListenerObserver.Disable(); } return SuccessExitCode; }).Dispose(); } [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticCancelledLogging() { RemoteInvoke(() => { bool cancelLogged = false; var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key.Equals("System.Net.Http.Response")) { Assert.NotNull(kvp.Value); var status = GetPropertyValueFromAnonymousTypeInstance<TaskStatus>(kvp.Value, "RequestTaskStatus"); Assert.Equal(TaskStatus.Canceled, status); Volatile.Write(ref cancelLogged, true); } }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { diagnosticListenerObserver.Enable(s => !s.Contains("HttpRequestOut")); using (var client = new HttpClient()) { LoopbackServer.CreateServerAsync(async (server, url) => { CancellationTokenSource tcs = new CancellationTokenSource(); Task request = LoopbackServer.AcceptSocketAsync(server, (s, stream, reader, writer) => { tcs.Cancel(); return LoopbackServer.ReadWriteAcceptedAsync(s, reader, writer); }); Task response = client.GetAsync(url, tcs.Token); await Assert.ThrowsAnyAsync<Exception>(() => TestHelper.WhenAllCompletedOrAnyFailed(response, request)); }).Wait(); } } // Poll with a timeout since logging response is not synchronized with returning a response. WaitForTrue(() => Volatile.Read(ref cancelLogged), TimeSpan.FromSeconds(1), "Cancellation was not logged within 1 second timeout."); diagnosticListenerObserver.Disable(); return SuccessExitCode; }).Dispose(); } [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticSourceActivityLogging() { RemoteInvoke(() => { bool requestLogged = false; bool responseLogged = false; bool activityStartLogged = false; bool activityStopLogged = false; bool exceptionLogged = false; Activity parentActivity = new Activity("parent"); parentActivity.AddBaggage("correlationId", Guid.NewGuid().ToString()); parentActivity.AddBaggage("moreBaggage", Guid.NewGuid().ToString()); parentActivity.AddTag("tag", "tag"); //add tag to ensure it is not injected into request parentActivity.Start(); var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key.Equals("System.Net.Http.Request")) { requestLogged = true; } else if (kvp.Key.Equals("System.Net.Http.Response")) { responseLogged = true;} else if (kvp.Key.Equals("System.Net.Http.Exception")) { exceptionLogged = true; } else if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Start")) { Assert.NotNull(kvp.Value); Assert.NotNull(Activity.Current); Assert.Equal(parentActivity, Activity.Current.Parent); GetPropertyValueFromAnonymousTypeInstance<HttpRequestMessage>(kvp.Value, "Request"); activityStartLogged = true; } else if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Stop")) { Assert.NotNull(kvp.Value); Assert.NotNull(Activity.Current); Assert.Equal(parentActivity, Activity.Current.Parent); Assert.True(Activity.Current.Duration != TimeSpan.Zero); GetPropertyValueFromAnonymousTypeInstance<HttpRequestMessage>(kvp.Value, "Request"); GetPropertyValueFromAnonymousTypeInstance<HttpResponseMessage>(kvp.Value, "Response"); var requestStatus = GetPropertyValueFromAnonymousTypeInstance<TaskStatus>(kvp.Value, "RequestTaskStatus"); Assert.Equal(TaskStatus.RanToCompletion, requestStatus); activityStopLogged = true; } }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { diagnosticListenerObserver.Enable(); using (var client = new HttpClient()) { LoopbackServer.CreateServerAsync(async (server, url) => { Task<List<string>> requestLines = LoopbackServer.AcceptSocketAsync(server, (s, stream, reader, writer) => LoopbackServer.ReadWriteAcceptedAsync(s, reader, writer)); Task response = client.GetAsync(url); await Task.WhenAll(response, requestLines); AssertHeadersAreInjected(requestLines.Result, parentActivity); }).Wait(); } Assert.True(activityStartLogged, "HttpRequestOut.Start was not logged."); Assert.False(requestLogged, "Request was logged when Activity logging was enabled."); // Poll with a timeout since logging response is not synchronized with returning a response. WaitForTrue(() => activityStopLogged, TimeSpan.FromSeconds(1), "HttpRequestOut.Stop was not logged within 1 second timeout."); Assert.False(exceptionLogged, "Exception was logged for successful request"); Assert.False(responseLogged, "Response was logged when Activity logging was enabled."); diagnosticListenerObserver.Disable(); } return SuccessExitCode; }).Dispose(); } [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticSourceUrlFilteredActivityLogging() { RemoteInvoke(() => { bool activityStartLogged = false; bool activityStopLogged = false; var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Start")){activityStartLogged = true;} else if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Stop")) {activityStopLogged = true;} }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { diagnosticListenerObserver.Enable((s, r, _) => { if (s.StartsWith("System.Net.Http.HttpRequestOut")) { var request = r as HttpRequestMessage; if (request != null) return !request.RequestUri.Equals(Configuration.Http.RemoteEchoServer); } return true; }); using (var client = new HttpClient()) { var response = client.GetAsync(Configuration.Http.RemoteEchoServer).Result; } Assert.False(activityStartLogged, "HttpRequestOut.Start was logged while URL disabled."); // Poll with a timeout since logging response is not synchronized with returning a response. Assert.False(activityStopLogged, "HttpRequestOut.Stop was logged while URL disabled."); diagnosticListenerObserver.Disable(); } return SuccessExitCode; }).Dispose(); } [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticExceptionActivityLogging() { RemoteInvoke(() => { bool exceptionLogged = false; bool activityStopLogged = false; var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Stop")) { Assert.NotNull(kvp.Value); GetPropertyValueFromAnonymousTypeInstance<HttpRequestMessage>(kvp.Value, "Request"); var requestStatus = GetPropertyValueFromAnonymousTypeInstance<TaskStatus>(kvp.Value, "RequestTaskStatus"); Assert.Equal(TaskStatus.Faulted, requestStatus); activityStopLogged = true; } else if (kvp.Key.Equals("System.Net.Http.Exception")) { Assert.NotNull(kvp.Value); GetPropertyValueFromAnonymousTypeInstance<Exception>(kvp.Value, "Exception"); exceptionLogged = true; } }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { diagnosticListenerObserver.Enable(); using (var client = new HttpClient()) { Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync($"http://{Guid.NewGuid()}.com")).Wait(); } // Poll with a timeout since logging response is not synchronized with returning a response. WaitForTrue(() => activityStopLogged, TimeSpan.FromSeconds(1), "Response with exception was not logged within 1 second timeout."); Assert.True(exceptionLogged, "Exception was not logged"); diagnosticListenerObserver.Disable(); } return SuccessExitCode; }).Dispose(); } [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticExceptionOnlyActivityLogging() { RemoteInvoke(() => { bool exceptionLogged = false; bool activityLogged = false; var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Stop")) { activityLogged = true; } else if (kvp.Key.Equals("System.Net.Http.Exception")) { Assert.NotNull(kvp.Value); GetPropertyValueFromAnonymousTypeInstance<Exception>(kvp.Value, "Exception"); exceptionLogged = true; } }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { diagnosticListenerObserver.Enable(s => s.Equals("System.Net.Http.Exception")); using (var client = new HttpClient()) { Assert.ThrowsAsync<HttpRequestException>(() => client.GetAsync($"http://{Guid.NewGuid()}.com")).Wait(); } // Poll with a timeout since logging response is not synchronized with returning a response. WaitForTrue(() => exceptionLogged, TimeSpan.FromSeconds(1), "Exception was not logged within 1 second timeout."); Assert.False(activityLogged, "HttpOutReq was logged when logging was disabled"); diagnosticListenerObserver.Disable(); } return SuccessExitCode; }).Dispose(); } [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticStopOnlyActivityLogging() { RemoteInvoke(() => { bool activityStartLogged = false; bool activityStopLogged = false; var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Start")) { activityStartLogged = true; } else if (kvp.Key.Equals("System.Net.Http.HttpRequestOut.Stop")) { Assert.NotNull(Activity.Current); activityStopLogged = true; } }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { diagnosticListenerObserver.Enable(s => s.Equals("System.Net.Http.HttpRequestOut")); using (var client = new HttpClient()) { var response = client.GetAsync(Configuration.Http.RemoteEchoServer).Result; } // Poll with a timeout since logging response is not synchronized with returning a response. WaitForTrue(() => activityStopLogged, TimeSpan.FromSeconds(1), "HttpRequestOut.Stop was not logged within 1 second timeout."); Assert.False(activityStartLogged, "HttpRequestOut.Start was logged when start logging was disabled"); diagnosticListenerObserver.Disable(); } return SuccessExitCode; }).Dispose(); } [OuterLoop] // TODO: Issue #11345 [Fact] public void SendAsync_ExpectedDiagnosticCancelledActivityLogging() { RemoteInvoke(() => { bool cancelLogged = false; var diagnosticListenerObserver = new FakeDiagnosticListenerObserver(kvp => { if (kvp.Key == "System.Net.Http.HttpRequestOut.Stop") { Assert.NotNull(kvp.Value); GetPropertyValueFromAnonymousTypeInstance<HttpRequestMessage>(kvp.Value, "Request"); var status = GetPropertyValueFromAnonymousTypeInstance<TaskStatus>(kvp.Value, "RequestTaskStatus"); Assert.Equal(TaskStatus.Canceled, status); Volatile.Write(ref cancelLogged, true); } }); using (DiagnosticListener.AllListeners.Subscribe(diagnosticListenerObserver)) { diagnosticListenerObserver.Enable(); using (var client = new HttpClient()) { LoopbackServer.CreateServerAsync(async (server, url) => { CancellationTokenSource tcs = new CancellationTokenSource(); Task request = LoopbackServer.AcceptSocketAsync(server, (s, stream, reader, writer) => { tcs.Cancel(); return LoopbackServer.ReadWriteAcceptedAsync(s, reader, writer); }); Task response = client.GetAsync(url, tcs.Token); await Assert.ThrowsAnyAsync<Exception>(() => TestHelper.WhenAllCompletedOrAnyFailed(response, request)); }).Wait(); } } // Poll with a timeout since logging response is not synchronized with returning a response. WaitForTrue(() => Volatile.Read(ref cancelLogged), TimeSpan.FromSeconds(1), "Cancellation was not logged within 1 second timeout."); diagnosticListenerObserver.Disable(); return SuccessExitCode; }).Dispose(); } private static T GetPropertyValueFromAnonymousTypeInstance<T>(object obj, string propertyName) { Type t = obj.GetType(); PropertyInfo p = t.GetRuntimeProperty(propertyName); object propertyValue = p.GetValue(obj); Assert.NotNull(propertyValue); Assert.IsAssignableFrom<T>(propertyValue); return (T)propertyValue; } private static void WaitForTrue(Func<bool> p, TimeSpan timeout, string message) { // Assert that spin doesn't time out. Assert.True(SpinWait.SpinUntil(p, timeout), message); } private static void WaitForFalse(Func<bool> p, TimeSpan timeout, string message) { // Assert that spin times out. Assert.False(SpinWait.SpinUntil(p, timeout), message); } private void AssertHeadersAreInjected(List<string> requestLines, Activity parent) { string requestId = null; var correlationContext = new List<NameValueHeaderValue>(); foreach (var line in requestLines) { if (line.StartsWith("Request-Id")) { requestId = line.Substring("Request-Id".Length).Trim(' ', ':'); } if (line.StartsWith("Correlation-Context")) { var corrCtxString = line.Substring("Correlation-Context".Length).Trim(' ', ':'); foreach (var kvp in corrCtxString.Split(',')) { correlationContext.Add(NameValueHeaderValue.Parse(kvp)); } } } Assert.True(requestId != null, "Request-Id was not injected when instrumentation was enabled"); Assert.True(requestId.StartsWith(parent.Id)); Assert.NotEqual(parent.Id, requestId); List<KeyValuePair<string, string>> baggage = parent.Baggage.ToList(); Assert.Equal(baggage.Count, correlationContext.Count); foreach (var kvp in baggage) { Assert.Contains(new NameValueHeaderValue(kvp.Key, kvp.Value), correlationContext); } } private void AssertNoHeadersAreInjected(List<string> requestLines) { foreach (var line in requestLines) { Assert.False(line.StartsWith("Request-Id"), "Request-Id header was injected when instrumentation was disabled"); Assert.False(line.StartsWith("Correlation-Context"), "Correlation-Context header was injected when instrumentation was disabled"); } } } }
// * ************************************************************************** // * Copyright (c) McCreary, Veselka, Bragg & Allen, P.C. // * This source code is subject to terms and conditions of the MIT License. // * A copy of the license can be found in the License.txt file // * at the root of this distribution. // * By using this source code in any fashion, you are agreeing to be bound by // * the terms of the MIT License. // * You must not remove this notice from this software. // * ************************************************************************** using System.Collections.Generic; using System.Linq; using System.Text; using JetBrains.Annotations; using MvbaCore.Collections; namespace System.Linq { public static class IEnumerableTExtensions { public static int Max<T>([CanBeNull] this IEnumerable<T> items, Func<T, int> selector, int @default) { if (items == null) { return @default; } return items.Select(selector).DefaultIfEmpty(@default).Max(); } } } namespace System.Collections.Generic { public static class IEnumerableTExtensions { [NotNull] public static IEnumerable<T> Except<T, TKey>([NotNull][ItemCanBeNull] this IEnumerable<T> a, [CanBeNull][ItemCanBeNull] IEnumerable<T> other, Func<T, TKey> getJoinKey) { return a.Except(other, getJoinKey, getJoinKey, item => ReferenceEquals(item, null) || item.Equals(default(T))); } [NotNull] public static IEnumerable<T> Except<T, TKey>([NotNull][ItemCanBeNull] this IEnumerable<T> a, [CanBeNull][ItemCanBeNull] IEnumerable<T> other, Func<T, TKey> getJoinKey, Func<T, bool> isNullOrEmptyComparer) { return a.Except(other, getJoinKey, getJoinKey, isNullOrEmptyComparer); } [NotNull] [ContractAnnotation("a:null => halt; other:null => notnull")] public static IEnumerable<T> Except<T, TK, TKey>([NotNull][ItemCanBeNull]this IEnumerable<T> a, [CanBeNull][ItemCanBeNull]IEnumerable<TK> other, Func<T, TKey> getJoinKeyForItemsInThis, Func<TK, TKey> getJoinKeyForItemsInOther, Func<TK, bool> isNullOrEmptyComparer) { if (a == null) { throw new ArgumentNullException("a", "list being selected from cannot be null"); } if (other == null) { return a; } var otherLocal = other.ToList(); if (!otherLocal.Any()) { return a; } var aLocal = a.ToList(); if (!aLocal.Any()) { return aLocal; } return from itemA in aLocal join itemB in otherLocal on getJoinKeyForItemsInThis(itemA) equals getJoinKeyForItemsInOther(itemB) into c from itemC in c.DefaultIfEmpty() where isNullOrEmptyComparer(itemC) select itemA; } [NotNull] public static IEnumerable<KeyValuePair<int, T>> FlattenRanges<T>([NotNull][ItemNotNull] this IEnumerable<Range<T>> items) { return items.SelectMany(x => Enumerable.Range(x.Start, x.End - x.Start + 1) .Select(y => new KeyValuePair<int, T>(y, x.Payload))); } [NotNull] [ContractAnnotation("items:null => halt; action:null => halt")] public static IEnumerable<T> ForEach<T>([NotNull] this IEnumerable<T> items, [NotNull] Action<T> action) { if (items == null) { throw new ArgumentNullException("items", "collection cannot be null"); } // ReSharper disable PossibleMultipleEnumeration foreach (var item in items) // ReSharper restore PossibleMultipleEnumeration { action(item); } // ReSharper disable PossibleMultipleEnumeration return items; // ReSharper restore PossibleMultipleEnumeration } [ItemNotNull] [NotNull] public static IEnumerable<IEnumerable<T>> Group<T>([NotNull] this IEnumerable<T> input, [NotNull] Func<T, T, bool> keepGrouping) { using (var enumerator = input.GetEnumerator()) { if (!enumerator.MoveNext()) { yield break; } bool hasMore; do { var group = new ContinuingEnumerator<T>(enumerator, keepGrouping, enumerator.Current); yield return group; hasMore = group.HasNext; } while (hasMore); } } [NotNull] public static IEnumerable<List<T>> InSetsOf<T>([NotNull] this IEnumerable<T> items, int setSize) { return items.InSetsOf(setSize, false, default(T)); } [NotNull] public static IEnumerable<List<T>> InSetsOf<T>([NotNull] this IEnumerable<T> items, int setSize, bool fillPartialSetWithDefaultItems, [CanBeNull] T defaultItemToFillGroups) { var counter = 0; Func<T, T, bool> keepGoing = (current, previous) => { if (++counter > setSize) { counter = 0; } return counter != 0; }; foreach (var list in items.Group(keepGoing).Select(set => set.ToList())) { if (list.Count < setSize && fillPartialSetWithDefaultItems) { list.AddRange(Enumerable.Repeat(defaultItemToFillGroups, setSize - list.Count)); } yield return list; } } [NotNull] [ContractAnnotation("a:null => halt; other:null => notnull; getJoinKey:null => halt")] public static IEnumerable<T> Intersect<T, TKey>([NotNull][ItemCanBeNull] this IEnumerable<T> a, [CanBeNull][ItemCanBeNull] IEnumerable<T> other, [NotNull] Func<T, TKey> getJoinKey) { if (a == null) { throw new ArgumentNullException("a", "list being selected from cannot be null"); } var aLocal = a.ToList(); if (!aLocal.Any()) { return aLocal; } if (other == null) { return new List<T>(); } var otherLocal = other.ToList(); if (!otherLocal.Any()) { return new List<T>(); } return from itemA in aLocal join itemB in otherLocal on getJoinKey(itemA) equals getJoinKey(itemB) select itemA; } [ContractAnnotation("list:null => true")] public static bool IsNullOrEmpty<T>([CanBeNull] this IEnumerable<T> list) { return list == null || !list.Any(); } [NotNull] public static string Join<T>([CanBeNull] this IEnumerable<T> items, [CanBeNull] string delimiter) { var result = new StringBuilder(); if (items != null) { delimiter = delimiter ?? ""; foreach (var item in items) { result.Append(item); result.Append(delimiter); } if (result.Length > 0) { result.Length -= delimiter.Length; } } return result.ToString(); } [NotNull] public static CachedEnumerable<T> Memoize<T>([NotNull] this IEnumerable<T> enumerable, int? max = null) { return new CachedEnumerable<T>(enumerable, max); } [NotNull] public static string SeparateWith<T>([NotNull] this IEnumerable<T> input, [NotNull] Func<T, string> getValueToSeparate, [NotNull] string separator) { return input.Select(getValueToSeparate).Join(separator); } /// <summary> /// synchronizes previousItems with newItems and returns the results. /// </summary> /// <typeparam name="TKey"></typeparam> /// <typeparam name="T"></typeparam> /// <param name="newItems">all current items, order does not matter</param> /// <param name="getItemKeyValue"> e.g. county=>county.Id</param> /// <param name="equals">e.g. (county1,County2)=>county1.Name == county2.Name && county1.Gps == county2.Gps</param> /// <param name="previousItems">all previous items, order does not matter</param> [NotNull] public static IEnumerable<SynchronizationResult<T>> Synchronize<T, TKey>( [NotNull] this IEnumerable<T> previousItems, [NotNull] IEnumerable<T> newItems, [NotNull] Func<T, TKey> getItemKeyValue, [NotNull] Func<T, T, bool> equals) where TKey : IComparable { return SynchronizeInternal(previousItems.OrderBy(getItemKeyValue), newItems.OrderBy(getItemKeyValue), getItemKeyValue, equals); } /// <summary> /// synchronizes previousItems with newItems and returns the results. /// </summary> /// <typeparam name="TKey"></typeparam> /// <typeparam name="T"></typeparam> /// <param name="newItems">all current items, order does not matter</param> /// <param name="getItemKeyValue"> e.g. county=>county.Id</param> /// <param name="equals">e.g. (county1,County2)=>county1.Name == county2.Name && county1.Gps == county2.Gps</param> /// <param name="previousItems">all previous items, sorted by KeyValue</param> [NotNull] public static IEnumerable<SynchronizationResult<T>> Synchronize<T, TKey>( this IOrderedEnumerable<T> previousItems, [NotNull] IEnumerable<T> newItems, [NotNull] Func<T, TKey> getItemKeyValue, [NotNull] Func<T, T, bool> equals) where TKey : IComparable { return SynchronizeInternal(previousItems, newItems.OrderBy(getItemKeyValue), getItemKeyValue, equals); } /// <summary> /// synchronizes previousItems with newItems and returns the results. /// </summary> /// <typeparam name="TKey"></typeparam> /// <typeparam name="T"></typeparam> /// <param name="newItems">all current items, sorted by KeyValue</param> /// <param name="getItemKeyValue"> e.g. county=>county.Id</param> /// <param name="equals">e.g. (county1,County2)=>county1.Name == county2.Name && county1.Gps == county2.Gps</param> /// <param name="previousItems">all previous items, sorted by KeyValue</param> public static IEnumerable<SynchronizationResult<T>> Synchronize<T, TKey>( [NotNull] this IOrderedEnumerable<T> previousItems, [NotNull] IOrderedEnumerable<T> newItems, [NotNull] Func<T, TKey> getItemKeyValue, [NotNull] Func<T, T, bool> equals) where TKey : IComparable { return SynchronizeInternal(previousItems, newItems.OrderBy(getItemKeyValue), getItemKeyValue, equals); } /// <summary> /// synchronizes previousItems with newItems and returns the results. /// </summary> /// <typeparam name="TKey"></typeparam> /// <typeparam name="T"></typeparam> /// <param name="newItems">all current items, sorted by KeyValue</param> /// <param name="getItemKeyValue"> e.g. county=>county.Id</param> /// <param name="equals">e.g. (county1,County2)=>county1.Name == county2.Name && county1.Gps == county2.Gps</param> /// <param name="previousItems">all previous items, order does not matter</param> public static IEnumerable<SynchronizationResult<T>> Synchronize<T, TKey>( [NotNull] this IEnumerable<T> previousItems, [NotNull] IOrderedEnumerable<T> newItems, [NotNull] Func<T, TKey> getItemKeyValue, [NotNull] Func<T, T, bool> equals) where TKey : IComparable { return SynchronizeInternal(previousItems.OrderBy(getItemKeyValue), newItems, getItemKeyValue, equals); } private static IEnumerable<SynchronizationResult<T>> SynchronizeInternal<T, TKey>( // ReSharper disable ParameterTypeCanBeEnumerable.Local [NotNull] this IEnumerable<T> previousSortedByKey, [NotNull] IEnumerable<T> newSortedByKey, // ReSharper restore ParameterTypeCanBeEnumerable.Local [NotNull] Func<T, TKey> getItemKeyValue, [NotNull] Func<T, T, bool> equals) where TKey : IComparable { var previousEnumerator = previousSortedByKey.GetEnumerator(); var newEnumerator = newSortedByKey.GetEnumerator(); var havePrevious = previousEnumerator.MoveNext(); var haveNew = newEnumerator.MoveNext(); var previousItem = default(T); var previousItemKey = default(TKey); if (havePrevious) { previousItem = previousEnumerator.Current; previousItemKey = getItemKeyValue(previousItem); } var newItem = default(T); var newItemKey = default(TKey); if (haveNew) { newItem = newEnumerator.Current; newItemKey = getItemKeyValue(newItem); } while (havePrevious && haveNew) { var keyComparisonResult = previousItemKey.CompareTo(newItemKey); if (keyComparisonResult == 0) { // item key matched, check for data changes yield return new SynchronizationResult<T>( previousItem, newItem, equals(previousItem, newItem) ? SynchronizationStatus.Unchanged : SynchronizationStatus.Changed); havePrevious = previousEnumerator.MoveNext(); if (havePrevious) { previousItem = previousEnumerator.Current; previousItemKey = getItemKeyValue(previousItem); } haveNew = newEnumerator.MoveNext(); if (haveNew) { newItem = newEnumerator.Current; newItemKey = getItemKeyValue(newItem); } continue; } if (keyComparisonResult > 0) { // newItem was added yield return new SynchronizationResult<T>(default(T), newItem, SynchronizationStatus.Added); haveNew = newEnumerator.MoveNext(); if (haveNew) { newItem = newEnumerator.Current; newItemKey = getItemKeyValue(newItem); continue; } break; } // previousItem was removed yield return new SynchronizationResult<T>(previousItem, default(T), SynchronizationStatus.Removed); havePrevious = previousEnumerator.MoveNext(); if (havePrevious) { previousItem = previousEnumerator.Current; previousItemKey = getItemKeyValue(previousItem); continue; } break; } while (haveNew) // remainder of newSortedByKey were added { yield return new SynchronizationResult<T>(default(T), newEnumerator.Current, SynchronizationStatus.Added); haveNew = newEnumerator.MoveNext(); } while (havePrevious) // remainder of previousSortedByKey were deleted { yield return new SynchronizationResult<T>(previousEnumerator.Current, default(T), SynchronizationStatus.Removed); havePrevious = previousEnumerator.MoveNext(); } } /// <summary> /// synchronizes previousItems with newItems and returns the results. /// </summary> /// <typeparam name="TKey"></typeparam> /// <typeparam name="T"></typeparam> /// <param name="newItems">all current items, sorted by KeyValue</param> /// <param name="getItemKeyValue"> e.g. county=>county.Id</param> /// <param name="equals">e.g. (county1,County2)=>county1.Name == county2.Name && county1.Gps == county2.Gps</param> /// <param name="previousItems">all previous items, sorted by KeyValue</param> public static IEnumerable<SynchronizationResult<T>> SynchronizeOrdered<T, TKey>( [NotNull] this IEnumerable<T> previousItems, [NotNull] IEnumerable<T> newItems, [NotNull] Func<T, TKey> getItemKeyValue, [NotNull] Func<T, T, bool> equals) where TKey : IComparable { return SynchronizeInternal(previousItems, newItems, getItemKeyValue, equals); } [NotNull] public static IEnumerable<List<T>> ToPageSets<T>([CanBeNull] this IEnumerable<T> items, int firstPageSize, int nthPageSize) { if (items != null) { var toEnumerate = items.ToList(); yield return new List<T>(toEnumerate.Take(firstPageSize)); var remainder = toEnumerate.Skip(firstPageSize).ToList(); if (remainder.Any()) { foreach (var item in remainder.InSetsOf(nthPageSize)) { yield return item; } } } } } public class SynchronizationResult<T> { public SynchronizationResult(T oldItem, T newItem, SynchronizationStatus status) { OldItem = oldItem; NewItem = newItem; Status = status; } public T NewItem { get; private set; } public T OldItem { get; private set; } public SynchronizationStatus Status { get; private set; } } public enum SynchronizationStatus { Unchanged = 0, Added = 10, Removed = 20, Changed = 30 } public class Range<T> { public int End { get; set; } public T Payload { get; set; } public int Start { get; set; } } public class ContinuingEnumerator<T> : IEnumerable<T> { public ContinuingEnumerator([NotNull] IEnumerator<T> enumerator, [NotNull] Func<T, T, bool> keepGrouping, [CanBeNull] T current) { Current = current; _enumerator = enumerator; _keepGrouping = keepGrouping; } private readonly IEnumerator<T> _enumerator; private readonly Func<T, T, bool> _keepGrouping; public IEnumerator<T> GetEnumerator() { var previous = Current; do { if (_keepGrouping(_enumerator.Current, previous)) { previous = _enumerator.Current; yield return _enumerator.Current; continue; } HasNext = true; Current = _enumerator.Current; yield break; } while (_enumerator.MoveNext()); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } public T Current { get; private set; } public bool HasNext { get; private set; } } }
S/W Version Information Model: Emulator Tizen-Version: 2.2.1 Build-Number: Tizen_EMULATOR_20131107.2308 Build-Date: 2013.11.07 23:08:36 Crash Information Process Name: QRDemo PID: 4651 Date: 2013-11-30 07:24:07(GMT+0900) Executable File Path: /opt/apps/vd84JCg9vN/bin/QRDemo This process is multi-thread process pid=4651 tid=4651 Signal: 11 (SIGSEGV) si_code: -6 signal sent by tkill (sent by pid 4651, uid 5000) Register Information gs = 0x00000033, fs = 0x00000000 es = 0x0000007b, ds = 0x0000007b edi = 0xb23f0735, esi = 0x00000040 ebp = 0xbfd7ed38, esp = 0xbfd7ed00 eax = 0x2e6f732e, ebx = 0xb23f5b80 ecx = 0x00000000, edx = 0x00000000 eip = 0xb23e9a34 Memory Information MemTotal: 1029696 KB MemFree: 215696 KB Buffers: 33604 KB Cached: 545784 KB VmPeak: 239492 KB VmSize: 233912 KB VmLck: 0 KB VmPin: 0 KB VmHWM: 84200 KB VmRSS: 79788 KB VmData: 66936 KB VmStk: 136 KB VmExe: 40 KB VmLib: 119120 KB VmPTE: 200 KB VmSwap: 0 KB Maps Information 08048000 08052000 r-xp /usr/bin/launchpad_preloading_preinitializing_daemon abd67000 abdb1000 r-xp /usr/lib/gstreamer-0.10/libgstcoreelements.so abdb3000 abdde000 r-xp /usr/lib/libgstcontroller-0.10.so.0.30.0 abdf1000 abe0e000 r-xp /usr/lib/gstreamer-0.10/libgstvideo4linux2.so ae73d000 ae7b0000 r-xp /usr/lib/host-gl/libGL.so.1.2 ae7dc000 ae7ea000 r-xp /usr/lib/evas/modules/engines/software_generic/linux-gnu-i686-1.7.99/module.so ae7eb000 ae822000 r-xp /usr/lib/evas/modules/engines/gl_x11/linux-gnu-i686-1.7.99/module.so ae826000 ae828000 r-xp /usr/lib/libtzsvc.so.0.0.1 ae829000 ae82c000 r-xp /usr/lib/libemail-network.so.1.1.0 ae82d000 ae901000 r-xp /usr/lib/libuw-imap-toolkit.so.0.0.0 ae907000 ae90c000 r-xp /usr/lib/libss-client.so.1.0.0 ae90d000 ae914000 r-xp /usr/lib/libmmutil_jpeg.so.0.0.0 ae915000 ae93a000 r-xp /usr/lib/libnfc.so.1.0.0 ae93b000 ae953000 r-xp /usr/lib/libnfc-common-lib.so.1.0.0 ae954000 ae98c000 r-xp /usr/lib/libbluetooth-api.so.1.0.0 ae98d000 ae9cf000 r-xp /usr/lib/libzmq.so.3.0.0 ae9d2000 ae9df000 r-xp /usr/lib/libpims-ipc.so.0.0.30 ae9e0000 ae9e6000 r-xp /usr/lib/libmemenv.so.1.1.0 ae9e7000 aea30000 r-xp /usr/lib/libleveldb.so.1.1.0 aea32000 aea3d000 r-xp /usr/lib/libgstfft-0.10.so.0.25.0 aea3e000 aea7a000 r-xp /usr/lib/libgstaudio-0.10.so.0.25.0 aea7c000 aea91000 r-xp /usr/lib/libgstvideo-0.10.so.0.25.0 aea92000 aeab2000 r-xp /usr/lib/libgstpbutils-0.10.so.0.25.0 aeab4000 aeaea000 r-xp /usr/lib/libxslt.so.1.1.16 aeaeb000 aeaf0000 r-xp /usr/lib/libeukit.so.1.7.99 aeaf1000 aeafb000 r-xp /usr/lib/libui-gadget-1.so.0.1.0 aeafc000 aeb2b000 r-xp /usr/lib/host-gl/libGLESv2.so.1.0 aeb31000 aeb3b000 r-xp /usr/lib/libmsg_vobject.so aeb3c000 aeb49000 r-xp /usr/lib/libdrm-client.so.0.0.1 aeb4a000 aeb68000 r-xp /usr/lib/libmsg_plugin_manager.so aeb69000 aeba9000 r-xp /usr/lib/libmsg_framework_handler.so aebaa000 aebe6000 r-xp /usr/lib/libmsg_transaction_proxy.so aebe7000 aec32000 r-xp /usr/lib/libmsg_utils.so aec33000 aec48000 r-xp /usr/lib/libemail-common-use.so.1.1.0 aec49000 aeceb000 r-xp /usr/lib/libemail-core.so.1.1.0 aecf4000 aed56000 r-xp /usr/lib/libemail-storage.so.1.1.0 aed57000 aed67000 r-xp /usr/lib/libemail-ipc.so.1.1.0 aed68000 aedaa000 r-xp /usr/lib/libSLP-location.so.0.0.0 aedab000 aedb6000 r-xp /usr/lib/libdownload-provider-interface.so.1.1.6 aedb7000 aedc0000 r-xp /usr/lib/libmedia-utils.so.0.0.0 aedc1000 aedc3000 r-xp /usr/lib/libmedia-hash.so.1.0.0 aedc4000 aede5000 r-xp /usr/lib/libmedia-thumbnail.so.1.0.0 aede6000 aede8000 r-xp /usr/lib/libmedia-svc-hash.so.1.0.0 aede9000 aee08000 r-xp /usr/lib/libmedia-service.so.1.0.0 aee09000 aee12000 r-xp /usr/lib/libbadge.so.0.0.1 aee13000 aee1b000 r-xp /usr/lib/libcapi-appfw-app-manager.so.0.1.0 aee1c000 aee24000 r-xp /usr/lib/libshortcut.so.0.0.1 aee25000 aee29000 r-xp /usr/lib/libminicontrol-provider.so.0.0.1 aee2a000 aee38000 r-xp /usr/lib/liblivebox-service.so.0.0.1 aee39000 aee55000 r-xp /usr/lib/liblivebox-viewer.so.0.0.1 aee56000 aee5c000 r-xp /usr/lib/libcapi-appfw-package-manager.so.0.0.30 aee5d000 aee6b000 r-xp /usr/lib/libstt.so aee6c000 af098000 r-xp /usr/lib/libface-engine-plugin.so af0f0000 af0f9000 r-xp /usr/lib/libcapi-network-nfc.so.0.0.11 af0fa000 af118000 r-xp /usr/lib/libcapi-network-bluetooth.so.0.1.40 af119000 af124000 r-xp /usr/lib/libcapi-network-wifi.so.0.1.2_24 af125000 af13f000 r-xp /usr/lib/libnetwork.so.0.0.0 af141000 af161000 r-xp /usr/lib/libaccounts-svc.so.0.2.66 af162000 af1ed000 r-xp /usr/lib/libcontacts-service2.so.0.9.114.7 af205000 af265000 r-xp /usr/lib/libcalendar-service2.so.0.1.44 af268000 af273000 r-xp /usr/lib/libcapi-web-favorites.so af274000 b1227000 r-xp /usr/lib/libewebkit2.so.0.11.113 b1315000 b1320000 r-xp /usr/lib/libpush.so.0.2.12 b1321000 b1343000 r-xp /usr/lib/libmsg_mapi.so.0.1.0 b1344000 b1364000 r-xp /usr/lib/libemail-api.so.1.1.0 b1365000 b1371000 r-xp /usr/lib/libcapi-system-sensor.so.0.1.17 b1373000 b1377000 r-xp /usr/lib/libcapi-telephony-sim.so.0.1.7 b1378000 b137c000 r-xp /usr/lib/libcapi-telephony-network-info.so.0.1.0 b137d000 b1389000 r-xp /usr/lib/libcapi-location-manager.so.0.1.11 b138a000 b138f000 r-xp /usr/lib/libcapi-web-url-download.so.0.1.0 b1390000 b13bc000 r-xp /usr/lib/libcapi-content-media-content.so.0.2.59 b13bd000 b13bf000 r-xp /usr/lib/libcamsrcjpegenc.so.0.0.0 b13c0000 b13dd000 r-xp /usr/lib/libwifi-direct.so.0.0 b13de000 b13e9000 r-xp /usr/lib/libcapi-network-tethering.so.0.1.0 b13ea000 b13f7000 r-xp /usr/lib/libcapi-network-connection.so.0.1.3_18 b13f8000 b1423000 r-xp /usr/lib/osp/libosp-shell-core.so.1.2.2.1 b1425000 b14cd000 r-xp /usr/lib/osp/libosp-shell.so.1.2.2.1 b14d8000 b14df000 r-xp /usr/lib/osp/libosp-speech-stt.so.1.2.2.0 b14e0000 b14e7000 r-xp /usr/lib/osp/libosp-speech-tts.so.1.2.2.0 b14e8000 b151d000 r-xp /usr/lib/osp/libosp-face.so.1.2.2.0 b151f000 b15ab000 r-xp /usr/lib/osp/libosp-nfc.so.1.2.2.0 b15b0000 b1638000 r-xp /usr/lib/osp/libosp-bluetooth.so.1.2.2.0 b163d000 b16e7000 r-xp /usr/lib/osp/libosp-wifi.so.1.2.2.0 b16ec000 b1827000 r-xp /usr/lib/osp/libosp-social.so.1.2.2.0 b182b000 b18e6000 r-xp /usr/lib/osp/libosp-web.so.1.2.2.0 b18ed000 b1964000 r-xp /usr/lib/osp/libosp-messaging.so.1.2.2.0 b1966000 b1990000 r-xp /usr/lib/osp/libosp-uix.so.1.2.2.0 b1993000 b19c0000 r-xp /usr/lib/osp/libosp-telephony.so.1.2.2.0 b19c2000 b19e7000 r-xp /usr/lib/osp/libosp-locations.so.1.2.2.3 b19e8000 b1a7d000 r-xp /usr/lib/osp/libosp-content.so.1.2.2.0 b1a7f000 b1a9a000 r-xp /usr/lib/osp/libosp-ime.so.1.2.2.0 b1a9b000 b1ac1000 r-xp /usr/lib/osp/libosp-json.so.1.2.2.0 b1ac3000 b1adb000 r-xp /usr/lib/libmmfile_utils.so.0.0.0 b1adc000 b1ae2000 r-xp /usr/lib/libmmffile.so.0.0.0 b1ae3000 b1b70000 r-xp /usr/lib/libmmfcamcorder.so.0.0.0 b1b76000 b1c9c000 r-xp /usr/lib/osp/libosp-net.so.1.2.2.0 b1ca3000 b20f1000 r-xp /usr/lib/osp/libarengine.so b216f000 b2174000 r-xp /usr/lib/libcapi-media-metadata-extractor.so b2175000 b2184000 r-xp /usr/lib/libcapi-media-camera.so.0.1.4 b2185000 b2188000 r-xp /usr/lib/libcapi-media-sound-manager.so.0.1.1 b2189000 b219b000 r-xp /usr/lib/libcapi-media-player.so.0.1.1 b219c000 b21cb000 r-xp /usr/lib/libopencore-amrnb.so.0.0.2 b21cc000 b21f7000 r-xp /usr/lib/libvorbis.so.0.4.3 b21f8000 b21fd000 r-xp /usr/lib/libcapi-media-audio-io.so.0.2.0 b21fe000 b222b000 r-xp /usr/lib/osp/libosp-image.so.1.2.2.0 b222c000 b2255000 r-xp /usr/lib/osp/libosp-vision.so.1.2.2.0 b2257000 b23c4000 r-xp /usr/lib/osp/libosp-media.so.1.2.2.0 b23d8000 b23d9000 r-xp /usr/lib/libX11-xcb.so.1.0.0 b23e2000 b23f5000 r-xp /opt/usr/apps/vd84JCg9vN/bin/QRDemo.exe b23f7000 b2453000 r-xp /usr/lib/libosp-env-config.so.1.2.2.1 b2454000 b24a7000 r-xp /usr/lib/libpulsecommon-0.9.23.so b24a8000 b24ae000 r-xp /usr/lib/libascenario-0.2.so.0.0.0 b24af000 b24b4000 r-xp /usr/lib/libmmfsoundcommon.so.0.0.0 b24b5000 b24fd000 r-xp /usr/lib/libpulse.so.0.12.4 b24fe000 b2502000 r-xp /usr/lib/libpulse-simple.so.0.0.3 b2503000 b25f5000 r-xp /usr/lib/libasound.so.2.0.0 b25f9000 b261e000 r-xp /usr/lib/libavsysaudio.so.0.0.1 b261f000 b2633000 r-xp /usr/lib/libmmfsound.so.0.1.0 b2634000 b2715000 r-xp /usr/lib/libgstreamer-0.10.so.0.30.0 b271a000 b2779000 r-xp /usr/lib/libgstbase-0.10.so.0.30.0 b277a000 b2786000 r-xp /usr/lib/libgstapp-0.10.so.0.25.0 b2787000 b279a000 r-xp /usr/lib/libgstinterfaces-0.10.so.0.25.0 b279b000 b279e000 r-xp /usr/lib/libmm_ta.so.0.0.0 b279f000 b27b6000 r-xp /usr/lib/libICE.so.6.3.0 b27b9000 b27c0000 r-xp /usr/lib/libSM.so.6.0.1 b27c1000 b27c2000 r-xp /usr/lib/libmmfkeysound.so.0.0.0 b27c3000 b27ce000 r-xp /usr/lib/libmmfcommon.so.0.0.0 b27cf000 b27da000 r-xp /usr/lib/libaudio-session-mgr.so.0.0.0 b27de000 b27e2000 r-xp /usr/lib/libmmfsession.so.0.0.0 b27e3000 b2841000 r-xp /usr/lib/libmmfplayer.so.0.0.0 b2843000 b284b000 r-xp /usr/lib/libxcb-render.so.0.0.0 b284c000 b284e000 r-xp /usr/lib/libxcb-shm.so.0.0.0 b284f000 b28b2000 r-xp /usr/lib/libtiff.so.5.1.0 b28b5000 b2907000 r-xp /usr/lib/libturbojpeg.so b2918000 b291f000 r-xp /usr/lib/libmmutil_imgp.so.0.0.0 b2920000 b2929000 r-xp /usr/lib/libgif.so.4.1.6 b292a000 b2950000 r-xp /usr/lib/libavutil.so.51.73.101 b2957000 b299c000 r-xp /usr/lib/libswscale.so.2.1.101 b299d000 b2d02000 r-xp /usr/lib/libavcodec.so.54.59.100 b3023000 b304a000 r-xp /usr/lib/libpng12.so.0.50.0 b304b000 b3052000 r-xp /usr/lib/libfeedback.so.0.1.4 b3053000 b3062000 r-xp /usr/lib/libtts.so b3063000 b3079000 r-xp /usr/lib/host-gl/libEGL.so.1.0 b307a000 b3194000 r-xp /usr/lib/libcairo.so.2.11200.12 b3197000 b31bc000 r-xp /usr/lib/osp/libosp-image-core.so.1.2.2.0 b31bd000 b4021000 r-xp /usr/lib/osp/libosp-uifw.so.1.2.2.1 b4094000 b409a000 r-xp /usr/lib/libslp_devman_plugin.so b409b000 b409f000 r-xp /usr/lib/libsyspopup_caller.so.0.1.0 b40a0000 b40a5000 r-xp /usr/lib/libsysman.so.0.2.0 b40a6000 b40be000 r-xp /usr/lib/libsecurity-server-commons.so.1.0.0 b40bf000 b40c1000 r-xp /usr/lib/libsystemd-daemon.so.0.0.1 b40c2000 b40c4000 r-xp /usr/lib/libdeviced.so.0.1.0 b40c5000 b40e2000 r-xp /usr/lib/libpkgmgr_parser.so.0.1.0 b40e3000 b40e5000 r-xp /usr/lib/libpkgmgr_installer_status_broadcast_server.so.0.1.0 b40e6000 b40e9000 r-xp /usr/lib/libpkgmgr_installer_client.so.0.1.0 b40ea000 b40ee000 r-xp /usr/lib/libdevice-node.so.0.1 b40ef000 b40f3000 r-xp /usr/lib/libheynoti.so.0.0.2 b40f4000 b415d000 r-xp /usr/lib/libsoup-2.4.so.1.5.0 b415f000 b417e000 r-xp /usr/lib/libsecurity-server-client.so.1.0.1 b417f000 b4184000 r-xp /usr/lib/libcapi-system-info.so.0.2.0 b4185000 b418b000 r-xp /usr/lib/libcapi-system-system-settings.so.0.0.2 b418c000 b418e000 r-xp /usr/lib/libcapi-system-power.so.0.1.1 b418f000 b4193000 r-xp /usr/lib/libcapi-system-device.so.0.1.0 b4194000 b4199000 r-xp /usr/lib/libcapi-system-runtime-info.so.0.0.3 b419a000 b419d000 r-xp /usr/lib/libcapi-network-serial.so.0.0.8 b419e000 b419f000 r-xp /usr/lib/libcapi-content-mime-type.so.0.0.2 b41a0000 b41b3000 r-xp /usr/lib/libcapi-appfw-application.so.0.1.0 b41b5000 b41e5000 r-xp /usr/lib/libSLP-tapi.so.0.0.0 b41e6000 b41ea000 r-xp /usr/lib/libuuid.so.1.3.0 b41eb000 b4212000 r-xp /usr/lib/libpkgmgr-info.so.0.0.17 b4213000 b4228000 r-xp /usr/lib/libpkgmgr-client.so.0.1.68 b4229000 b422a000 r-xp /usr/lib/libpmapi.so.1.2 b422b000 b4237000 r-xp /usr/lib/libminizip.so.1.0.0 b4238000 b4247000 r-xp /usr/lib/libmessage-port.so.1.2.2.1 b4248000 b4391000 r-xp /usr/lib/libxml2.so.2.7.8 b4397000 b43bf000 r-xp /usr/lib/libpcre.so.0.0.1 b43c0000 b43c3000 r-xp /usr/lib/libiniparser.so.0 b43c5000 b43ca000 r-xp /usr/lib/libhaptic.so.0.1 b43cb000 b43cc000 r-xp /usr/lib/libcryptsvc.so.0.0.1 b43cd000 b43d4000 r-xp /usr/lib/libdevman.so.0.1 b43d5000 b43db000 r-xp /usr/lib/libchromium.so.1.0 b43dc000 b43e4000 r-xp /usr/lib/libappsvc.so.0.1.0 b43e5000 b43e7000 r-xp /usr/lib/osp/libappinfo.so.1.2.2.1 b43e8000 b43f0000 r-xp /usr/lib/libalarm.so.0.0.0 b43f1000 b43fb000 r-xp /usr/lib/libcapi-security-privilege-manager.so.0.0.3 b43fc000 b4415000 r-xp /usr/lib/libprivacy-manager-client.so.0.0.5 b4416000 b48a9000 r-xp /usr/lib/osp/libosp-appfw.so.1.2.2.1 b48ca000 b48d4000 r-xp /lib/libnss_files-2.13.so b48d6000 b48df000 r-xp /lib/libnss_nis-2.13.so b48e1000 b48f4000 r-xp /lib/libnsl-2.13.so b48f8000 b48fe000 r-xp /lib/libnss_compat-2.13.so b4c20000 b4c38000 r-xp /usr/lib/libcom-core.so.0.0.1 b4c39000 b4c3c000 r-xp /usr/lib/libdri2.so.0.0.0 b4c3d000 b4c48000 r-xp /usr/lib/libdrm.so.2.4.0 b4c49000 b4c4e000 r-xp /usr/lib/libtbm.so.1.0.0 b4c4f000 b4c53000 r-xp /usr/lib/libXv.so.1.0.0 b4c54000 b4d71000 r-xp /usr/lib/libscim-1.0.so.8.2.3 b4d80000 b4d96000 r-xp /usr/lib/libnotification.so.0.1.0 b4d97000 b4da0000 r-xp /usr/lib/libutilX.so.1.1.0 b4da1000 b4dd4000 r-xp /usr/lib/ecore/immodules/libisf-imf-module.so b4dd6000 b4de7000 r-xp /lib/libresolv-2.13.so b4deb000 b4dee000 r-xp /usr/lib/libgmodule-2.0.so.0.3200.3 b4def000 b4f5f000 r-xp /usr/lib/libcrypto.so.1.0.0 b4f77000 b4fcd000 r-xp /usr/lib/libssl.so.1.0.0 b4fd2000 b5001000 r-xp /usr/lib/libidn.so.11.5.44 b5002000 b5011000 r-xp /usr/lib/libcares.so.2.0.0 b5012000 b5039000 r-xp /lib/libexpat.so.1.5.2 b503b000 b506e000 r-xp /usr/lib/libicule.so.48.1 b506f000 b507a000 r-xp /usr/lib/libsf_common.so b507b000 b5157000 r-xp /usr/lib/libstdc++.so.6.0.14 b5163000 b52c8000 r-xp /usr/lib/libgio-2.0.so.0.3200.3 b52cc000 b52fd000 r-xp /usr/lib/libexif.so.12.3.3 b530a000 b5316000 r-xp /usr/lib/libethumb.so.1.7.99 b5317000 b537b000 r-xp /usr/lib/libsndfile.so.1.0.25 b5381000 b5384000 r-xp /usr/lib/libctxdata.so.0.0.0 b5385000 b539c000 r-xp /usr/lib/libremix.so.0.0.0 b539d000 b539f000 r-xp /usr/lib/libecore_imf_evas.so.1.7.99 b53a0000 b53cd000 r-xp /usr/lib/liblua-5.1.so b53ce000 b53d8000 r-xp /usr/lib/libembryo.so.1.7.99 b53d9000 b53dc000 r-xp /usr/lib/libecore_input_evas.so.1.7.99 b53dd000 b543e000 r-xp /usr/lib/libcurl.so.4.3.0 b5440000 b5446000 r-xp /usr/lib/libecore_ipc.so.1.7.99 b5447000 b54de000 r-xp /usr/lib/libpixman-1.so.0.28.2 b54e3000 b5518000 r-xp /usr/lib/libfontconfig.so.1.5.0 b551a000 b559f000 r-xp /usr/lib/libharfbuzz.so.0.907.0 b55a9000 b55bf000 r-xp /usr/lib/libfribidi.so.0.3.1 b55c0000 b5645000 r-xp /usr/lib/libfreetype.so.6.8.1 b5649000 b5690000 r-xp /usr/lib/libjpeg.so.8.0.2 b56a1000 b56c0000 r-xp /lib/libz.so.1.2.5 b56c1000 b56d1000 r-xp /usr/lib/libsensor.so.1.1.0 b56d4000 b56d7000 r-xp /usr/lib/libapp-checker.so.0.1.0 b56d8000 b56e1000 r-xp /usr/lib/libxdgmime.so.1.1.0 b67ea000 b6945000 r-xp /usr/lib/libicuuc.so.48.1 b6953000 b6b32000 r-xp /usr/lib/libicui18n.so.48.1 b6b39000 b6b3c000 r-xp /usr/lib/libSLP-db-util.so.0.1.0 b6b3d000 b6b49000 r-xp /usr/lib/libvconf.so.0.2.45 b6b4a000 b6b5c000 r-xp /usr/lib/libail.so.0.1.0 b6b5d000 b6b82000 r-xp /usr/lib/libdbus-glib-1.so.2.2.2 b6b83000 b6b88000 r-xp /usr/lib/libffi.so.5.0.10 b6b89000 b6b8a000 r-xp /usr/lib/libgthread-2.0.so.0.3200.3 b6b8b000 b6b9c000 r-xp /usr/lib/libXext.so.6.4.0 b6b9d000 b6ba2000 r-xp /usr/lib/libXtst.so.6.1.0 b6ba3000 b6bab000 r-xp /usr/lib/libXrender.so.1.3.0 b6bac000 b6bb5000 r-xp /usr/lib/libXrandr.so.2.2.0 b6bb6000 b6bc4000 r-xp /usr/lib/libXi.so.6.1.0 b6bc5000 b6bc9000 r-xp /usr/lib/libXfixes.so.3.1.0 b6bca000 b6bcc000 r-xp /usr/lib/libXgesture.so.7.0.0 b6bcd000 b6bcf000 r-xp /usr/lib/libXcomposite.so.1.0.0 b6bd0000 b6bd2000 r-xp /usr/lib/libXdamage.so.1.1.0 b6bd3000 b6bdd000 r-xp /usr/lib/libXcursor.so.1.0.2 b6bde000 b6bea000 r-xp /usr/lib/libemotion.so.1.7.99 b6beb000 b6c16000 r-xp /usr/lib/libecore_con.so.1.7.99 b6c18000 b6c20000 r-xp /usr/lib/libecore_imf.so.1.7.99 b6c21000 b6c2c000 r-xp /usr/lib/libethumb_client.so.1.7.99 b6c2d000 b6c30000 r-xp /usr/lib/libefreet_trash.so.1.7.99 b6c31000 b6c37000 r-xp /usr/lib/libefreet_mime.so.1.7.99 b6c38000 b6c5a000 r-xp /usr/lib/libefreet.so.1.7.99 b6c5c000 b6c68000 r-xp /usr/lib/libedbus.so.1.7.99 b6c69000 b6d00000 r-xp /usr/lib/libedje.so.1.7.99 b6d02000 b6d19000 r-xp /usr/lib/libecore_input.so.1.7.99 b6d2d000 b6d34000 r-xp /usr/lib/libecore_file.so.1.7.99 b6d35000 b6d62000 r-xp /usr/lib/libecore_evas.so.1.7.99 b6d64000 b6dbf000 r-xp /usr/lib/libeina.so.1.7.99 b6dc1000 b6ecb000 r-xp /usr/lib/libevas.so.1.7.99 b6ee6000 b6f03000 r-xp /usr/lib/libeet.so.1.7.99 b6f04000 b6f28000 r-xp /lib/libm-2.13.so b6f2a000 b6f30000 r-xp /usr/lib/libappcore-common.so.1.1 b6f31000 b6f41000 r-xp /usr/lib/libaul.so.0.1.0 b6f42000 b6f92000 r-xp /usr/lib/libgobject-2.0.so.0.3200.3 b6f93000 b6fd6000 r-xp /usr/lib/libecore_x.so.1.7.99 b6fd8000 b6ff7000 r-xp /usr/lib/libecore.so.1.7.99 b7006000 b71d8000 r-xp /usr/lib/libelementary.so.1.7.99 b71e3000 b71ea000 r-xp /usr/lib/libcapi-media-recorder.so.0.1.3 b71eb000 b71f1000 r-xp /usr/lib/libogg.so.0.7.1 b71f2000 b71f4000 r-xp /opt/usr/apps/vd84JCg9vN/bin/QRDemo b71f7000 b71fb000 r-xp /lib/libattr.so.1.1.0 b71fc000 b71fe000 r-xp /usr/lib/libXau.so.6.0.0 b71ff000 b7206000 r-xp /lib/librt-2.13.so b7209000 b7211000 r-xp /lib/libcrypt-2.13.so b723a000 b723d000 r-xp /lib/libcap.so.2.21 b723e000 b7240000 r-xp /usr/lib/libiri.so b7241000 b725b000 r-xp /lib/libgcc_s-4.5.3.so.1 b725c000 b727c000 r-xp /usr/lib/libxcb.so.1.1.0 b727e000 b7287000 r-xp /lib/libunwind.so.8.0.1 b7291000 b73e7000 r-xp /lib/libc-2.13.so b73ed000 b73f2000 r-xp /usr/lib/libsmack.so.1.0.0 b73f3000 b743f000 r-xp /usr/lib/libdbus-1.so.3.7.2 b7440000 b7445000 r-xp /usr/lib/libbundle.so.0.1.22 b7446000 b7448000 r-xp /lib/libdl-2.13.so b744a000 b7573000 r-xp /usr/lib/libglib-2.0.so.0.3200.3 b7574000 b761e000 r-xp /usr/lib/libsqlite3.so.0.8.6 b7621000 b7633000 r-xp /usr/lib/libprivilege-control.so.0.0.2 b7634000 b7769000 r-xp /usr/lib/libX11.so.6.3.0 b776d000 b7782000 r-xp /lib/libpthread-2.13.so b7788000 b7789000 r-xp /usr/lib/libdlog.so.0.0.0 b778a000 b778c000 r-xp /usr/lib/libXinerama.so.1.0.0 b778d000 b7793000 r-xp /usr/lib/libecore_fb.so.1.7.99 b7795000 b779a000 r-xp /usr/lib/libappcore-efl.so.1.1 b779c000 b77a0000 r-xp /usr/lib/libsys-assert.so b77a4000 b77a5000 r-xp [vdso] b77a5000 b77c1000 r-xp /lib/ld-2.13.so End of Maps Information Callstack Information (PID:4651) Call Stack Count: 21 0: (0xb22f1744) [/usr/lib/osp/libosp-media.so.1] + 0x9a744 1: Tizen::Base::Runtime::_Event::ProcessListeners(std::tr1::shared_ptr<Tizen::Base::Runtime::IEventArg>) + 0x41a (0xb45c986a) [/usr/lib/osp/libosp-appfw.so] + 0x1b386a 2: Tizen::Base::Runtime::_Event::Fire(std::tr1::shared_ptr<Tizen::Base::Runtime::IEventArg>) + 0x62 (0xb45c9d22) [/usr/lib/osp/libosp-appfw.so] + 0x1b3d22 3: (0xb45d16b4) [/usr/lib/osp/libosp-appfw.so] + 0x1bb6b4 4: g_io_unix_dispatch + 0x4b (0xb74d550b) [/usr/lib/libglib-2.0.so.0] + 0x8b50b 5: g_main_context_dispatch + 0x133 (0xb7492a13) [/usr/lib/libglib-2.0.so.0] + 0x48a13 6: _ecore_glib_select + 0x3fb (0xb6febdeb) [/usr/lib/libecore.so.1] + 0x13deb 7: _ecore_main_select + 0x3a5 (0xb6fe5625) [/usr/lib/libecore.so.1] + 0xd625 8: _ecore_main_loop_iterate_internal + 0x3b9 (0xb6fe6179) [/usr/lib/libecore.so.1] + 0xe179 9: ecore_main_loop_begin + 0x3f (0xb6fe64ef) [/usr/lib/libecore.so.1] + 0xe4ef 10: elm_run + 0x17 (0xb70f7717) [/usr/lib/libelementary.so.1] + 0xf1717 11: appcore_efl_main + 0x42e (0xb779812e) [/usr/lib/libappcore-efl.so.1] + 0x312e 12: app_efl_main + 0xe8 (0xb41a4bf8) [/usr/lib/libcapi-appfw-application.so.0] + 0x4bf8 13: Tizen::App::_AppImpl::Execute(Tizen::App::_IAppImpl*) + 0x129 (0xb4517779) [/usr/lib/osp/libosp-appfw.so] + 0x101779 14: Tizen::App::UiApp::Execute(Tizen::App::UiApp* (*)(), Tizen::Base::Collection::IList const*) + 0xa1 (0xb3bd7721) [/usr/lib/osp/libosp-uifw.so] + 0xa1a721 15: OspMain + 0x15a (0xb23e9e8a) [/opt/apps/vd84JCg9vN/bin/QRDemo.exe] + 0x7e8a 16: main + 0x25a (0xb71f2f4a) [/opt/apps/vd84JCg9vN/bin/QRDemo] + 0xf4a 17: __launchpad_main_loop + 0x17e0 (0x804bae0) [/usr/bin/launchpad_preloading_preinitializing_daemon] + 0x804bae0 18: main + 0x685 (0x804ca25) [/usr/bin/launchpad_preloading_preinitializing_daemon] + 0x804ca25 19: __libc_start_main + 0xe6 (0xb72a7da6) [/lib/libc.so.6] + 0x16da6 20: (0x8049e01) [/usr/bin/launchpad_preloading_preinitializing_daemon] + 0x8049e01 End of Call Stack Package Information Package Name: vd84JCg9vN.QRDemo Package ID : vd84JCg9vN Version: 1.0.0 Package Type: tpk App Name: QRDemo App ID: vd84JCg9vN.QRDemo Type: Application Categories: (NULL)
using Android.Content; using Android.Graphics; using Android.Support.Design.Widget; using Android.Support.V4.View; using Android.Views; using Android.Widget; using eShopOnContainers.Core.Controls; using eShopOnContainers.Droid.Extensions; using eShopOnContainers.Droid.Renderers; using System; using System.Collections.Generic; using System.Threading.Tasks; using Xamarin.Forms; using Xamarin.Forms.Platform.Android; using Xamarin.Forms.Platform.Android.AppCompat; [assembly: ExportRenderer(typeof(TabbedPage), typeof(CustomTabbedPageRenderer))] namespace eShopOnContainers.Droid.Renderers { public class CustomTabbedPageRenderer : TabbedPageRenderer { private const int DelayBeforeTabAdded = 10; protected readonly Dictionary<Element, BadgeView> BadgeViews = new Dictionary<Element, BadgeView>(); private TabLayout _tabLayout; private LinearLayout _tabStrip; private ViewPager _viewPager; private TabbedPage _tabbedPage; private bool _firstTime = true; public CustomTabbedPageRenderer(Context context) : base(context) { } protected override void OnElementChanged(ElementChangedEventArgs<TabbedPage> e) { base.OnElementChanged(e); _tabLayout = ViewGroup.FindChildOfType<TabLayout>(); if (_tabLayout == null) { Console.WriteLine("No TabLayout found. Badge not added."); return; } _tabbedPage = e.NewElement as TabbedPage; _viewPager = (ViewPager)GetChildAt(0); _tabLayout.TabSelected += (s, a) => { var page = _tabbedPage.Children[a.Tab.Position]; SetTab(a.Tab, page.Icon.File); _viewPager.SetCurrentItem(a.Tab.Position, false); }; _tabStrip = _tabLayout.FindChildOfType<LinearLayout>(); for (var i = 0; i < _tabLayout.TabCount; i++) { AddTabBadge(i); } Element.ChildAdded += OnTabAdded; Element.ChildRemoved += OnTabRemoved; } private void SetTab(TabLayout.Tab tab, string name) { try { int id = Resources.GetIdentifier(name, "drawable", Context.PackageName); tab.SetIcon(null); LinearLayout.LayoutParams linearLayoutParams = new LinearLayout.LayoutParams(LayoutParams.MatchParent, LayoutParams.MatchParent); linearLayoutParams.SetMargins(0, -48, 0, 0); ImageView img = new ImageView(Context); img.LayoutParameters = linearLayoutParams; img.SetPadding(0, 0, 0, 48); img.SetImageResource(id); tab.SetCustomView(img); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.StackTrace); } } protected override void DispatchDraw(Canvas canvas) { base.DispatchDraw(canvas); if (!_firstTime) { return; } for (int i = 0; i < _tabLayout.TabCount; i++) { var tab = _tabLayout.GetTabAt(i); var page = _tabbedPage.Children[tab.Position]; if (page is TabbedPage) { var tabbedPage = (TabbedPage)page; SetTab(tab, tabbedPage.Icon.File); } else { SetTab(tab, page.Icon.File); } if (!string.IsNullOrEmpty(_tabbedPage.Title)) { tab.SetText(string.Empty); } } _firstTime = false; } private void AddTabBadge(int tabIndex) { var element = Element.Children[tabIndex]; var view = _tabLayout?.GetTabAt(tabIndex).CustomView ?? _tabStrip?.GetChildAt(tabIndex); var badgeView = (view as ViewGroup)?.FindChildOfType<BadgeView>(); if (badgeView == null) { var imageView = (view as ViewGroup)?.FindChildOfType<ImageView>(); var badgeTarget = imageView?.Drawable != null ? (Android.Views.View)imageView : (view as ViewGroup)?.FindChildOfType<TextView>(); // Create badge for tab badgeView = new BadgeView(Context, badgeTarget); } BadgeViews[element] = badgeView; // Get text var badgeText = CustomTabbedPage.GetBadgeText(element); badgeView.Text = badgeText; // Set color if not default var tabColor = CustomTabbedPage.GetBadgeColor(element); if (tabColor != Xamarin.Forms.Color.Default) { badgeView.BadgeColor = tabColor.ToAndroid(); } element.PropertyChanged += OnTabbedPagePropertyChanged; } protected virtual void OnTabbedPagePropertyChanged(object sender, System.ComponentModel.PropertyChangedEventArgs e) { var element = sender as Element; if (element == null) return; BadgeView badgeView; if (!BadgeViews.TryGetValue(element, out badgeView)) { return; } if (e.PropertyName == CustomTabbedPage.BadgeTextProperty.PropertyName) { badgeView.Text = CustomTabbedPage.GetBadgeText(element); return; } if (e.PropertyName == CustomTabbedPage.BadgeColorProperty.PropertyName) { badgeView.BadgeColor = CustomTabbedPage.GetBadgeColor(element).ToAndroid(); } } private void OnTabRemoved(object sender, ElementEventArgs e) { e.Element.PropertyChanged -= OnTabbedPagePropertyChanged; BadgeViews.Remove(e.Element); } private async void OnTabAdded(object sender, ElementEventArgs e) { await Task.Delay(DelayBeforeTabAdded); var page = e.Element as Page; if (page == null) return; var tabIndex = Element.Children.IndexOf(page); AddTabBadge(tabIndex); } protected override void Dispose(bool disposing) { if (Element != null) { foreach (var tab in Element.Children) { tab.PropertyChanged -= OnTabbedPagePropertyChanged; } Element.ChildRemoved -= OnTabRemoved; Element.ChildAdded -= OnTabAdded; BadgeViews.Clear(); } base.Dispose(disposing); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.ComponentModel.Composition.Factories; using System.ComponentModel.Composition.Primitives; using System.UnitTesting; using Xunit; namespace System.ComponentModel.Composition { public class ExportTests { [Fact] public void Constructor1_ShouldNotThrow() { new NoOverridesExport(); } [Fact] public void Constructor2_NullAsExportedValueGetterArgument_ShouldThrowArgumentNull() { var definition = ExportDefinitionFactory.Create(); Assert.Throws<ArgumentNullException>("exportedValueGetter", () => { new Export(definition, (Func<object>)null); }); } [Fact] public void Constructor3_NullAsExportedValueGetterArgument_ShouldThrowArgumentNull() { Assert.Throws<ArgumentNullException>("exportedValueGetter", () => { new Export("ContractName", (Func<object>)null); }); } [Fact] public void Constructor4_NullAsExportedValueGetterArgument_ShouldThrowArgumentNull() { var metadata = new Dictionary<string, object>(); Assert.Throws<ArgumentNullException>("exportedValueGetter", () => { new Export("ContractName", metadata, (Func<object>)null); }); } [Fact] public void Constructor2_NullAsDefinitionArgument_ShouldThrowArgumentNull() { Assert.Throws<ArgumentNullException>("definition", () => { new Export((ExportDefinition)null, () => null); }); } [Fact] public void Constructor2_DefinitionAsDefinitionArgument_ShouldSetDefinitionProperty() { var definition = ExportDefinitionFactory.Create(); var export = new Export(definition, () => null); Assert.Same(definition, export.Definition); } [Fact] public void Constructor3_NullAsContractNameArgument_ShouldThrowArgumentNull() { Assert.Throws<ArgumentNullException>("contractName", () => { new Export((string)null, () => null); }); } [Fact] public void Constructor4_NullAsContractNameArgument_ShouldThrowArgumentNull() { Assert.Throws<ArgumentNullException>("contractName", () => { new Export((string)null, new Dictionary<string, object>(), () => null); }); } [Fact] public void Constructor3_EmptyStringAsContractNameArgument_ShouldThrowArgument() { Assert.Throws<ArgumentException>("contractName", () => { new Export(string.Empty, () => null); }); } [Fact] public void Constructor4_EmptyStringAsContractNameArgument_ShouldThrowArgument() { Assert.Throws<ArgumentException>("contractName", () => { new Export(string.Empty, new Dictionary<string, object>(), () => null); }); } [Fact] public void Constructor3_ValueAsContractNameArgument_ShouldSetDefinitionContractNameProperty() { var expectations = Expectations.GetContractNames(); foreach (var e in expectations) { var export = new Export(e, () => null); Assert.Equal(e, export.Definition.ContractName); } } [Fact] public void Constructor4_ValueAsContractNameArgument_ShouldSetDefinitionContractNameProperty() { var expectations = Expectations.GetContractNames(); foreach (var e in expectations) { var export = new Export(e, new Dictionary<string, object>(), () => null); Assert.Equal(e, export.Definition.ContractName); } } [Fact] public void Constructor3_ShouldSetMetadataPropertyToEmptyDictionary() { var export = new Export("ContractName", () => null); Assert.Empty(export.Metadata); } [Fact] public void Constructor4_NullAsMetadataArgument_ShouldSetMetadataPropertyToEmptyDictionary() { var export = new Export("ContractName", (IDictionary<string, object>)null, () => null); Assert.Empty(export.Metadata); } [Fact] public void Constructor3_NullAsMetadataArgument_ShouldSetMetadataPropertyToReadOnlyDictionary() { var export = new Export("ContractName", () => null); ExceptionAssert.Throws<NotSupportedException>(() => { export.Metadata["Value"] = "Value"; }); } [Fact] public void Constructor4_NullAsMetadataArgument_ShouldSetMetadataPropertyToReadOnlyDictionary() { var export = new Export("ContractName", (IDictionary<string, object>)null, () => null); ExceptionAssert.Throws<NotSupportedException>(() => { export.Metadata["Value"] = "Value"; }); } [Fact] public void Constructor4_WritableDictionaryAsMetadataArgument_ShouldSetMetadataPropertyToReadOnlyDictionary() { var export = new Export("ContractName", new Dictionary<string, object>(), () => null); ExceptionAssert.Throws<NotSupportedException>(() => { export.Metadata["Value"] = "Value"; }); } [Fact] public void Constructor4_DictionaryAsMetadataArgument_ShouldSetMetadataProperty() { var expectations = Expectations.GetMetadata(); foreach (var e in expectations) { var export = new Export("ContractName", e, () => null); EnumerableAssert.AreEqual(e, export.Metadata); } } [Fact] public void Constructor3_ShouldSetDefinitionMetadataPropertyToEmptyDictionary() { var export = new Export("ContractName", () => null); Assert.Empty(export.Definition.Metadata); } [Fact] public void Constructor4_NullAsMetadataArgument_ShouldSetDefinitionMetadataPropertyToEmptyDictionary() { var export = new Export("ContractName", (IDictionary<string, object>)null, () => null); Assert.Empty(export.Definition.Metadata); } [Fact] public void Constructor3_ShouldSetDefinitionMetadataPropertyToReadOnlyDictionary() { var export = new Export("ContractName", () => null); ExceptionAssert.Throws<NotSupportedException>(() => { export.Definition.Metadata["Value"] = "Value"; }); } [Fact] public void Constructor4_NullAsMetadataArgument_ShouldSetDefinitionMetadataPropertyToReadOnlyDictionary() { var export = new Export("ContractName", (IDictionary<string, object>)null, () => null); ExceptionAssert.Throws<NotSupportedException>(() => { export.Definition.Metadata["Value"] = "Value"; }); } [Fact] public void Constructor4_WritableDictionaryAsMetadataArgument_ShouldSetDefinitionMetadataPropertyToReadOnlyDictionary() { var export = new Export("ContractName", new Dictionary<string, object>(), () => null); ExceptionAssert.Throws<NotSupportedException>(() => { export.Definition.Metadata["Value"] = "Value"; }); } [Fact] public void Constructor4_DictionaryAsMetadataArgument_ShouldSetDefinitionMetadataProperty() { var expectations = Expectations.GetMetadata(); foreach (var e in expectations) { var export = new Export("ContractName", e, () => null); EnumerableAssert.AreEqual(e, export.Definition.Metadata); } } [Fact] public void Constructor2_FuncReturningAStringAsExportedValueGetter_ShouldBeReturnedByGetExportedValue() { var definition = ExportDefinitionFactory.Create(); var export = new Export(definition, () => "Value"); Assert.Equal("Value", export.Value); } [Fact] public void Constructor3_FuncReturningAStringAsExportedValueGetter_ShouldBeReturnedByGetExportedValue() { var export = new Export("ContractName", () => "Value"); Assert.Equal("Value", export.Value); } [Fact] public void Constructor4_FuncReturningAStringAsExportedValueGetter_ShouldBeReturnedByGetExportedValue() { var export = new Export("ContractName", new Dictionary<string, object>(), () => "Value"); Assert.Equal("Value", export.Value); } [Fact] public void Constructor2_FuncReturningNullAsExportedValueGetter_ShouldBeReturnedByGetExportedValue() { var definition = ExportDefinitionFactory.Create(); var export = new Export(definition, () => null); Assert.Null(export.Value); } [Fact] public void Constructor3_FuncReturningNullAsExportedValueGetter_ShouldBeReturnedByGetExportedValue() { var export = new Export("ContractName", () => null); Assert.Null(export.Value); } [Fact] public void Constructor4_FuncReturningNullAsExportedValueGetter_ShouldBeReturnedByGetExportedValue() { var export = new Export("ContractName", new Dictionary<string, object>(), () => null); Assert.Null(export.Value); } [Fact] public void Metadata_DerivedExportDefinition_ShouldReturnDefinitionMetadata() { var expectations = Expectations.GetMetadata(); foreach (var e in expectations) { var definition = ExportDefinitionFactory.Create("ContractName", e); var export = new DerivedExport(definition); EnumerableAssert.AreEqual(e, export.Metadata); } } [Fact] public void Definition_WhenNotOverridden_ShouldThrowNotImplemented() { var export = new NoOverridesExport(); ExceptionAssert.Throws<NotImplementedException>(() => { var definition = export.Definition; }); } [Fact] public void Metadata_WhenDefinitionNotOverridden_ShouldThrowNotImplemented() { var export = new NoOverridesExport(); ExceptionAssert.Throws<NotImplementedException>(() => { var definition = export.Metadata; }); } [Fact] public void GetExportedValue_WhenGetExportedValueCoreNotOverridden_ShouldThrowNotImplemented() { var export = new NoOverridesExport(); ExceptionAssert.Throws<NotImplementedException>(() => { var value = export.Value; }); } [Fact] public void GetExportedValue_ShouldCacheExportedValueGetter() { int count = 0; var export = new Export("ContractName", () => { count++; return count; }); Assert.Equal(1, export.Value); Assert.Equal(1, export.Value); Assert.Equal(1, export.Value); } [Fact] public void GetExportedValue_ShouldCacheOverrideGetExportedValueCore() { int count = 0; var export = new DerivedExport(() => { count++; return count; }); Assert.Equal(1, export.Value); Assert.Equal(1, export.Value); Assert.Equal(1, export.Value); } [Fact] public void GetExportedValue_ThrowingFuncAsObjectGetterArgument_ShouldThrow() { var exceptionToThrow = new Exception(); var export = new Export("ContractName", new Dictionary<string, object>(), () => { throw exceptionToThrow; }); ExceptionAssert.Throws(exceptionToThrow, RetryMode.Retry, () => { var value = export.Value; }); } private class NoOverridesExport : Export { } private class DerivedExport : Export { private readonly Func<object> _exportedValueGetter; private readonly ExportDefinition _definition; public DerivedExport(ExportDefinition definition) { _definition = definition; } public DerivedExport(Func<object> exportedValueGetter) { _exportedValueGetter = exportedValueGetter; } public override ExportDefinition Definition { get { return _definition; } } protected override object GetExportedValueCore() { return _exportedValueGetter(); } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Runtime.CompilerServices; using System.IO; using System.Collections; using System.Globalization; using System.Text; using System.Threading; using Xunit; public class FileInfo_get_Attributes { public static String s_strActiveBugNums = "38078"; public static String s_strDtTmVer = "2000/05/09 11:28"; public static String s_strClassMethod = "File.Directory()"; public static String s_strTFName = "get_Attributes.cs"; public static String s_strTFPath = Directory.GetCurrentDirectory(); [Fact] public static void runTest() { String strLoc = "Loc_000oo"; String strValue = String.Empty; int iCountErrors = 0; int iCountTestcases = 0; try { ///////////////////////// START TESTS //////////////////////////// /////////////////////////////////////////////////////////////////// String filName = Path.Combine(TestInfo.CurrentDirectory, Path.GetRandomFileName()); FileInfo fil2; /* ReadOnly = 0x1, Hidden = 0x2, System = 0x4, Directory = 0x10, Archive = 0x20, Encrypted = 0x40, Normal = 0x80, Temporary = 0x100, SparseFile = 0x200, ReparsePoint = 0x400, Compressed = 0x800, Offline = 0x1000, NotContentIndexed = 0x2000 */ if (File.Exists(filName)) { fil2 = new FileInfo(filName); fil2.Attributes = new FileAttributes(); File.Delete(filName); } // [] File does not exist //-------------------------------------------------------- strLoc = "loc_2yg8c"; fil2 = new FileInfo(Path.Combine(TestInfo.CurrentDirectory, "FileDoesNotExist")); iCountTestcases++; try { fil2.Attributes = new FileAttributes(); iCountErrors++; printerr("Error_27t8b! Expected exception not thrown"); } catch (FileNotFoundException) { } catch (Exception exc) { iCountErrors++; printerr("Error_21409! Incorrect exception thrown, exc==" + exc.ToString()); } //----------------------------------------------------------------- File.Create(filName).Dispose(); /* // [] Invalid enum value //----------------------------------------------------------------- strLoc = "Loc_298g8"; fil2 = new FileInfo(filName); iCountTestcases++; try { fil2.Attributes = (FileAttributes)(-1); iCountErrors++; printerr( "Error_28g8b! Expected exception not thrown"); } catch (ArgumentException aexc) { } catch (Exception exc) { iCountErrors++; printerr( "Error_t94u9! Incorrect exception thrown, exc=="+exc.ToString()); } //----------------------------------------------------------------- */ try { // [] valid data //----------------------------------------------------------------- strLoc = "Loc_48yx9"; fil2 = new FileInfo(filName); fil2.Attributes = FileAttributes.Hidden; iCountTestcases++; #if TEST_WINRT // WinRT doesn't support hidden if ((fil2.Attributes & FileAttributes.Hidden) != 0) { #else if ((fil2.Attributes & FileAttributes.Hidden) == 0 && Interop.IsWindows) // setting Hidden not support on Unix { #endif iCountErrors++; printerr("ERror_2g985! Hidden not set"); } fil2.Refresh(); fil2.Attributes = FileAttributes.System; iCountTestcases++; fil2.Refresh(); #if TEST_WINRT // WinRT doesn't support system if ((fil2.Attributes & FileAttributes.System) == FileAttributes.System) { #else if ((fil2.Attributes & FileAttributes.System) != FileAttributes.System && Interop.IsWindows) // setting System not support on Unix { #endif iCountErrors++; printerr("Error_298g7! System not set"); } fil2.Attributes = FileAttributes.Normal; fil2.Refresh(); iCountTestcases++; if ((fil2.Attributes & FileAttributes.Normal) != FileAttributes.Normal) { if ((fil2.Attributes & FileAttributes.Compressed) == 0 && Interop.IsWindows) // setting Compressed not support on Unix #if TEST_WINRT if ((fil2.Attributes & FileAttributes.Archive) == 0) #endif { iCountErrors++; printerr("Error_286b7! Normal not set"); } } fil2.Attributes = FileAttributes.Temporary; fil2.Refresh(); iCountTestcases++; if ((fil2.Attributes & FileAttributes.Temporary) == 0 && Interop.IsWindows) // setting Temporary not support on Unix { iCountErrors++; printerr("Error_87tg8! Temporary not set"); } //----------------------------------------------------------------- // [] //----------------------------------------------------------------- strLoc = "Loc_29gy7"; fil2 = new FileInfo(filName); fil2.Attributes = FileAttributes.Archive; fil2.Refresh(); fil2.Attributes = FileAttributes.ReadOnly | FileAttributes.Archive; // setting Archive not support on Unix fil2.Refresh(); iCountTestcases++; if ((fil2.Attributes & FileAttributes.ReadOnly) != FileAttributes.ReadOnly) { iCountErrors++; printerr("Error_g58y8! ReadOnly attribute not set"); } iCountTestcases++; if ((fil2.Attributes & FileAttributes.Archive) != FileAttributes.Archive && Interop.IsWindows) // setting Archive not support on Unix { iCountErrors++; printerr("Error_2g78b! Archive attribute not set"); } fil2.Attributes = new FileAttributes(); //----------------------------------------------------------------- } catch (Exception exc) { iCountErrors++; printerr("Error_284y8! Unexpected exception thrown, bug 29808, exc==" + exc.ToString()); } if (File.Exists(filName)) { fil2 = new FileInfo(filName); fil2.Attributes = new FileAttributes(); File.Delete(filName); } /////////////////////////////////////////////////////////////////// /////////////////////////// END TESTS ///////////////////////////// } catch (Exception exc_general) { ++iCountErrors; Console.WriteLine("Error Err_8888yyy! strLoc==" + strLoc + ", exc_general==" + exc_general.ToString()); } //// Finish Diagnostics if (iCountErrors != 0) { Console.WriteLine("FAiL! " + s_strTFName + " ,iCountErrors==" + iCountErrors.ToString()); } Assert.Equal(0, iCountErrors); } public static void printerr(String err, [CallerMemberName] string memberName = "", [CallerFilePath] string filePath = "", [CallerLineNumber] int lineNumber = 0) { Console.WriteLine("ERROR: ({0}, {1}, {2}) {3}", memberName, filePath, lineNumber, err); } }
#region Copyright // // Copyright (c) 2015 // by Satrabel // #endregion #region Using Statements using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Web.Http; using DotNetNuke.Web.Api; using Newtonsoft.Json.Linq; using System.Web.Hosting; using System.IO; using DotNetNuke.Instrumentation; using Satrabel.OpenContent.Components; using DotNetNuke.Security; using Satrabel.OpenContent.Components.Json; using DotNetNuke.Entities.Modules; using DotNetNuke.Entities.Tabs; using DotNetNuke.Common; using DotNetNuke.Services.FileSystem; using System.Drawing; using System.Drawing.Imaging; using DotNetNuke.Entities.Content.Common; using DotNetNuke.Entities.Modules.Definitions; using DotNetNuke.Entities.Portals; using Satrabel.OpenContent.Components.TemplateHelpers; using System.Text.RegularExpressions; #endregion namespace Satrabel.OpenContent.Components { public class DnnEntitiesAPIController : DnnApiController { private static readonly ILog Logger = LoggerSource.Instance.GetLogger(typeof(DnnEntitiesAPIController)); [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpGet] public HttpResponseMessage Tabs(string q, string l) { try { var tabs = TabController.GetTabsBySortOrder(PortalSettings.PortalId).Where(t => t.ParentId != PortalSettings.AdminTabId).Where(t => t.TabName.ToLower().Contains(q.ToLower())).Select(t => new { name = t.TabName + " (" + t.TabPath.Replace("//", "/").Replace("/" + t.TabName + "/", "") + " " + l + ")", value = (new System.Uri(NavigateUrl(t, l, PortalSettings))).PathAndQuery }); return Request.CreateResponse(HttpStatusCode.OK, tabs); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } private static string NavigateUrl(TabInfo t, string culture, PortalSettings portalsettings) { return Globals.NavigateURL(t.TabID, false, portalsettings, "", culture); } [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpGet] public HttpResponseMessage Images(string q, string d) { try { var folderManager = FolderManager.Instance; var portalFolder = folderManager.GetFolder(PortalSettings.PortalId, d ?? ""); var files = folderManager.GetFiles(portalFolder, true); files = files.Where(f => IsImageFile(f)); if (q != "*") { files = files.Where(f => f.FileName.ToLower().Contains(q.ToLower())); } //files = files.Where(f => IsImageFile(f)).Where(f => f.FileName.ToLower().Contains(q.ToLower())); var res = files.Select(f => new { value = PortalSettings.HomeDirectory + f.RelativePath, name = f.FileName + " (" + f.Folder + ")" }); return Request.CreateResponse(HttpStatusCode.OK, res); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } /// <summary> /// Imageses the lookup. /// </summary> /// <param name="q">The string that should be Contained in the name of the file (case insensitive). Use * to get all the files.</param> /// <param name="d">The Folder path to retrieve</param> /// <returns></returns> [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpGet] public HttpResponseMessage ImagesLookup(string q, string d) { try { if (string.IsNullOrEmpty(d)) { var exc = new ArgumentException("Folder path not specified. Missing ['folder': 'FolderPath'] in optionfile? "); Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } var folderManager = FolderManager.Instance; var portalFolder = folderManager.GetFolder(PortalSettings.PortalId, d ?? ""); if (portalFolder == null) { var exc = new ArgumentException("Folder path not found. Adjust ['folder': " + d + "] in optionfile. "); Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } var files = folderManager.GetFiles(portalFolder, true); files = files.Where(f => IsImageFile(f)); if (q != "*" && !string.IsNullOrEmpty(q)) { files = files.Where(f => f.FileName.ToLower().Contains(q.ToLower())); } int folderLength = d.Length; var res = files.Select(f => new { value = f.FileId.ToString(), url = ImageHelper.GetImageUrl(f, new Ratio(40, 40)), //todo for install in application folder is dat niet voldoende ??? text = f.Folder.Substring(folderLength).TrimStart('/') + f.FileName }) .Take(1000); return Request.CreateResponse(HttpStatusCode.OK, res); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpGet] public HttpResponseMessage FilesLookup(string q, string d, string filter = "") { try { var folderManager = FolderManager.Instance; var fileManager = FileManager.Instance; var portalFolder = folderManager.GetFolder(PortalSettings.PortalId, d ?? ""); var files = folderManager.GetFiles(portalFolder, true); //files = files.Where(f => IsImageFile(f)); if (q != "*" && !string.IsNullOrEmpty(q)) { files = files.Where(f => f.FileName.ToLower().Contains(q.ToLower())); } if (!string.IsNullOrEmpty(filter)) { var rx = new Regex(filter, RegexOptions.IgnoreCase); files = files.Where(f => rx.IsMatch(f.FileName)); } int folderLength = (d == null) ? 0 : d.Length; var res = files.Select(f => new { value = f.FileId.ToString(), url = fileManager.GetUrl(f), text = f.Folder.Substring(folderLength).TrimStart('/') + f.FileName /*+ (string.IsNullOrEmpty(f.Folder) ? "" : " (" + f.Folder.Trim('/') + ")")*/ }); return Request.CreateResponse(HttpStatusCode.OK, res); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpGet] public HttpResponseMessage FoldersLookup(string q, string d, string filter = "") { try { IEnumerable<IFolderInfo> folders = new List<IFolderInfo>(); var folderManager = FolderManager.Instance; var fileManager = FileManager.Instance; var portalFolder = folderManager.GetFolder(PortalSettings.PortalId, d ?? ""); if (portalFolder != null) { folders = GetFolders(folderManager, portalFolder); if (q != "*" && !string.IsNullOrEmpty(q)) { folders = folders.Where(f => f.FolderName.ToLower().Contains(q.ToLower())); } if (!string.IsNullOrEmpty(filter)) { var rx = new Regex(filter, RegexOptions.IgnoreCase); folders = folders.Where(f => rx.IsMatch(f.FolderName)); } } int folderLength = (d == null) ? 0 : d.Length; var res = folders.Select(f => new { value = f.FolderID.ToString(), url = f.FolderPath, text = f.FolderPath.Substring(folderLength).Trim('/') }); return Request.CreateResponse(HttpStatusCode.OK, res); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } private static IEnumerable<IFolderInfo> GetFolders(IFolderManager folderManager, IFolderInfo portalFolder) { var folders = new List<IFolderInfo>(); foreach (var item in folderManager.GetFolders(portalFolder)) { folders.Add(item); folders.AddRange(GetFolders(folderManager, item)); } return folders; } [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpGet] public HttpResponseMessage TabsLookup(string q, string l) { try { var tabs = TabController.GetTabsBySortOrder(PortalSettings.PortalId) .Where(t => t.ParentId != PortalSettings.AdminTabId); if (q != "*" && !string.IsNullOrEmpty(q)) { tabs = tabs.Where(t => t.TabName.ToLower().Contains(q.ToLower())); } var tabsDtos = tabs.Select(t => new { value = t.TabID.ToString(), text = t.TabName + " (" + t.TabPath.Replace("//", "/").Replace("/" + t.TabName + "/", "") + " " + l + ")", url = (new System.Uri(NavigateUrl(t, l, PortalSettings))).PathAndQuery }); return Request.CreateResponse(HttpStatusCode.OK, tabsDtos); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpGet] public HttpResponseMessage FileUrl(int fileid) { try { var fileManager = FileManager.Instance; IFileInfo File = fileManager.GetFile(fileid); return Request.CreateResponse(HttpStatusCode.OK, fileManager.GetUrl(File)); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpGet] public HttpResponseMessage Files(string q, string d) { try { var folderManager = FolderManager.Instance; var fileManager = FileManager.Instance; var portalFolder = folderManager.GetFolder(PortalSettings.PortalId, d ?? ""); var files = folderManager.GetFiles(portalFolder, true); //files = files.Where(f => IsImageFile(f)); if (q != "*") { files = files.Where(f => f.FileName.ToLower().Contains(q.ToLower())); } //files = files.Where(f => IsImageFile(f)).Where(f => f.FileName.ToLower().Contains(q.ToLower())); var res = files.Select(f => new { value = PortalSettings.HomeDirectory + f.RelativePath, name = f.FileName + " (" + f.Folder + ")" }); return Request.CreateResponse(HttpStatusCode.OK, res); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } private bool IsImageFile(IFileInfo file) { return (Globals.glbImageFileTypes + ",").IndexOf(file.Extension.ToLower().Replace(".", "") + ",") > -1; } [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpPost] public HttpResponseMessage CropImage(CropResizeDTO cropData) { FilesStatus fs = null; try { var res = new CropResizeResultDTO() { crop = new CropDTO() { x = cropData.crop.x, y = cropData.crop.y, width = cropData.crop.width, height = cropData.crop.height } }; var folderManager = FolderManager.Instance; var fileManager = FileManager.Instance; string RawImageUrl = cropData.url; if (RawImageUrl.IndexOf('?') > 0) { RawImageUrl = RawImageUrl.Substring(0, RawImageUrl.IndexOf('?')); } RawImageUrl = RawImageUrl.Replace(PortalSettings.HomeDirectory, ""); var file = fileManager.GetFile(ActiveModule.PortalID, RawImageUrl); string cropfolder = "OpenContent/Files/" + ActiveModule.ModuleID; if (!string.IsNullOrEmpty(cropData.cropfolder)) { cropfolder = cropData.cropfolder; } var userFolder = folderManager.GetFolder(PortalSettings.PortalId, cropfolder); if (userFolder == null) { userFolder = folderManager.AddFolder(PortalSettings.PortalId, cropfolder); } string newFilename = Path.GetFileNameWithoutExtension(file.FileName) + "-" + cropData.id + Path.GetExtension(file.FileName); if (file != null) { var folder = folderManager.GetFolder(file.FolderId); var image = Image.FromFile(file.PhysicalPath); Image imageCropped; //int x = cropData.crop.x; //int y = cropData.crop.y; if (cropData.crop.x < 0 && cropData.crop.y < 0) // center { int left = 0; int top = 0; int width = 0; int height = 0; imageCropped = ImageHelper.SaveCroppedImage(image, cropData.crop.width, cropData.crop.height, out left, out top, out width, out height); res.crop.x = left; res.crop.y = top; res.crop.width = width; res.crop.height = height; } else { imageCropped = ImageHelper.Crop(image, cropData.crop.x, cropData.crop.y, cropData.crop.width, cropData.crop.height); if (cropData.resize != null && cropData.resize.width > 0 && cropData.resize.height > 0) { imageCropped = ImageHelper.Resize(imageCropped, cropData.resize.width, cropData.resize.height); } } Stream content = new MemoryStream(); ImageFormat imgFormat = ImageFormat.Bmp; if (file.Extension.ToLowerInvariant() == "png") { imgFormat = ImageFormat.Png; } else if (file.Extension.ToLowerInvariant() == "gif") { imgFormat = ImageFormat.Gif; } else if (file.Extension.ToLowerInvariant() == "jpg") { imgFormat = ImageFormat.Jpeg; } imageCropped.Save(content, imgFormat); var newFile = fileManager.AddFile(userFolder, newFilename, content, true); fs = new FilesStatus() { success = true, name = newFile.FileName, extension = newFile.Extension, type = newFile.ContentType, size = newFile.Size, progress = "1.0", url = FileManager.Instance.GetUrl(newFile), //thumbnail_url = fileIcon, message = "success", id = newFile.FileId, }; } res.url = fs.url; return Request.CreateResponse(HttpStatusCode.OK, res); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } [ValidateAntiForgeryToken] [DnnModuleAuthorize(AccessLevel = SecurityAccessLevel.Edit)] [HttpPost] public HttpResponseMessage CropImages(CroppersDTO cropData) { FilesStatus fs = null; try { var res = new CroppersResultDTO(); var folderManager = FolderManager.Instance; var fileManager = FileManager.Instance; string rawImageUrl = cropData.url; if (rawImageUrl.IndexOf('?') > 0) { rawImageUrl = rawImageUrl.Substring(0, rawImageUrl.IndexOf('?')); } rawImageUrl = rawImageUrl.Replace(PortalSettings.HomeDirectory, ""); var file = fileManager.GetFile(ActiveModule.PortalID, rawImageUrl); if (file != null) { string cropfolder = "OpenContent/Files/" + ActiveModule.ModuleID; if (!string.IsNullOrEmpty(cropData.cropfolder)) { cropfolder = cropData.cropfolder; } var userFolder = folderManager.GetFolder(PortalSettings.PortalId, cropfolder); if (userFolder == null) { userFolder = folderManager.AddFolder(PortalSettings.PortalId, cropfolder); } foreach (var cropper in cropData.croppers) { string key = cropper.Key; string newFilename = Path.GetFileNameWithoutExtension(file.FileName) + "-" + key + Path.GetExtension(file.FileName); var resizeInfo = cropper.Value; CropDTO cropInfo = null; if (cropData.cropdata.ContainsKey(key)) { cropInfo = cropData.cropdata[key].cropper; } var cropResult = CropFile(file, newFilename, cropInfo, resizeInfo, userFolder); res.cropdata.Add(key, cropResult); } } return Request.CreateResponse(HttpStatusCode.OK, res); } catch (Exception exc) { Logger.Error(exc); return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, exc); } } private CropResizeResultDTO CropFile(IFileInfo file, string newFilename, CropDTO crop, ResizeDTO resize, IFolderInfo userFolder) { var cropresult = new CropResizeResultDTO(); if (crop != null) { cropresult.crop = crop; } else { cropresult.crop = new CropDTO(); } var folderManager = FolderManager.Instance; var fileManager = FileManager.Instance; var folder = folderManager.GetFolder(file.FolderId); var image = Image.FromFile(file.PhysicalPath); Image imageCropped = null; if (crop == null || crop.x < 0 || crop.y < 0) // center { int left = 0; int top = 0; int width = 0; int height = 0; imageCropped = ImageHelper.SaveCroppedImage(image, resize.width, resize.height, out left, out top, out width, out height); cropresult.crop.x = left; cropresult.crop.y = top; cropresult.crop.width = width; cropresult.crop.height = height; } else if (crop.width > 0 && crop.width > 0) { imageCropped = ImageHelper.Crop(image, crop.x, crop.y, crop.width, crop.height); if (resize != null && resize.width > 0 && resize.height > 0) { imageCropped = ImageHelper.Resize(imageCropped, resize.width, resize.height); } } Stream content = new MemoryStream(); ImageFormat imgFormat = ImageFormat.Bmp; if (file.Extension.ToLowerInvariant() == "png") { imgFormat = ImageFormat.Png; } else if (file.Extension.ToLowerInvariant() == "gif") { imgFormat = ImageFormat.Gif; } else if (file.Extension.ToLowerInvariant() == "jpg") { imgFormat = ImageFormat.Jpeg; } if (imageCropped != null) { imageCropped.Save(content, imgFormat); var newFile = fileManager.AddFile(userFolder, newFilename, content, true); cropresult.url = newFile.ToUrl(); return cropresult; } return null; } public class CroppersDTO { public Dictionary<string, CroppperDTO> cropdata { get; set; } public Dictionary<string, ResizeDTO> croppers { get; set; } public string cropfolder { get; set; } public string url { get; set; } } public class CroppersResultDTO { public CroppersResultDTO() { cropdata = new Dictionary<string, CropResizeResultDTO>(); } public Dictionary<string, CropResizeResultDTO> cropdata { get; set; } public string url { get; set; } } public class CropResizeDTO { public string id { get; set; } public string url { get; set; } public CropDTO crop { get; set; } public ResizeDTO resize { get; set; } public string cropfolder { get; set; } } public class CropResizeResultDTO { public string url { get; set; } public CropDTO crop { get; set; } } public class CroppperDTO { public string url { get; set; } public CropDTO cropper { get; set; } } public class CropDTO { public int x { get; set; } public int y { get; set; } public int width { get; set; } public int height { get; set; } public int rotate { get; set; } } public class ResizeDTO { public int width { get; set; } public int height { get; set; } } } }
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on // an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by google-apis-code-generator 1.5.1 // C# generator version: 1.14.1 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ /** * \brief * Accelerated Mobile Pages (AMP) URL API Version v1 * * \section ApiInfo API Version Information * <table> * <tr><th>API * <td><a href='https://developers.google.com/amp/cache/'>Accelerated Mobile Pages (AMP) URL API</a> * <tr><th>API Version<td>v1 * <tr><th>API Rev<td>20160728 (574) * <tr><th>API Docs * <td><a href='https://developers.google.com/amp/cache/'> * https://developers.google.com/amp/cache/</a> * <tr><th>Discovery Name<td>acceleratedmobilepageurl * </table> * * \section ForMoreInfo For More Information * * The complete API documentation for using Accelerated Mobile Pages (AMP) URL API can be found at * <a href='https://developers.google.com/amp/cache/'>https://developers.google.com/amp/cache/</a>. * * For more information about the Google APIs Client Library for .NET, see * <a href='https://developers.google.com/api-client-library/dotnet/get_started'> * https://developers.google.com/api-client-library/dotnet/get_started</a> */ namespace Google.Apis.Acceleratedmobilepageurl.v1 { /// <summary>The Acceleratedmobilepageurl Service.</summary> public class AcceleratedmobilepageurlService : Google.Apis.Services.BaseClientService { /// <summary>The API version.</summary> public const string Version = "v1"; /// <summary>The discovery version used to generate this service.</summary> public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed = Google.Apis.Discovery.DiscoveryVersion.Version_1_0; /// <summary>Constructs a new service.</summary> public AcceleratedmobilepageurlService() : this(new Google.Apis.Services.BaseClientService.Initializer()) {} /// <summary>Constructs a new service.</summary> /// <param name="initializer">The service initializer.</param> public AcceleratedmobilepageurlService(Google.Apis.Services.BaseClientService.Initializer initializer) : base(initializer) { ampUrls = new AmpUrlsResource(this); } /// <summary>Gets the service supported features.</summary> public override System.Collections.Generic.IList<string> Features { get { return new string[0]; } } /// <summary>Gets the service name.</summary> public override string Name { get { return "acceleratedmobilepageurl"; } } /// <summary>Gets the service base URI.</summary> public override string BaseUri { get { return "https://acceleratedmobilepageurl.googleapis.com/"; } } /// <summary>Gets the service base path.</summary> public override string BasePath { get { return ""; } } private readonly AmpUrlsResource ampUrls; /// <summary>Gets the AmpUrls resource.</summary> public virtual AmpUrlsResource AmpUrls { get { return ampUrls; } } } ///<summary>A base abstract class for Acceleratedmobilepageurl requests.</summary> public abstract class AcceleratedmobilepageurlBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse> { ///<summary>Constructs a new AcceleratedmobilepageurlBaseServiceRequest instance.</summary> protected AcceleratedmobilepageurlBaseServiceRequest(Google.Apis.Services.IClientService service) : base(service) { } /// <summary>V1 error format.</summary> [Google.Apis.Util.RequestParameterAttribute("$.xgafv", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<XgafvEnum> Xgafv { get; set; } /// <summary>V1 error format.</summary> public enum XgafvEnum { /// <summary>v1 error format</summary> [Google.Apis.Util.StringValueAttribute("1")] Value1, /// <summary>v2 error format</summary> [Google.Apis.Util.StringValueAttribute("2")] Value2, } /// <summary>OAuth access token.</summary> [Google.Apis.Util.RequestParameterAttribute("access_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string AccessToken { get; set; } /// <summary>Data format for response.</summary> /// [default: json] [Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<AltEnum> Alt { get; set; } /// <summary>Data format for response.</summary> public enum AltEnum { /// <summary>Responses with Content-Type of application/json</summary> [Google.Apis.Util.StringValueAttribute("json")] Json, /// <summary>Media download with context-dependent Content-Type</summary> [Google.Apis.Util.StringValueAttribute("media")] Media, /// <summary>Responses with Content-Type of application/x-protobuf</summary> [Google.Apis.Util.StringValueAttribute("proto")] Proto, } /// <summary>OAuth bearer token.</summary> [Google.Apis.Util.RequestParameterAttribute("bearer_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string BearerToken { get; set; } /// <summary>JSONP</summary> [Google.Apis.Util.RequestParameterAttribute("callback", Google.Apis.Util.RequestParameterType.Query)] public virtual string Callback { get; set; } /// <summary>Selector specifying which fields to include in a partial response.</summary> [Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)] public virtual string Fields { get; set; } /// <summary>API key. Your API key identifies your project and provides you with API access, quota, and reports. /// Required unless you provide an OAuth 2.0 token.</summary> [Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)] public virtual string Key { get; set; } /// <summary>OAuth 2.0 token for the current user.</summary> [Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string OauthToken { get; set; } /// <summary>Pretty-print response.</summary> /// [default: true] [Google.Apis.Util.RequestParameterAttribute("pp", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<bool> Pp { get; set; } /// <summary>Returns response with indentations and line breaks.</summary> /// [default: true] [Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<bool> PrettyPrint { get; set; } /// <summary>Available to use for quota purposes for server-side applications. Can be any arbitrary string /// assigned to a user, but should not exceed 40 characters.</summary> [Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)] public virtual string QuotaUser { get; set; } /// <summary>Legacy upload protocol for media (e.g. "media", "multipart").</summary> [Google.Apis.Util.RequestParameterAttribute("uploadType", Google.Apis.Util.RequestParameterType.Query)] public virtual string UploadType { get; set; } /// <summary>Upload protocol for media (e.g. "raw", "multipart").</summary> [Google.Apis.Util.RequestParameterAttribute("upload_protocol", Google.Apis.Util.RequestParameterType.Query)] public virtual string UploadProtocol { get; set; } /// <summary>Initializes Acceleratedmobilepageurl parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "$.xgafv", new Google.Apis.Discovery.Parameter { Name = "$.xgafv", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "access_token", new Google.Apis.Discovery.Parameter { Name = "access_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "alt", new Google.Apis.Discovery.Parameter { Name = "alt", IsRequired = false, ParameterType = "query", DefaultValue = "json", Pattern = null, }); RequestParameters.Add( "bearer_token", new Google.Apis.Discovery.Parameter { Name = "bearer_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "callback", new Google.Apis.Discovery.Parameter { Name = "callback", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "fields", new Google.Apis.Discovery.Parameter { Name = "fields", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "key", new Google.Apis.Discovery.Parameter { Name = "key", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "oauth_token", new Google.Apis.Discovery.Parameter { Name = "oauth_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "pp", new Google.Apis.Discovery.Parameter { Name = "pp", IsRequired = false, ParameterType = "query", DefaultValue = "true", Pattern = null, }); RequestParameters.Add( "prettyPrint", new Google.Apis.Discovery.Parameter { Name = "prettyPrint", IsRequired = false, ParameterType = "query", DefaultValue = "true", Pattern = null, }); RequestParameters.Add( "quotaUser", new Google.Apis.Discovery.Parameter { Name = "quotaUser", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "uploadType", new Google.Apis.Discovery.Parameter { Name = "uploadType", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "upload_protocol", new Google.Apis.Discovery.Parameter { Name = "upload_protocol", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } /// <summary>The "ampUrls" collection of methods.</summary> public class AmpUrlsResource { private const string Resource = "ampUrls"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public AmpUrlsResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary>Returns AMP URL(s) and equivalent [AMP Cache URL(s)](/amp/cache/overview#amp-cache-url- /// format).</summary> /// <param name="body">The body of the request.</param> public virtual BatchGetRequest BatchGet(Google.Apis.Acceleratedmobilepageurl.v1.Data.BatchGetAmpUrlsRequest body) { return new BatchGetRequest(service, body); } /// <summary>Returns AMP URL(s) and equivalent [AMP Cache URL(s)](/amp/cache/overview#amp-cache-url- /// format).</summary> public class BatchGetRequest : AcceleratedmobilepageurlBaseServiceRequest<Google.Apis.Acceleratedmobilepageurl.v1.Data.BatchGetAmpUrlsResponse> { /// <summary>Constructs a new BatchGet request.</summary> public BatchGetRequest(Google.Apis.Services.IClientService service, Google.Apis.Acceleratedmobilepageurl.v1.Data.BatchGetAmpUrlsRequest body) : base(service) { Body = body; InitParameters(); } /// <summary>Gets or sets the body of this request.</summary> Google.Apis.Acceleratedmobilepageurl.v1.Data.BatchGetAmpUrlsRequest Body { get; set; } ///<summary>Returns the body of the request.</summary> protected override object GetBody() { return Body; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "batchGet"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "POST"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "v1/ampUrls:batchGet"; } } /// <summary>Initializes BatchGet parameter list.</summary> protected override void InitParameters() { base.InitParameters(); } } } } namespace Google.Apis.Acceleratedmobilepageurl.v1.Data { /// <summary>AMP URL response for a requested URL.</summary> public class AmpUrl : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The AMP URL pointing to the publisher's web server.</summary> [Newtonsoft.Json.JsonPropertyAttribute("ampUrl")] public virtual string AmpUrlValue { get; set; } /// <summary>The [AMP Cache URL](/amp/cache/overview#amp-cache-url-format) pointing to the cached document in /// the Google AMP Cache.</summary> [Newtonsoft.Json.JsonPropertyAttribute("cdnAmpUrl")] public virtual string CdnAmpUrl { get; set; } /// <summary>The original non-AMP URL.</summary> [Newtonsoft.Json.JsonPropertyAttribute("originalUrl")] public virtual string OriginalUrl { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>AMP URL Error resource for a requested URL that couldn't be found.</summary> public class AmpUrlError : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The error code of an API call.</summary> [Newtonsoft.Json.JsonPropertyAttribute("errorCode")] public virtual string ErrorCode { get; set; } /// <summary>An optional descriptive error message.</summary> [Newtonsoft.Json.JsonPropertyAttribute("errorMessage")] public virtual string ErrorMessage { get; set; } /// <summary>The original non-AMP URL.</summary> [Newtonsoft.Json.JsonPropertyAttribute("originalUrl")] public virtual string OriginalUrl { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>AMP URL request for a batch of URLs.</summary> public class BatchGetAmpUrlsRequest : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The lookup_strategy being requested.</summary> [Newtonsoft.Json.JsonPropertyAttribute("lookupStrategy")] public virtual string LookupStrategy { get; set; } /// <summary>List of URLs to look up for the paired AMP URLs. The URLs are case-sensitive. Up to 50 URLs per /// lookup (see [Usage Limits](/amp/cache/reference/limits)).</summary> [Newtonsoft.Json.JsonPropertyAttribute("urls")] public virtual System.Collections.Generic.IList<string> Urls { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Batch AMP URL response.</summary> public class BatchGetAmpUrlsResponse : Google.Apis.Requests.IDirectResponseSchema { /// <summary>For each URL in BatchAmpUrlsRequest, the URL response. The response might not be in the same order /// as URLs in the batch request. If BatchAmpUrlsRequest contains duplicate URLs, AmpUrl is generated only /// once.</summary> [Newtonsoft.Json.JsonPropertyAttribute("ampUrls")] public virtual System.Collections.Generic.IList<AmpUrl> AmpUrls { get; set; } /// <summary>The errors for requested URLs that have no AMP URL.</summary> [Newtonsoft.Json.JsonPropertyAttribute("urlErrors")] public virtual System.Collections.Generic.IList<AmpUrlError> UrlErrors { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } }
using UnityEngine; using System.Collections.Generic; [System.Serializable] public class Warp { public string name = "None"; public Vector3[] points = new Vector3[16]; public void GetWarp(MegaBezPatch mod) { mod.p11 = points[0]; mod.p12 = points[1]; mod.p13 = points[2]; mod.p14 = points[3]; mod.p21 = points[4]; mod.p22 = points[5]; mod.p23 = points[6]; mod.p24 = points[7]; mod.p31 = points[8]; mod.p32 = points[9]; mod.p33 = points[10]; mod.p34 = points[11]; mod.p41 = points[12]; mod.p42 = points[13]; mod.p43 = points[14]; mod.p44 = points[15]; } public void SetWarp(MegaBezPatch mod) { points[0] = mod.p11; points[1] = mod.p12; points[2] = mod.p13; points[3] = mod.p14; points[4] = mod.p21; points[5] = mod.p22; points[6] = mod.p23; points[7] = mod.p24; points[8] = mod.p31; points[9] = mod.p32; points[10] = mod.p33; points[11] = mod.p34; points[12] = mod.p41; points[13] = mod.p42; points[14] = mod.p43; points[15] = mod.p44; } public void AdjustLattice(float wr, float hr) { Vector3 r = new Vector3(wr, hr, 1.0f); points[0] = Vector3.Scale(points[0], r); points[1] = Vector3.Scale(points[1], r); points[2] = Vector3.Scale(points[2], r); points[3] = Vector3.Scale(points[3], r); points[4] = Vector3.Scale(points[4], r); points[5] = Vector3.Scale(points[5], r); points[6] = Vector3.Scale(points[6], r); points[7] = Vector3.Scale(points[7], r); points[8] = Vector3.Scale(points[8], r); points[9] = Vector3.Scale(points[9], r); points[10] = Vector3.Scale(points[10], r); points[11] = Vector3.Scale(points[11], r); points[12] = Vector3.Scale(points[12], r); points[13] = Vector3.Scale(points[13], r); points[14] = Vector3.Scale(points[14], r); points[15] = Vector3.Scale(points[15], r); } } [ExecuteInEditMode] public class MegaBezPatch : MonoBehaviour { public float Width = 1.0f; public float Height = 1.0f; public int WidthSegs = 20; public int HeightSegs = 20; public bool GenUVs = true; public bool recalcBounds = false; public bool recalcTangents = true; public bool recalcCollider = false; public bool showgizmos = true; public bool showlatticepoints = false; public Color latticecol = Color.white; public float handlesize = 0.075f; public bool positionhandles = false; public bool showlabels = true; public Vector2 snap = new Vector2(0.25f, 0.25f); public List<Warp> warps = new List<Warp>(); public int srcwarp; public int destwarp; [HideInInspector] public Vector3[] verts; [HideInInspector] public Vector2[] uvs; [HideInInspector] public int[] tris; [HideInInspector] public Vector3[] norms; [HideInInspector] public bool rebuild = true; public Vector2 UVOffset = Vector2.zero; public Vector2 UVScale = Vector2.one; public int currentwarp = 0; [HideInInspector] public Mesh mesh; public float switchtime = 1.0f; public float time = 1000.0f; public Vector3 p11; public Vector3 p21; public Vector3 p31; public Vector3 p41; public Vector3 p12; public Vector3 p22; public Vector3 p32; public Vector3 p42; public Vector3 p13; public Vector3 p23; public Vector3 p33; public Vector3 p43; public Vector3 p14; public Vector3 p24; public Vector3 p34; public Vector3 p44; public void AddWarp() { Warp warp = new Warp(); warp.SetWarp(this); warps.Add(warp); } public void UpdateWarp(int i) { Warp warp = warps[i]; warp.SetWarp(this); } public void SetWarp(int i) { if ( Application.isPlaying ) { time = 0.0f; srcwarp = currentwarp; destwarp = i; } else { time = 100.0f; currentwarp = i; warps[i].GetWarp(this); } } void Start() { time = 0.0f; } public void Reset() { InitLattice(); Rebuild(); } public void Rebuild() { MeshFilter mf = GetComponent<MeshFilter>(); if ( mf != null ) { Mesh mesh1 = mf.sharedMesh; if ( mesh1 == null ) { mesh1 = new Mesh(); mf.sharedMesh = mesh1; } mesh = mesh1; } } void Update() { ChangeWarp(srcwarp, destwarp); if ( mesh == null ) Rebuild(); if ( rebuild ) BuildMesh(mesh); } void MakeQuad1(int f, int a, int b, int c, int d) { tris[f++] = a; tris[f++] = b; tris[f++] = c; tris[f++] = c; tris[f++] = d; tris[f++] = a; } // Put in utils int MaxComponent(Vector3 v) { if ( Mathf.Abs(v.x) > Mathf.Abs(v.y) ) { if ( Mathf.Abs(v.x) > Mathf.Abs(v.z) ) return 0; else return 2; } else { if ( Mathf.Abs(v.y) > Mathf.Abs(v.z) ) return 1; else return 2; } } void UpdateSurface() { } // Only call this on size or seg change void BuildMesh(Mesh mesh) { if ( WidthSegs < 1 ) WidthSegs = 1; if ( HeightSegs < 1 ) HeightSegs = 1; Vector3 p = Vector3.zero; int numverts = (WidthSegs + 1) * (HeightSegs + 1); if ( verts == null ) { InitLattice(); } if ( verts == null || verts.Length != numverts ) { verts = new Vector3[numverts]; uvs = new Vector2[numverts]; tris = new int[HeightSegs * WidthSegs * 2 * 3]; norms = new Vector3[numverts]; for ( int i = 0; i < norms.Length; i++ ) norms[i] = Vector3.back; } Vector2 uv = Vector2.zero; int index = 0; p = Vector3.zero; for ( int i = 0; i <= HeightSegs; i++ ) { index = i * (WidthSegs + 1); for ( int j = 0; j <= WidthSegs; j++ ) { float xIndex = (float)j / (float)WidthSegs; float yIndex = (float)i / (float)HeightSegs; float omx = 1.0f - xIndex; float omy = 1.0f - yIndex; float x1 = omx * omx * omx; float x2 = (3.0f * omx) * omx * xIndex; float x3 = (3.0f * omx) * xIndex * xIndex; float x4 = xIndex * xIndex * xIndex; float y1 = omy * omy * omy; float y2 = (3.0f * omy) * omy * yIndex; float y3 = (3.0f * omy) * yIndex * yIndex; float y4 = yIndex * yIndex * yIndex; p.x = (x1 * p11.x * y1) + (x2 * p12.x * y1) + (x3 * p13.x * y1) + (x4 * p14.x * y1) + (x1 * p21.x * y2) + (x2 * p22.x * y2) + (x3 * p23.x * y2) + (x4 * p24.x * y2) + (x1 * p31.x * y3) + (x2 * p32.x * y3) + (x3 * p33.x * y3) + (x4 * p34.x * y3) + (x1 * p41.x * y4) + (x2 * p42.x * y4) + (x3 * p43.x * y4) + (x4 * p44.x * y4); p.y = (x1 * p11.y * y1) + (x2 * p12.y * y1) + (x3 * p13.y * y1) + (x4 * p14.y * y1) + (x1 * p21.y * y2) + (x2 * p22.y * y2) + (x3 * p23.y * y2) + (x4 * p24.y * y2) + (x1 * p31.y * y3) + (x2 * p32.y * y3) + (x3 * p33.y * y3) + (x4 * p34.y * y3) + (x1 * p41.y * y4) + (x2 * p42.y * y4) + (x3 * p43.y * y4) + (x4 * p44.y * y4); verts[index + j] = p; if ( GenUVs ) { uv.x = (xIndex + UVOffset.x) * UVScale.x; uv.y = (yIndex + UVOffset.y) * UVScale.y; uvs[index + j] = uv; } } } int f = 0; for ( int iz = 0; iz < HeightSegs; iz++ ) { int kv = iz * (WidthSegs + 1); for ( int ix = 0; ix < WidthSegs; ix++ ) { tris[f++] = kv; tris[f++] = kv + WidthSegs + 1; tris[f++] = kv + WidthSegs + 2; tris[f++] = kv + WidthSegs + 2; tris[f++] = kv + 1; tris[f++] = kv; kv++; } } mesh.Clear(); mesh.subMeshCount = 1; mesh.vertices = verts; mesh.uv = uvs; mesh.SetTriangles(tris, 0); mesh.normals = norms; mesh.RecalculateBounds(); if ( recalcTangents ) BuildTangents(mesh, verts, norms, tris, uvs); } public void InitLattice() { float w = Width; float h = Height; p11 = new Vector3(-1.5f * w, -1.5f * h, 0.0f); p12 = new Vector3(-0.5f * w, -1.5f * h, 0.0f); p13 = new Vector3(0.5f * w, -1.5f * h, 0.0f); p14 = new Vector3(1.5f * w, -1.5f * h, 0.0f); p21 = new Vector3(-1.5f * w, -0.5f * h, 0.0f); p22 = new Vector3(-0.5f * w, -0.5f * h, 0.0f); p23 = new Vector3(0.5f * w, -0.5f * h, 0.0f); p24 = new Vector3(1.5f * w, -0.5f * h, 0.0f); p31 = new Vector3(-1.5f * w, 0.5f * h, 0.0f); p32 = new Vector3(-0.5f * w, 0.5f * h, 0.0f); p33 = new Vector3(0.5f * w, 0.5f * h, 0.0f); p34 = new Vector3(1.5f * w, 0.5f * h, 0.0f); p41 = new Vector3(-1.5f * w, 1.5f * h, 0.0f); p42 = new Vector3(-0.5f * w, 1.5f * h, 0.0f); p43 = new Vector3(0.5f * w, 1.5f * h, 0.0f); p44 = new Vector3(1.5f * w, 1.5f * h, 0.0f); } public Vector3[] lpoints; public void AdjustLattice(float w, float h) { float wr = w / Width; float hr = h / Height; Vector3 r = new Vector3(wr, hr, 1.0f); p11 = Vector3.Scale(p11, r); p12 = Vector3.Scale(p12, r); p13 = Vector3.Scale(p13, r); p14 = Vector3.Scale(p14, r); p21 = Vector3.Scale(p21, r); p22 = Vector3.Scale(p22, r); p23 = Vector3.Scale(p23, r); p24 = Vector3.Scale(p24, r); p31 = Vector3.Scale(p31, r); p32 = Vector3.Scale(p32, r); p33 = Vector3.Scale(p33, r); p34 = Vector3.Scale(p34, r); p41 = Vector3.Scale(p41, r); p42 = Vector3.Scale(p42, r); p43 = Vector3.Scale(p43, r); p44 = Vector3.Scale(p44, r); for ( int i = 0; i < warps.Count; i++ ) { warps[i].AdjustLattice(wr, hr); } Height = h; Width = w; } static public void BuildTangents(Mesh mesh, Vector3[] verts, Vector3[] norms, int[] tris, Vector2[] uvs) { int vertexCount = mesh.vertices.Length; Vector3[] tan1 = new Vector3[vertexCount]; Vector3[] tan2 = new Vector3[vertexCount]; Vector4[] tangents = new Vector4[vertexCount]; for ( int a = 0; a < tris.Length; a += 3 ) { long i1 = tris[a]; long i2 = tris[a + 1]; long i3 = tris[a + 2]; Vector3 v1 = verts[i1]; Vector3 v2 = verts[i2]; Vector3 v3 = verts[i3]; Vector2 w1 = uvs[i1]; Vector2 w2 = uvs[i2]; Vector2 w3 = uvs[i3]; float x1 = v2.x - v1.x; float x2 = v3.x - v1.x; float y1 = v2.y - v1.y; float y2 = v3.y - v1.y; float z1 = v2.z - v1.z; float z2 = v3.z - v1.z; float s1 = w2.x - w1.x; float s2 = w3.x - w1.x; float t1 = w2.y - w1.y; float t2 = w3.y - w1.y; float r = 1.0f / (s1 * t2 - s2 * t1); Vector3 sdir = new Vector3((t2 * x1 - t1 * x2) * r, (t2 * y1 - t1 * y2) * r, (t2 * z1 - t1 * z2) * r); Vector3 tdir = new Vector3((s1 * x2 - s2 * x1) * r, (s1 * y2 - s2 * y1) * r, (s1 * z2 - s2 * z1) * r); tan1[i1] += sdir; tan1[i2] += sdir; tan1[i3] += sdir; tan2[i1] += tdir; tan2[i2] += tdir; tan2[i3] += tdir; } for ( int a = 0; a < vertexCount; a++ ) { Vector3 n = norms[a]; Vector3 t = tan1[a]; Vector3.OrthoNormalize(ref n, ref t); tangents[a].x = t.x; tangents[a].y = t.y; tangents[a].z = t.z; tangents[a].w = (Vector3.Dot(Vector3.Cross(n, t), tan2[a]) < 0.0f) ? -1.0f : 1.0f; } mesh.tangents = tangents; } Vector3 bounce(Vector3 start, Vector3 end, float value) { value /= 1.0f; end -= start; if ( value < (1.0f / 2.75f) ) { return end * (7.5625f * value * value) + start; } else { if ( value < (2.0f / 2.75f) ) { value -= (1.5f / 2.75f); return end * (7.5625f * (value) * value + 0.75f) + start; } else { if ( value < (2.5f / 2.75f) ) { value -= (2.25f / 2.75f); return end * (7.5625f * (value) * value + .9375f) + start; } else { value -= (2.625f / 2.75f); return end * (7.5625f * (value) * value + .984375f) + start; } } } } Vector3 easeInOutSine(Vector3 start, Vector3 end, float value) { end -= start; return -end / 2.0f * (Mathf.Cos(Mathf.PI * value / 1.0f) - 1.0f) + start; } float delay = -1.0f; public void ChangeWarp(int f, int t) { if ( !Application.isPlaying ) return; if ( delay > 0.0f ) { delay -= Time.deltaTime; return; } if ( time <= switchtime ) { time += Time.deltaTime; Warp from = warps[f]; Warp to = warps[t]; float a = time / switchtime; if ( a > 1.0f ) { a = 1.0f; currentwarp = t; t++; destwarp = t; if ( destwarp >= warps.Count ) destwarp = 0; srcwarp = currentwarp; time = 0.0f; delay = 1.0f; } p11 = easeInOutSine(from.points[0], to.points[0], a); p12 = easeInOutSine(from.points[1], to.points[1], a); p13 = easeInOutSine(from.points[2], to.points[2], a); p14 = easeInOutSine(from.points[3], to.points[3], a); p21 = easeInOutSine(from.points[4], to.points[4], a); p22 = easeInOutSine(from.points[5], to.points[5], a); p23 = easeInOutSine(from.points[6], to.points[6], a); p24 = easeInOutSine(from.points[7], to.points[7], a); p31 = easeInOutSine(from.points[8], to.points[8], a); p32 = easeInOutSine(from.points[9], to.points[9], a); p33 = easeInOutSine(from.points[10], to.points[10], a); p34 = easeInOutSine(from.points[11], to.points[11], a); p41 = easeInOutSine(from.points[12], to.points[12], a); p42 = easeInOutSine(from.points[13], to.points[13], a); p43 = easeInOutSine(from.points[14], to.points[14], a); p44 = easeInOutSine(from.points[15], to.points[15], a); } } }