context
stringlengths
2.52k
185k
gt
stringclasses
1 value
//------------------------------------------------------------------------------ // <copyright file="BaseTemplateParser.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ /* * Implements the ASP.NET template parser * * Copyright (c) 1998 Microsoft Corporation */ namespace System.Web.UI { using System.Text; using System; using System.IO; using System.Collections; using System.Collections.Specialized; using System.ComponentModel; using System.ComponentModel.Design; using System.Reflection; using System.Configuration; using System.Web.Caching; using System.Web.Util; using System.Web.Hosting; using System.Web.Compilation; using HttpException = System.Web.HttpException; using System.Text.RegularExpressions; using System.Globalization; using System.Security.Permissions; /* * Parser for Template Files (TemplateControls and PageTheme) */ /// <internalonly/> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public abstract class BaseTemplateParser : TemplateParser { private const string _sourceString = "src"; private const string _namespaceString = "namespace"; private const string _tagnameString = "tagname"; internal Type GetDesignTimeUserControlType(string tagPrefix, string tagName) { Debug.Assert(FInDesigner); Type type = typeof(UserControl); IDesignerHost host = DesignerHost; if (host != null) { IUserControlTypeResolutionService ucTypeResService = (IUserControlTypeResolutionService)host.GetService(typeof(IUserControlTypeResolutionService)); if (ucTypeResService != null) { try { type = ucTypeResService.GetType(tagPrefix, tagName); } catch { } } } return type; } /* * Compile a nested .ascx file (a User Control) and return its Type */ protected internal Type GetUserControlType(string virtualPath) { return GetUserControlType(VirtualPath.Create(virtualPath)); } internal Type GetUserControlType(VirtualPath virtualPath) { Type t = GetReferencedType(virtualPath, false /*allowNoCompile*/); // Fail if it's a no compile uc, since it doesn't have a Type we can use if (t == null) { // First, check whether there is a PageParserFilter that can give us a type if (_pageParserFilter != null) t = _pageParserFilter.GetNoCompileUserControlType(); if (t == null) ProcessError(SR.GetString(SR.Cant_use_nocompile_uc, virtualPath)); } else { // Make sure it has the correct base type Util.CheckAssignableType(typeof(UserControl), t); } return t; } /* * Compile a .aspx/.ascx file and return its Type */ protected Type GetReferencedType(string virtualPath) { return GetReferencedType(VirtualPath.Create(virtualPath)); } internal Type GetReferencedType(VirtualPath virtualPath) { return GetReferencedType(virtualPath, true /*allowNoCompile*/); } internal Type GetReferencedType(VirtualPath virtualPath, bool allowNoCompile) { virtualPath = ResolveVirtualPath(virtualPath); // If we have a page parser filter, make sure the reference is allowed if (_pageParserFilter != null && !_pageParserFilter.AllowVirtualReference(CompConfig, virtualPath)) { ProcessError(SR.GetString(SR.Reference_not_allowed, virtualPath)); } BuildResult result = null; Type t = null; try { result = BuildManager.GetVPathBuildResult(virtualPath); } catch (HttpCompileException e) { // Add the path depdencies properly so we know when // to invalidate the cached result. if (e.VirtualPathDependencies != null) { foreach (string vPath in e.VirtualPathDependencies) { AddSourceDependency(VirtualPath.Create(vPath)); } } throw; } catch { // Add the virtualPath to the dependency so that // we know when to check again. This could happen if the // virtualPath points to a file not created yet. // This only affects designtime code path since we do want to return // partial result even if there is an error, and that result is // cached. VSWhidbey 372585 if (IgnoreParseErrors) { AddSourceDependency(virtualPath); } throw; } // Is it a no-compile page/uc BuildResultNoCompileTemplateControl noCompileResult = result as BuildResultNoCompileTemplateControl; if (noCompileResult != null) { // If no-compile is not acceptable, return null if (!allowNoCompile) return null; // In the no-compile case, use the base type, since we don't compile a type t = noCompileResult.BaseType; } else if (result is BuildResultCompiledType) { BuildResultCompiledType compiledResult = (BuildResultCompiledType) result; Debug.Assert(compiledResult != null); t = compiledResult.ResultType; } else { throw new HttpException(SR.GetString(SR.Invalid_typeless_reference, _sourceString)); } Debug.Assert(t != null); // Add a dependency on the Type AddTypeDependency(t); // Add a dependency on the BuildResult AddBuildResultDependency(result); return t; } internal override void ProcessDirective(string directiveName, IDictionary directive) { if (StringUtil.EqualsIgnoreCase(directiveName, "register")) { // Register directive // Get the tagprefix, which is required string tagPrefix = Util.GetAndRemoveNonEmptyIdentifierAttribute(directive, "tagprefix", true /*required*/); string tagName = Util.GetAndRemoveNonEmptyIdentifierAttribute(directive, _tagnameString, false /*required*/); VirtualPath src = Util.GetAndRemoveVirtualPathAttribute(directive, _sourceString, false /*required*/); string ns = Util.GetAndRemoveNonEmptyNoSpaceAttribute(directive, _namespaceString, false /*required*/); // An Assembly can optionally be specified (ASURT 61326/VSWhidbey 87050) string assemblyName = Util.GetAndRemoveNonEmptyAttribute(directive, "assembly", false /*required*/); RegisterDirectiveEntry registerEntry; if (tagName != null) { // It's a user control registration // 'src' is required if (src == null) { throw new HttpException(SR.GetString(SR.Missing_attr, _sourceString)); } // 'namespace' is not allowed if (ns != null) { throw new HttpException( SR.GetString(SR.Invalid_attr, _namespaceString, "tagname")); } // 'assembly' is not allowed if (assemblyName != null) { throw new HttpException( SR.GetString(SR.Invalid_attr, "assembly", "tagname")); } UserControlRegisterEntry ucRegisterEntry = new UserControlRegisterEntry(tagPrefix, tagName); ucRegisterEntry.UserControlSource = src; registerEntry = ucRegisterEntry; TypeMapper.ProcessUserControlRegistration(ucRegisterEntry); } else if (src != null) { // It's missing the tagname attribute. throw new HttpException(SR.GetString(SR.Missing_attr, _tagnameString)); } else { // It's a namespace prefix registration // 'namespace' is required if (ns == null) { throw new HttpException(SR.GetString(SR.Missing_attr, _namespaceString)); } TagNamespaceRegisterEntry nsRegisterEntry = new TagNamespaceRegisterEntry(tagPrefix, ns, assemblyName); registerEntry = nsRegisterEntry; TypeMapper.ProcessTagNamespaceRegistration(nsRegisterEntry); } registerEntry.Line = _lineNumber; registerEntry.VirtualPath = CurrentVirtualPathString; // If there are some attributes left, fail Util.CheckUnknownDirectiveAttributes(directiveName, directive); } else { base.ProcessDirective(directiveName, directive); } } } /* * Entry representing a register directive * e.g. <%@ Register tagprefix="tagprefix" Namespace="namespace" Assembly="assembly" %> OR * e.g. <%@ Register tagprefix="tagprefix" Tagname="tagname" Src="pathname" %> */ internal abstract class RegisterDirectiveEntry: SourceLineInfo { internal RegisterDirectiveEntry(string tagPrefix) { _tagPrefix = tagPrefix; } private string _tagPrefix; internal string TagPrefix { get { return _tagPrefix;} } } /* * Entry representing the registration of a tag namespace * e.g. <%@ Register tagprefix="tagprefix" Namespace="namespace" Assembly="assembly" %> */ internal class TagNamespaceRegisterEntry: RegisterDirectiveEntry { internal TagNamespaceRegisterEntry(string tagPrefix, string namespaceName, string assemblyName) : base(tagPrefix) { _ns = namespaceName; _assemblyName = assemblyName; } private string _ns; internal string Namespace { get { return _ns;} } private string _assemblyName; internal string AssemblyName { get { return _assemblyName;} } #if DONT_COMPILE internal string Key { get { return TagPrefix + ":" + _ns + ":" + (_assemblyName == null ? String.Empty : _assemblyName); } } #endif } /* * Entry representing the registration of a user control * e.g. <%@ Register tagprefix="tagprefix" Tagname="tagname" Src="pathname" %> */ internal class UserControlRegisterEntry: RegisterDirectiveEntry { internal UserControlRegisterEntry(string tagPrefix, string tagName) : base(tagPrefix) { _tagName = tagName; } private string _tagName; internal string TagName { get { return _tagName;} } private VirtualPath _source; internal VirtualPath UserControlSource { get { return _source;} set { _source = value;} } private bool _comesFromConfig; internal bool ComesFromConfig { get { return _comesFromConfig;} set { _comesFromConfig = value;} } internal string Key { get { return TagPrefix + ":" + _tagName; } } } internal class TagNamespaceRegisterEntryTable : Hashtable { public TagNamespaceRegisterEntryTable() : base(StringComparer.OrdinalIgnoreCase) { } public override object Clone() { // We override clone to perform a deep copy of the hashtable contents but a shallow copy of // the contained arraylist itself TagNamespaceRegisterEntryTable newTable = new TagNamespaceRegisterEntryTable(); foreach (DictionaryEntry entry in this) { newTable[entry.Key] = ((ArrayList)entry.Value).Clone(); } return newTable; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using UnityEngine; using UnityEditor.Graphing; using UnityEditor.ShaderGraph.Drawing.Controls; using UnityEngine.Rendering; using UnityEditor.Experimental.GraphView; using UnityEditor.Rendering; using UnityEngine.UIElements; using Node = UnityEditor.Experimental.GraphView.Node; namespace UnityEditor.ShaderGraph.Drawing { sealed class MaterialNodeView : Node, IShaderNodeView { PreviewRenderData m_PreviewRenderData; Image m_PreviewImage; VisualElement m_PreviewContainer; VisualElement m_ControlItems; VisualElement m_PreviewFiller; VisualElement m_ControlsDivider; IEdgeConnectorListener m_ConnectorListener; VisualElement m_PortInputContainer; VisualElement m_SettingsContainer; bool m_ShowSettings = false; VisualElement m_SettingsButton; VisualElement m_Settings; VisualElement m_NodeSettingsView; GraphView m_GraphView; public void Initialize(AbstractMaterialNode inNode, PreviewManager previewManager, IEdgeConnectorListener connectorListener, GraphView graphView) { styleSheets.Add(Resources.Load<StyleSheet>("Styles/MaterialNodeView")); AddToClassList("MaterialNode"); if (inNode == null) return; var contents = this.Q("contents"); m_GraphView = graphView; m_ConnectorListener = connectorListener; node = inNode; viewDataKey = node.guid.ToString(); UpdateTitle(); // Add controls container var controlsContainer = new VisualElement { name = "controls" }; { m_ControlsDivider = new VisualElement { name = "divider" }; m_ControlsDivider.AddToClassList("horizontal"); controlsContainer.Add(m_ControlsDivider); m_ControlItems = new VisualElement { name = "items" }; controlsContainer.Add(m_ControlItems); // Instantiate control views from node foreach (var propertyInfo in node.GetType().GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)) foreach (IControlAttribute attribute in propertyInfo.GetCustomAttributes(typeof(IControlAttribute), false)) m_ControlItems.Add(attribute.InstantiateControl(node, propertyInfo)); } if (m_ControlItems.childCount > 0) contents.Add(controlsContainer); if (node.hasPreview) { // Add actual preview which floats on top of the node m_PreviewContainer = new VisualElement { name = "previewContainer", cacheAsBitmap = true, style = { overflow = Overflow.Hidden }, pickingMode = PickingMode.Ignore }; m_PreviewImage = new Image { name = "preview", pickingMode = PickingMode.Ignore, image = Texture2D.whiteTexture, }; { // Add preview collapse button on top of preview var collapsePreviewButton = new VisualElement { name = "collapse" }; collapsePreviewButton.Add(new VisualElement { name = "icon" }); collapsePreviewButton.AddManipulator(new Clickable(() => { node.owner.owner.RegisterCompleteObjectUndo("Collapse Preview"); UpdatePreviewExpandedState(false); })); m_PreviewImage.Add(collapsePreviewButton); } m_PreviewContainer.Add(m_PreviewImage); // Hook up preview image to preview manager m_PreviewRenderData = previewManager.GetPreview(inNode); m_PreviewRenderData.onPreviewChanged += UpdatePreviewTexture; UpdatePreviewTexture(); // Add fake preview which pads out the node to provide space for the floating preview m_PreviewFiller = new VisualElement { name = "previewFiller" }; m_PreviewFiller.AddToClassList("expanded"); { var previewDivider = new VisualElement { name = "divider" }; previewDivider.AddToClassList("horizontal"); m_PreviewFiller.Add(previewDivider); var expandPreviewButton = new VisualElement { name = "expand" }; expandPreviewButton.Add(new VisualElement { name = "icon" }); expandPreviewButton.AddManipulator(new Clickable(() => { node.owner.owner.RegisterCompleteObjectUndo("Expand Preview"); UpdatePreviewExpandedState(true); })); m_PreviewFiller.Add(expandPreviewButton); } contents.Add(m_PreviewFiller); UpdatePreviewExpandedState(node.previewExpanded); } // Add port input container, which acts as a pixel cache for all port inputs m_PortInputContainer = new VisualElement { name = "portInputContainer", cacheAsBitmap = true, style = { overflow = Overflow.Hidden }, pickingMode = PickingMode.Ignore }; Add(m_PortInputContainer); AddSlots(node.GetSlots<MaterialSlot>()); UpdatePortInputs(); base.expanded = node.drawState.expanded; RefreshExpandedState(); //This should not be needed. GraphView needs to improve the extension api here UpdatePortInputVisibilities(); SetPosition(new Rect(node.drawState.position.x, node.drawState.position.y, 0, 0)); if (node is SubGraphNode) { RegisterCallback<MouseDownEvent>(OnSubGraphDoubleClick); } var masterNode = node as IMasterNode; if (masterNode != null) { AddToClassList("master"); if (!masterNode.IsPipelineCompatible(GraphicsSettings.renderPipelineAsset)) { AttachMessage("The current render pipeline is not compatible with this master node.", ShaderCompilerMessageSeverity.Error); } } m_PortInputContainer.SendToBack(); // Remove this after updated to the correct API call has landed in trunk. ------------ VisualElement m_TitleContainer; VisualElement m_ButtonContainer; m_TitleContainer = this.Q("title"); // ----------------------------------------------------------------------------------- var settings = node as IHasSettings; if (settings != null) { m_NodeSettingsView = new NodeSettingsView(); m_NodeSettingsView.visible = false; Add(m_NodeSettingsView); m_SettingsButton = new VisualElement {name = "settings-button"}; m_SettingsButton.Add(new VisualElement { name = "icon" }); m_Settings = settings.CreateSettingsElement(); m_SettingsButton.AddManipulator(new Clickable(() => { UpdateSettingsExpandedState(); })); // Remove this after updated to the correct API call has landed in trunk. ------------ m_ButtonContainer = new VisualElement { name = "button-container" }; m_ButtonContainer.style.flexDirection = FlexDirection.Row; m_ButtonContainer.Add(m_SettingsButton); m_ButtonContainer.Add(m_CollapseButton); m_TitleContainer.Add(m_ButtonContainer); // ----------------------------------------------------------------------------------- //titleButtonContainer.Add(m_SettingsButton); //titleButtonContainer.Add(m_CollapseButton); } } public void AttachMessage(string errString, ShaderCompilerMessageSeverity severity) { ClearMessage(); IconBadge badge; if (severity == ShaderCompilerMessageSeverity.Error) { badge = IconBadge.CreateError(errString); } else { badge = IconBadge.CreateComment(errString); } Add(badge); var myTitle = this.Q("title"); badge.AttachTo(myTitle, SpriteAlignment.RightCenter); } public void ClearMessage() { var badge = this.Q<IconBadge>(); if(badge != null) { badge.Detach(); badge.RemoveFromHierarchy(); } } void OnGeometryChanged(GeometryChangedEvent evt) { // style.positionTop and style.positionLeft are in relation to the parent, // so we translate the layout of the settings button to be in the coordinate // space of the settings view's parent. var settingsButtonLayout = m_SettingsButton.ChangeCoordinatesTo(m_NodeSettingsView.parent, m_SettingsButton.layout); m_NodeSettingsView.style.top = settingsButtonLayout.yMax - 18f; m_NodeSettingsView.style.left = settingsButtonLayout.xMin - 16f; } void OnSubGraphDoubleClick(MouseDownEvent evt) { if (evt.clickCount == 2 && evt.button == 0) { SubGraphNode subgraphNode = node as SubGraphNode; var path = AssetDatabase.GUIDToAssetPath(subgraphNode.subGraphGuid); ShaderGraphImporterEditor.ShowGraphEditWindow(path); } } public Node gvNode => this; public AbstractMaterialNode node { get; private set; } public override bool expanded { get { return base.expanded; } set { if (base.expanded != value) base.expanded = value; if (node.drawState.expanded != value) { var ds = node.drawState; ds.expanded = value; node.drawState = ds; } RefreshExpandedState(); //This should not be needed. GraphView needs to improve the extension api here UpdatePortInputVisibilities(); } } public override void BuildContextualMenu(ContextualMenuPopulateEvent evt) { if (evt.target is Node) { var isMaster = node is IMasterNode; var isActive = node.guid == node.owner.activeOutputNodeGuid; if (isMaster) { evt.menu.AppendAction("Set Active", SetMasterAsActive, _ => isActive ? DropdownMenuAction.Status.Checked : DropdownMenuAction.Status.Normal); } var canViewShader = node.hasPreview || node is IMasterNode || node is SubGraphOutputNode; evt.menu.AppendAction("Copy Shader", CopyToClipboard, _ => canViewShader ? DropdownMenuAction.Status.Normal : DropdownMenuAction.Status.Hidden, GenerationMode.ForReals); evt.menu.AppendAction("Show Generated Code", ShowGeneratedCode, _ => canViewShader ? DropdownMenuAction.Status.Normal : DropdownMenuAction.Status.Hidden, GenerationMode.ForReals); if (Unsupported.IsDeveloperMode()) { evt.menu.AppendAction("Show Preview Code", ShowGeneratedCode, _ => canViewShader ? DropdownMenuAction.Status.Normal : DropdownMenuAction.Status.Hidden, GenerationMode.Preview); } } base.BuildContextualMenu(evt); } void SetMasterAsActive(DropdownMenuAction action) { node.owner.activeOutputNodeGuid = node.guid; } void CopyToClipboard(DropdownMenuAction action) { GUIUtility.systemCopyBuffer = ConvertToShader((GenerationMode) action.userData); } public string SanitizeName(string name) { return new string(name.Where(c => !Char.IsWhiteSpace(c)).ToArray()); } public void ShowGeneratedCode(DropdownMenuAction action) { string name = GetFirstAncestorOfType<GraphEditorView>().assetName; var mode = (GenerationMode)action.userData; string path = String.Format("Temp/GeneratedFromGraph-{0}-{1}-{2}{3}.shader", SanitizeName(name), SanitizeName(node.name), node.guid, mode == GenerationMode.Preview ? "-Preview" : ""); if (GraphUtil.WriteToFile(path, ConvertToShader(mode))) GraphUtil.OpenFile(path); } string ConvertToShader(GenerationMode mode) { List<PropertyCollector.TextureInfo> textureInfo; if (node is IMasterNode masterNode) return masterNode.GetShader(mode, node.name, out textureInfo); return node.owner.GetShader(node, mode, node.name).shader; } void RecreateSettings() { var settings = node as IHasSettings; if (settings != null) { m_Settings.RemoveFromHierarchy(); m_Settings = settings.CreateSettingsElement(); m_NodeSettingsView.Add(m_Settings); } } void UpdateSettingsExpandedState() { m_ShowSettings = !m_ShowSettings; if (m_ShowSettings) { m_NodeSettingsView.Add(m_Settings); m_NodeSettingsView.visible = true; m_GraphView.ClearSelection(); m_GraphView.AddToSelection(this); m_SettingsButton.AddToClassList("clicked"); RegisterCallback<GeometryChangedEvent>(OnGeometryChanged); OnGeometryChanged(null); } else { m_Settings.RemoveFromHierarchy(); m_NodeSettingsView.visible = false; m_SettingsButton.RemoveFromClassList("clicked"); UnregisterCallback<GeometryChangedEvent>(OnGeometryChanged); } } void UpdatePreviewExpandedState(bool expanded) { node.previewExpanded = expanded; if (m_PreviewFiller == null) return; if (expanded) { if (m_PreviewContainer.parent != this) { Add(m_PreviewContainer); m_PreviewContainer.PlaceBehind(this.Q("selection-border")); } m_PreviewFiller.AddToClassList("expanded"); m_PreviewFiller.RemoveFromClassList("collapsed"); } else { if (m_PreviewContainer.parent == m_PreviewFiller) { m_PreviewContainer.RemoveFromHierarchy(); } m_PreviewFiller.RemoveFromClassList("expanded"); m_PreviewFiller.AddToClassList("collapsed"); } UpdatePreviewTexture(); } void UpdateTitle() { if (node is SubGraphNode subGraphNode && subGraphNode.subGraphData != null) title = subGraphNode.subGraphAsset.name; else title = node.name; } public void OnModified(ModificationScope scope) { UpdateTitle(); if (node.hasPreview) UpdatePreviewExpandedState(node.previewExpanded); base.expanded = node.drawState.expanded; // Update slots to match node modification if (scope == ModificationScope.Topological) { RecreateSettings(); var slots = node.GetSlots<MaterialSlot>().ToList(); var inputPorts = inputContainer.Children().OfType<ShaderPort>().ToList(); foreach (var port in inputPorts) { var currentSlot = port.slot; var newSlot = slots.FirstOrDefault(s => s.id == currentSlot.id); if (newSlot == null) { // Slot doesn't exist anymore, remove it inputContainer.Remove(port); // We also need to remove the inline input var portInputView = m_PortInputContainer.Children().OfType<PortInputView>().FirstOrDefault(v => Equals(v.slot, port.slot)); if (portInputView != null) portInputView.RemoveFromHierarchy(); } else { port.slot = newSlot; var portInputView = m_PortInputContainer.Children().OfType<PortInputView>().FirstOrDefault(x => x.slot.id == currentSlot.id); if (newSlot.isConnected) { portInputView?.RemoveFromHierarchy(); } else { portInputView?.UpdateSlot(newSlot); } slots.Remove(newSlot); } } var outputPorts = outputContainer.Children().OfType<ShaderPort>().ToList(); foreach (var port in outputPorts) { var currentSlot = port.slot; var newSlot = slots.FirstOrDefault(s => s.id == currentSlot.id); if (newSlot == null) { outputContainer.Remove(port); } else { port.slot = newSlot; slots.Remove(newSlot); } } AddSlots(slots); slots.Clear(); slots.AddRange(node.GetSlots<MaterialSlot>()); if (inputContainer.childCount > 0) inputContainer.Sort((x, y) => slots.IndexOf(((ShaderPort)x).slot) - slots.IndexOf(((ShaderPort)y).slot)); if (outputContainer.childCount > 0) outputContainer.Sort((x, y) => slots.IndexOf(((ShaderPort)x).slot) - slots.IndexOf(((ShaderPort)y).slot)); UpdatePortInputs(); UpdatePortInputVisibilities(); } RefreshExpandedState(); //This should not be needed. GraphView needs to improve the extension api here foreach (var listener in m_ControlItems.Children().OfType<AbstractMaterialNodeModificationListener>()) { if (listener != null) listener.OnNodeModified(scope); } } void AddSlots(IEnumerable<MaterialSlot> slots) { foreach (var slot in slots) { if (slot.hidden) continue; var port = ShaderPort.Create(slot, m_ConnectorListener); if (slot.isOutputSlot) outputContainer.Add(port); else inputContainer.Add(port); } } void UpdatePortInputs() { foreach (var port in inputContainer.Children().OfType<ShaderPort>()) { if (port.slot.isConnected || m_PortInputContainer.Children().OfType<PortInputView>().Any(a => Equals(a.slot, port.slot))) { continue; } var portInputView = new PortInputView(port.slot) { style = { position = Position.Absolute } }; m_PortInputContainer.Add(portInputView); if (float.IsNaN(port.layout.width)) { port.RegisterCallback<GeometryChangedEvent>(UpdatePortInput); } else { SetPortInputPosition(port, portInputView); } } } void UpdatePortInput(GeometryChangedEvent evt) { var port = (ShaderPort)evt.target; var inputView = m_PortInputContainer.Children().OfType<PortInputView>().First(x => Equals(x.slot, port.slot)); SetPortInputPosition(port, inputView); port.UnregisterCallback<GeometryChangedEvent>(UpdatePortInput); } void SetPortInputPosition(ShaderPort port, PortInputView inputView) { inputView.style.top = port.layout.y; inputView.parent.style.height = inputContainer.layout.height; } void UpdatePortInputVisibilities() { if (expanded) { m_PortInputContainer.style.display = StyleKeyword.Null; } else { m_PortInputContainer.style.display = DisplayStyle.None; } } public void UpdatePortInputTypes() { foreach (var anchor in inputContainer.Children().Concat(outputContainer.Children()).OfType<ShaderPort>()) { var slot = anchor.slot; anchor.portName = slot.displayName; anchor.visualClass = slot.concreteValueType.ToClassName(); } foreach (var portInputView in m_PortInputContainer.Children().OfType<PortInputView>()) portInputView.UpdateSlotType(); foreach (var control in m_ControlItems.Children()) { var listener = control as AbstractMaterialNodeModificationListener; if (listener != null) listener.OnNodeModified(ModificationScope.Graph); } } void OnResize(Vector2 deltaSize) { var updatedWidth = topContainer.layout.width + deltaSize.x; var updatedHeight = m_PreviewImage.layout.height + deltaSize.y; var previewNode = node as PreviewNode; if (previewNode != null) { previewNode.SetDimensions(updatedWidth, updatedHeight); UpdateSize(); } } void UpdatePreviewTexture() { if (m_PreviewRenderData.texture == null || !node.previewExpanded) { m_PreviewImage.visible = false; m_PreviewImage.image = Texture2D.blackTexture; } else { m_PreviewImage.visible = true; m_PreviewImage.AddToClassList("visible"); m_PreviewImage.RemoveFromClassList("hidden"); if (m_PreviewImage.image != m_PreviewRenderData.texture) m_PreviewImage.image = m_PreviewRenderData.texture; else m_PreviewImage.MarkDirtyRepaint(); if (m_PreviewRenderData.shaderData.isCompiling) m_PreviewImage.tintColor = new Color(1.0f, 1.0f, 1.0f, 0.3f); else m_PreviewImage.tintColor = Color.white; } } void UpdateSize() { var previewNode = node as PreviewNode; if (previewNode == null) return; var width = previewNode.width; var height = previewNode.height; m_PreviewImage.style.height = height; m_PreviewImage.style.width = width; } public void Dispose() { foreach (var portInputView in m_PortInputContainer.Children().OfType<PortInputView>()) portInputView.Dispose(); node = null; ((VisualElement)this).userData = null; if (m_PreviewRenderData != null) { m_PreviewRenderData.onPreviewChanged -= UpdatePreviewTexture; m_PreviewRenderData = null; } } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; namespace Lucene.Net.Codecs.Perfield { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using FieldInfo = Lucene.Net.Index.FieldInfo; using IOUtils = Lucene.Net.Util.IOUtils; using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator; using SegmentReadState = Lucene.Net.Index.SegmentReadState; using SegmentWriteState = Lucene.Net.Index.SegmentWriteState; using Terms = Lucene.Net.Index.Terms; /// <summary> /// Enables per field postings support. /// <p> /// Note, when extending this class, the name (<seealso cref="#getName"/>) is /// written into the index. In order for the field to be read, the /// name must resolve to your implementation via <seealso cref="#forName(String)"/>. /// this method uses Java's /// <seealso cref="ServiceLoader Service Provider Interface"/> to resolve format names. /// <p> /// Files written by each posting format have an additional suffix containing the /// format name. For example, in a per-field configuration instead of <tt>_1.prx</tt> /// filenames would look like <tt>_1_Lucene40_0.prx</tt>. </summary> /// <seealso cref= ServiceLoader /// @lucene.experimental </seealso> public abstract class PerFieldPostingsFormat : PostingsFormat { /// <summary> /// Name of this <seealso cref="PostingsFormat"/>. </summary> public const string PER_FIELD_NAME = "PerField40"; /// <summary> /// <seealso cref="FieldInfo"/> attribute name used to store the /// format name for each field. /// </summary> public static readonly string PER_FIELD_FORMAT_KEY = typeof(PerFieldPostingsFormat).Name + ".format"; /// <summary> /// <seealso cref="FieldInfo"/> attribute name used to store the /// segment suffix name for each field. /// </summary> public static readonly string PER_FIELD_SUFFIX_KEY = typeof(PerFieldPostingsFormat).Name + ".suffix"; /// <summary> /// Sole constructor. </summary> public PerFieldPostingsFormat() : base(PER_FIELD_NAME) { } public override sealed FieldsConsumer FieldsConsumer(SegmentWriteState state) { return new FieldsWriter(this, state); } internal class FieldsConsumerAndSuffix : IDisposable { internal FieldsConsumer Consumer; internal int Suffix; public void Dispose() { Consumer.Dispose(); } } private class FieldsWriter : FieldsConsumer { private readonly PerFieldPostingsFormat OuterInstance; internal readonly IDictionary<PostingsFormat, FieldsConsumerAndSuffix> Formats = new Dictionary<PostingsFormat, FieldsConsumerAndSuffix>(); internal readonly IDictionary<string, int> Suffixes = new Dictionary<string, int>(); internal readonly SegmentWriteState SegmentWriteState; public FieldsWriter(PerFieldPostingsFormat outerInstance, SegmentWriteState state) { this.OuterInstance = outerInstance; SegmentWriteState = state; } public override TermsConsumer AddField(FieldInfo field) { PostingsFormat format = OuterInstance.GetPostingsFormatForField(field.Name); if (format == null) { throw new InvalidOperationException("invalid null PostingsFormat for field=\"" + field.Name + "\""); } string formatName = format.Name; string previousValue = field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName); //Debug.Assert(previousValue == null); int suffix; FieldsConsumerAndSuffix consumer; Formats.TryGetValue(format, out consumer); if (consumer == null) { // First time we are seeing this format; create a new instance // bump the suffix if (!Suffixes.TryGetValue(formatName, out suffix)) { suffix = 0; } else { suffix = suffix + 1; } Suffixes[formatName] = suffix; string segmentSuffix = GetFullSegmentSuffix(field.Name, SegmentWriteState.SegmentSuffix, GetSuffix(formatName, Convert.ToString(suffix))); consumer = new FieldsConsumerAndSuffix(); consumer.Consumer = format.FieldsConsumer(new SegmentWriteState(SegmentWriteState, segmentSuffix)); consumer.Suffix = suffix; Formats[format] = consumer; } else { // we've already seen this format, so just grab its suffix Debug.Assert(Suffixes.ContainsKey(formatName)); suffix = consumer.Suffix; } previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix)); //Debug.Assert(previousValue == null); // TODO: we should only provide the "slice" of FIS // that this PF actually sees ... then stuff like // .hasProx could work correctly? // NOTE: .hasProx is already broken in the same way for the non-perfield case, // if there is a fieldinfo with prox that has no postings, you get a 0 byte file. return consumer.Consumer.AddField(field); } public override void Dispose() { // Close all subs IOUtils.Close(Formats.Values.ToArray()); } } internal static string GetSuffix(string formatName, string suffix) { return formatName + "_" + suffix; } internal static string GetFullSegmentSuffix(string fieldName, string outerSegmentSuffix, string segmentSuffix) { if (outerSegmentSuffix.Length == 0) { return segmentSuffix; } else { // TODO: support embedding; I think it should work but // we need a test confirm to confirm // return outerSegmentSuffix + "_" + segmentSuffix; throw new InvalidOperationException("cannot embed PerFieldPostingsFormat inside itself (field \"" + fieldName + "\" returned PerFieldPostingsFormat)"); } } private class FieldsReader : FieldsProducer { private readonly PerFieldPostingsFormat OuterInstance; internal readonly IDictionary<string, FieldsProducer> Fields = new SortedDictionary<string, FieldsProducer>(); internal readonly IDictionary<string, FieldsProducer> Formats = new Dictionary<string, FieldsProducer>(); public FieldsReader(PerFieldPostingsFormat outerInstance, SegmentReadState readState) { this.OuterInstance = outerInstance; // Read _X.per and init each format: bool success = false; try { // Read field name -> format name foreach (FieldInfo fi in readState.FieldInfos) { if (fi.Indexed) { string fieldName = fi.Name; string formatName = fi.GetAttribute(PER_FIELD_FORMAT_KEY); if (formatName != null) { // null formatName means the field is in fieldInfos, but has no postings! string suffix = fi.GetAttribute(PER_FIELD_SUFFIX_KEY); Debug.Assert(suffix != null); PostingsFormat format = PostingsFormat.ForName(formatName); string segmentSuffix = GetSuffix(formatName, suffix); if (!Formats.ContainsKey(segmentSuffix)) { Formats[segmentSuffix] = format.FieldsProducer(new SegmentReadState(readState, segmentSuffix)); } Fields[fieldName] = Formats[segmentSuffix]; } } } success = true; } finally { if (!success) { IOUtils.CloseWhileHandlingException(Formats.Values); } } } public override IEnumerator<string> GetEnumerator() { return Fields.Keys.GetEnumerator(); } public override Terms Terms(string field) { FieldsProducer fieldsProducer; Fields.TryGetValue(field, out fieldsProducer); return fieldsProducer == null ? null : fieldsProducer.Terms(field); } public override int Size { get { return Fields.Count; } } public override void Dispose() { IOUtils.Close(Formats.Values.ToArray()); } public override long RamBytesUsed() { long sizeInBytes = 0; foreach (KeyValuePair<string, FieldsProducer> entry in Formats) { sizeInBytes += entry.Key.Length * RamUsageEstimator.NUM_BYTES_CHAR; sizeInBytes += entry.Value.RamBytesUsed(); } return sizeInBytes; } public override void CheckIntegrity() { foreach (FieldsProducer producer in Formats.Values) { producer.CheckIntegrity(); } } } public override sealed FieldsProducer FieldsProducer(SegmentReadState state) { return new FieldsReader(this, state); } /// <summary> /// Returns the postings format that should be used for writing /// new segments of <code>field</code>. /// <p> /// The field to format mapping is written to the index, so /// this method is only invoked when writing, not when reading. /// </summary> public abstract PostingsFormat GetPostingsFormatForField(string field); } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using System.Threading.Tasks.Dataflow.Internal; using Xunit; namespace System.Threading.Tasks.Dataflow.Tests { public class TransformManyBlockTests { [Fact] public async Task TestCtor() { var blocks = new[] { new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable), new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions { MaxMessagesPerTask = 1 }), new TransformManyBlock<int, int>(i => TaskShim.Run(() => DataflowTestHelpers.ToEnumerable(i)), new ExecutionDataflowBlockOptions { MaxMessagesPerTask = 1 }) }; foreach (var block in blocks) { Assert.Equal(expected: 0, actual: block.InputCount); Assert.Equal(expected: 0, actual: block.OutputCount); Assert.False(block.Completion.IsCompleted); } blocks = new[] { new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions { CancellationToken = new CancellationToken(true) }), new TransformManyBlock<int, int>(i => TaskShim.Run(() => DataflowTestHelpers.ToEnumerable(i)), new ExecutionDataflowBlockOptions { CancellationToken = new CancellationToken(true) }) }; foreach (var block in blocks) { Assert.Equal(expected: 0, actual: block.InputCount); Assert.Equal(expected: 0, actual: block.OutputCount); await Assert.ThrowsAnyAsync<OperationCanceledException>(() => block.Completion); } } [Fact] public void TestArgumentExceptions() { Assert.Throws<ArgumentNullException>(() => new TransformManyBlock<int, int>((Func<int, IEnumerable<int>>)null)); Assert.Throws<ArgumentNullException>(() => new TransformManyBlock<int, int>((Func<int, Task<IEnumerable<int>>>)null)); Assert.Throws<ArgumentNullException>(() => new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, null)); Assert.Throws<ArgumentNullException>(() => new TransformManyBlock<int, int>(i => TaskShim.Run(() => DataflowTestHelpers.ToEnumerable(i)), null)); DataflowTestHelpers.TestArgumentsExceptions(new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable)); } [Fact] public void TestToString() { DataflowTestHelpers.TestToString(nameFormat => nameFormat != null ? new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions() { NameFormat = nameFormat }) : new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable)); } [Fact] public async Task TestOfferMessage() { var generators = new Func<TransformManyBlock<int, int>>[] { () => new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable), () => new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions { BoundedCapacity = 10 }), () => new TransformManyBlock<int, int>(i => TaskShim.Run(() => DataflowTestHelpers.ToEnumerable(i)), new ExecutionDataflowBlockOptions { BoundedCapacity = 10, MaxMessagesPerTask = 1 }) }; foreach (var generator in generators) { DataflowTestHelpers.TestOfferMessage_ArgumentValidation(generator()); var target = generator(); DataflowTestHelpers.TestOfferMessage_AcceptsDataDirectly(target); DataflowTestHelpers.TestOfferMessage_CompleteAndOffer(target); target = generator(); await DataflowTestHelpers.TestOfferMessage_AcceptsViaLinking(target); DataflowTestHelpers.TestOfferMessage_CompleteAndOffer(target); } } [Fact] public void TestPost() { foreach (bool bounded in DataflowTestHelpers.BooleanValues) foreach (var tb in new[] { new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions { BoundedCapacity = bounded ? 1 : -1 }), new TransformManyBlock<int, int>(i => TaskShim.Run(() => DataflowTestHelpers.ToEnumerable(i)), new ExecutionDataflowBlockOptions { BoundedCapacity = bounded ? 1 : -1 })}) { Assert.True(tb.Post(0)); tb.Complete(); Assert.False(tb.Post(0)); } } [Fact] public Task TestCompletionTask() { return DataflowTestHelpers.TestCompletionTask(() => new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable)); } [Fact] public async Task TestLinkToOptions() { const int Messages = 1; foreach (bool append in DataflowTestHelpers.BooleanValues) foreach (var tb in new[] { new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable), new TransformManyBlock<int, int>(i => TaskShim.Run(() => DataflowTestHelpers.ToEnumerable(i))) }) { var values = new int[Messages]; var targets = new ActionBlock<int>[Messages]; for (int i = 0; i < Messages; i++) { int slot = i; targets[i] = new ActionBlock<int>(item => values[slot] = item); tb.LinkTo(targets[i], new DataflowLinkOptions { MaxMessages = 1, Append = append }); } tb.PostRange(0, Messages); tb.Complete(); await tb.Completion; for (int i = 0; i < Messages; i++) { Assert.Equal( expected: append ? i : Messages - i - 1, actual: values[i]); } } } [Fact] public async Task TestReceives() { for (int test = 0; test < 2; test++) { foreach (var tb in new[] { new TransformManyBlock<int, int>(i => Enumerable.Repeat(i * 2, 1)), new TransformManyBlock<int, int>(i => TaskShim.Run(() => Enumerable.Repeat(i * 2, 1))) }) { tb.PostRange(0, 5); for (int i = 0; i < 5; i++) { Assert.Equal(expected: i * 2, actual: await tb.ReceiveAsync()); } int item; IList<int> items; Assert.False(tb.TryReceive(out item)); Assert.False(tb.TryReceiveAll(out items)); } } } [Fact] public async Task TestCircularLinking() { const int Iters = 200; foreach (bool sync in DataflowTestHelpers.BooleanValues) { var tcs = new TaskCompletionSource<bool>(); Func<int, IEnumerable<int>> body = i => { if (i >= Iters) tcs.SetResult(true); return Enumerable.Repeat(i + 1, 1); }; TransformManyBlock<int, int> tb = sync ? new TransformManyBlock<int, int>(body) : new TransformManyBlock<int, int>(i => TaskShim.Run(() => body(i))); using (tb.LinkTo(tb)) { tb.Post(0); await tcs.Task; tb.Complete(); } } } [Fact] public async Task TestProducerConsumer() { foreach (TaskScheduler scheduler in new[] { TaskScheduler.Default, new ConcurrentExclusiveSchedulerPair().ConcurrentScheduler }) foreach (int maxMessagesPerTask in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) foreach (int boundedCapacity in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) foreach (int dop in new[] { 1, 2 }) foreach (int elementsPerItem in new[] { 1, 3, 5 }) foreach (bool sync in DataflowTestHelpers.BooleanValues) { const int Messages = 50; var options = new ExecutionDataflowBlockOptions { BoundedCapacity = boundedCapacity, MaxDegreeOfParallelism = dop, MaxMessagesPerTask = maxMessagesPerTask, TaskScheduler = scheduler }; TransformManyBlock<int, int> tb = sync ? new TransformManyBlock<int, int>(i => Enumerable.Repeat(i, elementsPerItem), options) : new TransformManyBlock<int, int>(i => TaskShim.Run(() => Enumerable.Repeat(i, elementsPerItem)), options); await TaskShim.WhenAll( TaskShim.Run(async delegate { // consumer int i = 0; int processed = 0; while (await tb.OutputAvailableAsync()) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); processed++; if (processed % elementsPerItem == 0) { i++; } } }), TaskShim.Run(async delegate { // producer for (int i = 0; i < Messages; i++) { await tb.SendAsync(i); } tb.Complete(); })); } } [Fact] public async Task TestMessagePostponement() { const int Excess = 10; foreach (int boundedCapacity in new[] { 1, 3 }) { var options = new ExecutionDataflowBlockOptions { BoundedCapacity = boundedCapacity }; foreach (var tb in new[] { new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, options), new TransformManyBlock<int, int>(i => TaskShim.Run(() => DataflowTestHelpers.ToEnumerable(i)), options) }) { var sendAsync = new Task<bool>[boundedCapacity + Excess]; for (int i = 0; i < boundedCapacity + Excess; i++) { sendAsync[i] = tb.SendAsync(i); } tb.Complete(); for (int i = 0; i < boundedCapacity; i++) { Assert.True(sendAsync[i].IsCompleted); Assert.True(sendAsync[i].Result); } for (int i = 0; i < Excess; i++) { Assert.False(await sendAsync[boundedCapacity + i]); } } } } [Fact] public async Task TestMultipleYields() { const int Messages = 10; var t = new TransformManyBlock<int, int>(i => Enumerable.Range(0, Messages)); t.Post(42); t.Complete(); for (int i = 0; i < Messages; i++) { Assert.False(t.Completion.IsCompleted); Assert.Equal(expected: i, actual: await t.ReceiveAsync()); } await t.Completion; } [Fact] public async Task TestReserveReleaseConsume() { var tb = new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable); tb.Post(1); await DataflowTestHelpers.TestReserveAndRelease(tb); tb = new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable); tb.Post(2); await DataflowTestHelpers.TestReserveAndConsume(tb); } [Fact] public async Task TestCountZeroAtCompletion() { var cts = new CancellationTokenSource(); var tb = new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions() { CancellationToken = cts.Token }); tb.Post(1); cts.Cancel(); await Assert.ThrowsAnyAsync<OperationCanceledException>(() => tb.Completion); Assert.Equal(expected: 0, actual: tb.InputCount); Assert.Equal(expected: 0, actual: tb.OutputCount); cts = new CancellationTokenSource(); tb = new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable); tb.Post(1); ((IDataflowBlock)tb).Fault(new InvalidOperationException()); await Assert.ThrowsAnyAsync<InvalidOperationException>(() => tb.Completion); Assert.Equal(expected: 0, actual: tb.InputCount); Assert.Equal(expected: 0, actual: tb.OutputCount); } [Fact] public void TestInputCount() { foreach (bool sync in DataflowTestHelpers.BooleanValues) { Barrier barrier1 = new Barrier(2), barrier2 = new Barrier(2); Func<int, IEnumerable<int>> body = item => { barrier1.SignalAndWait(); // will test InputCount here barrier2.SignalAndWait(); return new[] { item }; }; TransformManyBlock<int, int> tb = sync ? new TransformManyBlock<int, int>(body) : new TransformManyBlock<int, int>(i => TaskShim.Run(() => body(i))); for (int iter = 0; iter < 2; iter++) { tb.PostItems(1, 2); for (int i = 1; i >= 0; i--) { barrier1.SignalAndWait(); Assert.Equal(expected: i, actual: tb.InputCount); barrier2.SignalAndWait(); } } } } [Fact] //[OuterLoop] // spins waiting for a condition to be true, though it should happen very quickly public async Task TestCount() { var tb = new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable); Assert.Equal(expected: 0, actual: tb.InputCount); Assert.Equal(expected: 0, actual: tb.OutputCount); tb.PostRange(1, 11); await TaskShim.Run(() => SpinWait.SpinUntil(() => tb.OutputCount == 10)); for (int i = 10; i > 0; i--) { int item; Assert.True(tb.TryReceive(out item)); Assert.Equal(expected: 11 - i, actual: item); Assert.Equal(expected: i - 1, actual: tb.OutputCount); } } [Fact] public async Task TestChainedSendReceive() { foreach (bool post in DataflowTestHelpers.BooleanValues) foreach (bool sync in DataflowTestHelpers.BooleanValues) { Func<TransformManyBlock<int, int>> func = sync ? (Func<TransformManyBlock<int, int>>)(() => new TransformManyBlock<int, int>(i => new[] { i * 2 })) : (Func<TransformManyBlock<int, int>>)(() => new TransformManyBlock<int, int>(i => TaskShim.Run(() => Enumerable.Repeat(i * 2, 1)))); var network = DataflowTestHelpers.Chain<TransformManyBlock<int, int>, int>(4, func); const int Iters = 10; for (int i = 0; i < Iters; i++) { if (post) { network.Post(i); } else { await network.SendAsync(i); } Assert.Equal(expected: i * 16, actual: await network.ReceiveAsync()); } } } [Fact] public async Task TestSendAllThenReceive() { foreach (bool post in DataflowTestHelpers.BooleanValues) foreach (bool sync in DataflowTestHelpers.BooleanValues) { Func<TransformManyBlock<int, int>> func = sync ? (Func<TransformManyBlock<int, int>>)(() => new TransformManyBlock<int, int>(i => new[] { i * 2 })) : (Func<TransformManyBlock<int, int>>)(() => new TransformManyBlock<int, int>(i => TaskShim.Run(() => Enumerable.Repeat(i * 2, 1)))); var network = DataflowTestHelpers.Chain<TransformManyBlock<int, int>, int>(4, func); const int Iters = 10; if (post) { network.PostRange(0, Iters); } else { await TaskShim.WhenAll(from i in Enumerable.Range(0, Iters) select network.SendAsync(i)); } for (int i = 0; i < Iters; i++) { Assert.Equal(expected: i * 16, actual: await network.ReceiveAsync()); } } } [Fact] public async Task TestPrecanceled() { var bb = new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions { CancellationToken = new CancellationToken(canceled: true) }); int ignoredValue; IList<int> ignoredValues; IDisposable link = bb.LinkTo(DataflowBlock.NullTarget<int>()); Assert.NotNull(link); link.Dispose(); Assert.False(bb.Post(42)); var t = bb.SendAsync(42); Assert.True(t.IsCompleted); Assert.False(t.Result); Assert.False(bb.TryReceiveAll(out ignoredValues)); Assert.False(bb.TryReceive(out ignoredValue)); Assert.NotNull(bb.Completion); await Assert.ThrowsAnyAsync<OperationCanceledException>(() => bb.Completion); bb.Complete(); // just make sure it doesn't throw } [Fact] public async Task TestExceptions() { var tb1 = new TransformManyBlock<int, int>((Func<int, IEnumerable<int>>)(i => { throw new InvalidCastException(); })); var tb2 = new TransformManyBlock<int, int>((Func<int, Task<IEnumerable<int>>>)(i => { throw new InvalidProgramException(); })); var tb3 = new TransformManyBlock<int, int>((Func<int, Task<IEnumerable<int>>>)(i => TaskShim.Run((Func<IEnumerable<int>>)(() => { throw new InvalidTimeZoneException(); })))); var tb4 = new TransformManyBlock<int, int>(i => ExceptionAfter(3)); var tb5 = new TransformManyBlock<int, int>(i => TaskShim.Run(() => ExceptionAfter(3))); for (int i = 0; i < 3; i++) { tb1.Post(i); tb2.Post(i); tb3.Post(i); tb4.Post(i); tb5.Post(i); } await Assert.ThrowsAsync<InvalidCastException>(() => tb1.Completion); await Assert.ThrowsAsync<InvalidProgramException>(() => tb2.Completion); await Assert.ThrowsAsync<InvalidTimeZoneException>(() => tb3.Completion); await Assert.ThrowsAsync<FormatException>(() => tb4.Completion); await Assert.ThrowsAsync<FormatException>(() => tb5.Completion); Assert.All(new[] { tb1, tb2, tb3 }, tb => Assert.True(tb.InputCount == 0 && tb.OutputCount == 0)); } private IEnumerable<int> ExceptionAfter(int iterations) { for (int i = 0; i < iterations; i++) { yield return i; } throw new FormatException(); } [Fact] public async Task TestFaultingAndCancellation() { foreach (bool fault in DataflowTestHelpers.BooleanValues) { var cts = new CancellationTokenSource(); var tb = new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions { CancellationToken = cts.Token }); tb.PostRange(0, 4); Assert.Equal(expected: 0, actual: await tb.ReceiveAsync()); Assert.Equal(expected: 1, actual: await tb.ReceiveAsync()); if (fault) { Assert.Throws<ArgumentNullException>(() => ((IDataflowBlock)tb).Fault(null)); ((IDataflowBlock)tb).Fault(new InvalidCastException()); await Assert.ThrowsAsync<InvalidCastException>(() => tb.Completion); } else { cts.Cancel(); await Assert.ThrowsAnyAsync<OperationCanceledException>(() => tb.Completion); } Assert.Equal(expected: 0, actual: tb.InputCount); Assert.Equal(expected: 0, actual: tb.OutputCount); } } [Fact] public async Task TestCancellationExceptionsIgnored() { foreach (bool sync in DataflowTestHelpers.BooleanValues) { Func<int, IEnumerable<int>> body = i => { if ((i % 2) == 0) throw new OperationCanceledException(); return new[] { i }; }; TransformManyBlock<int, int> t = sync ? new TransformManyBlock<int, int>(body) : new TransformManyBlock<int, int>(async i => await TaskShim.Run(() => body(i))); t.PostRange(0, 2); t.Complete(); for (int i = 0; i < 2; i++) { if ((i % 2) != 0) { Assert.Equal(expected: i, actual: await t.ReceiveAsync()); } } await t.Completion; } } [Fact] public async Task TestNullTasksIgnored() { foreach (int dop in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { var tb = new TransformManyBlock<int, int>(i => { if ((i % 2) == 0) return null; return TaskShim.Run(() => (IEnumerable<int>)new[] { i }); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop }); const int Iters = 100; tb.PostRange(0, Iters); tb.Complete(); for (int i = 0; i < Iters; i++) { if ((i % 2) != 0) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } } await tb.Completion; } } [Fact] public async Task TestYieldingNoResults() { foreach (int dop in new[] { 1, Environment.ProcessorCount }) foreach (int boundedCapacity in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { const int Modes = 3, Iters = 100; var tb = new TransformManyBlock<int, int>(i => { switch (i % Modes) { default: case 0: return new List<int> { i }; case 1: return new int[0]; case 2: return new Collection<int> { i, i + 1 }; } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, BoundedCapacity = boundedCapacity }); var source = new BufferBlock<int>(); source.PostRange(0, Modes * Iters); source.Complete(); source.LinkTo(tb, new DataflowLinkOptions { PropagateCompletion = true }); int received = 0; while (await tb.OutputAvailableAsync()) { await tb.ReceiveAsync(); received++; } Assert.Equal(expected: Modes * Iters, actual: received); } } [Fact] public async Task TestArrayListReusePossibleForDop1() { foreach (int boundedCapacity in new[] { DataflowBlockOptions.Unbounded, 2 }) foreach (bool sync in DataflowTestHelpers.BooleanValues) { foreach (int dop in new[] { 1, Environment.ProcessorCount }) { var dbo = new ExecutionDataflowBlockOptions { BoundedCapacity = boundedCapacity, MaxDegreeOfParallelism = dop }; foreach (IList<int> list in new IList<int>[] { new int[1], new List<int> { 0 }, new Collection<int> { 0 } }) { int nextExpectedValue = 1; TransformManyBlock<int, int> transform = null; Func<int, IEnumerable<int>> body = i => { if (i == 100) // we're done iterating { transform.Complete(); return (IEnumerable<int>)null; } else if (dop == 1) { list[0] = i + 1; // reuse the list over and over, but only at dop == 1 return (IEnumerable<int>)list; } else if (list is int[]) { return new int[1] { i + 1 }; } else if (list is List<int>) { return new List<int>() { i + 1 }; } else { return new Collection<int>() { i + 1 }; } }; transform = sync ? new TransformManyBlock<int, int>(body, dbo) : new TransformManyBlock<int, int>(i => TaskShim.Run(() => body(i)), dbo); TransformBlock<int, int> verifier = new TransformBlock<int, int>(i => { Assert.Equal(expected: nextExpectedValue, actual: i); nextExpectedValue++; return i; }); transform.LinkTo(verifier); verifier.LinkTo(transform); await transform.SendAsync(0); await transform.Completion; } } } } [Fact] public async Task TestOrdering() { const int iters = 9999; foreach (int mmpt in new[] { DataflowBlockOptions.Unbounded, 1 }) foreach (int dop in new[] { 1, 2, DataflowBlockOptions.Unbounded }) { var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt }; var tb = new TransformManyBlock<int, int>(i => new[] { i, i + 1, i + 2 }, options); for (int i = 0; i < iters; i += 3) { Assert.True(tb.Post(i)); } for (int i = 0; i < iters; i++) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Batch.Protocol.Models { using System.Linq; /// <summary> /// A job schedule that allows recurring jobs by specifying when to run /// jobs and a specification used to create each job. /// </summary> public partial class CloudJobSchedule { /// <summary> /// Initializes a new instance of the CloudJobSchedule class. /// </summary> public CloudJobSchedule() { } /// <summary> /// Initializes a new instance of the CloudJobSchedule class. /// </summary> /// <param name="id">A string that uniquely identifies the schedule /// within the account.</param> /// <param name="displayName">The display name for the /// schedule.</param> /// <param name="url">The URL of the job schedule.</param> /// <param name="eTag">The ETag of the job schedule.</param> /// <param name="lastModified">The last modified time of the job /// schedule.</param> /// <param name="creationTime">The creation time of the job /// schedule.</param> /// <param name="state">The current state of the job schedule.</param> /// <param name="stateTransitionTime">The time at which the job /// schedule entered the current state.</param> /// <param name="previousState">The previous state of the job /// schedule.</param> /// <param name="previousStateTransitionTime">The time at which the job /// schedule entered its previous state.</param> /// <param name="schedule">The schedule according to which jobs will be /// created.</param> /// <param name="jobSpecification">The details of the jobs to be /// created on this schedule.</param> /// <param name="executionInfo">Information about jobs that have been /// and will be run under this schedule.</param> /// <param name="metadata">A list of name-value pairs associated with /// the schedule as metadata.</param> /// <param name="stats">The lifetime resource usage statistics for the /// job schedule.</param> public CloudJobSchedule(string id = default(string), string displayName = default(string), string url = default(string), string eTag = default(string), System.DateTime? lastModified = default(System.DateTime?), System.DateTime? creationTime = default(System.DateTime?), JobScheduleState? state = default(JobScheduleState?), System.DateTime? stateTransitionTime = default(System.DateTime?), JobScheduleState? previousState = default(JobScheduleState?), System.DateTime? previousStateTransitionTime = default(System.DateTime?), Schedule schedule = default(Schedule), JobSpecification jobSpecification = default(JobSpecification), JobScheduleExecutionInformation executionInfo = default(JobScheduleExecutionInformation), System.Collections.Generic.IList<MetadataItem> metadata = default(System.Collections.Generic.IList<MetadataItem>), JobScheduleStatistics stats = default(JobScheduleStatistics)) { Id = id; DisplayName = displayName; Url = url; ETag = eTag; LastModified = lastModified; CreationTime = creationTime; State = state; StateTransitionTime = stateTransitionTime; PreviousState = previousState; PreviousStateTransitionTime = previousStateTransitionTime; Schedule = schedule; JobSpecification = jobSpecification; ExecutionInfo = executionInfo; Metadata = metadata; Stats = stats; } /// <summary> /// Gets or sets a string that uniquely identifies the schedule within /// the account. /// </summary> /// <remarks> /// It is common to use a GUID for the id. /// </remarks> [Newtonsoft.Json.JsonProperty(PropertyName = "id")] public string Id { get; set; } /// <summary> /// Gets or sets the display name for the schedule. /// </summary> [Newtonsoft.Json.JsonProperty(PropertyName = "displayName")] public string DisplayName { get; set; } /// <summary> /// Gets or sets the URL of the job schedule. /// </summary> [Newtonsoft.Json.JsonProperty(PropertyName = "url")] public string Url { get; set; } /// <summary> /// Gets or sets the ETag of the job schedule. /// </summary> /// <remarks> /// This is an opaque string. You can use it to detect whether the job /// schedule has changed between requests. In particular, you can be /// pass the ETag with an Update Job Schedule request to specify that /// your changes should take effect only if nobody else has modified /// the schedule in the meantime. /// </remarks> [Newtonsoft.Json.JsonProperty(PropertyName = "eTag")] public string ETag { get; set; } /// <summary> /// Gets or sets the last modified time of the job schedule. /// </summary> /// <remarks> /// This is the last time at which the schedule level data, such as the /// job specification or recurrence information, changed. It does not /// factor in job-level changes such as new jobs being created or jobs /// changing state. /// </remarks> [Newtonsoft.Json.JsonProperty(PropertyName = "lastModified")] public System.DateTime? LastModified { get; set; } /// <summary> /// Gets or sets the creation time of the job schedule. /// </summary> [Newtonsoft.Json.JsonProperty(PropertyName = "creationTime")] public System.DateTime? CreationTime { get; set; } /// <summary> /// Gets or sets the current state of the job schedule. /// </summary> /// <remarks> /// Possible values include: 'active', 'completed', 'disabled', /// 'terminating', 'deleting' /// </remarks> [Newtonsoft.Json.JsonProperty(PropertyName = "state")] public JobScheduleState? State { get; set; } /// <summary> /// Gets or sets the time at which the job schedule entered the current /// state. /// </summary> [Newtonsoft.Json.JsonProperty(PropertyName = "stateTransitionTime")] public System.DateTime? StateTransitionTime { get; set; } /// <summary> /// Gets or sets the previous state of the job schedule. /// </summary> /// <remarks> /// This property is not present if the job schedule is in its initial /// active state. Possible values include: 'active', 'completed', /// 'disabled', 'terminating', 'deleting' /// </remarks> [Newtonsoft.Json.JsonProperty(PropertyName = "previousState")] public JobScheduleState? PreviousState { get; set; } /// <summary> /// Gets or sets the time at which the job schedule entered its /// previous state. /// </summary> /// <remarks> /// This property is not present if the job schedule is in its initial /// active state. /// </remarks> [Newtonsoft.Json.JsonProperty(PropertyName = "previousStateTransitionTime")] public System.DateTime? PreviousStateTransitionTime { get; set; } /// <summary> /// Gets or sets the schedule according to which jobs will be created. /// </summary> [Newtonsoft.Json.JsonProperty(PropertyName = "schedule")] public Schedule Schedule { get; set; } /// <summary> /// Gets or sets the details of the jobs to be created on this /// schedule. /// </summary> [Newtonsoft.Json.JsonProperty(PropertyName = "jobSpecification")] public JobSpecification JobSpecification { get; set; } /// <summary> /// Gets or sets information about jobs that have been and will be run /// under this schedule. /// </summary> [Newtonsoft.Json.JsonProperty(PropertyName = "executionInfo")] public JobScheduleExecutionInformation ExecutionInfo { get; set; } /// <summary> /// Gets or sets a list of name-value pairs associated with the /// schedule as metadata. /// </summary> /// <remarks> /// The Batch service does not assign any meaning to metadata; it is /// solely for the use of user code. /// </remarks> [Newtonsoft.Json.JsonProperty(PropertyName = "metadata")] public System.Collections.Generic.IList<MetadataItem> Metadata { get; set; } /// <summary> /// Gets or sets the lifetime resource usage statistics for the job /// schedule. /// </summary> [Newtonsoft.Json.JsonProperty(PropertyName = "stats")] public JobScheduleStatistics Stats { get; set; } /// <summary> /// Validate the object. /// </summary> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown if validation fails /// </exception> public virtual void Validate() { if (this.JobSpecification != null) { this.JobSpecification.Validate(); } if (this.Metadata != null) { foreach (var element in this.Metadata) { if (element != null) { element.Validate(); } } } if (this.Stats != null) { this.Stats.Validate(); } } } }
using System; using System.Collections.Generic; using System.Data; using System.Data.SqlClient; using Csla; using Csla.Data; using ParentLoadROSoftDelete.DataAccess; using ParentLoadROSoftDelete.DataAccess.ERCLevel; namespace ParentLoadROSoftDelete.DataAccess.Sql.ERCLevel { /// <summary> /// DAL SQL Server implementation of <see cref="IF01_ContinentCollDal"/> /// </summary> public partial class F01_ContinentCollDal : IF01_ContinentCollDal { private List<F03_Continent_ChildDto> _f03_Continent_Child = new List<F03_Continent_ChildDto>(); private List<F03_Continent_ReChildDto> _f03_Continent_ReChild = new List<F03_Continent_ReChildDto>(); private List<F04_SubContinentDto> _f03_SubContinentColl = new List<F04_SubContinentDto>(); private List<F05_SubContinent_ChildDto> _f05_SubContinent_Child = new List<F05_SubContinent_ChildDto>(); private List<F05_SubContinent_ReChildDto> _f05_SubContinent_ReChild = new List<F05_SubContinent_ReChildDto>(); private List<F06_CountryDto> _f05_CountryColl = new List<F06_CountryDto>(); private List<F07_Country_ChildDto> _f07_Country_Child = new List<F07_Country_ChildDto>(); private List<F07_Country_ReChildDto> _f07_Country_ReChild = new List<F07_Country_ReChildDto>(); private List<F08_RegionDto> _f07_RegionColl = new List<F08_RegionDto>(); private List<F09_Region_ChildDto> _f09_Region_Child = new List<F09_Region_ChildDto>(); private List<F09_Region_ReChildDto> _f09_Region_ReChild = new List<F09_Region_ReChildDto>(); private List<F10_CityDto> _f09_CityColl = new List<F10_CityDto>(); private List<F11_City_ChildDto> _f11_City_Child = new List<F11_City_ChildDto>(); private List<F11_City_ReChildDto> _f11_City_ReChild = new List<F11_City_ReChildDto>(); private List<F12_CityRoadDto> _f11_CityRoadColl = new List<F12_CityRoadDto>(); /// <summary> /// Gets the F03 Continent Single Object. /// </summary> /// <value>A list of <see cref="F03_Continent_ChildDto"/>.</value> public List<F03_Continent_ChildDto> F03_Continent_Child { get { return _f03_Continent_Child; } } /// <summary> /// Gets the F03 Continent ASingle Object. /// </summary> /// <value>A list of <see cref="F03_Continent_ReChildDto"/>.</value> public List<F03_Continent_ReChildDto> F03_Continent_ReChild { get { return _f03_Continent_ReChild; } } /// <summary> /// Gets the F03 SubContinent Objects. /// </summary> /// <value>A list of <see cref="F04_SubContinentDto"/>.</value> public List<F04_SubContinentDto> F03_SubContinentColl { get { return _f03_SubContinentColl; } } /// <summary> /// Gets the F05 SubContinent Single Object. /// </summary> /// <value>A list of <see cref="F05_SubContinent_ChildDto"/>.</value> public List<F05_SubContinent_ChildDto> F05_SubContinent_Child { get { return _f05_SubContinent_Child; } } /// <summary> /// Gets the F05 SubContinent ASingle Object. /// </summary> /// <value>A list of <see cref="F05_SubContinent_ReChildDto"/>.</value> public List<F05_SubContinent_ReChildDto> F05_SubContinent_ReChild { get { return _f05_SubContinent_ReChild; } } /// <summary> /// Gets the F05 Country Objects. /// </summary> /// <value>A list of <see cref="F06_CountryDto"/>.</value> public List<F06_CountryDto> F05_CountryColl { get { return _f05_CountryColl; } } /// <summary> /// Gets the F07 Country Single Object. /// </summary> /// <value>A list of <see cref="F07_Country_ChildDto"/>.</value> public List<F07_Country_ChildDto> F07_Country_Child { get { return _f07_Country_Child; } } /// <summary> /// Gets the F07 Country ASingle Object. /// </summary> /// <value>A list of <see cref="F07_Country_ReChildDto"/>.</value> public List<F07_Country_ReChildDto> F07_Country_ReChild { get { return _f07_Country_ReChild; } } /// <summary> /// Gets the F07 Region Objects. /// </summary> /// <value>A list of <see cref="F08_RegionDto"/>.</value> public List<F08_RegionDto> F07_RegionColl { get { return _f07_RegionColl; } } /// <summary> /// Gets the F09 Region Single Object. /// </summary> /// <value>A list of <see cref="F09_Region_ChildDto"/>.</value> public List<F09_Region_ChildDto> F09_Region_Child { get { return _f09_Region_Child; } } /// <summary> /// Gets the F09 Region ASingle Object. /// </summary> /// <value>A list of <see cref="F09_Region_ReChildDto"/>.</value> public List<F09_Region_ReChildDto> F09_Region_ReChild { get { return _f09_Region_ReChild; } } /// <summary> /// Gets the F09 City Objects. /// </summary> /// <value>A list of <see cref="F10_CityDto"/>.</value> public List<F10_CityDto> F09_CityColl { get { return _f09_CityColl; } } /// <summary> /// Gets the F11 City Single Object. /// </summary> /// <value>A list of <see cref="F11_City_ChildDto"/>.</value> public List<F11_City_ChildDto> F11_City_Child { get { return _f11_City_Child; } } /// <summary> /// Gets the F11 City ASingle Object. /// </summary> /// <value>A list of <see cref="F11_City_ReChildDto"/>.</value> public List<F11_City_ReChildDto> F11_City_ReChild { get { return _f11_City_ReChild; } } /// <summary> /// Gets the F11 CityRoad Objects. /// </summary> /// <value>A list of <see cref="F12_CityRoadDto"/>.</value> public List<F12_CityRoadDto> F11_CityRoadColl { get { return _f11_CityRoadColl; } } /// <summary> /// Loads a F01_ContinentColl collection from the database. /// </summary> /// <returns>A list of <see cref="F02_ContinentDto"/>.</returns> public List<F02_ContinentDto> Fetch() { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("GetF01_ContinentColl", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; var dr = cmd.ExecuteReader(); return LoadCollection(dr); } } } private List<F02_ContinentDto> LoadCollection(IDataReader data) { var f01_ContinentColl = new List<F02_ContinentDto>(); using (var dr = new SafeDataReader(data)) { while (dr.Read()) { f01_ContinentColl.Add(Fetch(dr)); } if (f01_ContinentColl.Count > 0) FetchChildren(dr); } return f01_ContinentColl; } private F02_ContinentDto Fetch(SafeDataReader dr) { var f02_Continent = new F02_ContinentDto(); // Value properties f02_Continent.Continent_ID = dr.GetInt32("Continent_ID"); f02_Continent.Continent_Name = dr.GetString("Continent_Name"); return f02_Continent; } private void FetchChildren(SafeDataReader dr) { dr.NextResult(); while (dr.Read()) { _f03_Continent_Child.Add(FetchF03_Continent_Child(dr)); } dr.NextResult(); while (dr.Read()) { _f03_Continent_ReChild.Add(FetchF03_Continent_ReChild(dr)); } dr.NextResult(); while (dr.Read()) { _f03_SubContinentColl.Add(FetchF04_SubContinent(dr)); } dr.NextResult(); while (dr.Read()) { _f05_SubContinent_Child.Add(FetchF05_SubContinent_Child(dr)); } dr.NextResult(); while (dr.Read()) { _f05_SubContinent_ReChild.Add(FetchF05_SubContinent_ReChild(dr)); } dr.NextResult(); while (dr.Read()) { _f05_CountryColl.Add(FetchF06_Country(dr)); } dr.NextResult(); while (dr.Read()) { _f07_Country_Child.Add(FetchF07_Country_Child(dr)); } dr.NextResult(); while (dr.Read()) { _f07_Country_ReChild.Add(FetchF07_Country_ReChild(dr)); } dr.NextResult(); while (dr.Read()) { _f07_RegionColl.Add(FetchF08_Region(dr)); } dr.NextResult(); while (dr.Read()) { _f09_Region_Child.Add(FetchF09_Region_Child(dr)); } dr.NextResult(); while (dr.Read()) { _f09_Region_ReChild.Add(FetchF09_Region_ReChild(dr)); } dr.NextResult(); while (dr.Read()) { _f09_CityColl.Add(FetchF10_City(dr)); } dr.NextResult(); while (dr.Read()) { _f11_City_Child.Add(FetchF11_City_Child(dr)); } dr.NextResult(); while (dr.Read()) { _f11_City_ReChild.Add(FetchF11_City_ReChild(dr)); } dr.NextResult(); while (dr.Read()) { _f11_CityRoadColl.Add(FetchF12_CityRoad(dr)); } } private F03_Continent_ChildDto FetchF03_Continent_Child(SafeDataReader dr) { var f03_Continent_Child = new F03_Continent_ChildDto(); // Value properties f03_Continent_Child.Continent_Child_Name = dr.GetString("Continent_Child_Name"); // parent properties f03_Continent_Child.Parent_Continent_ID = dr.GetInt32("Continent_ID1"); return f03_Continent_Child; } private F03_Continent_ReChildDto FetchF03_Continent_ReChild(SafeDataReader dr) { var f03_Continent_ReChild = new F03_Continent_ReChildDto(); // Value properties f03_Continent_ReChild.Continent_Child_Name = dr.GetString("Continent_Child_Name"); // parent properties f03_Continent_ReChild.Parent_Continent_ID = dr.GetInt32("Continent_ID2"); return f03_Continent_ReChild; } private F04_SubContinentDto FetchF04_SubContinent(SafeDataReader dr) { var f04_SubContinent = new F04_SubContinentDto(); // Value properties f04_SubContinent.SubContinent_ID = dr.GetInt32("SubContinent_ID"); f04_SubContinent.SubContinent_Name = dr.GetString("SubContinent_Name"); // parent properties f04_SubContinent.Parent_Continent_ID = dr.GetInt32("Parent_Continent_ID"); return f04_SubContinent; } private F05_SubContinent_ChildDto FetchF05_SubContinent_Child(SafeDataReader dr) { var f05_SubContinent_Child = new F05_SubContinent_ChildDto(); // Value properties f05_SubContinent_Child.SubContinent_Child_Name = dr.GetString("SubContinent_Child_Name"); // parent properties f05_SubContinent_Child.Parent_SubContinent_ID = dr.GetInt32("SubContinent_ID1"); return f05_SubContinent_Child; } private F05_SubContinent_ReChildDto FetchF05_SubContinent_ReChild(SafeDataReader dr) { var f05_SubContinent_ReChild = new F05_SubContinent_ReChildDto(); // Value properties f05_SubContinent_ReChild.SubContinent_Child_Name = dr.GetString("SubContinent_Child_Name"); // parent properties f05_SubContinent_ReChild.Parent_SubContinent_ID = dr.GetInt32("SubContinent_ID2"); return f05_SubContinent_ReChild; } private F06_CountryDto FetchF06_Country(SafeDataReader dr) { var f06_Country = new F06_CountryDto(); // Value properties f06_Country.Country_ID = dr.GetInt32("Country_ID"); f06_Country.Country_Name = dr.GetString("Country_Name"); // parent properties f06_Country.Parent_SubContinent_ID = dr.GetInt32("Parent_SubContinent_ID"); return f06_Country; } private F07_Country_ChildDto FetchF07_Country_Child(SafeDataReader dr) { var f07_Country_Child = new F07_Country_ChildDto(); // Value properties f07_Country_Child.Country_Child_Name = dr.GetString("Country_Child_Name"); // parent properties f07_Country_Child.Parent_Country_ID = dr.GetInt32("Country_ID1"); return f07_Country_Child; } private F07_Country_ReChildDto FetchF07_Country_ReChild(SafeDataReader dr) { var f07_Country_ReChild = new F07_Country_ReChildDto(); // Value properties f07_Country_ReChild.Country_Child_Name = dr.GetString("Country_Child_Name"); // parent properties f07_Country_ReChild.Parent_Country_ID = dr.GetInt32("Country_ID2"); return f07_Country_ReChild; } private F08_RegionDto FetchF08_Region(SafeDataReader dr) { var f08_Region = new F08_RegionDto(); // Value properties f08_Region.Region_ID = dr.GetInt32("Region_ID"); f08_Region.Region_Name = dr.GetString("Region_Name"); // parent properties f08_Region.Parent_Country_ID = dr.GetInt32("Parent_Country_ID"); return f08_Region; } private F09_Region_ChildDto FetchF09_Region_Child(SafeDataReader dr) { var f09_Region_Child = new F09_Region_ChildDto(); // Value properties f09_Region_Child.Region_Child_Name = dr.GetString("Region_Child_Name"); // parent properties f09_Region_Child.Parent_Region_ID = dr.GetInt32("Region_ID1"); return f09_Region_Child; } private F09_Region_ReChildDto FetchF09_Region_ReChild(SafeDataReader dr) { var f09_Region_ReChild = new F09_Region_ReChildDto(); // Value properties f09_Region_ReChild.Region_Child_Name = dr.GetString("Region_Child_Name"); // parent properties f09_Region_ReChild.Parent_Region_ID = dr.GetInt32("Region_ID2"); return f09_Region_ReChild; } private F10_CityDto FetchF10_City(SafeDataReader dr) { var f10_City = new F10_CityDto(); // Value properties f10_City.City_ID = dr.GetInt32("City_ID"); f10_City.City_Name = dr.GetString("City_Name"); // parent properties f10_City.Parent_Region_ID = dr.GetInt32("Parent_Region_ID"); return f10_City; } private F11_City_ChildDto FetchF11_City_Child(SafeDataReader dr) { var f11_City_Child = new F11_City_ChildDto(); // Value properties f11_City_Child.City_Child_Name = dr.GetString("City_Child_Name"); // parent properties f11_City_Child.Parent_City_ID = dr.GetInt32("City_ID1"); return f11_City_Child; } private F11_City_ReChildDto FetchF11_City_ReChild(SafeDataReader dr) { var f11_City_ReChild = new F11_City_ReChildDto(); // Value properties f11_City_ReChild.City_Child_Name = dr.GetString("City_Child_Name"); // parent properties f11_City_ReChild.Parent_City_ID = dr.GetInt32("City_ID2"); return f11_City_ReChild; } private F12_CityRoadDto FetchF12_CityRoad(SafeDataReader dr) { var f12_CityRoad = new F12_CityRoadDto(); // Value properties f12_CityRoad.CityRoad_ID = dr.GetInt32("CityRoad_ID"); f12_CityRoad.CityRoad_Name = dr.GetString("CityRoad_Name"); // parent properties f12_CityRoad.Parent_City_ID = dr.GetInt32("Parent_City_ID"); return f12_CityRoad; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.ComponentModel; using System.IO; namespace Tomato.Hardware { public class M35FD : Device { public M35FD() { DeviceState = M35FDStateCode.STATE_NO_MEDIA; LastError = M35FDErrorCode.ERROR_NONE; } public ushort[] Disk { get; set; } [Category("Disk Information")] public bool Writable { get; set; } [Category("Device Status")] public M35FDErrorCode LastError { get { return lastError; } set { lastError = value; if (InterruptMessage != 0) AttachedCPU.FireInterrupt(InterruptMessage); } } private M35FDErrorCode lastError; [Category("Device Status")] public ushort InterruptMessage { get; set; } [Category("Device Status")] public M35FDStateCode DeviceState { get { return deviceState; } set { deviceState = value; if (InterruptMessage != 0) AttachedCPU.FireInterrupt(InterruptMessage); } } private M35FDStateCode deviceState; [Category("Device Information")] [TypeConverter(typeof(HexTypeEditor))] public override uint DeviceID { get { return 0x4fd524c5; } } [Category("Device Information")] [TypeConverter(typeof(HexTypeEditor))] public override uint ManufacturerID { get { return 0x1eb37e91; } } [Category("Device Information")] [TypeConverter(typeof(HexTypeEditor))] public override ushort Version { get { return 0x000b; } } [Browsable(false)] public override string FriendlyName { get { return "3.5\" Floppy Drive (M35FD)"; } } public override int HandleInterrupt() { switch (AttachedCPU.A) { case 0: // Poll device AttachedCPU.B = (ushort)DeviceState; AttachedCPU.C = (ushort)LastError; break; case 1: // Set interrupt InterruptMessage = AttachedCPU.X; break; case 2: // Read sector if (DeviceState == M35FDStateCode.STATE_NO_MEDIA) { LastError = M35FDErrorCode.ERROR_NO_MEDIA; AttachedCPU.B = 0; break; } if (isReading || isWriting) { LastError = M35FDErrorCode.ERROR_BUSY; AttachedCPU.B = 0; break; } targetTrack = (uint)AttachedCPU.X / wordsPerTrack; seekTicks = (int)(tracksPerTick * Math.Abs(targetTrack - currentTrack)); fromAddress = (uint)AttachedCPU.X * wordsPerSector; toAddress = AttachedCPU.Y; AttachedCPU.B = 1; wordsWritten = 0; isReading = true; LastError = M35FDErrorCode.ERROR_NONE; DeviceState = M35FDStateCode.STATE_BUSY; break; case 3: // Write sector if (DeviceState == M35FDStateCode.STATE_NO_MEDIA) { LastError = M35FDErrorCode.ERROR_NO_MEDIA; AttachedCPU.B = 0; break; } if (isReading || isWriting) { LastError = M35FDErrorCode.ERROR_BUSY; AttachedCPU.B = 0; break; } if (!Writable) { LastError = M35FDErrorCode.ERROR_PROTECTED; AttachedCPU.B = 0; break; } targetTrack = (uint)AttachedCPU.X / wordsPerTrack; seekTicks = (int)(tracksPerTick * Math.Abs(targetTrack - currentTrack)); toAddress = (uint)AttachedCPU.X * wordsPerSector; fromAddress = AttachedCPU.Y; AttachedCPU.B = 1; wordsWritten = 0; isWriting = true; LastError = M35FDErrorCode.ERROR_NONE; DeviceState = M35FDStateCode.STATE_BUSY; break; } return 0; } private bool isReading = false; private bool isWriting = false; private uint fromAddress, toAddress, currentTrack, targetTrack; private int seekTicks, wordsWritten; private const int wordsPerTick = 512; private const int wordsPerSector = 512, wordsPerTrack = 512 * 18; private const float tracksPerTick = 0.144f; public override void Tick() { if (isReading) { // Handle seeking if (seekTicks != 0) seekTicks--; else { currentTrack = targetTrack; int wordsToWrite = wordsPerTick; if (wordsToWrite + wordsWritten > wordsPerSector) wordsToWrite = wordsPerSector - wordsWritten; if ((wordsToWrite + fromAddress > Disk.Length) || (wordsToWrite + toAddress > AttachedCPU.Memory.Length)) { LastError = M35FDErrorCode.ERROR_BROKEN; DeviceState = M35FDStateCode.STATE_READY; isReading = false; return; } Array.Copy(Disk, fromAddress, AttachedCPU.Memory, toAddress, wordsToWrite); toAddress += wordsPerTick; wordsWritten += wordsPerTick; if (wordsWritten >= wordsPerSector) { isReading = false; DeviceState = M35FDStateCode.STATE_READY; } } } else if (isWriting) { // Handle seeking if (seekTicks != 0) seekTicks--; else { currentTrack = targetTrack; int wordsToWrite = wordsPerTick; if (wordsToWrite + wordsWritten > wordsPerSector) wordsToWrite = wordsPerSector - wordsWritten; if ((wordsToWrite + toAddress > Disk.Length) || (wordsToWrite + fromAddress > AttachedCPU.Memory.Length)) { LastError = M35FDErrorCode.ERROR_BROKEN; DeviceState = M35FDStateCode.STATE_READY; isWriting = false; return; } Array.Copy(AttachedCPU.Memory, fromAddress, Disk, toAddress, wordsToWrite); toAddress += wordsPerTick; wordsWritten += wordsPerTick; if (wordsWritten >= wordsPerSector) { isWriting = false; DeviceState = M35FDStateCode.STATE_READY; } } } } public void InsertDisk(ref ushort[] disk, bool writable) { if (disk.Length != 737280) throw new IOException("Invalid disk size."); Disk = disk; Writable = writable; if (writable) DeviceState = M35FDStateCode.STATE_READY; else DeviceState = M35FDStateCode.STATE_READY_WP; currentTrack = 0; } public void Eject() { if (Disk == null) throw new IOException("No disk present."); Disk = null; DeviceState = M35FDStateCode.STATE_NO_MEDIA; if (isReading || isWriting) { isReading = isWriting = false; LastError = M35FDErrorCode.ERROR_EJECT; } } public override void Reset() { isReading = false; isWriting = false; if (DeviceState == M35FDStateCode.STATE_BUSY) { if (Writable) DeviceState = M35FDStateCode.STATE_READY; else DeviceState = M35FDStateCode.STATE_READY_WP; } LastError = M35FDErrorCode.ERROR_NONE; InterruptMessage = 0; } } public enum M35FDStateCode { STATE_NO_MEDIA = 0, STATE_READY = 1, STATE_READY_WP = 2, STATE_BUSY = 3 } public enum M35FDErrorCode { ERROR_NONE = 0, ERROR_BUSY = 1, ERROR_NO_MEDIA = 2, ERROR_PROTECTED = 3, ERROR_EJECT = 4, ERROR_BAD_SECTOR = 5, ERROR_BROKEN = 6 } }
// // ClientEngine.cs // // Authors: // Alan McGovern alan.mcgovern@gmail.com // // Copyright (C) 2006 Alan McGovern // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // namespace OctoTorrent.Client { using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.IO; using System.Linq; using System.Net; using System.Text; using BEncoding; using Common; using PieceWriters; /// <summary> /// The Engine that contains the TorrentManagers /// </summary> public class ClientEngine : IDisposable { internal static readonly MainLoop MainLoop = new MainLoop("Client Engine Loop"); private static readonly Random Random = new Random(); #region Global Constants // To support this I need to ensure that the transition from // InitialSeeding -> Regular seeding either closes all existing // connections or sends HaveAll messages, or sends HaveMessages. public const bool SupportsInitialSeed = true; public const bool SupportsLocalPeerDiscovery = true; public const bool SupportsWebSeed = true; public const bool SupportsExtended = true; public const bool SupportsFastPeer = true; public const bool SupportsEncryption = true; public const bool SupportsEndgameMode = true; #if !DISABLE_DHT public const bool SupportsDht = true; #else public const bool SupportsDht = false; #endif internal const int TickLength = 500; // A logic tick will be performed every TickLength miliseconds #endregion #region Events public event EventHandler<StatsUpdateEventArgs> StatsUpdate; public event EventHandler<CriticalExceptionEventArgs> CriticalException; public event EventHandler<TorrentEventArgs> TorrentRegistered; public event EventHandler<TorrentEventArgs> TorrentUnregistered; #endregion #region Member Variables internal static readonly BufferManager BufferManager = new BufferManager(); private readonly ConnectionManager connectionManager; private IDhtEngine dhtEngine; private readonly DiskManager diskManager; private bool disposed; private bool isRunning; private readonly PeerListener listener; private readonly ListenManager listenManager; // Listens for incoming connections and passes them off to the correct TorrentManager private readonly LocalPeerManager localPeerManager; private readonly LocalPeerListener localPeerListener; private readonly string peerId; private readonly EngineSettings settings; private int tickCount; private readonly List<TorrentManager> torrents; private readonly ReadOnlyCollection<TorrentManager> torrentsReadonly; private RateLimiterGroup uploadLimiter; private RateLimiterGroup downloadLimiter; private readonly IEnumerable<FastResume> _fastResume; #endregion #region Properties public ConnectionManager ConnectionManager { get { return connectionManager; } } #if !DISABLE_DHT public IDhtEngine DhtEngine { get { return dhtEngine; } } #endif public DiskManager DiskManager { get { return diskManager; } } public bool Disposed { get { return disposed; } } public PeerListener Listener { get { return listener; } } public bool LocalPeerSearchEnabled { get { return localPeerListener.Status != ListenerStatus.NotListening; } set { if (value && !LocalPeerSearchEnabled) localPeerListener.Start(); else if (!value && LocalPeerSearchEnabled) localPeerListener.Stop(); } } public bool IsRunning { get { return isRunning; } } public string PeerId { get { return peerId; } } public EngineSettings Settings { get { return settings; } } public IList<TorrentManager> Torrents { get { return torrentsReadonly; } } #endregion #region Constructors public ClientEngine(EngineSettings settings, string peerId = null) : this(settings, new DiskWriter(), peerId) { } public ClientEngine(EngineSettings settings, PieceWriter writer, string peerId = null) : this(settings, new SocketListener(new IPEndPoint(IPAddress.Any, 0)), writer, peerId) { } public ClientEngine(EngineSettings settings, PeerListener listener, string peerId = null) : this(settings, listener, new DiskWriter(), peerId) { } public ClientEngine(EngineSettings settings, PeerListener listener, PieceWriter writer, string peerId = null) { Check.Settings(settings); Check.Listener(listener); Check.Writer(writer); this.listener = listener; this.settings = settings; if (settings.FastResumePath != null && File.Exists(settings.FastResumePath)) { var encodedListData = File.ReadAllBytes(settings.FastResumePath); var encodedList = (BEncodedList) BEncodedValue.Decode(encodedListData); _fastResume = encodedList.Cast<BEncodedDictionary>() .Select(x => new FastResume(x)); } connectionManager = new ConnectionManager(this); RegisterDht(new NullDhtEngine()); diskManager = new DiskManager(this, writer); listenManager = new ListenManager(this); MainLoop.QueueTimeout(TimeSpan.FromMilliseconds(TickLength), () => { if (IsRunning && !disposed) LogicTick(); return !disposed; }); torrents = new List<TorrentManager>(); torrentsReadonly = new ReadOnlyCollection<TorrentManager>(torrents); CreateRateLimiters(); this.peerId = peerId ?? GeneratePeerId(); localPeerListener = new LocalPeerListener(this); localPeerManager = new LocalPeerManager(); LocalPeerSearchEnabled = SupportsLocalPeerDiscovery; listenManager.Register(listener); // This means we created the listener in the constructor if (listener.Endpoint.Port == 0) listener.ChangeEndpoint(new IPEndPoint(IPAddress.Any, settings.ListenPort)); } private void CreateRateLimiters() { var downloader = new RateLimiter(); downloadLimiter = new RateLimiterGroup(); downloadLimiter.Add(new DiskWriterLimiter(DiskManager)); downloadLimiter.Add(downloader); var uploader = new RateLimiter(); uploadLimiter = new RateLimiterGroup(); downloadLimiter.Add(new DiskWriterLimiter(DiskManager)); uploadLimiter.Add(uploader); MainLoop.QueueTimeout(TimeSpan.FromSeconds(1), () => { downloader.UpdateChunks( Settings.GlobalMaxDownloadSpeed, TotalDownloadSpeed); uploader.UpdateChunks(Settings.GlobalMaxUploadSpeed, TotalUploadSpeed); return !disposed; }); } #endregion #region Methods public void ChangeListenEndpoint(IPEndPoint endpoint) { Check.Endpoint(endpoint); Settings.ListenPort = endpoint.Port; listener.ChangeEndpoint(endpoint); } private void CheckDisposed() { if (disposed) throw new ObjectDisposedException(GetType().Name); } public bool Contains(InfoHash infoHash) { CheckDisposed(); if (infoHash == null) return false; return torrents.Exists(m => m.InfoHash.Equals(infoHash)); } public bool Contains(Torrent torrent) { CheckDisposed(); if (torrent == null) return false; return Contains(torrent.InfoHash); } public bool Contains(TorrentManager manager) { CheckDisposed(); if (manager == null) return false; return Contains(manager.Torrent); } public void Dispose() { if (disposed) return; disposed = true; MainLoop.QueueWait(() => { dhtEngine.Dispose(); diskManager.Dispose(); listenManager.Dispose(); localPeerListener.Stop(); localPeerManager.Dispose(); MainLoop.Dispose(); }); } private static string GeneratePeerId() { var sb = new StringBuilder(20); sb.Append(VersionInfo.ClientVersion); lock (Random) while (sb.Length < 20) sb.Append(Random.Next(0, 9)); return sb.ToString(); } public void PauseAll() { CheckDisposed(); MainLoop.QueueWait(() => { foreach (var manager in torrents) manager.Pause(); }); } public void SaveFastResume() { if (string.IsNullOrWhiteSpace(settings.FastResumePath)) return; var encodedList = new BEncodedList(); var fastResumeData = torrentsReadonly .Where(x => x.HashChecked) .Select(tm => tm.SaveFastResume().Encode()); foreach (var data in fastResumeData) encodedList.Add(data); File.WriteAllBytes(settings.FastResumePath, encodedList.Encode()); } public void Register(TorrentManager manager) { CheckDisposed(); Check.Manager(manager); MainLoop.QueueWait(() => { if (manager.Engine != null) throw new TorrentException("This manager has already been registered"); if (Contains(manager.Torrent)) throw new TorrentException( "A manager for this torrent has already been registered"); torrents.Add(manager); manager.PieceHashed += PieceHashed; manager.Engine = this; manager.DownloadLimiter.Add(downloadLimiter); manager.UploadLimiter.Add(uploadLimiter); if (_fastResume != null) { var fastResume = _fastResume .SingleOrDefault(fr => manager.InfoHash == fr.Infohash); if (fastResume != null) manager.LoadFastResume(fastResume); } if (dhtEngine != null && manager.Torrent != null && manager.Torrent.Nodes != null && dhtEngine.State != DhtState.Ready) { try { dhtEngine.Add(manager.Torrent.Nodes); } catch { // FIXME: Should log this somewhere, though it's not critical } } }); if (TorrentRegistered != null) TorrentRegistered(this, new TorrentEventArgs(manager)); } public void RegisterDht(IDhtEngine engine) { MainLoop.QueueWait(() => { if (dhtEngine != null) { dhtEngine.StateChanged -= DhtEngineStateChanged; dhtEngine.Stop(); dhtEngine.Dispose(); } dhtEngine = engine ?? new NullDhtEngine(); }); dhtEngine.StateChanged += DhtEngineStateChanged; } private void DhtEngineStateChanged(object o, EventArgs e) { if (dhtEngine.State != DhtState.Ready) return; MainLoop.Queue(() => { foreach (var manager in torrents.Where(manager => manager.CanUseDht)) { dhtEngine.Announce(manager.InfoHash, Listener.Endpoint.Port); dhtEngine.GetPeers(manager.InfoHash); } }); } public void StartAll() { CheckDisposed(); MainLoop.QueueWait(() => { foreach (var torrentManager in torrents) torrentManager.Start(); }); } public void StopAll() { CheckDisposed(); MainLoop.QueueWait(() => { foreach (var torrentManager in torrents) torrentManager.Stop(); }); } public int TotalDownloadSpeed { get { return torrents.Sum(x => x.Monitor.DownloadSpeed); } } public int TotalUploadSpeed { get { return torrents.Sum(x => x.Monitor.UploadSpeed); } } public void Unregister(TorrentManager manager) { CheckDisposed(); Check.Manager(manager); MainLoop.QueueWait(() => { if (manager.Engine != this) throw new TorrentException( "The manager has not been registered with this engine"); if (manager.State != TorrentState.Stopped) throw new TorrentException( "The manager must be stopped before it can be unregistered"); torrents.Remove(manager); manager.PieceHashed -= PieceHashed; manager.Engine = null; manager.DownloadLimiter.Remove(downloadLimiter); manager.UploadLimiter.Remove(uploadLimiter); }); if (TorrentUnregistered != null) TorrentUnregistered(this, new TorrentEventArgs(manager)); } #endregion #region Private/Internal methods internal void Broadcast(TorrentManager manager) { if (LocalPeerSearchEnabled) localPeerManager.Broadcast(manager); } private void LogicTick() { tickCount++; if (tickCount%(1000/TickLength) == 0) { diskManager.WriteLimiter.UpdateChunks(settings.MaxWriteRate, diskManager.WriteRate); diskManager.ReadLimiter.UpdateChunks(settings.MaxReadRate, diskManager.ReadRate); } ConnectionManager.TryConnect(); foreach (var torrentManager in torrents) torrentManager.Mode.Tick(tickCount); RaiseStatsUpdate(new StatsUpdateEventArgs()); } internal void RaiseCriticalException(CriticalExceptionEventArgs e) { Toolbox.RaiseAsyncEvent(CriticalException, this, e); } private void PieceHashed(object sender, PieceHashedEventArgs e) { if (e.TorrentManager.State != TorrentState.Hashing) diskManager.QueueFlush(e.TorrentManager, e.PieceIndex); } internal void RaiseStatsUpdate(StatsUpdateEventArgs args) { Toolbox.RaiseAsyncEvent(StatsUpdate, this, args); } internal void Start() { CheckDisposed(); isRunning = true; if (listener.Status == ListenerStatus.NotListening) listener.Start(); } internal void Stop() { CheckDisposed(); // If all the torrents are stopped, stop ticking isRunning = torrents.Exists(x => x.State != TorrentState.Stopped); if (!isRunning) listener.Stop(); } #endregion } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ using TestCases.SS.UserModel; using NPOI.SS.UserModel; using NPOI.OpenXmlFormats.Spreadsheet; using NUnit.Framework; using System; using NPOI.Util; using System.Text; namespace NPOI.XSSF.UserModel { [TestFixture] public class TestXSSFFont : BaseTestFont { public TestXSSFFont() : base(XSSFITestDataProvider.instance) { } [Test] public void TestDefaultFont() { BaseTestDefaultFont("Calibri", 220, IndexedColors.Black.Index); } [Test] public void TestConstructor() { XSSFFont xssfFont = new XSSFFont(); Assert.IsNotNull(xssfFont.GetCTFont()); } [Test] public void TestBoldweight() { CT_Font ctFont = new CT_Font(); CT_BooleanProperty bool1 = ctFont.AddNewB(); bool1.val = (false); ctFont.SetBArray(0, bool1); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(false, xssfFont.IsBold); xssfFont.IsBold = (true); Assert.AreEqual(ctFont.b.Count, 1); Assert.AreEqual(true, ctFont.GetBArray(0).val); } [Test] public void TestCharSet() { CT_Font ctFont = new CT_Font(); CT_IntProperty prop = ctFont.AddNewCharset(); prop.val = (FontCharset.ANSI.Value); ctFont.SetCharsetArray(0, prop); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(FontCharset.ANSI.Value, xssfFont.Charset); xssfFont.SetCharSet(FontCharset.DEFAULT); Assert.AreEqual(FontCharset.DEFAULT.Value, ctFont.GetCharsetArray(0).val); // Try with a few less usual ones: // Set with the Charset itself xssfFont.SetCharSet(FontCharset.RUSSIAN); Assert.AreEqual(FontCharset.RUSSIAN.Value, xssfFont.Charset); // And Set with the Charset index xssfFont.SetCharSet(FontCharset.ARABIC.Value); Assert.AreEqual(FontCharset.ARABIC.Value, xssfFont.Charset); // This one isn't allowed Assert.AreEqual(null, FontCharset.ValueOf(9999)); try { xssfFont.SetCharSet(9999); Assert.Fail("Shouldn't be able to Set an invalid charset"); } catch (POIXMLException) { } // Now try with a few sample files // Normal charset XSSFWorkbook workbook = XSSFTestDataSamples.OpenSampleWorkbook("Formatting.xlsx"); Assert.AreEqual(0, ((XSSFCellStyle)workbook.GetSheetAt(0).GetRow(0).GetCell(0).CellStyle).GetFont().Charset ); // GB2312 charact Set workbook = XSSFTestDataSamples.OpenSampleWorkbook("49273.xlsx"); Assert.AreEqual(134, ((XSSFCellStyle)workbook.GetSheetAt(0).GetRow(0).GetCell(0).CellStyle).GetFont().Charset ); } [Test] public void TestFontName() { CT_Font ctFont = new CT_Font(); CT_FontName fname = ctFont.AddNewName(); fname.val = "Arial"; ctFont.name = fname; XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual("Arial", xssfFont.FontName); xssfFont.FontName = "Courier"; Assert.AreEqual("Courier", ctFont.name.val); } [Test] public void TestItalic() { CT_Font ctFont = new CT_Font(); CT_BooleanProperty bool1 = ctFont.AddNewI(); bool1.val = (false); ctFont.SetIArray(0, bool1); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(false, xssfFont.IsItalic); xssfFont.IsItalic = (true); Assert.AreEqual(1,ctFont.i.Count); Assert.AreEqual(true, ctFont.GetIArray(0).val); Assert.AreEqual(true, ctFont.GetIArray(0).val); } [Test] public void TestStrikeout() { CT_Font ctFont = new CT_Font(); CT_BooleanProperty bool1 = ctFont.AddNewStrike(); bool1.val = (false); ctFont.SetStrikeArray(0, bool1); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(false, xssfFont.IsStrikeout); xssfFont.IsStrikeout = (true); Assert.AreEqual(1, ctFont.strike.Count); Assert.AreEqual(true, ctFont.GetStrikeArray(0).val); Assert.AreEqual(true, ctFont.GetStrikeArray(0).val); } [Test] public void TestFontHeight() { CT_Font ctFont = new CT_Font(); CT_FontSize size = ctFont.AddNewSz(); size.val = (11); ctFont.SetSzArray(0, size); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(11, xssfFont.FontHeightInPoints); xssfFont.FontHeight = 20; Assert.AreEqual(20.0, ctFont.GetSzArray(0).val, 0.0); } [Test] public void TestFontHeightInPoint() { CT_Font ctFont = new CT_Font(); CT_FontSize size = ctFont.AddNewSz(); size.val = (14); ctFont.SetSzArray(0, size); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(14, xssfFont.FontHeightInPoints); xssfFont.FontHeightInPoints = (short)20; Assert.AreEqual(20.0, ctFont.GetSzArray(0).val, 0.0); } [Test] public void TestUnderline() { CT_Font ctFont = new CT_Font(); CT_UnderlineProperty underlinePropr = ctFont.AddNewU(); underlinePropr.val = (ST_UnderlineValues.single); ctFont.SetUArray(0, underlinePropr); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(FontUnderlineType.Single, xssfFont.Underline); xssfFont.SetUnderline(FontUnderlineType.Double); Assert.AreEqual(1,ctFont.u.Count); Assert.AreEqual(ST_UnderlineValues.@double, ctFont.GetUArray(0).val); xssfFont.SetUnderline(FontUnderlineType.DoubleAccounting); Assert.AreEqual(1,ctFont.u.Count); Assert.AreEqual(ST_UnderlineValues.doubleAccounting, ctFont.GetUArray(0).val); } [Test] public void TestColor() { CT_Font ctFont = new CT_Font(); CT_Color color = ctFont.AddNewColor(); color.indexed = (uint)(XSSFFont.DEFAULT_FONT_COLOR); ctFont.SetColorArray(0, color); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(IndexedColors.Black.Index, xssfFont.Color); xssfFont.Color = IndexedColors.Red.Index; Assert.AreEqual((uint)IndexedColors.Red.Index, ctFont.GetColorArray(0).indexed); } [Test] public void TestRgbColor() { CT_Font ctFont = new CT_Font(); CT_Color color = ctFont.AddNewColor(); //Integer.toHexString(0xFFFFFF).getBytes() = [102, 102, 102, 102, 102, 102] color.SetRgb(Encoding.ASCII.GetBytes("ffffff")); ctFont.SetColorArray(0, color); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(ctFont.GetColorArray(0).GetRgb()[0], xssfFont.GetXSSFColor().GetRgb()[0]); Assert.AreEqual(ctFont.GetColorArray(0).GetRgb()[1], xssfFont.GetXSSFColor().GetRgb()[1]); Assert.AreEqual(ctFont.GetColorArray(0).GetRgb()[2], xssfFont.GetXSSFColor().GetRgb()[2]); Assert.AreEqual(ctFont.GetColorArray(0).GetRgb()[3], xssfFont.GetXSSFColor().GetRgb()[3]); //Integer.toHexString(0xF1F1F1).getBytes() = [102, 49, 102, 49, 102, 49] color.SetRgb(Encoding.ASCII.GetBytes("f1f1f1")); XSSFColor newColor = new XSSFColor(color); xssfFont.SetColor(newColor); Assert.AreEqual(ctFont.GetColorArray(0).GetRgb()[2], newColor.GetRgb()[2]); } [Test] public void TestThemeColor() { CT_Font ctFont = new CT_Font(); CT_Color color = ctFont.AddNewColor(); color.theme = (1); color.themeSpecified = true; ctFont.SetColorArray(0, color); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual((short)ctFont.GetColorArray(0).theme, xssfFont.GetThemeColor()); xssfFont.SetThemeColor(IndexedColors.Red.Index); Assert.AreEqual((uint)IndexedColors.Red.Index, ctFont.GetColorArray(0).theme); } [Test] public void TestFamily() { CT_Font ctFont = new CT_Font(); CT_IntProperty family = ctFont.AddNewFamily(); family.val = (FontFamily.MODERN.Value); ctFont.SetFamilyArray(0, family); XSSFFont xssfFont = new XSSFFont(ctFont); Assert.AreEqual(FontFamily.MODERN.Value, xssfFont.Family); } [Test] public void TestScheme() { CT_Font ctFont = new CT_Font(); CT_FontScheme scheme = ctFont.AddNewScheme(); scheme.val = (ST_FontScheme.major); ctFont.SetSchemeArray(0, scheme); XSSFFont font = new XSSFFont(ctFont); Assert.AreEqual(FontScheme.MAJOR, font.GetScheme()); font.SetScheme(FontScheme.NONE); Assert.AreEqual(ST_FontScheme.none, ctFont.GetSchemeArray(0).val); } [Test] public void TestTypeOffset() { CT_Font ctFont = new CT_Font(); CT_VerticalAlignFontProperty valign = ctFont.AddNewVertAlign(); valign.val = (ST_VerticalAlignRun.baseline); ctFont.SetVertAlignArray(0, valign); XSSFFont font = new XSSFFont(ctFont); Assert.AreEqual(FontSuperScript.None, font.TypeOffset); font.TypeOffset = FontSuperScript.Super; Assert.AreEqual(ST_VerticalAlignRun.superscript, ctFont.GetVertAlignArray(0).val); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using Microsoft.CodeAnalysis.CSharp.Symbols; using Roslyn.Utilities; using System.Collections.Immutable; using System.Diagnostics; namespace Microsoft.CodeAnalysis.CSharp.ExpressionEvaluator { internal enum DisplayClassVariableKind { Local, Parameter, This, } /// <summary> /// A field in a display class that represents a captured /// variable: either a local, a parameter, or "this". /// </summary> internal sealed class DisplayClassVariable { internal readonly string Name; internal readonly DisplayClassVariableKind Kind; internal readonly DisplayClassInstance DisplayClassInstance; internal readonly ConsList<FieldSymbol> DisplayClassFields; internal DisplayClassVariable(string name, DisplayClassVariableKind kind, DisplayClassInstance displayClassInstance, ConsList<FieldSymbol> displayClassFields) { Debug.Assert(displayClassFields.Any()); this.Name = name; this.Kind = kind; this.DisplayClassInstance = displayClassInstance; this.DisplayClassFields = displayClassFields; // Verify all type parameters are substituted. Debug.Assert(this.ContainingSymbol.IsContainingSymbolOfAllTypeParameters(this.Type)); } internal TypeSymbol Type { get { return this.DisplayClassFields.Head.Type; } } internal Symbol ContainingSymbol { get { return this.DisplayClassInstance.ContainingSymbol; } } internal DisplayClassVariable ToOtherMethod(MethodSymbol method, TypeMap typeMap) { var otherInstance = this.DisplayClassInstance.ToOtherMethod(method, typeMap); return SubstituteFields(otherInstance, typeMap); } internal BoundExpression ToBoundExpression(CSharpSyntaxNode syntax) { var expr = this.DisplayClassInstance.ToBoundExpression(syntax); var fields = ArrayBuilder<FieldSymbol>.GetInstance(); fields.AddRange(this.DisplayClassFields); fields.ReverseContents(); foreach (var field in fields) { expr = new BoundFieldAccess(syntax, expr, field, constantValueOpt: null) { WasCompilerGenerated = true }; } fields.Free(); return expr; } internal DisplayClassVariable SubstituteFields(DisplayClassInstance otherInstance, TypeMap typeMap) { var otherFields = SubstituteFields(this.DisplayClassFields, typeMap); return new DisplayClassVariable(this.Name, this.Kind, otherInstance, otherFields); } private static ConsList<FieldSymbol> SubstituteFields(ConsList<FieldSymbol> fields, TypeMap typeMap) { if (!fields.Any()) { return ConsList<FieldSymbol>.Empty; } var head = SubstituteField(fields.Head, typeMap); var tail = SubstituteFields(fields.Tail, typeMap); return tail.Prepend(head); } private static FieldSymbol SubstituteField(FieldSymbol field, TypeMap typeMap) { Debug.Assert(!field.IsStatic); Debug.Assert(!field.IsReadOnly || GeneratedNames.GetKind(field.Name) == GeneratedNameKind.AnonymousTypeField); Debug.Assert(field.CustomModifiers.Length == 0); // CONSIDER: Instead of digging fields out of the unsubstituted type and then performing substitution // on each one individually, we could dig fields out of the substituted type. return new EEDisplayClassFieldSymbol(typeMap.SubstituteNamedType(field.ContainingType), field.Name, typeMap.SubstituteType(field.Type)); } private sealed class EEDisplayClassFieldSymbol : FieldSymbol { private readonly NamedTypeSymbol _container; private readonly string _name; private readonly TypeSymbol _type; internal EEDisplayClassFieldSymbol(NamedTypeSymbol container, string name, TypeSymbol type) { _container = container; _name = name; _type = type; } public override Symbol AssociatedSymbol { get { throw ExceptionUtilities.Unreachable; } } public override Symbol ContainingSymbol { get { return _container; } } public override ImmutableArray<CustomModifier> CustomModifiers { get { return ImmutableArray<CustomModifier>.Empty; } } public override Accessibility DeclaredAccessibility { get { throw ExceptionUtilities.Unreachable; } } public override ImmutableArray<SyntaxReference> DeclaringSyntaxReferences { get { throw ExceptionUtilities.Unreachable; } } public override bool IsConst { get { return false; } } public override bool IsReadOnly { get { return false; } } public override bool IsStatic { get { return false; } } public override bool IsVolatile { get { return false; } } public override ImmutableArray<Location> Locations { get { throw ExceptionUtilities.Unreachable; } } public override string Name { get { return _name; } } internal override bool HasRuntimeSpecialName { get { throw ExceptionUtilities.Unreachable; } } internal override bool HasSpecialName { get { throw ExceptionUtilities.Unreachable; } } internal override bool IsNotSerialized { get { throw ExceptionUtilities.Unreachable; } } internal override MarshalPseudoCustomAttributeData MarshallingInformation { get { throw ExceptionUtilities.Unreachable; } } internal override ObsoleteAttributeData ObsoleteAttributeData { get { throw ExceptionUtilities.Unreachable; } } internal override int? TypeLayoutOffset { get { throw ExceptionUtilities.Unreachable; } } internal override ConstantValue GetConstantValue(ConstantFieldsInProgress inProgress, bool earlyDecodingWellKnownAttributes) { throw ExceptionUtilities.Unreachable; } internal override TypeSymbol GetFieldType(ConsList<FieldSymbol> fieldsBeingBound) { return _type; } } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure.Management.Resources; using Microsoft.Azure.Management.Resources.Models; using Microsoft.WindowsAzure; using Microsoft.WindowsAzure.Common; using Microsoft.WindowsAzure.Common.Internals; using Newtonsoft.Json; using Newtonsoft.Json.Linq; namespace Microsoft.Azure.Management.Resources { /// <summary> /// Operations for managing resources. /// </summary> internal partial class ResourceOperations : IServiceOperations<ResourceManagementClient>, IResourceOperations { /// <summary> /// Initializes a new instance of the ResourceOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal ResourceOperations(ResourceManagementClient client) { this._client = client; } private ResourceManagementClient _client; /// <summary> /// Gets a reference to the /// Microsoft.Azure.Management.Resources.ResourceManagementClient. /// </summary> public ResourceManagementClient Client { get { return this._client; } } /// <summary> /// Checks whether resource exists. /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the resource group. The name is case /// insensitive. /// </param> /// <param name='identity'> /// Required. Resource identity. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Resource group information. /// </returns> public async Task<ResourceExistsResult> CheckExistenceAsync(string resourceGroupName, ResourceIdentity identity, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (resourceGroupName != null && resourceGroupName.Length > 1000) { throw new ArgumentOutOfRangeException("resourceGroupName"); } if (Regex.IsMatch(resourceGroupName, "^[-\\w\\._]+$") == false) { throw new ArgumentOutOfRangeException("resourceGroupName"); } if (identity == null) { throw new ArgumentNullException("identity"); } if (identity.ResourceName == null) { throw new ArgumentNullException("identity.ResourceName"); } if (identity.ResourceProviderApiVersion == null) { throw new ArgumentNullException("identity.ResourceProviderApiVersion"); } if (identity.ResourceProviderNamespace == null) { throw new ArgumentNullException("identity.ResourceProviderNamespace"); } if (identity.ResourceType == null) { throw new ArgumentNullException("identity.ResourceType"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("identity", identity); Tracing.Enter(invocationId, this, "CheckExistenceAsync", tracingParameters); } // Construct URL string url = "/subscriptions/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/resourcegroups/" + resourceGroupName.Trim() + "/providers/" + identity.ResourceProviderNamespace.Trim() + "/" + (identity.ParentResourcePath != null ? identity.ParentResourcePath.Trim() : "") + "/" + identity.ResourceType.Trim() + "/" + identity.ResourceName.Trim() + "?"; url = url + "api-version=" + Uri.EscapeDataString(identity.ResourceProviderApiVersion.Trim()); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.NotFound) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result ResourceExistsResult result = null; result = new ResourceExistsResult(); result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (statusCode == HttpStatusCode.OK) { result.Exists = true; } else { result.Exists = false; } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Create a resource. /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the resource group. The name is case /// insensitive. /// </param> /// <param name='identity'> /// Required. Resource identity. /// </param> /// <param name='parameters'> /// Required. Create or update resource parameters. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Resource information. /// </returns> public async Task<ResourceCreateOrUpdateResult> CreateOrUpdateAsync(string resourceGroupName, ResourceIdentity identity, BasicResource parameters, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (resourceGroupName != null && resourceGroupName.Length > 1000) { throw new ArgumentOutOfRangeException("resourceGroupName"); } if (Regex.IsMatch(resourceGroupName, "^[-\\w\\._]+$") == false) { throw new ArgumentOutOfRangeException("resourceGroupName"); } if (identity == null) { throw new ArgumentNullException("identity"); } if (identity.ResourceName == null) { throw new ArgumentNullException("identity.ResourceName"); } if (identity.ResourceProviderApiVersion == null) { throw new ArgumentNullException("identity.ResourceProviderApiVersion"); } if (identity.ResourceProviderNamespace == null) { throw new ArgumentNullException("identity.ResourceProviderNamespace"); } if (identity.ResourceType == null) { throw new ArgumentNullException("identity.ResourceType"); } if (parameters == null) { throw new ArgumentNullException("parameters"); } if (parameters.Location == null) { throw new ArgumentNullException("parameters.Location"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("identity", identity); tracingParameters.Add("parameters", parameters); Tracing.Enter(invocationId, this, "CreateOrUpdateAsync", tracingParameters); } // Construct URL string url = "/subscriptions/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/resourcegroups/" + resourceGroupName.Trim() + "/providers/" + identity.ResourceProviderNamespace.Trim() + "/" + (identity.ParentResourcePath != null ? identity.ParentResourcePath.Trim() : "") + "/" + identity.ResourceType.Trim() + "/" + identity.ResourceName.Trim() + "?"; url = url + "api-version=" + Uri.EscapeDataString(identity.ResourceProviderApiVersion.Trim()); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Put; httpRequest.RequestUri = new Uri(url); // Set Headers // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; JToken requestDoc = null; JObject basicResourceValue = new JObject(); requestDoc = basicResourceValue; basicResourceValue["location"] = parameters.Location; if (parameters.Properties != null) { basicResourceValue["properties"] = JObject.Parse(parameters.Properties); } if (parameters.Tags != null) { if (parameters.Tags is ILazyCollection == false || ((ILazyCollection)parameters.Tags).IsInitialized) { JObject tagsDictionary = new JObject(); foreach (KeyValuePair<string, string> pair in parameters.Tags) { string tagsKey = pair.Key; string tagsValue = pair.Value; tagsDictionary[tagsKey] = tagsValue; } basicResourceValue["tags"] = tagsDictionary; } } if (parameters.Plan != null) { JObject planValue = new JObject(); basicResourceValue["plan"] = planValue; if (parameters.Plan.Name != null) { planValue["name"] = parameters.Plan.Name; } if (parameters.Plan.Publisher != null) { planValue["publisher"] = parameters.Plan.Publisher; } if (parameters.Plan.Product != null) { planValue["product"] = parameters.Plan.Product; } if (parameters.Plan.PromotionCode != null) { planValue["promotionCode"] = parameters.Plan.PromotionCode; } } if (parameters.ProvisioningState != null) { basicResourceValue["provisioningState"] = parameters.ProvisioningState; } requestContent = requestDoc.ToString(Formatting.Indented); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Created) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result ResourceCreateOrUpdateResult result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new ResourceCreateOrUpdateResult(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { Resource resourceInstance = new Resource(); result.Resource = resourceInstance; JToken idValue = responseDoc["id"]; if (idValue != null && idValue.Type != JTokenType.Null) { string idInstance = ((string)idValue); resourceInstance.Id = idInstance; } JToken nameValue = responseDoc["name"]; if (nameValue != null && nameValue.Type != JTokenType.Null) { string nameInstance = ((string)nameValue); resourceInstance.Name = nameInstance; } JToken typeValue = responseDoc["type"]; if (typeValue != null && typeValue.Type != JTokenType.Null) { string typeInstance = ((string)typeValue); resourceInstance.Type = typeInstance; } JToken propertiesValue = responseDoc["properties"]; if (propertiesValue != null && propertiesValue.Type != JTokenType.Null) { JToken provisioningStateValue = propertiesValue["provisioningState"]; if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null) { string provisioningStateInstance = ((string)provisioningStateValue); resourceInstance.ProvisioningState = provisioningStateInstance; } } JToken locationValue = responseDoc["location"]; if (locationValue != null && locationValue.Type != JTokenType.Null) { string locationInstance = ((string)locationValue); resourceInstance.Location = locationInstance; } JToken propertiesValue2 = responseDoc["properties"]; if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null) { string propertiesInstance = propertiesValue2.ToString(Formatting.Indented); resourceInstance.Properties = propertiesInstance; } JToken tagsSequenceElement = ((JToken)responseDoc["tags"]); if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null) { foreach (JProperty property in tagsSequenceElement) { string tagsKey2 = ((string)property.Name); string tagsValue2 = ((string)property.Value); resourceInstance.Tags.Add(tagsKey2, tagsValue2); } } JToken planValue2 = responseDoc["plan"]; if (planValue2 != null && planValue2.Type != JTokenType.Null) { Plan planInstance = new Plan(); resourceInstance.Plan = planInstance; JToken nameValue2 = planValue2["name"]; if (nameValue2 != null && nameValue2.Type != JTokenType.Null) { string nameInstance2 = ((string)nameValue2); planInstance.Name = nameInstance2; } JToken publisherValue = planValue2["publisher"]; if (publisherValue != null && publisherValue.Type != JTokenType.Null) { string publisherInstance = ((string)publisherValue); planInstance.Publisher = publisherInstance; } JToken productValue = planValue2["product"]; if (productValue != null && productValue.Type != JTokenType.Null) { string productInstance = ((string)productValue); planInstance.Product = productInstance; } JToken promotionCodeValue = planValue2["promotionCode"]; if (promotionCodeValue != null && promotionCodeValue.Type != JTokenType.Null) { string promotionCodeInstance = ((string)promotionCodeValue); planInstance.PromotionCode = promotionCodeInstance; } } JToken provisioningStateValue2 = responseDoc["provisioningState"]; if (provisioningStateValue2 != null && provisioningStateValue2.Type != JTokenType.Null) { string provisioningStateInstance2 = ((string)provisioningStateValue2); resourceInstance.ProvisioningState = provisioningStateInstance2; } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Delete resource and all of its resources. /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the resource group. The name is case /// insensitive. /// </param> /// <param name='identity'> /// Required. Resource identity. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public async Task<OperationResponse> DeleteAsync(string resourceGroupName, ResourceIdentity identity, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (resourceGroupName != null && resourceGroupName.Length > 1000) { throw new ArgumentOutOfRangeException("resourceGroupName"); } if (Regex.IsMatch(resourceGroupName, "^[-\\w\\._]+$") == false) { throw new ArgumentOutOfRangeException("resourceGroupName"); } if (identity == null) { throw new ArgumentNullException("identity"); } if (identity.ResourceName == null) { throw new ArgumentNullException("identity.ResourceName"); } if (identity.ResourceProviderApiVersion == null) { throw new ArgumentNullException("identity.ResourceProviderApiVersion"); } if (identity.ResourceProviderNamespace == null) { throw new ArgumentNullException("identity.ResourceProviderNamespace"); } if (identity.ResourceType == null) { throw new ArgumentNullException("identity.ResourceType"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("identity", identity); Tracing.Enter(invocationId, this, "DeleteAsync", tracingParameters); } // Construct URL string url = "/subscriptions/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/resourcegroups/" + resourceGroupName.Trim() + "/providers/" + identity.ResourceProviderNamespace.Trim() + "/" + (identity.ParentResourcePath != null ? identity.ParentResourcePath.Trim() : "") + "/" + identity.ResourceType.Trim() + "/" + identity.ResourceName.Trim() + "?"; url = url + "api-version=" + Uri.EscapeDataString(identity.ResourceProviderApiVersion.Trim()); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Delete; httpRequest.RequestUri = new Uri(url); // Set Headers // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted && statusCode != HttpStatusCode.NoContent) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result OperationResponse result = null; result = new OperationResponse(); result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Returns a resource belonging to a resource group. /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the resource group. The name is case /// insensitive. /// </param> /// <param name='identity'> /// Required. Resource identity. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Resource information. /// </returns> public async Task<ResourceGetResult> GetAsync(string resourceGroupName, ResourceIdentity identity, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (resourceGroupName != null && resourceGroupName.Length > 1000) { throw new ArgumentOutOfRangeException("resourceGroupName"); } if (Regex.IsMatch(resourceGroupName, "^[-\\w\\._]+$") == false) { throw new ArgumentOutOfRangeException("resourceGroupName"); } if (identity == null) { throw new ArgumentNullException("identity"); } if (identity.ResourceName == null) { throw new ArgumentNullException("identity.ResourceName"); } if (identity.ResourceProviderApiVersion == null) { throw new ArgumentNullException("identity.ResourceProviderApiVersion"); } if (identity.ResourceProviderNamespace == null) { throw new ArgumentNullException("identity.ResourceProviderNamespace"); } if (identity.ResourceType == null) { throw new ArgumentNullException("identity.ResourceType"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("identity", identity); Tracing.Enter(invocationId, this, "GetAsync", tracingParameters); } // Construct URL string url = "/subscriptions/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/resourcegroups/" + resourceGroupName.Trim() + "/providers/" + identity.ResourceProviderNamespace.Trim() + "/" + (identity.ParentResourcePath != null ? identity.ParentResourcePath.Trim() : "") + "/" + identity.ResourceType.Trim() + "/" + identity.ResourceName.Trim() + "?"; url = url + "api-version=" + Uri.EscapeDataString(identity.ResourceProviderApiVersion.Trim()); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.NoContent) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result ResourceGetResult result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new ResourceGetResult(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { Resource resourceInstance = new Resource(); result.Resource = resourceInstance; JToken idValue = responseDoc["id"]; if (idValue != null && idValue.Type != JTokenType.Null) { string idInstance = ((string)idValue); resourceInstance.Id = idInstance; } JToken nameValue = responseDoc["name"]; if (nameValue != null && nameValue.Type != JTokenType.Null) { string nameInstance = ((string)nameValue); resourceInstance.Name = nameInstance; } JToken typeValue = responseDoc["type"]; if (typeValue != null && typeValue.Type != JTokenType.Null) { string typeInstance = ((string)typeValue); resourceInstance.Type = typeInstance; } JToken propertiesValue = responseDoc["properties"]; if (propertiesValue != null && propertiesValue.Type != JTokenType.Null) { JToken provisioningStateValue = propertiesValue["provisioningState"]; if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null) { string provisioningStateInstance = ((string)provisioningStateValue); resourceInstance.ProvisioningState = provisioningStateInstance; } } JToken locationValue = responseDoc["location"]; if (locationValue != null && locationValue.Type != JTokenType.Null) { string locationInstance = ((string)locationValue); resourceInstance.Location = locationInstance; } JToken propertiesValue2 = responseDoc["properties"]; if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null) { string propertiesInstance = propertiesValue2.ToString(Formatting.Indented); resourceInstance.Properties = propertiesInstance; } JToken tagsSequenceElement = ((JToken)responseDoc["tags"]); if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null) { foreach (JProperty property in tagsSequenceElement) { string tagsKey = ((string)property.Name); string tagsValue = ((string)property.Value); resourceInstance.Tags.Add(tagsKey, tagsValue); } } JToken planValue = responseDoc["plan"]; if (planValue != null && planValue.Type != JTokenType.Null) { Plan planInstance = new Plan(); resourceInstance.Plan = planInstance; JToken nameValue2 = planValue["name"]; if (nameValue2 != null && nameValue2.Type != JTokenType.Null) { string nameInstance2 = ((string)nameValue2); planInstance.Name = nameInstance2; } JToken publisherValue = planValue["publisher"]; if (publisherValue != null && publisherValue.Type != JTokenType.Null) { string publisherInstance = ((string)publisherValue); planInstance.Publisher = publisherInstance; } JToken productValue = planValue["product"]; if (productValue != null && productValue.Type != JTokenType.Null) { string productInstance = ((string)productValue); planInstance.Product = productInstance; } JToken promotionCodeValue = planValue["promotionCode"]; if (promotionCodeValue != null && promotionCodeValue.Type != JTokenType.Null) { string promotionCodeInstance = ((string)promotionCodeValue); planInstance.PromotionCode = promotionCodeInstance; } } JToken provisioningStateValue2 = responseDoc["provisioningState"]; if (provisioningStateValue2 != null && provisioningStateValue2.Type != JTokenType.Null) { string provisioningStateInstance2 = ((string)provisioningStateValue2); resourceInstance.ProvisioningState = provisioningStateInstance2; } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Get all of the resources under a subscription. /// </summary> /// <param name='parameters'> /// Optional. Query parameters. If null is passed returns all resource /// groups. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// List of resource groups. /// </returns> public async Task<ResourceListResult> ListAsync(ResourceListParameters parameters, CancellationToken cancellationToken) { // Validate // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("parameters", parameters); Tracing.Enter(invocationId, this, "ListAsync", tracingParameters); } // Construct URL string url = "/subscriptions/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/"; if (parameters != null && parameters.ResourceGroupName != null) { url = url + "resourceGroups/" + Uri.EscapeDataString(parameters.ResourceGroupName != null ? parameters.ResourceGroupName.Trim() : "") + "/"; } url = url + "resources?"; bool appendFilter = true; if (parameters != null && parameters.ResourceType != null) { appendFilter = false; url = url + "$filter=resourceType eq '" + Uri.EscapeDataString(parameters.ResourceType != null ? parameters.ResourceType.Trim() : "") + "'"; } if (parameters != null && parameters.TagName != null) { if (appendFilter == true) { appendFilter = false; url = url + "$filter="; } else { url = url + " and "; } url = url + "tagname eq '" + Uri.EscapeDataString(parameters.TagName != null ? parameters.TagName.Trim() : "") + "'"; } if (parameters != null && parameters.TagValue != null) { if (appendFilter == true) { appendFilter = false; url = url + "$filter="; } else { url = url + " and "; } url = url + "tagvalue eq '" + Uri.EscapeDataString(parameters.TagValue != null ? parameters.TagValue.Trim() : "") + "'"; } if (parameters != null && parameters.Top != null) { url = url + "&$top=" + Uri.EscapeDataString(parameters.Top.Value.ToString()); } url = url + "&api-version=2014-04-01-preview"; string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result ResourceListResult result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new ResourceListResult(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { JToken valueArray = responseDoc["value"]; if (valueArray != null && valueArray.Type != JTokenType.Null) { foreach (JToken valueValue in ((JArray)valueArray)) { Resource resourceJsonFormatInstance = new Resource(); result.Resources.Add(resourceJsonFormatInstance); JToken idValue = valueValue["id"]; if (idValue != null && idValue.Type != JTokenType.Null) { string idInstance = ((string)idValue); resourceJsonFormatInstance.Id = idInstance; } JToken nameValue = valueValue["name"]; if (nameValue != null && nameValue.Type != JTokenType.Null) { string nameInstance = ((string)nameValue); resourceJsonFormatInstance.Name = nameInstance; } JToken typeValue = valueValue["type"]; if (typeValue != null && typeValue.Type != JTokenType.Null) { string typeInstance = ((string)typeValue); resourceJsonFormatInstance.Type = typeInstance; } JToken propertiesValue = valueValue["properties"]; if (propertiesValue != null && propertiesValue.Type != JTokenType.Null) { JToken provisioningStateValue = propertiesValue["provisioningState"]; if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null) { string provisioningStateInstance = ((string)provisioningStateValue); resourceJsonFormatInstance.ProvisioningState = provisioningStateInstance; } } JToken locationValue = valueValue["location"]; if (locationValue != null && locationValue.Type != JTokenType.Null) { string locationInstance = ((string)locationValue); resourceJsonFormatInstance.Location = locationInstance; } JToken propertiesValue2 = valueValue["properties"]; if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null) { string propertiesInstance = propertiesValue2.ToString(Formatting.Indented); resourceJsonFormatInstance.Properties = propertiesInstance; } JToken tagsSequenceElement = ((JToken)valueValue["tags"]); if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null) { foreach (JProperty property in tagsSequenceElement) { string tagsKey = ((string)property.Name); string tagsValue = ((string)property.Value); resourceJsonFormatInstance.Tags.Add(tagsKey, tagsValue); } } JToken planValue = valueValue["plan"]; if (planValue != null && planValue.Type != JTokenType.Null) { Plan planInstance = new Plan(); resourceJsonFormatInstance.Plan = planInstance; JToken nameValue2 = planValue["name"]; if (nameValue2 != null && nameValue2.Type != JTokenType.Null) { string nameInstance2 = ((string)nameValue2); planInstance.Name = nameInstance2; } JToken publisherValue = planValue["publisher"]; if (publisherValue != null && publisherValue.Type != JTokenType.Null) { string publisherInstance = ((string)publisherValue); planInstance.Publisher = publisherInstance; } JToken productValue = planValue["product"]; if (productValue != null && productValue.Type != JTokenType.Null) { string productInstance = ((string)productValue); planInstance.Product = productInstance; } JToken promotionCodeValue = planValue["promotionCode"]; if (promotionCodeValue != null && promotionCodeValue.Type != JTokenType.Null) { string promotionCodeInstance = ((string)promotionCodeValue); planInstance.PromotionCode = promotionCodeInstance; } } JToken provisioningStateValue2 = valueValue["provisioningState"]; if (provisioningStateValue2 != null && provisioningStateValue2.Type != JTokenType.Null) { string provisioningStateInstance2 = ((string)provisioningStateValue2); resourceJsonFormatInstance.ProvisioningState = provisioningStateInstance2; } } } JToken odatanextLinkValue = responseDoc["@odata.nextLink"]; if (odatanextLinkValue != null && odatanextLinkValue.Type != JTokenType.Null) { string odatanextLinkInstance = ((string)odatanextLinkValue); result.NextLink = odatanextLinkInstance; } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Get a list of deployments. /// </summary> /// <param name='nextLink'> /// Required. NextLink from the previous successful call to List /// operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// List of resource groups. /// </returns> public async Task<ResourceListResult> ListNextAsync(string nextLink, CancellationToken cancellationToken) { // Validate if (nextLink == null) { throw new ArgumentNullException("nextLink"); } // Tracing bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = Tracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("nextLink", nextLink); Tracing.Enter(invocationId, this, "ListNextAsync", tracingParameters); } // Construct URL string url = nextLink.Trim(); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { Tracing.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { Tracing.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { Tracing.Error(invocationId, ex); } throw ex; } // Create Result ResourceListResult result = null; // Deserialize Response cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new ResourceListResult(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { JToken valueArray = responseDoc["value"]; if (valueArray != null && valueArray.Type != JTokenType.Null) { foreach (JToken valueValue in ((JArray)valueArray)) { Resource resourceJsonFormatInstance = new Resource(); result.Resources.Add(resourceJsonFormatInstance); JToken idValue = valueValue["id"]; if (idValue != null && idValue.Type != JTokenType.Null) { string idInstance = ((string)idValue); resourceJsonFormatInstance.Id = idInstance; } JToken nameValue = valueValue["name"]; if (nameValue != null && nameValue.Type != JTokenType.Null) { string nameInstance = ((string)nameValue); resourceJsonFormatInstance.Name = nameInstance; } JToken typeValue = valueValue["type"]; if (typeValue != null && typeValue.Type != JTokenType.Null) { string typeInstance = ((string)typeValue); resourceJsonFormatInstance.Type = typeInstance; } JToken propertiesValue = valueValue["properties"]; if (propertiesValue != null && propertiesValue.Type != JTokenType.Null) { JToken provisioningStateValue = propertiesValue["provisioningState"]; if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null) { string provisioningStateInstance = ((string)provisioningStateValue); resourceJsonFormatInstance.ProvisioningState = provisioningStateInstance; } } JToken locationValue = valueValue["location"]; if (locationValue != null && locationValue.Type != JTokenType.Null) { string locationInstance = ((string)locationValue); resourceJsonFormatInstance.Location = locationInstance; } JToken propertiesValue2 = valueValue["properties"]; if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null) { string propertiesInstance = propertiesValue2.ToString(Formatting.Indented); resourceJsonFormatInstance.Properties = propertiesInstance; } JToken tagsSequenceElement = ((JToken)valueValue["tags"]); if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null) { foreach (JProperty property in tagsSequenceElement) { string tagsKey = ((string)property.Name); string tagsValue = ((string)property.Value); resourceJsonFormatInstance.Tags.Add(tagsKey, tagsValue); } } JToken planValue = valueValue["plan"]; if (planValue != null && planValue.Type != JTokenType.Null) { Plan planInstance = new Plan(); resourceJsonFormatInstance.Plan = planInstance; JToken nameValue2 = planValue["name"]; if (nameValue2 != null && nameValue2.Type != JTokenType.Null) { string nameInstance2 = ((string)nameValue2); planInstance.Name = nameInstance2; } JToken publisherValue = planValue["publisher"]; if (publisherValue != null && publisherValue.Type != JTokenType.Null) { string publisherInstance = ((string)publisherValue); planInstance.Publisher = publisherInstance; } JToken productValue = planValue["product"]; if (productValue != null && productValue.Type != JTokenType.Null) { string productInstance = ((string)productValue); planInstance.Product = productInstance; } JToken promotionCodeValue = planValue["promotionCode"]; if (promotionCodeValue != null && promotionCodeValue.Type != JTokenType.Null) { string promotionCodeInstance = ((string)promotionCodeValue); planInstance.PromotionCode = promotionCodeInstance; } } JToken provisioningStateValue2 = valueValue["provisioningState"]; if (provisioningStateValue2 != null && provisioningStateValue2.Type != JTokenType.Null) { string provisioningStateInstance2 = ((string)provisioningStateValue2); resourceJsonFormatInstance.ProvisioningState = provisioningStateInstance2; } } } JToken odatanextLinkValue = responseDoc["@odata.nextLink"]; if (odatanextLinkValue != null && odatanextLinkValue.Type != JTokenType.Null) { string odatanextLinkInstance = ((string)odatanextLinkValue); result.NextLink = odatanextLinkInstance; } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { Tracing.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
using System; using System.Collections.Generic; using System.Text; using System.IO; using System.Collections; using ID3.ID3v2Frames; using System.Text.RegularExpressions; namespace ID3 { /// <summary> /// The main class for any type of frame to inherit /// </summary> public abstract class Frame { private string _FrameID; // Contain FrameID of current Frame private FrameFlags _FrameFlags; // Contain Flags of current frame // After reading frame if must drop value were true it means frame is not readable protected bool _MustDrop; private bool _IsLinked; // indicate is current frame a linked frame or not private string _ErrorMessage; // Contain Error Message if occur /// <summary> /// Create a new Frame class /// </summary> /// <param name="FrameID">4 Characters tag identifier</param> /// <param name="Flag">Frame Falgs</param> protected Frame(string FrameID, FrameFlags Flags) { // All FrameID letters must be capital FrameID = FrameID.ToUpper(); if (!ValidatingFrameID(FrameID, ValidatingErrorTypes.Exception)) { _MustDrop = true; return; } _FrameFlags = Flags; _FrameID = FrameID; _MustDrop = false; _IsLinked = false; } /// <summary> /// Get or Set flags of current frame /// </summary> protected FrameFlags FrameFlag { get { return _FrameFlags; } set { _FrameFlags = value; } } /// <summary> /// Get header of current frame according to specific Size /// </summary> /// <param name="MinorVersion">Minor version of ID3v2</param> /// <returns>MemoryStream contain frame header</returns> protected MemoryStream FrameHeader(int MinorVersion) { byte[] Buf; MemoryStream ms = new MemoryStream(); int FrameIDLength = MinorVersion == 2 ? 3 : 4; // Length of FrameID according to version string Temp = _FrameID; // if minor version of ID3 were 2, the frameID is 3 character length if (MinorVersion == 2) { Temp = FramesInfo.Get3CharID(Temp); if (Temp == null) // This frame is not availabe in this version return null; } ms.Write(Encoding.ASCII.GetBytes(Temp), 0, FrameIDLength); // Write FrameID Buf = BitConverter.GetBytes(Length); Array.Reverse(Buf); if (MinorVersion == 2) ms.Write(Buf, 1, Buf.Length - 1); // Write Frame Size else ms.Write(Buf, 0, Buf.Length); // Write Frame Size if (MinorVersion != 2) { // If newer than version 2 it have Flags Buf = BitConverter.GetBytes((ushort)_FrameFlags); Array.Reverse(Buf); ms.Write(Buf, 0, Buf.Length); // Write Frame Flag } return ms; } /// <summary> /// Indicate if this frame is readable /// </summary> public bool IsReadableFrame { get { return (!_MustDrop | IsAvailable); } } protected void ErrorOccur(string Message, bool MustDrop) { _ErrorMessage = Message; _MustDrop = MustDrop; } #region -> Static Get Members <- /// <summary> /// Get length of Specific string according to Encoding /// </summary> /// <param name="Text">Text to get length</param> /// <param name="TEncoding">TextEncoding to use for Length calculation</param> /// <returns>Length of text</returns> protected static int GetTextLength(string Text, TextEncodings TEncoding, bool AddNullCharacter) { int StringLength; StringLength = Text.Length; if (TEncoding == TextEncodings.UTF_16 || TEncoding == TextEncodings.UTF_16BE || !IsAscii(Text)) { StringLength *= 2; // in UTF-16 each character is 2 bytes StringLength += 2; } if (TEncoding == TextEncodings.UTF8) { StringLength += 3; } if (AddNullCharacter) { if (TEncoding == TextEncodings.UTF_16 || TEncoding == TextEncodings.UTF_16BE) StringLength += 2; else StringLength ++; } return StringLength; } #endregion #region -> Write Methods <- /// <summary> /// Write specific string to specific MemoryStream /// </summary> /// <param name="Data">MemoryStream to write text to</param> /// <param name="Text">Text to write in MemoryStream</param> /// <param name="TEncoding">TextEncoding use for text</param> /// <param name="AddNullCharacter">indicate if need to add null characters</param> protected void WriteText(MemoryStream Data, string Text, TextEncodings TEncoding, bool AddNullCharacter) { byte[] Buf,BufTemp; BufTemp = FileStreamEx.GetEncoding(TEncoding).GetBytes(Text); if (TEncoding == TextEncodings.UTF_16) { Buf = new byte[BufTemp.Length + 2]; Buf[0] = 0xFF; Buf[1] = 0xFE; for (int i = 0; i < BufTemp.Length; i++) Buf[i + 2] = BufTemp[i]; } else if (TEncoding == TextEncodings.UTF_16BE) { Buf = new byte[BufTemp.Length + 2]; Buf[0] = 0xFE; Buf[1] = 0xFF; for (int i = 0; i < BufTemp.Length; i++) Buf[i + 2] = BufTemp[i]; } else if (TEncoding == TextEncodings.UTF8) { Buf = new byte[BufTemp.Length + 3]; // EF BB BF Buf[0] = 0xEF; Buf[1] = 0xBB; Buf[2] = 0xBF; for (int i = 0; i < BufTemp.Length; i++) Buf[i + 3] = BufTemp[i]; } else { Buf = new byte[BufTemp.Length]; Buf = BufTemp; } Data.Write(Buf, 0, Buf.Length); if (AddNullCharacter) { Data.WriteByte(0); if (TEncoding == TextEncodings.UTF_16 || TEncoding == TextEncodings.UTF_16BE) Data.WriteByte(0); } } #endregion #region -> Validating Methods <- protected enum ValidatingErrorTypes { Nothing = 0, ID3Error, Exception } /// <summary> /// Indicate is value of Enumeration valid for that enum /// </summary> /// <param name="Enumeration">Enumeration to control value for</param> /// <param name="ErrorType">if not valid how error occur</param> /// <returns>true if valid otherwise false</returns> protected bool IsValidEnumValue(Enum Enumeration, ValidatingErrorTypes ErrorType) { if (Enum.IsDefined(Enumeration.GetType(), Enumeration)) return true; else { if (ErrorType == ValidatingErrorTypes.ID3Error) { ErrorOccur(Enumeration.ToString() + " is out of range of " + Enumeration.GetType().ToString(), true); } else if (ErrorType == ValidatingErrorTypes.Exception) throw (new ArgumentOutOfRangeException(Enumeration.ToString() + " is out of range of " + Enumeration.GetType().ToString())); return false; } } protected bool ValidatingFrameID(string FrameIdentifier, ValidatingErrorTypes ErrorType) { bool IsValid = FramesInfo.IsValidFrameID(FrameIdentifier); if (!IsValid) { if (ErrorType == ValidatingErrorTypes.Exception) throw (new ArgumentException("FrameID must be 4 capital letters")); else if (ErrorType == ValidatingErrorTypes.ID3Error) ErrorOccur(FrameIdentifier + " is not valid FrameID", true); } return IsValid; } #endregion #region -> Abstract method and properties <- /// <summary> /// Indicate if this frame available /// </summary> public abstract bool IsAvailable { get; } /// <summary> /// Get stream containing this frame information /// </summary> /// <param name="MinorVersion">Minor version of ID3v2</param> /// <returns>MemoryStream according to this frame</returns> public abstract MemoryStream FrameStream(int MinorVersion); /// <summary> /// Get Length of current frame /// </summary> public abstract int Length { get; } #endregion #region -> Frame Flags Properties <- /// <summary> /// Get FrameID of current frame /// </summary> public string FrameID { get { return _FrameID; } } /// <summary> /// Gets or sets if current frame is ReadOnly /// </summary> public bool ReadOnly { get { if ((_FrameFlags & FrameFlags.ReadOnly) == FrameFlags.ReadOnly) return true; else return false; } set { if (value == true) _FrameFlags |= FrameFlags.ReadOnly; else _FrameFlags &= ~FrameFlags.ReadOnly; } } /// <summary> /// Gets or sets if current frame is Encrypted /// </summary> public bool Encryption { get { if ((_FrameFlags & FrameFlags.Encryption) == FrameFlags.Encryption) return true; else return false; } set { if (value == true) _FrameFlags |= FrameFlags.Encryption; else _FrameFlags &= ~FrameFlags.Encryption; } } /// <summary> /// Gets or sets whether or not frame belongs in a group with other frames /// </summary> public bool GroupIdentity { get { if ((_FrameFlags & FrameFlags.GroupingIdentity) == FrameFlags.GroupingIdentity) return true; else return false; } set { if (value == true) _FrameFlags |= FrameFlags.GroupingIdentity; else _FrameFlags &= ~FrameFlags.GroupingIdentity; } } /// <summary> /// Gets or sets whether or not this frame was compressed /// </summary> public bool Compression { get { if ((_FrameFlags & FrameFlags.Compression) == FrameFlags.Compression) return true; else return false; } set { if (value == true) _FrameFlags |= FrameFlags.Compression; else _FrameFlags &= ~FrameFlags.Compression; } } /// <summary> /// Gets or sets if it's unknown frame it should be preserved or discared /// </summary> public bool TagAlterPreservation { get { if ((_FrameFlags & FrameFlags.TagAlterPreservation) == FrameFlags.TagAlterPreservation) return true; else return false; } set { if (value == true) _FrameFlags |= FrameFlags.TagAlterPreservation; else _FrameFlags &= ~FrameFlags.TagAlterPreservation; } } /// <summary> /// Gets or sets what to do if file excluding frame, Preseved or discared /// </summary> public bool FileAlterPreservation { get { if ((_FrameFlags & FrameFlags.FileAlterPreservation) == FrameFlags.FileAlterPreservation) return true; else return false; } set { if (value == true) _FrameFlags |= FrameFlags.FileAlterPreservation; else _FrameFlags &= ~FrameFlags.FileAlterPreservation; } } /// <summary> /// Gets or sets is current frame a linked frame /// </summary> public bool IsLinked { get { return _IsLinked; } set { _IsLinked = value; } } #endregion /// <summary> /// Retrun a string that represent FrameID of current Frame /// </summary> /// <returns>FrameID of current Frame</returns> public override string ToString() { return _FrameID; } /// <summary> /// Get error message of current Frame /// </summary> internal string ErrorMessage { get { return _ErrorMessage; } } /// <summary> /// Indicate if specific text is Ascii /// </summary> /// <param name="Text">Text to detect</param> /// <returns>true if is ascii otherwise false</returns> internal static bool IsAscii(string Text) { return (Encoding.ASCII.GetString(Encoding.ASCII.GetBytes(Text)) == Text); } } }
using System.Collections.Generic; using BEPUphysics.Constraints.SolverGroups; using BEPUphysics.DeactivationManagement; using BEPUphysics.Entities; using BEPUutilities.DataStructures; namespace BEPUphysics.Constraints { /// <summary> /// Superclass of objects types which require solving by the velocity solver. /// These are updated within the internal iterative solver when owned by a space. /// </summary> public abstract class SolverUpdateable : ISimulationIslandConnectionOwner, ISpaceObject { internal int solverIndex; protected internal Solver solver; ///<summary> /// Gets the solver to which the solver updateable belongs. ///</summary> public virtual Solver Solver //Note: this is virtual because some child classes (SolverGroups) need to perform their own logic when a solver gets set. { get { return solver; } protected internal set { solver = value; } } protected internal SimulationIslandConnection simulationIslandConnection; /// <summary> /// Gets the simulation island connection associated with this updateable. /// </summary> public SimulationIslandConnection SimulationIslandConnection { get { return simulationIslandConnection; } } /// <summary> /// List of all entities affected by this updateable. /// </summary> protected internal readonly RawList<Entity> involvedEntities = new RawList<Entity>(2); ///<summary> /// Gets the entities that this solver updateable is involved with. ///</summary> public ReadOnlyList<Entity> InvolvedEntities { get { return new ReadOnlyList<Entity>(involvedEntities); } } /// <summary> /// Number of entities used in the solver updateable. /// Note that this is set automatically by the sortInvolvedEntities method /// if it is called. /// </summary> protected internal int numberOfInvolvedEntities; protected internal SolverSettings solverSettings = new SolverSettings(); ///<summary> /// Gets the solver settings that manage how the solver updates. ///</summary> public SolverSettings SolverSettings { get { return solverSettings; } } protected internal bool isActive = true; /// <summary> /// Gets or sets whether or not this solver updateable is active. /// /// When set to false, this solver updateable will be idle and its /// isActiveInSolver field will always be false. /// /// When set to true, the solver updateable will run normally and update if /// the type's activity conditions allow it. /// </summary> public bool IsActive { get { return isActive; } set { if (value != isActive) { //A constraint appearing or disappearing changes the constraint system. //Something that went to sleep because a constraint reached a rest state //could need to solve if the constraint turned on or off. ActivateInvolvedEntities(); isActive = value; } } } protected internal bool isActiveInSolver = true; /// <summary> /// Gets whether or not the space's solver should try to solve this object. /// Depends on conditions specific to each solver updateable type and whether or not /// it has completed its computations early. Recomputed each frame. /// </summary> public bool IsActiveInSolver { get { return isActiveInSolver; } } /// <summary> /// Activates all entities involved with this solver updateable. /// </summary> public void ActivateInvolvedEntities() { for (int i = 0; i < involvedEntities.Count; i++) { if (involvedEntities[i].isDynamic) { //Only need to wake up one dynamic entity. That will wake up the rest. //Wouldn't want to pointlessly force-wake a kinematic object. involvedEntities[i].activityInformation.Activate(); break; } } } /// <summary> /// Gets the solver group that manages this solver updateable, if any. /// Null if not owned by a solver group. /// </summary> public SolverGroup SolverGroup { get; protected internal set; } protected SolverUpdateable() { //Initialize the connection. //It will usually be overridden and end up floating on back to the resource pool. simulationIslandConnection = PhysicsResources.GetSimulationIslandConnection(); simulationIslandConnection.Owner = this; } /// <summary> /// Acquires exclusive access to all entities involved in the solver updateable. /// </summary> public void EnterLock() { for (int i = 0; i < numberOfInvolvedEntities; i++) { if (involvedEntities.Elements[i].isDynamic) //Only need to lock dynamic entities. { involvedEntities.Elements[i].locker.Enter(); } } } /// <summary> /// Releases exclusive access to the updateable's entities. /// This should be called within a 'finally' block following a 'try' block containing the locked operations. /// </summary> public void ExitLock() { for (int i = numberOfInvolvedEntities - 1; i >= 0; i--) { if (involvedEntities.Elements[i].isDynamic) //Only need to lock dynamic entities. involvedEntities.Elements[i].locker.Exit(); } } /// <summary> /// Attempts to acquire exclusive access to all entities involved in the solver updateable. /// If it is contested, the lock attempt is aborted. /// </summary> /// <returns>True if the lock was entered successfully, false otherwise.</returns> public bool TryEnterLock() { for (int i = 0; i < numberOfInvolvedEntities; i++) { if (involvedEntities.Elements[i].isDynamic) //Only need to lock dynamic entities. if (!involvedEntities.Elements[i].locker.TryEnter()) { //Turns out we can't take all the resources! Immediately drop everything. for (i = i - 1 /*failed on the ith element, so start at the previous*/; i >= 0; i--) { if (involvedEntities[i].isDynamic) involvedEntities.Elements[i].locker.Exit(); } return false; } } return true; } /// <summary> /// Updates the activity state of the solver updateable based on its members. /// </summary> public virtual void UpdateSolverActivity() { if (isActive) { //This is a simulation island connection. We already know that all connected objects share the //same simulation island (or don't have one, in the case of kinematics). All we have to do is test to see if that island is active! for (int i = 0; i < simulationIslandConnection.entries.Count; i++) { var island = simulationIslandConnection.entries.Elements[i].Member.SimulationIsland; if (island != null && island.isActive) { isActiveInSolver = true; return; } } } isActiveInSolver = false; } ///<summary> /// Performs the frame's configuration step. ///</summary> ///<param name="dt">Timestep duration.</param> public abstract void Update(float dt); //Will be locked by the solver during multithreaded updates. /// <summary> /// Performs any pre-solve iteration work that needs exclusive /// access to the members of the solver updateable. /// Usually, this is used for applying warmstarting impulses. /// </summary> public abstract void ExclusiveUpdate(); //Will be locked by the solver during multithreaded updates. /// <summary> /// Computes one iteration of the constraint to meet the solver updateable's goal. /// </summary> /// <returns>The rough applied impulse magnitude.</returns> public abstract float SolveIteration(); /// <summary> /// Handle any bookkeeping needed when the entities involved in this SolverUpdateable change. /// </summary> protected internal virtual void OnInvolvedEntitiesChanged() { //First verify that something really changed. bool entitiesChanged = false; RawList<Entity> newInvolvedEntities = PhysicsResources.GetEntityRawList(); CollectInvolvedEntities(newInvolvedEntities); if (newInvolvedEntities.Count == involvedEntities.Count) { for (int i = 0; i < newInvolvedEntities.Count; i++) { if (newInvolvedEntities.Elements[i] != involvedEntities.Elements[i]) { entitiesChanged = true; break; } } } else { entitiesChanged = true; } if (entitiesChanged) { //Probably need to wake things up given that such a significant change was made. for (int i = 0; i < involvedEntities.Count; i++) { Entity e = involvedEntities.Elements[i]; if (e.isDynamic) { e.activityInformation.Activate(); break;//Don't bother activating other entities; they are all a part of the same simulation island. } } //CollectInvolvedEntities will give the updateable a new simulationIslandConnection and get rid of the old one. CollectInvolvedEntities(); if (SolverGroup != null) SolverGroup.OnInvolvedEntitiesChanged(); //We woke up the FORMER involved entities, now wake up the current involved entities. for (int i = 0; i < involvedEntities.Count; i++) { Entity e = involvedEntities.Elements[i]; if (e.isDynamic) { e.activityInformation.Activate(); break; //Don't bother activating other entities; they are all a part of the same simulation island. } } } PhysicsResources.GiveBack(newInvolvedEntities); } /// <summary> /// Collects the entities involved in a solver updateable and sets up the internal listings. /// </summary> protected internal void CollectInvolvedEntities() { involvedEntities.Clear(); CollectInvolvedEntities(involvedEntities); SortInvolvedEntities(); UpdateConnectedMembers(); } /// <summary> /// Adds entities associated with the solver item to the involved entities list. /// This allows the non-batched multithreading system to lock properly. /// </summary> protected internal abstract void CollectInvolvedEntities(RawList<Entity> outputInvolvedEntities); /// <summary> /// Sorts the involved entities according to their hashcode to allow non-batched multithreading to avoid deadlocks. /// </summary> protected internal void SortInvolvedEntities() { numberOfInvolvedEntities = involvedEntities.Count; involvedEntities.Sort(comparer); } void UpdateConnectedMembers() { //Since we're about to change this updateable's connections, make sure the //simulation islands hear about it. This is NOT thread safe. var deactivationManager = simulationIslandConnection.DeactivationManager; //Orphan the simulation island connection since it's about to get replaced. //There's three possible situations here: //1) We belong to the DeactivationManager. //2) We don't belong to a DeactivationManager and the connection is slated for removal (we were in the deactivation manager before). // This can happen when a solver updateable associated with a pair gets removed and cleaned up. //3) We don't belong to a DeactivationManager and the connection is not slated for removal (we weren't in a deactivation manager before). //In Case #1, all we have to do is orphan the connection and remove it from the manager. This performs any splits necessary. The replacement connection will force any necessary merges. //In Case #2, we were just removed but the connection is still considered to have an owner. //It won't get cleaned up by the removal, and doing it here would be premature: orphan the connection so the next deactivation manager splits flush cleans it up! //In Case #3, we have full control over the simulation island connection because there is no interaction with a deactivation manager. We can just get rid of it directly. simulationIslandConnection.Owner = null; if (deactivationManager != null) { deactivationManager.Remove(simulationIslandConnection); } else if (!simulationIslandConnection.SlatedForRemoval) //If it's already been removed, cleaning it ourselves would prevent proper simulation island splits in the deactivation manager split flush. PhysicsResources.GiveBack(simulationIslandConnection); //Well, since we're going to orphan the connection, we'll need to take care of its trash. //The SimulationIslandConnection is immutable. //So create a new one! //Assume we've already dealt with the old connection. simulationIslandConnection = PhysicsResources.GetSimulationIslandConnection(); for (int i = 0; i < involvedEntities.Count; i++) { simulationIslandConnection.Add(involvedEntities.Elements[i].activityInformation); } simulationIslandConnection.Owner = this; //Add the new reference back. if (deactivationManager != null) deactivationManager.Add(simulationIslandConnection); } private static EntityComparer comparer = new EntityComparer(); private class EntityComparer : IComparer<Entity> { #region IComparer<Entity> Members int IComparer<Entity>.Compare(Entity x, Entity y) { if (x.InstanceId > y.InstanceId) return 1; if (x.InstanceId < y.InstanceId) return -1; return 0; } #endregion } protected internal Space space; Space ISpaceObject.Space { get { return space; } set { space = value; } } /// <summary> /// Gets or sets the user data associated with this object. /// </summary> public object Tag { get; set; } /// <summary> /// Called after the object is added to a space. /// </summary> /// <param name="newSpace">Space to which this object was added.</param> public virtual void OnAdditionToSpace(Space newSpace) { } /// <summary> /// Called before an object is removed from its space. /// </summary> public virtual void OnRemovalFromSpace(Space oldSpace) { } ///<summary> /// Called when the updateable is added to a solver. ///</summary> ///<param name="newSolver">Solver to which the updateable was added.</param> public virtual void OnAdditionToSolver(Solver newSolver) { } /// <summary> /// Called when the updateable is removed from its solver. /// </summary> /// <param name="oldSolver">Solver from which the updateable was removed.</param> public virtual void OnRemovalFromSolver(Solver oldSolver) { } } }
namespace Microsoft.Protocols.TestSuites.MS_ASRM { using System.Xml.XPath; using Common.DataStructures; using Microsoft.Protocols.TestSuites.Common; using Microsoft.Protocols.TestTools; using Request = Microsoft.Protocols.TestSuites.Common.Request; /// <summary> /// Adapter class of MS-ASRM. /// </summary> public partial class MS_ASRMAdapter : ManagedAdapterBase, IMS_ASRMAdapter { #region Variables /// <summary> /// The instance of ActiveSync client. /// </summary> private ActiveSyncClient activeSyncClient; #endregion #region IMS_ASRMAdapter Properties /// <summary> /// Gets the XML request sent to protocol SUT. /// </summary> public IXPathNavigable LastRawRequestXml { get { return this.activeSyncClient.LastRawRequestXml; } } /// <summary> /// Gets the XML response received from protocol SUT. /// </summary> public IXPathNavigable LastRawResponseXml { get { return this.activeSyncClient.LastRawResponseXml; } } #endregion #region Initialize TestSuite /// <summary> /// Overrides IAdapter's Initialize() and sets default protocol short name of the testSite. /// </summary> /// <param name="testSite">Transfer ITestSite into adapter, make adapter can use ITestSite's function.</param> public override void Initialize(TestTools.ITestSite testSite) { base.Initialize(testSite); testSite.DefaultProtocolDocShortName = "MS-ASRM"; // Merge the common configuration Common.MergeConfiguration(testSite); this.activeSyncClient = new ActiveSyncClient(testSite) { AcceptLanguage = "en-us", UserName = Common.GetConfigurationPropertyValue("User1Name", testSite), Password = Common.GetConfigurationPropertyValue("User1Password", testSite) }; } #endregion #region MS-ASRMAdapter Members /// <summary> /// Sync data from the server. /// </summary> /// <param name="syncRequest">The request for Sync command.</param> /// <returns>The sync result which is returned from server.</returns> public SyncStore Sync(SyncRequest syncRequest) { SyncResponse response = this.activeSyncClient.Sync(syncRequest, true); Site.Assert.IsNotNull(response, "If the command is successful, the response should not be null."); SyncStore result = Common.LoadSyncResponse(response); return result; } /// <summary> /// Find an e-mail with specific subject. /// </summary> /// <param name="request">The request for Sync command.</param> /// <param name="subject">The subject of the e-mail to find.</param> /// <param name="isRetryNeeded">A boolean value specifies whether need retry.</param> /// <returns>The Sync result.</returns> public Sync SyncEmail(SyncRequest request, string subject, bool isRetryNeeded) { Sync sync = this.activeSyncClient.SyncEmail(request, subject, isRetryNeeded); this.VerifyTransport(); this.VerifyWBXMLCapture(); this.VerifySyncResponse(sync); return sync; } /// <summary> /// Fetch all information about exchange object. /// </summary> /// <param name="itemOperationsRequest">The request for ItemOperations command.</param> /// <returns>The ItemOperations result which is returned from server.</returns> public ItemOperationsStore ItemOperations(ItemOperationsRequest itemOperationsRequest) { ItemOperationsResponse response = this.activeSyncClient.ItemOperations(itemOperationsRequest, DeliveryMethodForFetch.Inline); Site.Assert.IsNotNull(response, "If the command is successful, the response should not be null."); ItemOperationsStore result = Common.LoadItemOperationsResponse(response); this.VerifyTransport(); this.VerifyWBXMLCapture(); this.VerifyItemOperationsResponse(result); return result; } /// <summary> /// Search items on server. /// </summary> /// <param name="searchRequest">The request for Search command.</param> /// <returns>The Search result which is returned from server.</returns> public SearchStore Search(SearchRequest searchRequest) { SearchResponse response = this.activeSyncClient.Search(searchRequest, true); Site.Assert.IsNotNull(response, "If the command is successful, the response should not be null."); SearchStore result = Common.LoadSearchResponse(response, Common.GetConfigurationPropertyValue("ActiveSyncProtocolVersion", this.Site)); this.VerifyTransport(); this.VerifyWBXMLCapture(); this.VerifySearchResponse(result); return result; } /// <summary> /// Synchronize the collection hierarchy. /// </summary> /// <param name="request">The request for FolderSync command.</param> /// <returns>The FolderSync response which is returned from server.</returns> public FolderSyncResponse FolderSync(FolderSyncRequest request) { FolderSyncResponse response = this.activeSyncClient.FolderSync(request); Site.Assert.IsNotNull(response, "If the command is successful, the response should not be null."); return response; } /// <summary> /// Gets the RightsManagementInformation by Settings command. /// </summary> /// <returns>The Settings response which is returned from server.</returns> public SettingsResponse Settings() { SettingsRequest request = new SettingsRequest(); Request.SettingsRightsManagementInformation settingsInformation = new Request.SettingsRightsManagementInformation(); Request.SettingsUserInformation setUser = new Request.SettingsUserInformation { Item = string.Empty }; settingsInformation.Get = string.Empty; request.RequestData.RightsManagementInformation = settingsInformation; request.RequestData.UserInformation = setUser; SettingsResponse response = this.activeSyncClient.Settings(request); Site.Assert.IsNotNull(response, "If the command is successful, the response should not be null."); this.VerifyWBXMLCapture(); this.VerifySettingsResponse(response); return response; } /// <summary> /// Sends MIME-formatted e-mail messages to the server. /// </summary> /// <param name="sendMailRequest">The request for SendMail command.</param> /// <returns>The SendMail response which is returned from server.</returns> public SendMailResponse SendMail(SendMailRequest sendMailRequest) { SendMailResponse response = this.activeSyncClient.SendMail(sendMailRequest); Site.Assert.IsNotNull(response, "If the command is successful, the response should not be null."); return response; } /// <summary> /// Reply to messages without retrieving the full, original message from the server. /// </summary> /// <param name="smartReplyRequest">The request for SmartReply command.</param> /// <returns>The SmartReply response which is returned from server.</returns> public SmartReplyResponse SmartReply(SmartReplyRequest smartReplyRequest) { SmartReplyResponse response = this.activeSyncClient.SmartReply(smartReplyRequest); Site.Assert.IsNotNull(response, "If the command is successful, the response should not be null."); return response; } /// <summary> /// Forwards messages without retrieving the full, original message from the server. /// </summary> /// <param name="smartForwardRequest">The request for SmartForward command.</param> /// <returns>The SmartForward response which is returned from server.</returns> public SmartForwardResponse SmartForward(SmartForwardRequest smartForwardRequest) { SmartForwardResponse response = this.activeSyncClient.SmartForward(smartForwardRequest); Site.Assert.IsNotNull(response, "If the command is successful, the response should not be null."); return response; } /// <summary> /// Change user to call ActiveSync command. /// </summary> /// <param name="userName">The name of a user.</param> /// <param name="userPassword">The password of a user.</param> /// <param name="userDomain">The domain which the user belongs to.</param> public void SwitchUser(string userName, string userPassword, string userDomain) { this.activeSyncClient.UserName = userName; this.activeSyncClient.Password = userPassword; this.activeSyncClient.Domain = userDomain; } #endregion } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Drawing; using System.IO; using System.Text; using System.Windows.Forms; using Appccelerate.EventBroker; using Ninject; using NLog; using SharpFlame.Core.Extensions; using SharpFlame.Core; using SharpFlame.Core.Collections; using SharpFlame.Core.Domain; using SharpFlame.Core.Domain.Colors; using SharpFlame.Domain; using SharpFlame.Domain.ObjData; using SharpFlame.FileIO; using SharpFlame.Graphics.OpenGL; using SharpFlame.Mapping; using SharpFlame.Mapping.Objects; using SharpFlame.Mapping.Tiles; using SharpFlame.Maths; using SharpFlame.MouseTools; using SharpFlame.Painters; using SharpFlame.Settings; using SharpFlame.Util; namespace SharpFlame { public sealed class App { private static readonly Logger logger = LogManager.GetCurrentClassLogger(); public static SRgb MinimapFeatureColour; public static bool DebugGL = false; public static string SettingsPath; public static string AutoSavePath; public static Random Random; public static bool ProgramInitialized = false; public static bool ProgramInitializeFinished = false; public static Icon ProgramIcon; public static ObservableCollection<string> CommandLinePaths = new ObservableCollection<string>(); public static int GLTexture_NoTile; public static int GLTexture_OverflowTile; public static clsKeysActive IsViewKeyDown = new clsKeysActive(); public static clsBrush TextureBrush = new clsBrush(0.0D, ShapeType.Circle); public static clsBrush TerrainBrush = new clsBrush(2.0D, ShapeType.Circle); public static clsBrush HeightBrush = new clsBrush(2.0D, ShapeType.Circle); public static clsBrush CliffBrush = new clsBrush(2.0D, ShapeType.Circle); public static clsBrush SmoothRadius = new clsBrush(1.0D, ShapeType.Square); public static bool DisplayTileOrientation; public static ObjectData ObjectData = null; public static int SelectedTextureNum = -1; public static TileOrientation TextureOrientation = new TileOrientation(false, false, false); public static Terrain SelectedTerrain; public static Road SelectedRoad; public static ObservableCollection<clsTileType> TileTypes = new ObservableCollection<clsTileType>(); public static DroidDesign.clsTemplateDroidType[] TemplateDroidTypes = new DroidDesign.clsTemplateDroidType[0]; public static int TemplateDroidTypeCount; public static readonly UTF8Encoding UTF8Encoding = new UTF8Encoding(false, false); public static readonly ASCIIEncoding ASCIIEncoding = new ASCIIEncoding(); public static int VisionRadius_2E; public static double VisionRadius; public static Map Copied_Map; public static Tileset Tileset_Arizona; public static Tileset Tileset_Urban; public static Tileset Tileset_Rockies; public static Painter Painter_Arizona; public static Painter Painter_Urban; public static Painter Painter_Rockies; public static Font UnitLabelBaseFont; public static GLFont UnitLabelFont; //Public TextureViewFont As GLFont public static Player[] PlayerColour = new Player[Constants.InternalPlayerCountMax + 1]; public static DroidDesign.clsTemplateDroidType TemplateDroidType_Droid; public static DroidDesign.clsTemplateDroidType TemplateDroidType_Cyborg; public static DroidDesign.clsTemplateDroidType TemplateDroidType_CyborgConstruct; public static DroidDesign.clsTemplateDroidType TemplateDroidType_CyborgRepair; public static DroidDesign.clsTemplateDroidType TemplateDroidType_CyborgSuper; public static DroidDesign.clsTemplateDroidType TemplateDroidType_Transporter; public static DroidDesign.clsTemplateDroidType TemplateDroidType_Person; public static DroidDesign.clsTemplateDroidType TemplateDroidType_Null; public static bool ShowIDErrorMessage = true; public static bool Draw_TileTextures = true; public static DrawLighting Draw_Lighting = DrawLighting.Half; public static bool Draw_TileWireframe; public static bool Draw_Units = true; public static bool Draw_VertexTerrain; public static bool Draw_Gateways; public static bool Draw_ScriptMarkers = true; public static ViewMoveType ViewMoveType = ViewMoveType.RTS; public static bool RTSOrbit = true; public static Matrix3DMath.Matrix3D SunAngleMatrix = new Matrix3DMath.Matrix3D(); public static clsBrush VisionSectors = new clsBrush(0.0D, ShapeType.Circle); public static sLayerList LayerList; private static ObservableCollection<Tileset> tilesets = new ObservableCollection<Tileset>(); public static ObservableCollection<Tileset> Tilesets { get { return tilesets; } } // TODO: Remove these once everthing uses ninject. public static SettingsManager SettingsManager { get; set; } public static KeyboardManager KeyboardManager { get; set; } public static ToolOptions ToolOptions { get; set; } /// <summary> /// Holder for the Status form. /// </summary> /// <value>The status dialog.</value> public static Gui.Dialogs.Status StatusDialog { get; set; } /// <summary> /// The Ninject Kernel /// </summary> /// <value>The kernel.</value> public static IKernel Kernel { get; set; } public static void Initalize () { createTileTypes(); createPlayerColours(); CreateTemplateDroidTypes(); } private static void createPlayerColours() { for ( var i = 0; i <= Constants.InternalPlayerCountMax; i++ ) { App.PlayerColour[i] = new Player(); } App.PlayerColour[0].Colour.Red = 0.0F; App.PlayerColour[0].Colour.Green = 96.0F / 255.0F; App.PlayerColour[0].Colour.Blue = 0.0F; App.PlayerColour[1].Colour.Red = 160.0F / 255.0F; App.PlayerColour[1].Colour.Green = 112.0F / 255.0F; App.PlayerColour[1].Colour.Blue = 0.0F; App.PlayerColour[2].Colour.Red = 128.0F / 255.0F; App.PlayerColour[2].Colour.Green = 128.0F / 255.0F; App.PlayerColour[2].Colour.Blue = 128.0F / 255.0F; App.PlayerColour[3].Colour.Red = 0.0F; App.PlayerColour[3].Colour.Green = 0.0F; App.PlayerColour[3].Colour.Blue = 0.0F; App.PlayerColour[4].Colour.Red = 128.0F / 255.0F; App.PlayerColour[4].Colour.Green = 0.0F; App.PlayerColour[4].Colour.Blue = 0.0F; App.PlayerColour[5].Colour.Red = 32.0F / 255.0F; App.PlayerColour[5].Colour.Green = 48.0F / 255.0F; App.PlayerColour[5].Colour.Blue = 96.0F / 255.0F; App.PlayerColour[6].Colour.Red = 144.0F / 255.0F; App.PlayerColour[6].Colour.Green = 0.0F; App.PlayerColour[6].Colour.Blue = 112 / 255.0F; App.PlayerColour[7].Colour.Red = 0.0F; App.PlayerColour[7].Colour.Green = 128.0F / 255.0F; App.PlayerColour[7].Colour.Blue = 128.0F / 255.0F; App.PlayerColour[8].Colour.Red = 128.0F / 255.0F; App.PlayerColour[8].Colour.Green = 192.0F / 255.0F; App.PlayerColour[8].Colour.Blue = 0.0F; App.PlayerColour[9].Colour.Red = 176.0F / 255.0F; App.PlayerColour[9].Colour.Green = 112.0F / 255.0F; App.PlayerColour[9].Colour.Blue = 112.0F / 255.0F; App.PlayerColour[10].Colour.Red = 224.0F / 255.0F; App.PlayerColour[10].Colour.Green = 224.0F / 255.0F; App.PlayerColour[10].Colour.Blue = 224.0F / 255.0F; App.PlayerColour[11].Colour.Red = 32.0F / 255.0F; App.PlayerColour[11].Colour.Green = 32.0F / 255.0F; App.PlayerColour[11].Colour.Blue = 255.0F / 255.0F; App.PlayerColour[12].Colour.Red = 0.0F; App.PlayerColour[12].Colour.Green = 160.0F / 255.0F; App.PlayerColour[12].Colour.Blue = 0.0F; App.PlayerColour[13].Colour.Red = 64.0F / 255.0F; App.PlayerColour[13].Colour.Green = 0.0F; App.PlayerColour[13].Colour.Blue = 0.0F; App.PlayerColour[14].Colour.Red = 16.0F / 255.0F; App.PlayerColour[14].Colour.Green = 0.0F; App.PlayerColour[14].Colour.Blue = 64.0F / 255.0F; App.PlayerColour[15].Colour.Red = 64.0F / 255.0F; App.PlayerColour[15].Colour.Green = 96.0F / 255.0F; App.PlayerColour[15].Colour.Blue = 0.0F; for ( var i = 0; i <= Constants.InternalPlayerCountMax; i++ ) { App.PlayerColour[i].CalcMinimapColour(); } App.MinimapFeatureColour.Red = 0.5F; App.MinimapFeatureColour.Green = 0.5F; App.MinimapFeatureColour.Blue = 0.5F; } private static void createTileTypes() { clsTileType newTileType; newTileType = new clsTileType(); newTileType.Name = "Sand"; newTileType.DisplayColour.Red = 1.0F; newTileType.DisplayColour.Green = 1.0F; newTileType.DisplayColour.Blue = 0.0F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Sandy Brush"; newTileType.DisplayColour.Red = 0.5F; newTileType.DisplayColour.Green = 0.5F; newTileType.DisplayColour.Blue = 0.0F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Rubble"; newTileType.DisplayColour.Red = 0.25F; newTileType.DisplayColour.Green = 0.25F; newTileType.DisplayColour.Blue = 0.25F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Green Mud"; newTileType.DisplayColour.Red = 0.0F; newTileType.DisplayColour.Green = 0.5F; newTileType.DisplayColour.Blue = 0.0F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Red Brush"; newTileType.DisplayColour.Red = 1.0F; newTileType.DisplayColour.Green = 0.0F; newTileType.DisplayColour.Blue = 0.0F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Pink Rock"; newTileType.DisplayColour.Red = 1.0F; newTileType.DisplayColour.Green = 0.5F; newTileType.DisplayColour.Blue = 0.5F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Road"; newTileType.DisplayColour.Red = 0.0F; newTileType.DisplayColour.Green = 0.0F; newTileType.DisplayColour.Blue = 0.0F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Water"; newTileType.DisplayColour.Red = 0.0F; newTileType.DisplayColour.Green = 0.0F; newTileType.DisplayColour.Blue = 1.0F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Cliff Face"; newTileType.DisplayColour.Red = 0.5F; newTileType.DisplayColour.Green = 0.5F; newTileType.DisplayColour.Blue = 0.5F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Baked Earth"; newTileType.DisplayColour.Red = 0.5F; newTileType.DisplayColour.Green = 0.0F; newTileType.DisplayColour.Blue = 0.0F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Sheet Ice"; newTileType.DisplayColour.Red = 1.0F; newTileType.DisplayColour.Green = 1.0F; newTileType.DisplayColour.Blue = 1.0F; App.TileTypes.Add(newTileType); newTileType = new clsTileType(); newTileType.Name = "Slush"; newTileType.DisplayColour.Red = 0.75F; newTileType.DisplayColour.Green = 0.75F; newTileType.DisplayColour.Blue = 0.75F; App.TileTypes.Add(newTileType); } public static void SetProgramSubDirs() { #if !Portable var myDocumentsProgramPath = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments).CombinePathWith(".flaME", true); SettingsPath = myDocumentsProgramPath.CombinePathWith("settings.json"); AutoSavePath = myDocumentsProgramPath.CombinePathWith("autosave", true); #else SettingsPath = AppDomain.CurrentDomain.BaseDirectory.CombinePathWith("settings.json"); AutoSavePath = AppDomain.CurrentDomain.BaseDirectory.CombinePathWith("autosave", true); #endif // Create the directories. if ( !Directory.Exists(AutoSavePath) ) { try { Directory.CreateDirectory(AutoSavePath); } catch ( Exception ex ) { logger.Error("Unable to create folder \"{0}\": {1}", AutoSavePath, ex.Message); Application.Exit(); } } } public static void VisionRadius_2E_Changed() { var eve = Kernel.Get<IEventBroker>(); VisionRadius = 256.0D * Math.Pow(2.0D, (VisionRadius_2E / 2.0D)); if (eve != null) { View_Radius_Set(VisionRadius); eve.DrawLater(typeof(App)); } } public static string MinDigits(int Number, int Digits) { var ReturnResult = Number.ToStringInvariant(); ReturnResult = ReturnResult.PadLeft(Digits, '0'); return ReturnResult; } public static void ViewKeyDown_Clear() { IsViewKeyDown.Deactivate(); // foreach ( Option<KeyboardControl> control in KeyboardManager.OptionsKeyboardControls.Options ) // { // ((KeyboardControl)(KeyboardManager.KeyboardProfile.GetValue(control))).KeysChanged(IsViewKeyDown); // } } public static void CreateTemplateDroidTypes() { TemplateDroidType_Droid = new DroidDesign.clsTemplateDroidType("Droid", "DROID"); TemplateDroidType_Droid.Num = TemplateDroidType_Add(TemplateDroidType_Droid); TemplateDroidType_Cyborg = new DroidDesign.clsTemplateDroidType("Cyborg", "CYBORG"); TemplateDroidType_Cyborg.Num = TemplateDroidType_Add(TemplateDroidType_Cyborg); TemplateDroidType_CyborgConstruct = new DroidDesign.clsTemplateDroidType("Cyborg Construct", "CYBORG_CONSTRUCT"); TemplateDroidType_CyborgConstruct.Num = TemplateDroidType_Add(TemplateDroidType_CyborgConstruct); TemplateDroidType_CyborgRepair = new DroidDesign.clsTemplateDroidType("Cyborg Repair", "CYBORG_REPAIR"); TemplateDroidType_CyborgRepair.Num = TemplateDroidType_Add(TemplateDroidType_CyborgRepair); TemplateDroidType_CyborgSuper = new DroidDesign.clsTemplateDroidType("Cyborg Super", "CYBORG_SUPER"); TemplateDroidType_CyborgSuper.Num = TemplateDroidType_Add(TemplateDroidType_CyborgSuper); TemplateDroidType_Transporter = new DroidDesign.clsTemplateDroidType("Transporter", "TRANSPORTER"); TemplateDroidType_Transporter.Num = TemplateDroidType_Add(TemplateDroidType_Transporter); TemplateDroidType_Person = new DroidDesign.clsTemplateDroidType("Person", "PERSON"); TemplateDroidType_Person.Num = TemplateDroidType_Add(TemplateDroidType_Person); TemplateDroidType_Null = new DroidDesign.clsTemplateDroidType("Null Droid", "ZNULLDROID"); TemplateDroidType_Null.Num = TemplateDroidType_Add(TemplateDroidType_Null); } public static DroidDesign.clsTemplateDroidType GetTemplateDroidTypeFromTemplateCode(string code) { var lCaseCode = code.ToLower(); for (var a = 0; a <= TemplateDroidTypeCount - 1; a++ ) { if ( TemplateDroidTypes[a].TemplateCode.ToLower() == lCaseCode ) { return TemplateDroidTypes[a]; } } return null; } public static int TemplateDroidType_Add(DroidDesign.clsTemplateDroidType NewDroidType) { Array.Resize(ref TemplateDroidTypes, TemplateDroidTypeCount + 1); TemplateDroidTypes[TemplateDroidTypeCount] = NewDroidType; var returnResult = TemplateDroidTypeCount; TemplateDroidTypeCount++; return returnResult; } public static void ShowWarnings(Result result) { if ( !result.HasWarnings ) { return; } var warningsForm = new frmWarnings(result, result.Text); warningsForm.Show(); warningsForm.Activate(); } public static TurretType GetTurretTypeFromName(string turretTypeName) { switch ( turretTypeName.ToLower() ) { case "weapon": return TurretType.Weapon; case "construct": return TurretType.Construct; case "repair": return TurretType.Repair; case "sensor": return TurretType.Sensor; case "brain": return TurretType.Brain; case "ecm": return TurretType.ECM; default: return TurretType.Unknown; } } public static void ErrorIDChange(UInt32 intendedID, Unit idUnit, string nameOfErrorSource) { if ( !ShowIDErrorMessage ) { return; } if ( idUnit.ID == intendedID ) { return; } var messageText = string.Format("An object\'s ID has been changed unexpectedly. The error was in \"{0}\"\n\n" + "The object is of type {1} and is at map position {2}. " + "It\'s ID was {3}, but is now {4}.\n\n" + "Click Cancel to stop seeing this message. Otherwise, click OK.", nameOfErrorSource, idUnit.TypeBase.GetDisplayTextCode(), idUnit.GetPosText(), intendedID.ToStringInvariant(), idUnit.ID.ToStringInvariant()); const string caption = "An object\'s ID has been changed unexpectedly."; var result = MessageBox.Show(messageText, caption, MessageBoxButtons.OKCancel, MessageBoxIcon.None); if ( result == DialogResult.Cancel ) { ShowIDErrorMessage = false; } } public static void ZeroIDWarning(Unit IDUnit, UInt32 NewID, Result Output) { var MessageText = string.Format ("An object\'s ID has been changed from 0 to {0}. Zero is not a valid ID. The object is of type {1} and is at map position {2}.", NewID.ToStringInvariant (), IDUnit.TypeBase.GetDisplayTextCode (), IDUnit.GetPosText ()); //MsgBox(MessageText, MsgBoxStyle.OkOnly) Output.WarningAdd(MessageText); } public static bool PosIsWithinTileArea(XYInt worldHorizontal, XYInt startTile, XYInt finishTile) { return worldHorizontal.X >= startTile.X * Constants.TerrainGridSpacing & worldHorizontal.Y >= startTile.Y * Constants.TerrainGridSpacing & worldHorizontal.X < finishTile.X * Constants.TerrainGridSpacing & worldHorizontal.Y < finishTile.Y * Constants.TerrainGridSpacing; } public static bool SizeIsPowerOf2(int Size) { var Power = Math.Log(Size) / Math.Log(2.0D); return Power == Power.ToInt(); } public static Result LoadTilesets(string TilesetsPath) { var returnResult = new Result("Loading tilesets", false); logger.Info("Loading tilesets"); string[] tilesetDirs; try { tilesetDirs = Directory.GetDirectories(TilesetsPath); } catch ( Exception ex ) { returnResult.ProblemAdd(ex.Message); return returnResult; } var tmpTilesets = new List<Tileset> ();; foreach ( var path in tilesetDirs ) { var tileset = new Tileset(); var loader = new TilesetLoader (ref tileset); var result = loader.Load (path); returnResult.Add(result); if ( !result.HasProblems ) { tmpTilesets.Add(tileset); } } foreach ( var tileset in tmpTilesets ) { if ( tileset.Name == "tertilesc1hw" ) { tileset.Name = "Arizona"; Tileset_Arizona = tileset; tileset.IsOriginal = true; tileset.BGColour = new SRgb(204.0f / 255.0f, 149.0f / 255.0f, 70.0f / 255.0f); } else if ( tileset.Name == "tertilesc2hw" ) { tileset.Name = "Urban"; Tileset_Urban = tileset; tileset.IsOriginal = true; tileset.BGColour = new SRgb(118.0f / 255.0f, 165.0f / 255.0f, 203.0f / 255.0f); } else if ( tileset.Name == "tertilesc3hw" ) { tileset.Name = "Rocky Mountains"; Tileset_Rockies = tileset; tileset.IsOriginal = true; tileset.BGColour = new SRgb(182.0f / 255.0f, 225.0f / 255.0f, 236.0f / 255.0f); } Tilesets.Add (tileset); } if ( Tileset_Arizona == null ) { returnResult.WarningAdd("Arizona tileset is missing."); } if ( Tileset_Urban == null ) { returnResult.WarningAdd("Urban tileset is missing."); } if ( Tileset_Rockies == null ) { returnResult.WarningAdd("Rocky Mountains tileset is missing."); } return returnResult; } public static void View_Radius_Set(double radius) { VisionSectors.Radius = radius / (Constants.TerrainGridSpacing * Constants.SectorTileSize); } public static XYDouble CalcUnitsCentrePos(ObservableCollection<Unit> units) { var result = default(XYDouble); result.X = 0.0D; result.Y = 0.0D; foreach ( var unit in units ) { result += unit.Pos.Horizontal.ToDoubles(); } result /= units.Count; return result; } } }
// =========================================================================================================== // // Class/Library: Demo Database Script -- Used to test connection and interaction with MSSQL, MySQL and SQLite Databases. // Author: Michael Marzilli ( http://www.linkedin.com/in/michaelmarzilli ) // Created: Sep 25, 2007 // // VERS 1.0.000 : Sep 25, 2007 : Original File Created. // // =========================================================================================================== using UnityEngine; using UnityEngine.UI; using System; using System.Data; public class _DemoDatabaseScript : MonoBehaviour { #region "PRIVATE VARIABLES" [SerializeField,HideInInspector] private string _strServer = ""; [SerializeField,HideInInspector] private string _strDatabase = ""; [SerializeField,HideInInspector] private string _strUsername = ""; [SerializeField,HideInInspector] private string _strPassword = ""; [SerializeField,HideInInspector] private string _strFileLoc = ""; [SerializeField,HideInInspector] private string _strTableName = ""; [SerializeField,HideInInspector] private int _intPort = 1433; [SerializeField,HideInInspector] private int _intType = 0; private string _strOutput = ""; private DatabaseManager _dbm = null; #endregion #region "PRIVATE PROPERTIES" private DatabaseManager DB { get { if (_dbm == null) _dbm = DatabaseManager.Instance; return _dbm; } } #endregion #region "PUBLIC EDITOR PROPERTIES" public InputField ServerInp; public InputField PortInp; public InputField DatabaseInp; public InputField UsernameInp; public InputField PasswordInp; public InputField FileLocInp; public InputField TableNameInp; public Dropdown TypeDDL; public Text OutputText; public GameObject UpdateButton; public InputField IDinp; #endregion #region "PUBLIC PROPERTIES" public string Server { get { return _strServer; } set { _strServer = value.Trim(); if (ServerInp != null) ServerInp.text = _strServer; } } public int Port { get { return _intPort; } set { _intPort = value; if (PortInp != null) PortInp.text = _intPort.ToString(); } } public string Database { get { return _strDatabase; } set { _strDatabase = value.Trim(); if (DatabaseInp != null) DatabaseInp.text = _strDatabase; } } public string Username { get { return _strUsername; } set { _strUsername = value.Trim(); if (UsernameInp != null) UsernameInp.text = _strUsername; } } public string Password { get { return _strPassword; } set { _strPassword = value.Trim(); if (PasswordInp != null) PasswordInp.text = _strPassword; } } public string FileLocation { get { return _strFileLoc; } set { _strFileLoc = value.Trim(); if (FileLocInp != null) FileLocInp.text = _strFileLoc; } } public string TableName { get { return _strTableName; } set { _strTableName = value.Trim(); if (TableNameInp != null) TableNameInp.text = _strTableName; } } public string Output { get { return _strOutput; } set { _strOutput = value.Trim(); if (OutputText != null) OutputText.text = _strOutput; } } public ClsDAL.DBtypes DatabaseType { get { if (TypeDDL != null) _intType = TypeDDL.value; return (ClsDAL.DBtypes) _intType; } set { _intType = (int) value; if (TypeDDL != null) TypeDDL.value = _intType; } } #endregion #region "PRIVATE FUNCTIONS" private void Start() { DeserializeData(); TypeDDL.value = _intType; UpdateDatabaseSettings(); ButtonUpdateDatabaseOnClick(); } private void UpdateDatabaseSettings() { // SET THE DROPDOWN LIST TO THE APPROPRIATE DATABASE TYPE DatabaseType = (ClsDAL.DBtypes) TypeDDL.value; // SHOW OR HIDE THE APPROPRIATE FIELDS BASED ON DATABASE TYPE switch ((ClsDAL.DBtypes) _intType) { case ClsDAL.DBtypes.MSSQL: case ClsDAL.DBtypes.MYSQL: ServerInp.gameObject.SetActive(true); PortInp.gameObject.SetActive(true); DatabaseInp.gameObject.SetActive(true); UsernameInp.gameObject.SetActive(true); PasswordInp.gameObject.SetActive(true); FileLocInp.gameObject.SetActive(false); break; case ClsDAL.DBtypes.SQLITE: ServerInp.gameObject.SetActive(false); PortInp.gameObject.SetActive(false); DatabaseInp.gameObject.SetActive(false); UsernameInp.gameObject.SetActive(false); PasswordInp.gameObject.SetActive(false); FileLocInp.gameObject.SetActive(true); break; } // PRE-FILL FIELDS WITH APPROPRIATE DATA ServerInp.text = _strServer; PortInp.text = _intPort.ToString(); DatabaseInp.text = _strDatabase; UsernameInp.text = _strUsername; PasswordInp.text = _strPassword; FileLocInp.text = _strFileLoc; TableNameInp.text = _strTableName; // UPDATE DATABASEMANAGER AND DAL DB.DatabaseType = DatabaseType; DB.DBserver = Server; DB.DBport = Port; DB.DBdatabase = Database; DB.DBuser = Username; DB.DBpassword = Password; DB.SQLiteDBfileLocation = FileLocation; // STORE THE SETTINGS IN A PLAYER PREF SerializeData(); } private void UpdateOutput(string strOut) { if (OutputText != null) OutputText.text = strOut + "\n\n" + ((DB.DBerrors != "") ? DB.DBerrors : ""); } private void SerializeData() { string strSer = ""; strSer += _intType.ToString() + "|"; strSer += _strServer + "|"; strSer += _intPort.ToString() + "|"; strSer += _strDatabase + "|"; strSer += _strUsername + "|"; strSer += _strPassword + "|"; strSer += _strFileLoc + "|"; strSer += _strTableName; PlayerPrefs.SetString("CBT.Demo.Settings", strSer); } private void DeserializeData() { string strSer = PlayerPrefs.GetString("CBT.Demo.Settings"); string[] s = strSer.Split('|'); _intType = Util.ConvertToInt(s[0]); _strServer = s[1]; _intPort = Util.ConvertToInt(s[2]); _strDatabase = s[3]; _strUsername = s[4]; _strPassword = s[5]; _strFileLoc = s[6]; _strTableName = s[7]; } #endregion #region "PUBLIC EVENTS" #region "DROP DOWN LIST EVENTS" public void TypeDDLonChange() { switch (TypeDDL.value) { case 0: // MSSQL _intPort = 1433; break; case 1: // MYSQL _intPort = 3306; break; } UpdateDatabaseSettings(); if (UpdateButton != null) UpdateButton.SetActive(true); } #endregion #region "INPUT FIELD CHANGE EVENTS" public void ServerOnChange() { Server = ServerInp.text; UpdateDatabaseSettings(); if (UpdateButton != null) UpdateButton.SetActive(true); } public void PortOnChange() { Port = Util.ConvertToInt(PortInp.text); UpdateDatabaseSettings(); if (UpdateButton != null) UpdateButton.SetActive(true); } public void DatabaseOnChange() { Database = DatabaseInp.text; UpdateDatabaseSettings(); if (UpdateButton != null) UpdateButton.SetActive(true); } public void UsernameOnChange() { Username = UsernameInp.text; UpdateDatabaseSettings(); if (UpdateButton != null) UpdateButton.SetActive(true); } public void PasswordOnChange() { Password = PasswordInp.text; UpdateDatabaseSettings(); if (UpdateButton != null) UpdateButton.SetActive(true); } public void FileLocationOnChange() { FileLocation = FileLocInp.text; UpdateDatabaseSettings(); if (UpdateButton != null) UpdateButton.SetActive(true); } public void TableNameOnChange() { TableName = TableNameInp.text; UpdateDatabaseSettings(); } #endregion #region "BUTTON EVENTS" public void ButtonUpdateDatabaseOnClick() { DB.InitializeDAL(); if (UpdateButton != null) UpdateButton.SetActive(false); } #endregion #region "--- HERE'S WHERE THE SQL MAGIC HAPPENS ---" #endregion #region "DIRECT SQL BUTTONS" public void ButtonBasicSelectOnClick() { // QUERY THE DATABASE -- THE RESULTS ARE PLACED INTO A DATATABLE string strSQL = "SELECT * FROM " + _strTableName.ToLower(); DB.ClearParams(); DataTable dt = DB.GetSQLSelectDataTable(strSQL); // DISPLAY THE DATA if (dt != null && dt.Rows.Count > 0) { string strOut = "\n\nTable: " + _strTableName + ", " + dt.Rows.Count.ToString() + " Rows selected\n\n "; for (int h = 0; h < dt.Columns.Count; h++) strOut += dt.Columns[h].ColumnName + ", "; strOut += "\n"; for (int i = 0; i < dt.Rows.Count; i++) { strOut += "Row #" + (i + 1).ToString() + ": "; for (int c = 0; c < dt.Columns.Count; c++) { strOut += dt.Rows[i][c].ToString() + ", "; } strOut += "\n"; } UpdateOutput(strOut + "\n\n" + strSQL); } else UpdateOutput("\n\nNo Records Selected for Table: " + _strTableName + "\n\n" + strSQL); } public void ButtonSelectFirstRecordNameOnClick() { // QUERY THE DATABASE -- THE RESULT IS RETURNED AS A STRING string strSQL = "SELECT Name FROM " + _strTableName + " WHERE ID=1"; DB.ClearParams(); string strOut = DB.GetSQLSelectString(strSQL); // DISPLAY THE DATA if (strOut != "") { strOut = "\n\nTable: " + _strTableName + "\n\n" + "Output: " + strOut + "\n"; UpdateOutput(strOut + "\n\n" + strSQL); } else UpdateOutput("\n\nNo Records Selected for Table: " + _strTableName + "\n\n" + strSQL); } public void ButtonUpdateFirstRecordAgeOnClick() { string strOut = ""; string strSQL = "UPDATE " + _strTableName + " SET Age=Age+1 WHERE ID=1"; // UPDATE THE DATABASE // DISPLAY THE RESULTS DB.ClearParams(); if (DB.DoSQLUpdateDelete(strSQL)) { strSQL = "SELECT Name FROM " + _strTableName + " WHERE ID=1"; DataTable dt = DB.DAL.GetSQLSelectDataTable("SELECT * FROM " + _strTableName + " WHERE ID=1"); if (dt != null && dt.Rows.Count > 0) { strOut = "\n\nTable: " + _strTableName + " - " + dt.Rows[0]["NAME"].ToString() + " is now " + dt.Rows[0]["AGE"].ToString() + " years old.\n"; UpdateOutput(strOut + "\n\n" + strSQL); } } else UpdateOutput("\n\nNo Records Updated for Table: " + _strTableName + "\n\n" + strSQL); } #endregion #region "STORED PROCEDURE BUTTONS" public void ButtonSPgetByIDOnClick() { // DATA INTEGRITY CHECKS if (IDinp == null) return; if (DatabaseType == ClsDAL.DBtypes.SQLITE) { UpdateOutput("\n\nSQLite is unable to execute stored procedures."); Debug.LogError("SQLite is unable to execute stored procedures."); return; } int intID = Util.ConvertToInt(IDinp.text); if (intID < 1) { UpdateOutput("\n\nID should be greater than zero."); Debug.LogError("ID should be greater than zero."); return; } // PERFORM THE STORED PROCEDURE -- RESULTS ARE RETURNED AS A DATATABLE DB.ClearParams(); DB.AddParam("SEARCHID", intID); DataTable dt = DB.GetSPDataTable("GetTestByID"); // DISPLAY THE DATA if (dt != null && dt.Rows.Count > 0) { string strOut = "\n\n" + dt.Rows.Count.ToString() + " Rows selected\n\n "; for (int h = 0; h < dt.Columns.Count; h++) strOut += dt.Columns[h].ColumnName + ", "; strOut += "\n"; for (int i = 0; i < dt.Rows.Count; i++) { strOut += "Row #" + (i + 1).ToString() + ": "; for (int c = 0; c < dt.Columns.Count; c++) { strOut += dt.Rows[i][c].ToString() + ", "; } strOut += "\n"; } UpdateOutput(strOut + "\n\n"); } else UpdateOutput("\n\nNo Records Selected for Table: " + _strTableName + "\n\n"); } public void ButtonSPupdateByIDOnClick() { // DATA INTEGRITY CHECKS if (IDinp == null) return; if (DatabaseType == ClsDAL.DBtypes.SQLITE) { UpdateOutput("\n\nSQLite is unable to execute stored procedures."); Debug.LogError("SQLite is unable to execute stored procedures."); return; } int intID = Util.ConvertToInt(IDinp.text); if (intID < 1) { UpdateOutput("\n\nID should be greater than zero."); Debug.LogError("ID should be greater than zero."); return; } // PERFORM THE UPDATE STORED PROCEDURE DB.ClearParams(); DB.AddParam("SEARCHID", intID); DB.ExecuteSP("UpdateAgeByID"); // DISPLAY THE RECORD ButtonSPgetByIDOnClick(); } #endregion #endregion }
// // Options.cs // // Authors: // Jonathan Pryor <jpryor@novell.com> // // Copyright (C) 2008 Novell (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // // Compile With: // gmcs -debug+ -r:System.Core Options.cs -o:NDesk.Options.dll // gmcs -debug+ -d:LINQ -r:System.Core Options.cs -o:NDesk.Options.dll // // The LINQ version just changes the implementation of // OptionSet.Parse(IEnumerable<string>), and confers no semantic changes. // // A Getopt::Long-inspired option parsing library for C#. // // NDesk.Options.OptionSet is built upon a key/value table, where the // key is a option format string and the value is a delegate that is // invoked when the format string is matched. // // Option format strings: // Regex-like BNF Grammar: // name: .+ // type: [=:] // sep: ( [^{}]+ | '{' .+ '}' )? // aliases: ( name type sep ) ( '|' name type sep )* // // Each '|'-delimited name is an alias for the associated action. If the // format string ends in a '=', it has a required value. If the format // string ends in a ':', it has an optional value. If neither '=' or ':' // is present, no value is supported. `=' or `:' need only be defined on one // alias, but if they are provided on more than one they must be consistent. // // Each alias portion may also end with a "key/value separator", which is used // to split option values if the option accepts > 1 value. If not specified, // it defaults to '=' and ':'. If specified, it can be any character except // '{' and '}' OR the *string* between '{' and '}'. If no separator should be // used (i.e. the separate values should be distinct arguments), then "{}" // should be used as the separator. // // Options are extracted either from the current option by looking for // the option name followed by an '=' or ':', or is taken from the // following option IFF: // - The current option does not contain a '=' or a ':' // - The current option requires a value (i.e. not a Option type of ':') // // The `name' used in the option format string does NOT include any leading // option indicator, such as '-', '--', or '/'. All three of these are // permitted/required on any named option. // // Option bundling is permitted so long as: // - '-' is used to start the option group // - all of the bundled options are a single character // - at most one of the bundled options accepts a value, and the value // provided starts from the next character to the end of the string. // // This allows specifying '-a -b -c' as '-abc', and specifying '-D name=value' // as '-Dname=value'. // // Option processing is disabled by specifying "--". All options after "--" // are returned by OptionSet.Parse() unchanged and unprocessed. // // Unprocessed options are returned from OptionSet.Parse(). // // Examples: // int verbose = 0; // OptionSet p = new OptionSet () // .Add ("v", v => ++verbose) // .Add ("name=|value=", v => Console.WriteLine (v)); // p.Parse (new string[]{"-v", "--v", "/v", "-name=A", "/name", "B", "extra"}); // // The above would parse the argument string array, and would invoke the // lambda expression three times, setting `verbose' to 3 when complete. // It would also print out "A" and "B" to standard output. // The returned array would contain the string "extra". // // C# 3.0 collection initializers are supported and encouraged: // var p = new OptionSet () { // { "h|?|help", v => ShowHelp () }, // }; // // System.ComponentModel.TypeConverter is also supported, allowing the use of // custom data types in the callback type; TypeConverter.ConvertFromString() // is used to convert the value option to an instance of the specified // type: // // var p = new OptionSet () { // { "foo=", (Foo f) => Console.WriteLine (f.ToString ()) }, // }; // // Random other tidbits: // - Boolean options (those w/o '=' or ':' in the option format string) // are explicitly enabled if they are followed with '+', and explicitly // disabled if they are followed with '-': // string a = null; // var p = new OptionSet () { // { "a", s => a = s }, // }; // p.Parse (new string[]{"-a"}); // sets v != null // p.Parse (new string[]{"-a+"}); // sets v != null // p.Parse (new string[]{"-a-"}); // sets v == null // using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.IO; using System.Runtime.Serialization; using System.Security.Permissions; using System.Text; using System.Text.RegularExpressions; #if LINQ using System.Linq; #endif #if TEST using NDesk.Options; #endif // HACK: Disables warnings about missing XML-documentation. #pragma warning disable 1591 namespace NDesk.Options { public class OptionValueCollection : IList, IList<string> { private List<string> values = new List<string>(); private OptionContext c; internal OptionValueCollection(OptionContext c) { this.c = c; } #region ICollection void ICollection.CopyTo(Array array, int index) { (values as ICollection).CopyTo(array, index); } bool ICollection.IsSynchronized { get { return (values as ICollection).IsSynchronized; } } object ICollection.SyncRoot { get { return (values as ICollection).SyncRoot; } } #endregion ICollection #region ICollection<T> public void Add(string item) { values.Add(item); } public void Clear() { values.Clear(); } public bool Contains(string item) { return values.Contains(item); } public void CopyTo(string[] array, int arrayIndex) { values.CopyTo(array, arrayIndex); } public bool Remove(string item) { return values.Remove(item); } public int Count { get { return values.Count; } } public bool IsReadOnly { get { return false; } } #endregion ICollection<T> #region IEnumerable IEnumerator IEnumerable.GetEnumerator() { return values.GetEnumerator(); } #endregion IEnumerable #region IEnumerable<T> public IEnumerator<string> GetEnumerator() { return values.GetEnumerator(); } #endregion IEnumerable<T> #region IList int IList.Add(object value) { return (values as IList).Add(value); } bool IList.Contains(object value) { return (values as IList).Contains(value); } int IList.IndexOf(object value) { return (values as IList).IndexOf(value); } void IList.Insert(int index, object value) { (values as IList).Insert(index, value); } void IList.Remove(object value) { (values as IList).Remove(value); } void IList.RemoveAt(int index) { (values as IList).RemoveAt(index); } bool IList.IsFixedSize { get { return false; } } object IList.this[int index] { get { return this[index]; } set { (values as IList)[index] = value; } } #endregion IList #region IList<T> public int IndexOf(string item) { return values.IndexOf(item); } public void Insert(int index, string item) { values.Insert(index, item); } public void RemoveAt(int index) { values.RemoveAt(index); } private void AssertValid(int index) { if (c.Option == null) throw new InvalidOperationException("OptionContext.Option is null."); if (index >= c.Option.MaxValueCount) throw new ArgumentOutOfRangeException("index"); if (c.Option.OptionValueType == OptionValueType.Required && index >= values.Count) throw new OptionException(string.Format( c.OptionSet.MessageLocalizer("Missing required value for option '{0}'."), c.OptionName), c.OptionName); } public string this[int index] { get { AssertValid(index); return index >= values.Count ? null : values[index]; } set { values[index] = value; } } #endregion IList<T> public List<string> ToList() { return new List<string>(values); } public string[] ToArray() { return values.ToArray(); } public override string ToString() { return string.Join(", ", values.ToArray()); } } public class OptionContext { private Option option; private string name; private int index; private OptionSet set; private OptionValueCollection c; public OptionContext(OptionSet set) { this.set = set; this.c = new OptionValueCollection(this); } public Option Option { get { return option; } set { option = value; } } public string OptionName { get { return name; } set { name = value; } } public int OptionIndex { get { return index; } set { index = value; } } public OptionSet OptionSet { get { return set; } } public OptionValueCollection OptionValues { get { return c; } } } public enum OptionValueType { None, Optional, Required, } public abstract class Option { private string prototype, description; private string[] names; private OptionValueType type; private int count; private string[] separators; protected Option(string prototype, string description) : this(prototype, description, 1) { } protected Option(string prototype, string description, int maxValueCount) { if (prototype == null) throw new ArgumentNullException("prototype"); if (prototype.Length == 0) throw new ArgumentException("Cannot be the empty string.", "prototype"); if (maxValueCount < 0) throw new ArgumentOutOfRangeException("maxValueCount"); this.prototype = prototype; this.names = prototype.Split('|'); this.description = description; this.count = maxValueCount; this.type = ParsePrototype(); if (this.count == 0 && type != OptionValueType.None) throw new ArgumentException( "Cannot provide maxValueCount of 0 for OptionValueType.Required or " + "OptionValueType.Optional.", "maxValueCount"); if (this.type == OptionValueType.None && maxValueCount > 1) throw new ArgumentException( string.Format("Cannot provide maxValueCount of {0} for OptionValueType.None.", maxValueCount), "maxValueCount"); if (Array.IndexOf(names, "<>") >= 0 && ((names.Length == 1 && this.type != OptionValueType.None) || (names.Length > 1 && this.MaxValueCount > 1))) throw new ArgumentException( "The default option handler '<>' cannot require values.", "prototype"); } public string Prototype { get { return prototype; } } public string Description { get { return description; } } public OptionValueType OptionValueType { get { return type; } } public int MaxValueCount { get { return count; } } public string[] GetNames() { return (string[])names.Clone(); } public string[] GetValueSeparators() { if (separators == null) return new string[0]; return (string[])separators.Clone(); } protected static T Parse<T>(string value, OptionContext c) { TypeConverter conv = TypeDescriptor.GetConverter(typeof(T)); T t = default(T); try { if (value != null) t = (T)conv.ConvertFromString(value); } catch (Exception e) { throw new OptionException( string.Format( c.OptionSet.MessageLocalizer("Could not convert string `{0}' to type {1} for option `{2}'."), value, typeof(T).Name, c.OptionName), c.OptionName, e); } return t; } internal string[] Names { get { return names; } } internal string[] ValueSeparators { get { return separators; } } private static readonly char[] NameTerminator = new char[] { '=', ':' }; private OptionValueType ParsePrototype() { char type = '\0'; List<string> seps = new List<string>(); for (int i = 0; i < names.Length; ++i) { string name = names[i]; if (name.Length == 0) throw new ArgumentException("Empty option names are not supported.", "prototype"); int end = name.IndexOfAny(NameTerminator); if (end == -1) continue; names[i] = name.Substring(0, end); if (type == '\0' || type == name[end]) type = name[end]; else throw new ArgumentException( string.Format("Conflicting option types: '{0}' vs. '{1}'.", type, name[end]), "prototype"); AddSeparators(name, end, seps); } if (type == '\0') return OptionValueType.None; if (count <= 1 && seps.Count != 0) throw new ArgumentException( string.Format("Cannot provide key/value separators for Options taking {0} value(s).", count), "prototype"); if (count > 1) { if (seps.Count == 0) this.separators = new string[] { ":", "=" }; else if (seps.Count == 1 && seps[0].Length == 0) this.separators = null; else this.separators = seps.ToArray(); } return type == '=' ? OptionValueType.Required : OptionValueType.Optional; } private static void AddSeparators(string name, int end, ICollection<string> seps) { int start = -1; for (int i = end + 1; i < name.Length; ++i) { switch (name[i]) { case '{': if (start != -1) throw new ArgumentException( string.Format("Ill-formed name/value separator found in \"{0}\".", name), "prototype"); start = i + 1; break; case '}': if (start == -1) throw new ArgumentException( string.Format("Ill-formed name/value separator found in \"{0}\".", name), "prototype"); seps.Add(name.Substring(start, i - start)); start = -1; break; default: if (start == -1) seps.Add(name[i].ToString()); break; } } if (start != -1) throw new ArgumentException( string.Format("Ill-formed name/value separator found in \"{0}\".", name), "prototype"); } public void Invoke(OptionContext c) { OnParseComplete(c); c.OptionName = null; c.Option = null; c.OptionValues.Clear(); } protected abstract void OnParseComplete(OptionContext c); public override string ToString() { return Prototype; } } [Serializable] public class OptionException : Exception { private string option; public OptionException() { } public OptionException(string message, string optionName) : base(message) { this.option = optionName; } public OptionException(string message, string optionName, Exception innerException) : base(message, innerException) { this.option = optionName; } protected OptionException(SerializationInfo info, StreamingContext context) : base(info, context) { this.option = info.GetString("OptionName"); } public string OptionName { get { return this.option; } } [SecurityPermission(SecurityAction.LinkDemand, SerializationFormatter = true)] public override void GetObjectData(SerializationInfo info, StreamingContext context) { base.GetObjectData(info, context); info.AddValue("OptionName", option); } } public delegate void OptionAction<TKey, TValue>(TKey key, TValue value); public class OptionSet : KeyedCollection<string, Option> { public OptionSet() : this(delegate (string f) { return f; }) { } public OptionSet(Converter<string, string> localizer) { this.localizer = localizer; } private Converter<string, string> localizer; public Converter<string, string> MessageLocalizer { get { return localizer; } } protected override string GetKeyForItem(Option item) { if (item == null) throw new ArgumentNullException("option"); if (item.Names != null && item.Names.Length > 0) return item.Names[0]; // This should never happen, as it's invalid for Option to be // constructed w/o any names. throw new InvalidOperationException("Option has no names!"); } [Obsolete("Use KeyedCollection.this[string]")] protected Option GetOptionForName(string option) { if (option == null) throw new ArgumentNullException("option"); try { return base[option]; } catch (KeyNotFoundException) { return null; } } protected override void InsertItem(int index, Option item) { base.InsertItem(index, item); AddImpl(item); } protected override void RemoveItem(int index) { base.RemoveItem(index); Option p = Items[index]; // KeyedCollection.RemoveItem() handles the 0th item for (int i = 1; i < p.Names.Length; ++i) { Dictionary.Remove(p.Names[i]); } } protected override void SetItem(int index, Option item) { base.SetItem(index, item); RemoveItem(index); AddImpl(item); } private void AddImpl(Option option) { if (option == null) throw new ArgumentNullException("option"); List<string> added = new List<string>(option.Names.Length); try { // KeyedCollection.InsertItem/SetItem handle the 0th name. for (int i = 1; i < option.Names.Length; ++i) { Dictionary.Add(option.Names[i], option); added.Add(option.Names[i]); } } catch (Exception) { foreach (string name in added) Dictionary.Remove(name); throw; } } public new OptionSet Add(Option option) { base.Add(option); return this; } private sealed class ActionOption : Option { private Action<OptionValueCollection> action; public ActionOption(string prototype, string description, int count, Action<OptionValueCollection> action) : base(prototype, description, count) { if (action == null) throw new ArgumentNullException("action"); this.action = action; } protected override void OnParseComplete(OptionContext c) { action(c.OptionValues); } } public OptionSet Add(string prototype, Action<string> action) { return Add(prototype, null, action); } public OptionSet Add(string prototype, string description, Action<string> action) { if (action == null) throw new ArgumentNullException("action"); Option p = new ActionOption(prototype, description, 1, delegate (OptionValueCollection v) { action(v[0]); }); base.Add(p); return this; } public OptionSet Add(string prototype, OptionAction<string, string> action) { return Add(prototype, null, action); } public OptionSet Add(string prototype, string description, OptionAction<string, string> action) { if (action == null) throw new ArgumentNullException("action"); Option p = new ActionOption(prototype, description, 2, delegate (OptionValueCollection v) { action(v[0], v[1]); }); base.Add(p); return this; } private sealed class ActionOption<T> : Option { private Action<T> action; public ActionOption(string prototype, string description, Action<T> action) : base(prototype, description, 1) { if (action == null) throw new ArgumentNullException("action"); this.action = action; } protected override void OnParseComplete(OptionContext c) { action(Parse<T>(c.OptionValues[0], c)); } } private sealed class ActionOption<TKey, TValue> : Option { private OptionAction<TKey, TValue> action; public ActionOption(string prototype, string description, OptionAction<TKey, TValue> action) : base(prototype, description, 2) { if (action == null) throw new ArgumentNullException("action"); this.action = action; } protected override void OnParseComplete(OptionContext c) { action( Parse<TKey>(c.OptionValues[0], c), Parse<TValue>(c.OptionValues[1], c)); } } public OptionSet Add<T>(string prototype, Action<T> action) { return Add(prototype, null, action); } public OptionSet Add<T>(string prototype, string description, Action<T> action) { return Add(new ActionOption<T>(prototype, description, action)); } public OptionSet Add<TKey, TValue>(string prototype, OptionAction<TKey, TValue> action) { return Add(prototype, null, action); } public OptionSet Add<TKey, TValue>(string prototype, string description, OptionAction<TKey, TValue> action) { return Add(new ActionOption<TKey, TValue>(prototype, description, action)); } protected virtual OptionContext CreateOptionContext() { return new OptionContext(this); } #if LINQ public List<string> Parse (IEnumerable<string> arguments) { bool process = true; OptionContext c = CreateOptionContext (); c.OptionIndex = -1; var def = GetOptionForName ("<>"); var unprocessed = from argument in arguments where ++c.OptionIndex >= 0 && (process || def != null) ? process ? argument == "--" ? (process = false) : !Parse (argument, c) ? def != null ? Unprocessed (null, def, c, argument) : true : false : def != null ? Unprocessed (null, def, c, argument) : true : true select argument; List<string> r = unprocessed.ToList (); if (c.Option != null) c.Option.Invoke (c); return r; } #else public List<string> Parse(IEnumerable<string> arguments) { OptionContext c = CreateOptionContext(); c.OptionIndex = -1; bool process = true; List<string> unprocessed = new List<string>(); Option def = Contains("<>") ? this["<>"] : null; foreach (string argument in arguments) { ++c.OptionIndex; if (argument == "--") { process = false; continue; } if (!process) { Unprocessed(unprocessed, def, c, argument); continue; } if (!Parse(argument, c)) Unprocessed(unprocessed, def, c, argument); } if (c.Option != null) c.Option.Invoke(c); return unprocessed; } #endif private static bool Unprocessed(ICollection<string> extra, Option def, OptionContext c, string argument) { if (def == null) { extra.Add(argument); return false; } c.OptionValues.Add(argument); c.Option = def; c.Option.Invoke(c); return false; } private readonly Regex ValueOption = new Regex( @"^(?<flag>--|-|/)(?<name>[^:=]+)((?<sep>[:=])(?<value>.*))?$"); protected bool GetOptionParts(string argument, out string flag, out string name, out string sep, out string value) { if (argument == null) throw new ArgumentNullException("argument"); flag = name = sep = value = null; Match m = ValueOption.Match(argument); if (!m.Success) { return false; } flag = m.Groups["flag"].Value; name = m.Groups["name"].Value; if (m.Groups["sep"].Success && m.Groups["value"].Success) { sep = m.Groups["sep"].Value; value = m.Groups["value"].Value; } return true; } protected virtual bool Parse(string argument, OptionContext c) { if (c.Option != null) { ParseValue(argument, c); return true; } string f, n, s, v; if (!GetOptionParts(argument, out f, out n, out s, out v)) return false; Option p; if (Contains(n)) { p = this[n]; c.OptionName = f + n; c.Option = p; switch (p.OptionValueType) { case OptionValueType.None: c.OptionValues.Add(n); c.Option.Invoke(c); break; case OptionValueType.Optional: case OptionValueType.Required: ParseValue(v, c); break; } return true; } // no match; is it a bool option? if (ParseBool(argument, n, c)) return true; // is it a bundled option? if (ParseBundledValue(f, string.Concat(n + s + v), c)) return true; return false; } private void ParseValue(string option, OptionContext c) { if (option != null) foreach (string o in c.Option.ValueSeparators != null ? option.Split(c.Option.ValueSeparators, StringSplitOptions.None) : new string[] { option }) { c.OptionValues.Add(o); } if (c.OptionValues.Count == c.Option.MaxValueCount || c.Option.OptionValueType == OptionValueType.Optional) c.Option.Invoke(c); else if (c.OptionValues.Count > c.Option.MaxValueCount) { throw new OptionException(localizer(string.Format( "Error: Found {0} option values when expecting {1}.", c.OptionValues.Count, c.Option.MaxValueCount)), c.OptionName); } } private bool ParseBool(string option, string n, OptionContext c) { Option p; string rn; if (n.Length >= 1 && (n[n.Length - 1] == '+' || n[n.Length - 1] == '-') && Contains((rn = n.Substring(0, n.Length - 1)))) { p = this[rn]; string v = n[n.Length - 1] == '+' ? option : null; c.OptionName = option; c.Option = p; c.OptionValues.Add(v); p.Invoke(c); return true; } return false; } private bool ParseBundledValue(string f, string n, OptionContext c) { if (f != "-") return false; for (int i = 0; i < n.Length; ++i) { Option p; string opt = f + n[i].ToString(); string rn = n[i].ToString(); if (!Contains(rn)) { if (i == 0) return false; throw new OptionException(string.Format(localizer( "Cannot bundle unregistered option '{0}'."), opt), opt); } p = this[rn]; switch (p.OptionValueType) { case OptionValueType.None: Invoke(c, opt, n, p); break; case OptionValueType.Optional: case OptionValueType.Required: { string v = n.Substring(i + 1); c.Option = p; c.OptionName = opt; ParseValue(v.Length != 0 ? v : null, c); return true; } default: throw new InvalidOperationException("Unknown OptionValueType: " + p.OptionValueType); } } return true; } private static void Invoke(OptionContext c, string name, string value, Option option) { c.OptionName = name; c.Option = option; c.OptionValues.Add(value); option.Invoke(c); } private const int OptionWidth = 29; public void WriteOptionDescriptions(TextWriter o) { foreach (Option p in this) { int written = 0; if (!WriteOptionPrototype(o, p, ref written)) continue; if (written < OptionWidth) o.Write(new string(' ', OptionWidth - written)); else { o.WriteLine(); o.Write(new string(' ', OptionWidth)); } List<string> lines = GetLines(localizer(GetDescription(p.Description))); o.WriteLine(lines[0]); string prefix = new string(' ', OptionWidth + 2); for (int i = 1; i < lines.Count; ++i) { o.Write(prefix); o.WriteLine(lines[i]); } } } private bool WriteOptionPrototype(TextWriter o, Option p, ref int written) { string[] names = p.Names; int i = GetNextOptionIndex(names, 0); if (i == names.Length) return false; if (names[i].Length == 1) { Write(o, ref written, " -"); Write(o, ref written, names[0]); } else { Write(o, ref written, " --"); Write(o, ref written, names[0]); } for (i = GetNextOptionIndex(names, i + 1); i < names.Length; i = GetNextOptionIndex(names, i + 1)) { Write(o, ref written, ", "); Write(o, ref written, names[i].Length == 1 ? "-" : "--"); Write(o, ref written, names[i]); } if (p.OptionValueType == OptionValueType.Optional || p.OptionValueType == OptionValueType.Required) { if (p.OptionValueType == OptionValueType.Optional) { Write(o, ref written, localizer("[")); } Write(o, ref written, localizer("=" + GetArgumentName(0, p.MaxValueCount, p.Description))); string sep = p.ValueSeparators != null && p.ValueSeparators.Length > 0 ? p.ValueSeparators[0] : " "; for (int c = 1; c < p.MaxValueCount; ++c) { Write(o, ref written, localizer(sep + GetArgumentName(c, p.MaxValueCount, p.Description))); } if (p.OptionValueType == OptionValueType.Optional) { Write(o, ref written, localizer("]")); } } return true; } private static int GetNextOptionIndex(string[] names, int i) { while (i < names.Length && names[i] == "<>") { ++i; } return i; } private static void Write(TextWriter o, ref int n, string s) { n += s.Length; o.Write(s); } private static string GetArgumentName(int index, int maxIndex, string description) { if (description == null) return maxIndex == 1 ? "VALUE" : "VALUE" + (index + 1); string[] nameStart; if (maxIndex == 1) nameStart = new string[] { "{0:", "{" }; else nameStart = new string[] { "{" + index + ":" }; for (int i = 0; i < nameStart.Length; ++i) { int start, j = 0; do { start = description.IndexOf(nameStart[i], j); } while (start >= 0 && j != 0 ? description[j++ - 1] == '{' : false); if (start == -1) continue; int end = description.IndexOf("}", start); if (end == -1) continue; return description.Substring(start + nameStart[i].Length, end - start - nameStart[i].Length); } return maxIndex == 1 ? "VALUE" : "VALUE" + (index + 1); } private static string GetDescription(string description) { if (description == null) return string.Empty; StringBuilder sb = new StringBuilder(description.Length); int start = -1; for (int i = 0; i < description.Length; ++i) { switch (description[i]) { case '{': if (i == start) { sb.Append('{'); start = -1; } else if (start < 0) start = i + 1; break; case '}': if (start < 0) { if ((i + 1) == description.Length || description[i + 1] != '}') throw new InvalidOperationException("Invalid option description: " + description); ++i; sb.Append("}"); } else { sb.Append(description.Substring(start, i - start)); start = -1; } break; case ':': if (start < 0) goto default; start = i + 1; break; default: if (start < 0) sb.Append(description[i]); break; } } return sb.ToString(); } private static List<string> GetLines(string description) { List<string> lines = new List<string>(); if (string.IsNullOrEmpty(description)) { lines.Add(string.Empty); return lines; } int length = 80 - OptionWidth - 2; int start = 0, end; do { end = GetLineEnd(start, length, description); bool cont = false; if (end < description.Length) { char c = description[end]; if (c == '-' || (char.IsWhiteSpace(c) && c != '\n')) ++end; else if (c != '\n') { cont = true; --end; } } lines.Add(description.Substring(start, end - start)); if (cont) { lines[lines.Count - 1] += "-"; } start = end; if (start < description.Length && description[start] == '\n') ++start; } while (end < description.Length); return lines; } private static int GetLineEnd(int start, int length, string description) { int end = Math.Min(start + length, description.Length); int sep = -1; for (int i = start; i < end; ++i) { switch (description[i]) { case ' ': case '\t': case '\v': case '-': case ',': case '.': case ';': sep = i; break; case '\n': return i; } } if (sep == -1 || end == description.Length) return end; return sep; } } }
/////////////////////////////////////////////////////////////// // FSXml - A library for representing file system data as // // Xml. // // Shukri Adams (shukri.adams@gmail.com) // // https://github.com/shukriadams/browsemonkey // // MIT License (MIT) Copyright (c) 2014 Shukri Adams // /////////////////////////////////////////////////////////////// using System.Text; using System.Xml; using vcFramework; using vcFramework.Arrays; using vcFramework.Delegates; using vcFramework.Parsers; using vcFramework.Maths; using vcFramework.Xml; namespace FSXml { /// <summary> /// Exports nestes FSXml data to flatteted text /// </summary> public class FSXmlTextExporter : IProgress { #region FIELDS /// <summary> /// /// </summary> private string[] _folderItemsToShow; /// <summary> /// /// </summary> private string[] _fileItemsToShow; /// <summary> /// /// </summary> private string _indentWith; /// <summary> /// /// </summary> private string _spaceWith; /// <summary> /// /// </summary> private bool _alignText; /// <summary> /// /// </summary> private bool _decorateWithLines; /// <summary> /// /// </summary> private bool _showFolderHeaders; /// <summary> /// /// </summary> private bool _showFileHeaders; /// <summary> /// /// </summary> private XmlNode _dataToDump; /// <summary> /// </summary> public event System.EventHandler OnNext; /// <summary> /// /// </summary> public event System.EventHandler OnEnd; /// <summary> /// /// </summary> private StringBuilder _x; /// <summary> /// /// </summary> private long _steps; /// <summary> /// /// </summary> private long _currentStep; /// <summary> /// Set to true if object is busy processing /// </summary> private bool _running; /// <summary> /// Set to true if object is to stop processing /// </summary> private bool _stop; #endregion #region PROPERTIES /// <summary> /// Gets the number of steps this object will take to complete its task /// </summary> public long Steps { get { return _steps; } } /// <summary> /// Gets teh current step /// </summary> public long CurrentStep { get { return _currentStep; } } /// <summary> /// Gets if object is busy with progressible processing /// </summary> public bool Running { get { return _running; } } /// <summary> /// Gets the exported text produced by this object /// </summary> public string Output { get { return _x.ToString(); } } #endregion #region CONSTRUCTORS /// <summary> /// /// </summary> /// <param name="dXmlDumpData"></param> /// <param name="arrStrFolderItemsToShow"></param> /// <param name="arrStrFileItemsToShow"></param> /// <param name="strIndentWith"></param> /// <param name="strSpaceWith"></param> /// <param name="blnAlign"></param> /// <param name="blnDecorateWithLines"></param> /// <param name="blnShowFolderHeaders"></param> /// <param name="blnShowFileHeaders"></param> public FSXmlTextExporter( XmlNode dXmlDumpData, string[] folderItemsToShow, string[] fileItemsToShow, string indentWith, string spaceWith, bool align, bool decorateWithLines, bool showFolderHeaders, bool showFileHeaders ) { _x = new StringBuilder(); _dataToDump = dXmlDumpData; _folderItemsToShow = folderItemsToShow; _fileItemsToShow = fileItemsToShow; _indentWith = indentWith; _spaceWith = spaceWith; _alignText = align; _decorateWithLines = decorateWithLines; _showFolderHeaders = showFolderHeaders; _showFileHeaders = showFileHeaders; _stop = false; // calculates how many nodes must be processed to carry out dump // this is done by counting the number of directory nodes in // data XmlLib.NamedNodeCount( _dataToDump, "d", ref _steps); } #endregion #region METHODS /// <summary> </summary> /// <param name="dXmlData"></param> /// <param name="arrStrFolderItemsToShow"></param> /// <param name="arrStrFileItemsToShow"></param> /// <param name="strIndentWith"></param> /// <param name="strSpaceWith"></param> /// <param name="blnAlign"></param> /// <param name="blnDecorateWithLines"></param> /// <param name="blnShowFolderHeaders"></param> /// <param name="blnShowFileHeaders"></param> /// <returns></returns> public void Start( ) { _running = true; // call text dumper DumpListViewDataToText( _dataToDump, 0); // fires OnFinish event DelegateLib.InvokeSubscribers( OnEnd, this); _running = false; } /// <summary> /// Stops the progressible process in this object /// </summary> public void Stop( ) { _stop = true; } /// <summary> </summary> /// <param name="nXmlLevelData"></param> /// <param name="intLevelDepth"></param> private void DumpListViewDataToText( XmlNode nXmlLevelData, int depth ) { string folderInfoText = ""; string folderText = ""; string fileInfoText = ""; string fileText = ""; string charLine = ""; int[] maxLengths = new int[6]; //int array used to store the widths of items in a node, used for finding the widest items, needed for proper formatting int fullPathLength = 0; int itemCount = 0; // ############################################################# // fires on advance dump event // ------------------------------------------------------------- DelegateLib.InvokeSubscribers( OnNext, this); _currentStep ++; // ############################################################# // implements "abortion" of processing // ------------------------------------------------------------- if (_stop) return; // ############################################################# // generates info text and associated decoration for folders // ------------------------------------------------------------- if (_showFolderHeaders) { // builds up folder info. This will look like : // Folder.......Path......DateCreated..........DateModified, except the "." padding // is " " // NAME if (StringArrayLib.Contains(_folderItemsToShow, "n")) folderInfoText += StringFormatLib.PadText("Folder", " ", nXmlLevelData.SelectSingleNode(".//n").InnerText.Length + _spaceWith.Length); // PATH if (StringArrayLib.Contains(_folderItemsToShow, "p")) folderInfoText += StringFormatLib.PadText("Path", " ", FSXmlLib.GetFullPathForFolder(nXmlLevelData, "\\").Length + _spaceWith.Length); // DATE CREATED if (StringArrayLib.Contains(_folderItemsToShow, "dc")) folderInfoText += StringFormatLib.PadText("Date Created", " ", nXmlLevelData.SelectSingleNode(".//dc").InnerText.Length + _spaceWith.Length); // DATE MODIFIED if (StringArrayLib.Contains(_folderItemsToShow, "dm")) folderInfoText += StringFormatLib.PadText("Date Modified", " ", nXmlLevelData.SelectSingleNode(".//dm").InnerText.Length + _spaceWith.Length); // if there is any folder header text, that text will require indentation // and decoration if (folderInfoText.Length > 0) { // first indent the line - the amount of indentation is a product of the length // of the indenting text, and the nesting depth of the current item being // processed charLine = StringFormatLib.CharLine(_indentWith, _indentWith.Length*depth); // builds up the actual visible line which will be drawn under folder header text charLine += StringFormatLib.CharLine("_", folderInfoText.Length) + "\r\n"; // indents folder header text in the same way the char line decoration text // was indented. folderInfoText = StringFormatLib.CharLine(_indentWith, _indentWith.Length*depth) + folderInfoText; // adds charLine UNDERNEATH header text (using the linebreak) folderInfoText += "\r\n" + charLine; } //if }// if // ############################################################# // 1- generates FOLDER text for this node. note that STR_SPACER // is added to each property's value to ensure that, for the // longest strings, there is still some space before the next // property // ------------------------------------------------------------- // NAME if (StringArrayLib.Contains(_folderItemsToShow, "n")) folderText += nXmlLevelData.SelectSingleNode(".//n").InnerText + _spaceWith; // PATH if (StringArrayLib.Contains(_folderItemsToShow, "p")) folderText += FSXmlLib.GetFullPathForFolder(nXmlLevelData, "\\") + _spaceWith; // DATE CREATED if (StringArrayLib.Contains(_folderItemsToShow, "dc")) folderText += nXmlLevelData.SelectSingleNode(".//dc").InnerText + _spaceWith; // DATE MODIFIED if (StringArrayLib.Contains(_folderItemsToShow, "dm")) folderText += nXmlLevelData.SelectSingleNode(".//dm").InnerText + _spaceWith; if (folderText.Length > 0) { charLine = ""; // makes decoration lines if necessary - if not using decoration, strCharLine remains empty and can just // be added to the final output if (_decorateWithLines) { // makes charlines - must do this before strFolderText is indented, as needs the unindented length of strFolderText charLine = StringFormatLib.CharLine("*", folderText.Length); // indents charline, so char lines appear flush with the string's they wrap charLine = StringFormatLib.CharLine(_indentWith, _indentWith.Length*depth) + charLine + "\r\n"; } // indents folder text folderText = StringFormatLib.CharLine(_indentWith, _indentWith.Length*depth) + folderText; folderText += "\r\n"; // puts it all together if (_showFolderHeaders) { if (_decorateWithLines) _x.Append ( charLine + folderInfoText + folderText + charLine ) ; else _x.Append ("\r\n" + folderInfoText + folderText + charLine); // add an extra linebreak if displaying header but not displaying decoration lines - the extra space make the resulting text more readable } else _x.Append (charLine + folderText + charLine); }//if // 2 - generates FILES text for this node // enters "file" processing section only if there are files on this node if (nXmlLevelData.SelectSingleNode(".//fs").ChildNodes.Count > 0) { XmlNode nXmlFiles = nXmlLevelData.SelectSingleNode(".//fs"); // 2.1 - finds max width of file properties - needed for text align. this operation builds up // the int array only, nothing else. max width is required for neat aligning of text if (_alignText) { for (int i = 0 ; i < nXmlFiles.ChildNodes.Count ; i ++) { if (StringArrayLib.Contains(_fileItemsToShow, "n")) { if (nXmlFiles.ChildNodes[i].SelectSingleNode(".//n").InnerText.Length > maxLengths[0]) maxLengths[0] = nXmlFiles.ChildNodes[i].SelectSingleNode(".//n").InnerText.Length; } if (StringArrayLib.Contains(_fileItemsToShow, "p")) { fullPathLength = FSXmlLib.GetFullPathForFile(nXmlFiles.ChildNodes[i], "\\").Length; if (fullPathLength > maxLengths[1]){maxLengths[1] = fullPathLength;} } if (StringArrayLib.Contains(_fileItemsToShow, "dc")) { if (nXmlFiles.ChildNodes[i].SelectSingleNode(".//dc").InnerText.Length > maxLengths[2]) maxLengths[2] = nXmlFiles.ChildNodes[i].SelectSingleNode(".//dc").InnerText.Length; } if (StringArrayLib.Contains(_fileItemsToShow, "dm")) { if (nXmlFiles.ChildNodes[i].SelectSingleNode(".//dm").InnerText.Length > maxLengths[3]) maxLengths[3] = nXmlFiles.ChildNodes[i].SelectSingleNode(".//dm").InnerText.Length; } if (StringArrayLib.Contains(_fileItemsToShow, "s")) { if (nXmlFiles.ChildNodes[i].SelectSingleNode(".//s").InnerText.Length > maxLengths[4]) maxLengths[4] = nXmlFiles.ChildNodes[i].SelectSingleNode(".//s").InnerText.Length; } if (StringArrayLib.Contains(_fileItemsToShow, "e")) { if (ParserLib.ReturnAfterLast(nXmlFiles.ChildNodes[i].SelectSingleNode(".//n").InnerText, ".").Length > maxLengths[5]) maxLengths[5] = ParserLib.ReturnAfterLast(nXmlFiles.ChildNodes[i].SelectSingleNode(".//n").InnerText, ".").Length; } }// for }// if // 2.2 - makes info text for file sections fileInfoText = ""; if (StringArrayLib.Contains(_fileItemsToShow, "n") || StringArrayLib.Contains(_fileItemsToShow, "p") || StringArrayLib.Contains(_fileItemsToShow, "dc") || StringArrayLib.Contains(_fileItemsToShow, "dm") || StringArrayLib.Contains(_fileItemsToShow, "s") || StringArrayLib.Contains(_fileItemsToShow, "e")) { if (StringArrayLib.Contains(_fileItemsToShow, "n")) { fileInfoText += StringFormatLib.PadText("File", " ", maxLengths[0]) + _spaceWith; itemCount++; } if (StringArrayLib.Contains(_fileItemsToShow, "p")) { fileInfoText += StringFormatLib.PadText("Path", " ", maxLengths[1]) + _spaceWith; itemCount++; } if (StringArrayLib.Contains(_fileItemsToShow, "dc")) { fileInfoText += StringFormatLib.PadText("Date Created", " ", maxLengths[2]) + _spaceWith; itemCount++; } if (StringArrayLib.Contains(_fileItemsToShow, "dm")) { fileInfoText += StringFormatLib.PadText("Date Modified", " ", maxLengths[3]) + _spaceWith; itemCount++; } if (StringArrayLib.Contains(_fileItemsToShow, "s")) { fileInfoText += StringFormatLib.PadText("Size", " ", maxLengths[4]) + _spaceWith; itemCount++; } if (StringArrayLib.Contains(_fileItemsToShow, "e")) { fileInfoText += StringFormatLib.PadText("Type", " ", maxLengths[5]) + _spaceWith; itemCount++; } // indents file item - add 1 to file indentation counter so files are pushed in further than folders on the same level fileInfoText = StringFormatLib.CharLine(_indentWith, _indentWith.Length*(depth + 1)) + fileInfoText; fileInfoText += "\r\n"; // makes _ charline and indents it, adds it to header charLine = StringFormatLib.CharLine(_indentWith, _indentWith.Length*(depth + 1)); charLine += StringFormatLib.CharLine("_", AggregationLib.Sum(maxLengths) + _spaceWith.Length*itemCount); charLine += "\r\n"; fileInfoText += charLine; }//if // put it together ! charLine = ""; if (_decorateWithLines) { // creates text decoration line charLine = StringFormatLib.CharLine(_indentWith, _indentWith.Length*(depth + 1)); charLine += StringFormatLib.CharLine(".", AggregationLib.Sum(maxLengths) + _spaceWith.Length*itemCount); charLine += "\r\n"; } if (_decorateWithLines) { if (_decorateWithLines) _x.Append ( charLine + fileInfoText ); else _x.Append ("\r\n" + fileInfoText); // add an extra linebreak if displaying header but not displaying decoration lines - the extra space make the resulting text more readable } else _x.Append ( charLine ); // 2.3 - generates text for each file item on this node for (int i = 0 ; i < nXmlFiles.ChildNodes.Count ; i ++) { fileText = ""; // NAME if (StringArrayLib.Contains(_fileItemsToShow, "n")) fileText += StringFormatLib.PadText(nXmlFiles.ChildNodes[i].SelectSingleNode(".//n").InnerText, " ", maxLengths[0]) + _spaceWith; // PATH if (StringArrayLib.Contains(_fileItemsToShow, "p")) fileText += StringFormatLib.PadText(FSXmlLib.GetFullPathForFile(nXmlFiles.ChildNodes[i], "\\"), " ", maxLengths[1]) + _spaceWith; // DATE CREATED if (StringArrayLib.Contains(_fileItemsToShow, "dc")) fileText += StringFormatLib.PadText(nXmlFiles.ChildNodes[i].SelectSingleNode(".//dc").InnerText, " ", maxLengths[2]) + _spaceWith; // DATE MODIFIED if (StringArrayLib.Contains(_fileItemsToShow, "dm")) fileText += StringFormatLib.PadText(nXmlFiles.ChildNodes[i].SelectSingleNode(".//dm").InnerText, " ", maxLengths[3]) + _spaceWith; // SIZE if (StringArrayLib.Contains(_fileItemsToShow, "s")) fileText += StringFormatLib.PadText(nXmlFiles.ChildNodes[i].SelectSingleNode(".//s").InnerText, " ", maxLengths[4]) + _spaceWith; // EXTENSION (FILE TYPE) if (StringArrayLib.Contains(_fileItemsToShow, "e")) fileText += StringFormatLib.PadText(ParserLib.ReturnAfterLast(nXmlFiles.ChildNodes[i].SelectSingleNode(".//n").InnerText, "."), " ", maxLengths[5]) + _spaceWith; if (fileText.Length > 0) { // indents file item - add 1 to file indentation counter so files are pushed in further than folders on the same level fileText = StringFormatLib.CharLine(_indentWith, _indentWith.Length*(depth + 1)) + fileText; // adds file item to mainoutput string _x.Append ( fileText + "\r\n" ); } }//for // entered on final run of loop - adds the char line to close off file block // THIS IS NOT SAFE ! ASSUMES THAT strCharLine STILL CONTAINS THE CORRENT TEXT NEEDED FOR A LINE if (_decorateWithLines) _x.Append ( charLine + "\r\n" ); }// if - file processing section - only entered if there are files on this node // 3. reinvokes this method if child elements exist //HANDLES CHILD NODES - RECURSION HAPPENS HERE for (int i = 0 ; i < nXmlLevelData.SelectSingleNode(".//ds").ChildNodes.Count ; i ++) DumpListViewDataToText(nXmlLevelData.SelectSingleNode(".//ds").ChildNodes[i], depth + 1); } #endregion } }
using System; using System.Data; using System.Data.SqlClient; using Csla; using Csla.Data; namespace ParentLoad.Business.ERCLevel { /// <summary> /// B04_SubContinent (editable child object).<br/> /// This is a generated base class of <see cref="B04_SubContinent"/> business object. /// </summary> /// <remarks> /// This class contains one child collection:<br/> /// - <see cref="B05_CountryObjects"/> of type <see cref="B05_CountryColl"/> (1:M relation to <see cref="B06_Country"/>)<br/> /// This class is an item of <see cref="B03_SubContinentColl"/> collection. /// </remarks> [Serializable] public partial class B04_SubContinent : BusinessBase<B04_SubContinent> { #region Static Fields private static int _lastID; #endregion #region State Fields [NotUndoable] [NonSerialized] internal int parent_Continent_ID = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="SubContinent_ID"/> property. /// </summary> public static readonly PropertyInfo<int> SubContinent_IDProperty = RegisterProperty<int>(p => p.SubContinent_ID, "Sub Continent ID"); /// <summary> /// Gets the Sub Continent ID. /// </summary> /// <value>The Sub Continent ID.</value> public int SubContinent_ID { get { return GetProperty(SubContinent_IDProperty); } } /// <summary> /// Maintains metadata about <see cref="SubContinent_Name"/> property. /// </summary> public static readonly PropertyInfo<string> SubContinent_NameProperty = RegisterProperty<string>(p => p.SubContinent_Name, "Sub Continent Name"); /// <summary> /// Gets or sets the Sub Continent Name. /// </summary> /// <value>The Sub Continent Name.</value> public string SubContinent_Name { get { return GetProperty(SubContinent_NameProperty); } set { SetProperty(SubContinent_NameProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B05_SubContinent_SingleObject"/> property. /// </summary> public static readonly PropertyInfo<B05_SubContinent_Child> B05_SubContinent_SingleObjectProperty = RegisterProperty<B05_SubContinent_Child>(p => p.B05_SubContinent_SingleObject, "B05 SubContinent Single Object", RelationshipTypes.Child); /// <summary> /// Gets the B05 Sub Continent Single Object ("parent load" child property). /// </summary> /// <value>The B05 Sub Continent Single Object.</value> public B05_SubContinent_Child B05_SubContinent_SingleObject { get { return GetProperty(B05_SubContinent_SingleObjectProperty); } private set { LoadProperty(B05_SubContinent_SingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B05_SubContinent_ASingleObject"/> property. /// </summary> public static readonly PropertyInfo<B05_SubContinent_ReChild> B05_SubContinent_ASingleObjectProperty = RegisterProperty<B05_SubContinent_ReChild>(p => p.B05_SubContinent_ASingleObject, "B05 SubContinent ASingle Object", RelationshipTypes.Child); /// <summary> /// Gets the B05 Sub Continent ASingle Object ("parent load" child property). /// </summary> /// <value>The B05 Sub Continent ASingle Object.</value> public B05_SubContinent_ReChild B05_SubContinent_ASingleObject { get { return GetProperty(B05_SubContinent_ASingleObjectProperty); } private set { LoadProperty(B05_SubContinent_ASingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="B05_CountryObjects"/> property. /// </summary> public static readonly PropertyInfo<B05_CountryColl> B05_CountryObjectsProperty = RegisterProperty<B05_CountryColl>(p => p.B05_CountryObjects, "B05 Country Objects", RelationshipTypes.Child); /// <summary> /// Gets the B05 Country Objects ("parent load" child property). /// </summary> /// <value>The B05 Country Objects.</value> public B05_CountryColl B05_CountryObjects { get { return GetProperty(B05_CountryObjectsProperty); } private set { LoadProperty(B05_CountryObjectsProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="B04_SubContinent"/> object. /// </summary> /// <returns>A reference to the created <see cref="B04_SubContinent"/> object.</returns> internal static B04_SubContinent NewB04_SubContinent() { return DataPortal.CreateChild<B04_SubContinent>(); } /// <summary> /// Factory method. Loads a <see cref="B04_SubContinent"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> /// <returns>A reference to the fetched <see cref="B04_SubContinent"/> object.</returns> internal static B04_SubContinent GetB04_SubContinent(SafeDataReader dr) { B04_SubContinent obj = new B04_SubContinent(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(dr); obj.LoadProperty(B05_CountryObjectsProperty, B05_CountryColl.NewB05_CountryColl()); obj.MarkOld(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="B04_SubContinent"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public B04_SubContinent() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="B04_SubContinent"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { LoadProperty(SubContinent_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID)); LoadProperty(B05_SubContinent_SingleObjectProperty, DataPortal.CreateChild<B05_SubContinent_Child>()); LoadProperty(B05_SubContinent_ASingleObjectProperty, DataPortal.CreateChild<B05_SubContinent_ReChild>()); LoadProperty(B05_CountryObjectsProperty, DataPortal.CreateChild<B05_CountryColl>()); var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="B04_SubContinent"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(SubContinent_IDProperty, dr.GetInt32("SubContinent_ID")); LoadProperty(SubContinent_NameProperty, dr.GetString("SubContinent_Name")); // parent properties parent_Continent_ID = dr.GetInt32("Parent_Continent_ID"); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Loads child <see cref="B05_SubContinent_Child"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(B05_SubContinent_Child child) { LoadProperty(B05_SubContinent_SingleObjectProperty, child); } /// <summary> /// Loads child <see cref="B05_SubContinent_ReChild"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(B05_SubContinent_ReChild child) { LoadProperty(B05_SubContinent_ASingleObjectProperty, child); } /// <summary> /// Inserts a new <see cref="B04_SubContinent"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(B02_Continent parent) { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("AddB04_SubContinent", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Parent_Continent_ID", parent.Continent_ID).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@SubContinent_ID", ReadProperty(SubContinent_IDProperty)).Direction = ParameterDirection.Output; cmd.Parameters.AddWithValue("@SubContinent_Name", ReadProperty(SubContinent_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnInsertPre(args); cmd.ExecuteNonQuery(); OnInsertPost(args); LoadProperty(SubContinent_IDProperty, (int) cmd.Parameters["@SubContinent_ID"].Value); } // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Updates in the database all changes made to the <see cref="B04_SubContinent"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update() { if (!IsDirty) return; using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { using (var cmd = new SqlCommand("UpdateB04_SubContinent", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@SubContinent_ID", ReadProperty(SubContinent_IDProperty)).DbType = DbType.Int32; cmd.Parameters.AddWithValue("@SubContinent_Name", ReadProperty(SubContinent_NameProperty)).DbType = DbType.String; var args = new DataPortalHookArgs(cmd); OnUpdatePre(args); cmd.ExecuteNonQuery(); OnUpdatePost(args); } // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Self deletes the <see cref="B04_SubContinent"/> object from database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf() { using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad")) { // flushes all pending data operations FieldManager.UpdateChildren(this); using (var cmd = new SqlCommand("DeleteB04_SubContinent", ctx.Connection)) { cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@SubContinent_ID", ReadProperty(SubContinent_IDProperty)).DbType = DbType.Int32; var args = new DataPortalHookArgs(cmd); OnDeletePre(args); cmd.ExecuteNonQuery(); OnDeletePost(args); } } // removes all previous references to children LoadProperty(B05_SubContinent_SingleObjectProperty, DataPortal.CreateChild<B05_SubContinent_Child>()); LoadProperty(B05_SubContinent_ASingleObjectProperty, DataPortal.CreateChild<B05_SubContinent_ReChild>()); LoadProperty(B05_CountryObjectsProperty, DataPortal.CreateChild<B05_CountryColl>()); } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
// ------------------------------------- // Domain : IBT / Realtime.co // Author : Nicholas Ventimiglia // Product : Messaging and Storage // Published : 2014 // ------------------------------------- using System; using System.Collections.Generic; using System.IO; namespace Realtime.LITJson { public enum JsonToken { None, ObjectStart, PropertyName, ObjectEnd, ArrayStart, ArrayEnd, Int, Long, Double, String, Boolean, Null } public class JsonReader { #region Fields private static IDictionary<int, IDictionary<int, int[]>> parse_table; private Stack<int> automaton_stack; private int current_input; private int current_symbol; private bool end_of_json; private bool end_of_input; private Lexer lexer; private bool parser_in_string; private bool parser_return; private bool read_started; private TextReader reader; private bool reader_is_owned; private bool skip_non_members; private object token_value; private JsonToken token; #endregion #region Public Properties public bool AllowComments { get { return lexer.AllowComments; } set { lexer.AllowComments = value; } } public bool AllowSingleQuotedStrings { get { return lexer.AllowSingleQuotedStrings; } set { lexer.AllowSingleQuotedStrings = value; } } public bool SkipNonMembers { get { return skip_non_members; } set { skip_non_members = value; } } public bool EndOfInput { get { return end_of_input; } } public bool EndOfJson { get { return end_of_json; } } public JsonToken Token { get { return token; } } public object Value { get { return token_value; } } #endregion #region Constructors static JsonReader () { PopulateParseTable (); } public JsonReader (string json_text) : this (new StringReader (json_text), true) { } public JsonReader (TextReader reader) : this (reader, false) { } private JsonReader (TextReader reader, bool owned) { if (reader == null) throw new ArgumentNullException ("reader"); parser_in_string = false; parser_return = false; read_started = false; automaton_stack = new Stack<int> (); automaton_stack.Push ((int) ParserToken.End); automaton_stack.Push ((int) ParserToken.Text); lexer = new Lexer (reader); end_of_input = false; end_of_json = false; skip_non_members = true; this.reader = reader; reader_is_owned = owned; } #endregion #region Static Methods private static void PopulateParseTable () { // See section A.2. of the manual for details parse_table = new Dictionary<int, IDictionary<int, int[]>> (); TableAddRow (ParserToken.Array); TableAddCol (ParserToken.Array, '[', '[', (int) ParserToken.ArrayPrime); TableAddRow (ParserToken.ArrayPrime); TableAddCol (ParserToken.ArrayPrime, '"', (int) ParserToken.Value, (int) ParserToken.ValueRest, ']'); TableAddCol (ParserToken.ArrayPrime, '[', (int) ParserToken.Value, (int) ParserToken.ValueRest, ']'); TableAddCol (ParserToken.ArrayPrime, ']', ']'); TableAddCol (ParserToken.ArrayPrime, '{', (int) ParserToken.Value, (int) ParserToken.ValueRest, ']'); TableAddCol (ParserToken.ArrayPrime, (int) ParserToken.Number, (int) ParserToken.Value, (int) ParserToken.ValueRest, ']'); TableAddCol (ParserToken.ArrayPrime, (int) ParserToken.True, (int) ParserToken.Value, (int) ParserToken.ValueRest, ']'); TableAddCol (ParserToken.ArrayPrime, (int) ParserToken.False, (int) ParserToken.Value, (int) ParserToken.ValueRest, ']'); TableAddCol (ParserToken.ArrayPrime, (int) ParserToken.Null, (int) ParserToken.Value, (int) ParserToken.ValueRest, ']'); TableAddRow (ParserToken.Object); TableAddCol (ParserToken.Object, '{', '{', (int) ParserToken.ObjectPrime); TableAddRow (ParserToken.ObjectPrime); TableAddCol (ParserToken.ObjectPrime, '"', (int) ParserToken.Pair, (int) ParserToken.PairRest, '}'); TableAddCol (ParserToken.ObjectPrime, '}', '}'); TableAddRow (ParserToken.Pair); TableAddCol (ParserToken.Pair, '"', (int) ParserToken.String, ':', (int) ParserToken.Value); TableAddRow (ParserToken.PairRest); TableAddCol (ParserToken.PairRest, ',', ',', (int) ParserToken.Pair, (int) ParserToken.PairRest); TableAddCol (ParserToken.PairRest, '}', (int) ParserToken.Epsilon); TableAddRow (ParserToken.String); TableAddCol (ParserToken.String, '"', '"', (int) ParserToken.CharSeq, '"'); TableAddRow (ParserToken.Text); TableAddCol (ParserToken.Text, '[', (int) ParserToken.Array); TableAddCol (ParserToken.Text, '{', (int) ParserToken.Object); TableAddRow (ParserToken.Value); TableAddCol (ParserToken.Value, '"', (int) ParserToken.String); TableAddCol (ParserToken.Value, '[', (int) ParserToken.Array); TableAddCol (ParserToken.Value, '{', (int) ParserToken.Object); TableAddCol (ParserToken.Value, (int) ParserToken.Number, (int) ParserToken.Number); TableAddCol (ParserToken.Value, (int) ParserToken.True, (int) ParserToken.True); TableAddCol (ParserToken.Value, (int) ParserToken.False, (int) ParserToken.False); TableAddCol (ParserToken.Value, (int) ParserToken.Null, (int) ParserToken.Null); TableAddRow (ParserToken.ValueRest); TableAddCol (ParserToken.ValueRest, ',', ',', (int) ParserToken.Value, (int) ParserToken.ValueRest); TableAddCol (ParserToken.ValueRest, ']', (int) ParserToken.Epsilon); } private static void TableAddCol (ParserToken row, int col, params int[] symbols) { parse_table[(int) row].Add (col, symbols); } private static void TableAddRow (ParserToken rule) { parse_table.Add ((int) rule, new Dictionary<int, int[]> ()); } #endregion #region Private Methods private void ProcessNumber (string number) { if (number.IndexOf ('.') != -1 || number.IndexOf ('e') != -1 || number.IndexOf ('E') != -1) { double n_double; if (Double.TryParse (number, out n_double)) { token = JsonToken.Double; token_value = n_double; return; } } int n_int32; if (Int32.TryParse (number, out n_int32)) { token = JsonToken.Int; token_value = n_int32; return; } long n_int64; if (Int64.TryParse (number, out n_int64)) { token = JsonToken.Long; token_value = n_int64; return; } // Shouldn't happen, but just in case, return something token = JsonToken.Int; token_value = 0; } private void ProcessSymbol () { if (current_symbol == '[') { token = JsonToken.ArrayStart; parser_return = true; } else if (current_symbol == ']') { token = JsonToken.ArrayEnd; parser_return = true; } else if (current_symbol == '{') { token = JsonToken.ObjectStart; parser_return = true; } else if (current_symbol == '}') { token = JsonToken.ObjectEnd; parser_return = true; } else if (current_symbol == '"') { if (parser_in_string) { parser_in_string = false; parser_return = true; } else { if (token == JsonToken.None) token = JsonToken.String; parser_in_string = true; } } else if (current_symbol == (int) ParserToken.CharSeq) { token_value = lexer.StringValue; } else if (current_symbol == (int) ParserToken.False) { token = JsonToken.Boolean; token_value = false; parser_return = true; } else if (current_symbol == (int) ParserToken.Null) { token = JsonToken.Null; parser_return = true; } else if (current_symbol == (int) ParserToken.Number) { ProcessNumber (lexer.StringValue); parser_return = true; } else if (current_symbol == (int) ParserToken.Pair) { token = JsonToken.PropertyName; } else if (current_symbol == (int) ParserToken.True) { token = JsonToken.Boolean; token_value = true; parser_return = true; } } private bool ReadToken () { if (end_of_input) return false; lexer.NextToken (); if (lexer.EndOfInput) { Close (); return false; } current_input = lexer.Token; return true; } #endregion public void Close () { if (end_of_input) return; end_of_input = true; end_of_json = true; if (reader_is_owned) reader.Close (); reader = null; } public bool Read () { if (end_of_input) return false; if (end_of_json) { end_of_json = false; automaton_stack.Clear (); automaton_stack.Push ((int) ParserToken.End); automaton_stack.Push ((int) ParserToken.Text); } parser_in_string = false; parser_return = false; token = JsonToken.None; token_value = null; if (! read_started) { read_started = true; if (! ReadToken ()) return false; } int[] entry_symbols; while (true) { if (parser_return) { if (automaton_stack.Peek () == (int) ParserToken.End) end_of_json = true; return true; } current_symbol = automaton_stack.Pop (); ProcessSymbol (); if (current_symbol == current_input) { if (! ReadToken ()) { if (automaton_stack.Peek () != (int) ParserToken.End) throw new JsonException ( "Input doesn't evaluate to proper JSON text"); if (parser_return) return true; return false; } continue; } try { entry_symbols = parse_table[current_symbol][current_input]; } catch (KeyNotFoundException e) { throw new JsonException ((ParserToken) current_input, e); } if (entry_symbols[0] == (int) ParserToken.Epsilon) continue; for (int i = entry_symbols.Length - 1; i >= 0; i--) automaton_stack.Push (entry_symbols[i]); } } } }
// // Grant.cs // s.im.pl serialization // // Generated by MetaMetadataDotNetTranslator. // Copyright 2017 Interface Ecology Lab. // using Ecologylab.BigSemantics.Generated.Library; using Ecologylab.BigSemantics.Generated.Library.CreativeWorkNS.ScholarlyArticleNS; using Ecologylab.BigSemantics.Generated.Library.PersonNS; using Ecologylab.BigSemantics.MetaMetadataNS; using Ecologylab.BigSemantics.MetadataNS; using Ecologylab.BigSemantics.MetadataNS.Builtins; using Ecologylab.BigSemantics.MetadataNS.Builtins.CreativeWorkNS; using Ecologylab.BigSemantics.MetadataNS.Scalar; using Ecologylab.Collections; using Simpl.Fundamental.Generic; using Simpl.Fundamental.Net; using Simpl.Serialization; using Simpl.Serialization.Attributes; using System; using System.Collections; using System.Collections.Generic; namespace Ecologylab.BigSemantics.Generated.Library.CreativeWorkNS { [SimplInherit] public class Grant : CreativeWork { [SimplCollection("grant_program")] [MmName("programs")] private List<GrantProgram> programs; [SimplScalar] private MetadataString grantNumber; [SimplScalar] private MetadataDate startDate; [SimplScalar] private MetadataDate expireDate; [SimplScalar] private MetadataInteger amount; [SimplCollection("grant_staff")] [MmName("managers")] private List<GrantStaff> managers; [SimplComposite] [MmName("sponsor")] private PostalAddress sponsor; [SimplCollection("scholarly_article")] [MmName("publications")] private List<ScholarlyArticle> publications; [SimplCollection("scholarly_article")] [MmName("proceedings")] private List<ScholarlyArticle> proceedings; [SimplCollection("relevant_location")] [MmName("relevant_locations")] private List<Ecologylab.BigSemantics.MetadataNS.Scalar.MetadataParsedURL> relevantLocations; public Grant() { } public Grant(MetaMetadataCompositeField mmd) : base(mmd) { } public List<GrantProgram> Programs { get{return programs;} set { if (this.programs != value) { this.programs = value; // TODO we need to implement our property change notification mechanism. } } } public MetadataString GrantNumber { get{return grantNumber;} set { if (this.grantNumber != value) { this.grantNumber = value; // TODO we need to implement our property change notification mechanism. } } } public MetadataDate StartDate { get{return startDate;} set { if (this.startDate != value) { this.startDate = value; // TODO we need to implement our property change notification mechanism. } } } public MetadataDate ExpireDate { get{return expireDate;} set { if (this.expireDate != value) { this.expireDate = value; // TODO we need to implement our property change notification mechanism. } } } public MetadataInteger Amount { get{return amount;} set { if (this.amount != value) { this.amount = value; // TODO we need to implement our property change notification mechanism. } } } public List<GrantStaff> Managers { get{return managers;} set { if (this.managers != value) { this.managers = value; // TODO we need to implement our property change notification mechanism. } } } public PostalAddress Sponsor { get{return sponsor;} set { if (this.sponsor != value) { this.sponsor = value; // TODO we need to implement our property change notification mechanism. } } } public List<ScholarlyArticle> Publications { get{return publications;} set { if (this.publications != value) { this.publications = value; // TODO we need to implement our property change notification mechanism. } } } public List<ScholarlyArticle> Proceedings { get{return proceedings;} set { if (this.proceedings != value) { this.proceedings = value; // TODO we need to implement our property change notification mechanism. } } } public List<Ecologylab.BigSemantics.MetadataNS.Scalar.MetadataParsedURL> RelevantLocations { get{return relevantLocations;} set { if (this.relevantLocations != value) { this.relevantLocations = value; // TODO we need to implement our property change notification mechanism. } } } } }
using Microsoft.VisualStudio.Services.Agent.Listener; using Microsoft.VisualStudio.Services.Agent.Listener.Configuration; using Microsoft.VisualStudio.Services.Agent.Util; using Moq; using System; using System.Runtime.CompilerServices; using Xunit; namespace Microsoft.VisualStudio.Services.Agent.Tests { public sealed class CommandSettingsL0 { private readonly Mock<IPromptManager> _promptManager = new Mock<IPromptManager>(); // It is sufficient to test one arg only. All individual args are tested by the PromptsFor___ methods. // The PromptsFor___ methods suffice to cover the interesting differences between each of the args. [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsArg() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--agent", "some agent" }); // Act. string actual = command.GetAgentName(); // Assert. Assert.Equal("some agent", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsArgFromEnvVar() { using (TestHostContext hc = CreateTestContext()) { try { // Arrange. Environment.SetEnvironmentVariable("VSTS_AGENT_INPUT_AGENT", "some agent"); var command = new CommandSettings(hc, args: new string[0]); // Act. string actual = command.GetAgentName(); // Assert. Assert.Equal("some agent", actual); Assert.Equal(string.Empty, Environment.GetEnvironmentVariable("VSTS_AGENT_INPUT_AGENT") ?? string.Empty); // Should remove. Assert.Equal(hc.SecretMasker.MaskSecrets("some agent"), "some agent"); } finally { Environment.SetEnvironmentVariable("VSTS_AGENT_INPUT_AGENT", null); } } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsArgSecretFromEnvVar() { using (TestHostContext hc = CreateTestContext()) { try { // Arrange. Environment.SetEnvironmentVariable("VSTS_AGENT_INPUT_TOKEN", "some secret token value"); var command = new CommandSettings(hc, args: new string[0]); // Act. string actual = command.GetToken(); // Assert. Assert.Equal("some secret token value", actual); Assert.Equal(string.Empty, Environment.GetEnvironmentVariable("VSTS_AGENT_INPUT_TOKEN") ?? string.Empty); // Should remove. Assert.Equal(hc.SecretMasker.MaskSecrets("some secret token value"), "***"); } finally { Environment.SetEnvironmentVariable("VSTS_AGENT_INPUT_TOKEN", null); } } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsCommandConfigure() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "configure" }); // Act. bool actual = command.Configure; // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsCommandRun() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "run" }); // Act. bool actual = command.Run; // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsCommandUnconfigure() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "remove" }); // Act. bool actual = command.Remove; // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagAcceptTeeEula() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--acceptteeeula" }); // Act. bool actual = command.GetAcceptTeeEula(); // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagCommit() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--commit" }); // Act. bool actual = command.Commit; // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagHelp() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--help" }); // Act. bool actual = command.Help; // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagReplace() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--replace" }); // Act. bool actual = command.GetReplace(); // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagRunAsService() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--runasservice" }); // Act. bool actual = command.GetRunAsService(); // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagUnattended() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--unattended" }); // Act. bool actual = command.Unattended; // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagUnattendedFromEnvVar() { using (TestHostContext hc = CreateTestContext()) { try { // Arrange. Environment.SetEnvironmentVariable("VSTS_AGENT_INPUT_UNATTENDED", "true"); var command = new CommandSettings(hc, args: new string[0]); // Act. bool actual = command.Unattended; // Assert. Assert.Equal(true, actual); Assert.Equal(string.Empty, Environment.GetEnvironmentVariable("VSTS_AGENT_INPUT_UNATTENDED") ?? string.Empty); // Should remove. } finally { Environment.SetEnvironmentVariable("VSTS_AGENT_INPUT_UNATTENDED", null); } } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagVersion() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--version" }); // Act. bool actual = command.Version; // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PassesUnattendedToReadBool() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--unattended" }); _promptManager .Setup(x => x.ReadBool( Constants.Agent.CommandLine.Flags.AcceptTeeEula, // argName StringUtil.Loc("AcceptTeeEula"), // description false, // defaultValue true)) // unattended .Returns(true); // Act. bool actual = command.GetAcceptTeeEula(); // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PassesUnattendedToReadValue() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--unattended" }); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Agent, // argName StringUtil.Loc("AgentName"), // description false, // secret Environment.MachineName, // defaultValue Validators.NonEmptyValidator, // validator true)) // unattended .Returns("some agent"); // Act. string actual = command.GetAgentName(); // Assert. Assert.Equal("some agent", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForAcceptTeeEula() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadBool( Constants.Agent.CommandLine.Flags.AcceptTeeEula, // argName StringUtil.Loc("AcceptTeeEula"), // description false, // defaultValue false)) // unattended .Returns(true); // Act. bool actual = command.GetAcceptTeeEula(); // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForAgent() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Agent, // argName StringUtil.Loc("AgentName"), // description false, // secret Environment.MachineName, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("some agent"); // Act. string actual = command.GetAgentName(); // Assert. Assert.Equal("some agent", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForAuth() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Auth, // argName StringUtil.Loc("AuthenticationType"), // description false, // secret "some default auth", // defaultValue Validators.AuthSchemeValidator, // validator false)) // unattended .Returns("some auth"); // Act. string actual = command.GetAuth("some default auth"); // Assert. Assert.Equal("some auth", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForPassword() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Password, // argName StringUtil.Loc("Password"), // description true, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("some password"); // Act. string actual = command.GetPassword(); // Assert. Assert.Equal("some password", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForPool() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Pool, // argName StringUtil.Loc("AgentMachinePoolNameLabel"), // description false, // secret "default", // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("some pool"); // Act. string actual = command.GetPool(); // Assert. Assert.Equal("some pool", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForReplace() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadBool( Constants.Agent.CommandLine.Flags.Replace, // argName StringUtil.Loc("Replace"), // description false, // defaultValue false)) // unattended .Returns(true); // Act. bool actual = command.GetReplace(); // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForRunAsService() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadBool( Constants.Agent.CommandLine.Flags.RunAsService, // argName StringUtil.Loc("RunAgentAsServiceDescription"), // description false, // defaultValue false)) // unattended .Returns(true); // Act. bool actual = command.GetRunAsService(); // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForToken() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Token, // argName StringUtil.Loc("PersonalAccessToken"), // description true, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("some token"); // Act. string actual = command.GetToken(); // Assert. Assert.Equal("some token", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForUrl() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Url, // argName StringUtil.Loc("ServerUrl"), // description false, // secret string.Empty, // defaultValue Validators.ServerUrlValidator, // validator false)) // unattended .Returns("some url"); // Act. string actual = command.GetUrl(); // Assert. Assert.Equal("some url", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForUserName() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.UserName, // argName StringUtil.Loc("UserName"), // description false, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("some user name"); // Act. string actual = command.GetUserName(); // Assert. Assert.Equal("some user name", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForWindowsLogonAccount() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.WindowsLogonAccount, // argName StringUtil.Loc("WindowsLogonAccountNameDescription"), // description false, // secret "some default account", // defaultValue Validators.NTAccountValidator, // validator false)) // unattended .Returns("some windows logon account"); // Act. string actual = command.GetWindowsLogonAccount("some default account", StringUtil.Loc("WindowsLogonAccountNameDescription")); // Assert. Assert.Equal("some windows logon account", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForWindowsLogonPassword() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); string accountName = "somewindowsaccount"; _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.WindowsLogonPassword, // argName StringUtil.Loc("WindowsLogonPasswordDescription", accountName), // description true, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("some windows logon password"); // Act. string actual = command.GetWindowsLogonPassword(accountName); // Assert. Assert.Equal("some windows logon password", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForWork() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Work, // argName StringUtil.Loc("WorkFolderDescription"), // description false, // secret "_work", // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("some work"); // Act. string actual = command.GetWork(); // Assert. Assert.Equal("some work", actual); } } // It is sufficient to test one arg only. All individual args are tested by the PromptsFor___ methods. // The PromptsFor___ methods suffice to cover the interesting differences between each of the args. [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsWhenEmpty() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--url", "" }); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Url, // argName StringUtil.Loc("ServerUrl"), // description false, // secret string.Empty, // defaultValue Validators.ServerUrlValidator, // validator false)) // unattended .Returns("some url"); // Act. string actual = command.GetUrl(); // Assert. Assert.Equal("some url", actual); } } // It is sufficient to test one arg only. All individual args are tested by the PromptsFor___ methods. // The PromptsFor___ methods suffice to cover the interesting differences between each of the args. [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsWhenInvalid() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--url", "notValid" }); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.Url, // argName StringUtil.Loc("ServerUrl"), // description false, // secret string.Empty, // defaultValue Validators.ServerUrlValidator, // validator false)) // unattended .Returns("some url"); // Act. string actual = command.GetUrl(); // Assert. Assert.Equal("some url", actual); } } /* * Deployment Agent Tests */ [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagDeploymentAgentWithBackCompat() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--machinegroup" }); // Act. bool actual = command.DeploymentGroup; // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagDeploymentAgent() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--deploymentgroup" }); // Act. bool actual = command.DeploymentGroup; // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagAddDeploymentGroupTagsBackCompat() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--addmachinegrouptags" }); // Act. bool actual = command.GetDeploymentGroupTagsRequired(); // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void GetsFlagAddDeploymentGroupTags() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--adddeploymentgrouptags" }); // Act. bool actual = command.GetDeploymentGroupTagsRequired(); // Assert. Assert.True(actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForProjectName() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.ProjectName, // argName StringUtil.Loc("ProjectName"), // description false, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("TestProject"); // Act. string actual = command.GetProjectName(string.Empty); // Assert. Assert.Equal("TestProject", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForCollectionName() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.CollectionName, // argName StringUtil.Loc("CollectionName"), // description false, // secret "DefaultCollection", // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("TestCollection"); // Act. string actual = command.GetCollectionName(); // Assert. Assert.Equal("TestCollection", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForDeploymentGroupName() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.DeploymentGroupName, // argName StringUtil.Loc("DeploymentGroupName"), // description false, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("Test Deployment Group"); // Act. string actual = command.GetDeploymentGroupName(); // Assert. Assert.Equal("Test Deployment Group", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForDeploymentPoolName() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.DeploymentPoolName, // argName StringUtil.Loc("DeploymentPoolName"), // description false, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("Test Deployment Pool Name"); // Act. string actual = command.GetDeploymentPoolName(); // Assert. Assert.Equal("Test Deployment Pool Name", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void DeploymentGroupNameBackCompat() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings( hc, new[] { "--machinegroupname", "Test-MachineGroupName", "--deploymentgroupname", "Test-DeploymentGroupName" }); _promptManager.Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.DeploymentGroupName, // argName StringUtil.Loc("DeploymentGroupName"), // description false, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("This Method should not get called!"); // Act. string actual = command.GetDeploymentGroupName(); // Validate if --machinegroupname parameter is working Assert.Equal("Test-MachineGroupName", actual); // Validate Read Value should not get invoked. _promptManager.Verify(x => x.ReadValue(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<string>(), It.IsAny<Func<string, bool>>(), It.IsAny<bool>()), Times.Never); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void PromptsForDeploymentGroupTags() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[0]); _promptManager .Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.DeploymentGroupTags, // argName StringUtil.Loc("DeploymentGroupTags"), // description false, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("Test-Tag1,Test-Tg2"); // Act. string actual = command.GetDeploymentGroupTags(); // Assert. Assert.Equal("Test-Tag1,Test-Tg2", actual); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void DeploymentGroupTagsBackCompat() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings( hc, new[] { "--machinegrouptags", "Test-MachineGrouptag1,Test-MachineGrouptag2", "--deploymentgrouptags", "Test-DeploymentGrouptag1,Test-DeploymentGrouptag2" }); _promptManager.Setup(x => x.ReadValue( Constants.Agent.CommandLine.Args.DeploymentGroupTags, // argName StringUtil.Loc("DeploymentGroupTags"), // description false, // secret string.Empty, // defaultValue Validators.NonEmptyValidator, // validator false)) // unattended .Returns("This Method should not get called!"); // Act. string actual = command.GetDeploymentGroupTags(); // Validate if --machinegrouptags parameter is working fine Assert.Equal("Test-MachineGrouptag1,Test-MachineGrouptag2", actual); // Validate Read Value should not get invoked. _promptManager.Verify(x => x.ReadValue(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(),It.IsAny<string>(), It.IsAny<Func<string,bool>>(),It.IsAny<bool>()), Times.Never); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void ValidateCommands() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "badcommand" }); // Assert. Assert.True(command.Validate().Contains("badcommand")); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void ValidateFlags() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--badflag" }); // Assert. Assert.True(command.Validate().Contains("badflag")); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void ValidateArgs() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "--badargname", "bad arg value" }); // Assert. Assert.True(command.Validate().Contains("badargname")); } } [Fact] [Trait("Level", "L0")] [Trait("Category", nameof(CommandSettings))] public void ValidateGoodCommandline() { using (TestHostContext hc = CreateTestContext()) { // Arrange. var command = new CommandSettings(hc, args: new string[] { "configure", "--unattended", "--agent", "test agent" }); // Assert. Assert.True(command.Validate().Count == 0); } } private TestHostContext CreateTestContext([CallerMemberName] string testName = "") { TestHostContext hc = new TestHostContext(this, testName); hc.SetSingleton<IPromptManager>(_promptManager.Object); return hc; } } }
//------------------------------------------------------------------------------ // <copyright file="DMLibDataInfo.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation // </copyright> //------------------------------------------------------------------------------ namespace DMLibTest { using System; using System.Collections.Generic; using System.IO; using System.Text; public class DMLibDataInfo : IDataInfo { public DMLibDataInfo(string rootPath) { this.RootPath = rootPath; this.RootNode = new DirNode(string.Empty); } public int FileCount { get { return this.RootNode.FileNodeCountRecursive; } } public string RootPath { get; set; } public DirNode RootNode { get; set; } public IEnumerable<FileNode> EnumerateFileNodes() { return this.RootNode.EnumerateFileNodesRecursively(); } IDataInfo IDataInfo.Clone() { return this.Clone(); } public DMLibDataInfo Clone() { return new DMLibDataInfo(this.RootPath) { RootNode = this.RootNode.Clone(), }; } public override string ToString() { return this.DetailedInfo(); } } public class DataInfoNode { public string Name { get; set; } public DirNode Parent { get; set; } public IEnumerable<string> PathComponents { get { if (this.Parent != null) { foreach (string component in Parent.PathComponents) { yield return component; } yield return this.Name; } } } } public class FileNode : DataInfoNode, IComparable<FileNode> { public FileNode(string name) { this.Name = name; } public int SnapshotsCount { get; set; } public string MD5 { get; set; } public string CacheControl { get; set; } public string ContentDisposition { get; set; } public string ContentEncoding { get; set; } public string ContentLanguage { get; set; } public IDictionary<string, string> Metadata { get; set; } public string ContentType { get; set; } public DateTime? LastModifiedTime { get; set; } public long SizeInByte { get; set; } public FileAttributes? FileAttr { get; set; } public string AbsolutePath { get; set; } public int CompareTo(FileNode other) { return string.Compare(this.Name, other.Name, StringComparison.OrdinalIgnoreCase); } public FileNode Clone(string name = null) { // Clone metadata Dictionary<string, string> cloneMetaData = null; if (this.Metadata != null) { cloneMetaData = new Dictionary<string, string>(this.Metadata); } return new FileNode(name ?? this.Name) { SnapshotsCount = this.SnapshotsCount, CacheControl = this.CacheControl, ContentDisposition = this.ContentDisposition, ContentEncoding = this.ContentEncoding, ContentLanguage = this.ContentLanguage, ContentType = this.ContentType, MD5 = this.MD5, Metadata = cloneMetaData, LastModifiedTime = this.LastModifiedTime, SizeInByte = this.SizeInByte, FileAttr = this.FileAttr, AbsolutePath = this.AbsolutePath, }; } } public class DirNode : DataInfoNode, IComparable<DirNode> { private Dictionary<string, DirNode> dirNodeMap; private Dictionary<string, FileNode> fileNodeMap; public DirNode(string name) { this.Name = name; this.dirNodeMap = new Dictionary<string, DirNode>(); this.fileNodeMap = new Dictionary<string, FileNode>(); } public int FileNodeCountRecursive { get { int totalCount = this.FileNodeCount; foreach (DirNode subDirNode in this.DirNodes) { totalCount += subDirNode.FileNodeCountRecursive; } return totalCount; } } public int FileNodeCount { get { return fileNodeMap.Count; } } public int DirNodeCount { get { return dirNodeMap.Count; } } public int NonEmptyDirNodeCount { get { int count = 0; foreach(DirNode subDirNode in dirNodeMap.Values) { if (!subDirNode.IsEmpty) { count++; } } return count; } } public bool IsEmpty { get { if (this.FileNodeCount != 0) { return false; } foreach(DirNode subDirNode in dirNodeMap.Values) { if (!subDirNode.IsEmpty) { return false; } } return true; } } public IEnumerable<DirNode> DirNodes { get { return dirNodeMap.Values; } } public IEnumerable<FileNode> FileNodes { get { return fileNodeMap.Values; } } public int CompareTo(DirNode other) { return string.Compare(this.Name, other.Name, StringComparison.OrdinalIgnoreCase); } public FileNode GetFileNode(string name) { FileNode result = null; if (this.fileNodeMap.TryGetValue(name, out result)) { return result; } return null; } public DirNode GetDirNode(string name) { DirNode result = null; if (this.dirNodeMap.TryGetValue(name, out result)) { return result; } return null; } public void AddDirNode(DirNode dirNode) { dirNode.Parent = this; this.dirNodeMap.Add(dirNode.Name, dirNode); } public void AddFileNode(FileNode fileNode) { fileNode.Parent = this; this.fileNodeMap.Add(fileNode.Name, fileNode); } public FileNode DeleteFileNode(string name) { FileNode fn = null; if (this.fileNodeMap.ContainsKey(name)) { fn = this.fileNodeMap[name]; fn.Parent = null; this.fileNodeMap.Remove(name); } return fn; } public DirNode DeleteDirNode(string name) { DirNode dn = null; if (this.dirNodeMap.ContainsKey(name)) { dn = this.dirNodeMap[name]; this.dirNodeMap.Remove(name); } return dn; } public DirNode Clone() { DirNode newDirNode = new DirNode(this.Name); foreach(FileNode fileNode in this.FileNodes) { newDirNode.AddFileNode(fileNode.Clone()); } foreach(DirNode dirNode in this.DirNodes) { newDirNode.AddDirNode(dirNode.Clone()); } return newDirNode; } public IEnumerable<FileNode> EnumerateFileNodesRecursively() { foreach (var fileNode in this.FileNodes) { yield return fileNode; } foreach (DirNode subDirNode in this.DirNodes) { foreach (var fileNode in subDirNode.EnumerateFileNodesRecursively()) { yield return fileNode; } } } public IEnumerable<DirNode> EnumerateDirNodesRecursively() { foreach (DirNode subDirNode in this.DirNodes) { foreach (var dirNode in subDirNode.EnumerateDirNodesRecursively()) { yield return dirNode; } yield return subDirNode; } } /// <summary> /// for debug use, show DataInfo in tree format /// </summary> public void Display(int level) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < level; ++i) sb.Append("--"); sb.Append(this.Name); Console.WriteLine(sb.ToString()); foreach (FileNode fn in fileNodeMap.Values) { StringBuilder fileNode = new StringBuilder(); for (int i = 0; i < level + 1; ++i) { fileNode.Append("--"); } fileNode.Append(fn.Name); Console.WriteLine(fileNode.ToString()); } foreach (DirNode dn in dirNodeMap.Values) { dn.Display(level + 1); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // The native type of Vector3 is struct {float x,y,z} whose size is 12 bytes. RyuJit uses 16-byte // register or stack location to store a Vector3 variable with the assumptions below. New testcases // are added to check whether: // // - RyuJit correctly generates code and memory layout that matches the native side. // // - RyuJIt back-end assumptions about Vector3 types are satisfied. // // - Assumption1: Vector3 type args passed in registers or on stack is rounded to POINTER_SIZE // and hence on 64-bit targets it can be read/written as if it were TYP_SIMD16. // // - Assumption2: Vector3 args passed in registers (e.g. unix) or on stack have their upper // 4-bytes being zero. Similarly Vector3 return type value returned from a method will have // its upper 4-bytes zeroed out using System; using System.Diagnostics; using System.Numerics; using System.Runtime.InteropServices; using System.Text; public struct DT { public Vector3 a; public Vector3 b; }; [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)] public struct ComplexDT { public int iv; public DT vecs; [MarshalAs(UnmanagedType.ByValTStr, SizeConst=256)] public string str; public Vector3 v3; }; class PInvokeTest { [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern int nativeCall_PInvoke_CheckVector3Size(); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern float nativeCall_PInvoke_Vector3Arg( int i, Vector3 v1, [MarshalAs(UnmanagedType.LPStr)] string s, Vector3 v2); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern float nativeCall_PInvoke_Vector3Arg_Unix( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem1, float f32_mem2, float f32_mem3); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern float nativeCall_PInvoke_Vector3Arg_Unix2( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem1, float f32_mem2, float f32_mem3, Vector3 v3f32_mem4, float f32_mem5); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern Vector3 nativeCall_PInvoke_Vector3Ret(); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern float nativeCall_PInvoke_Vector3Array(Vector3[] v_array); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern DT nativeCall_PInvoke_Vector3InStruct(DT d); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_PInvoke_Vector3InComplexStruct(ref ComplexDT cdt); public static bool test() { // Expected return value is 12 bytes. if (nativeCall_PInvoke_CheckVector3Size() != 12) { Console.WriteLine("The size of native Vector3 type is not 12 bytes"); return false; } // Argument passing test. // The native code accesses only 12 bytes for each Vector object. { int iv = 123; Vector3 v1 = new Vector3(1,2,3); string str = "abcdefg"; Vector3 v2 = new Vector3(10,11,12); // Expected return value = 1 + 2 + 3 + 10 + 11 + 12 = 39 if (nativeCall_PInvoke_Vector3Arg(iv, v1, str, v2) != 39) { Console.Write("PInvoke Vector3Arg test failed\n"); return false; } } // Argument passing test for Unix. // Few arguments are passed onto stack. { Vector3 v1 = new Vector3(1, 2, 3); Vector3 v2 = new Vector3(10, 20, 30); float f0 = 100, f1 = 101, f2 = 102, f3 = 103, f4 = 104, f5 = 105, f6 = 106, f7 = 107, f8 = 108; float sum = nativeCall_PInvoke_Vector3Arg_Unix( v1, // register f0, f1, f2, f3, f4, f5, // register f6, v2, // stack f7, f8); // stack if (sum != 1002) { Console.Write("PInvoke Vector3Arg_Unix test failed\n"); return false; } } // Argument passing test for Unix. // Few arguments are passed onto stack. { Vector3 v1 = new Vector3(1, 2, 3); Vector3 v2 = new Vector3(4, 5, 6); Vector3 v3 = new Vector3(7, 8, 9); float f0 = 100, f1 = 101, f2 = 102, f3 = 103, f4 = 104, f5 = 105, f6 = 106, f7 = 107, f8 = 108, f9 = 109; float sum = nativeCall_PInvoke_Vector3Arg_Unix2( v1, // register f0, f1, f2, f3, f4, f5, // register f6, v2, // stack f7, f8, // stack v3, // stack f9); // stack if (sum != 1090) { Console.Write("PInvoke Vector3Arg_Unix2 test failed\n"); return false; } } // JIT crashes with this testcase. // Disabled temporarily. { Vector3 ret = nativeCall_PInvoke_Vector3Ret(); // Expected return value = (1, 2, 3) dot (1, 2, 3) = 14 float sum = Vector3.Dot(ret, ret); if (sum != 14) { Console.WriteLine("PInvoke Vector3Ret test failed"); return false; } } // Array argument test. // Both the managed and native code assumes 12 bytes for each element. { Vector3[] v3_array = new Vector3[2]; v3_array[0].X = 1; v3_array[0].Y = 2; v3_array[0].Z = 3; v3_array[1].X = 5; v3_array[1].Y = 6; v3_array[1].Z = 7; // Expected resutn value = 1 + 2 + 3 + 5 + 6 + 7 = 24 if (nativeCall_PInvoke_Vector3Array(v3_array) != 24) { Console.WriteLine("PInvoke Vector3Array test failed"); return false; } } // Structure pass and return test. // Both the managed and native side use 12 bytes for each Vector3 object. // Dot product makes sure that the backend assumption 1 and 2 are met. { DT data = new DT(); data.a = new Vector3(1,2,3); data.b = new Vector3(5,6,7); DT ret = nativeCall_PInvoke_Vector3InStruct(data); // Expected return value = (2, 3, 4) dot (6, 7, 8) = 12 + 21 + 32 = 65 float sum = Vector3.Dot(ret.a, ret.b); if (sum != 65) { Console.WriteLine("PInvoke Vector3InStruct test failed"); return false; } } // Complex struct test // Dot product makes sure that the backend assumption 1 and 2 are met. { ComplexDT cdt = new ComplexDT(); cdt.iv = 99; cdt.str = "arg_string"; cdt.vecs.a = new Vector3(1,2,3); cdt.vecs.b = new Vector3(5,6,7); cdt.v3 = new Vector3(10, 20, 30); nativeCall_PInvoke_Vector3InComplexStruct(ref cdt); Console.WriteLine(" Managed ival: {0}", cdt.iv); Console.WriteLine(" Managed Vector3 v1: ({0} {1} {2})", cdt.vecs.a.X, cdt.vecs.a.Y, cdt.vecs.a.Z); Console.WriteLine(" Managed Vector3 v2: ({0} {1} {2})", cdt.vecs.b.X, cdt.vecs.b.Y, cdt.vecs.b.Z); Console.WriteLine(" Managed Vector3 v3: ({0} {1} {2})", cdt.v3.X, cdt.v3.Y, cdt.v3.Z); Console.WriteLine(" Managed string arg: {0}", cdt.str); // (2, 3, 4) dot (6, 7 , 8) = 12 + 21 + 32 = 65 float t0 = Vector3.Dot(cdt.vecs.a, cdt.vecs.b); // (6, 7, 8) dot (11, 21, 31) = 66 + 147 + 248 = 461 float t1 = Vector3.Dot(cdt.vecs.b, cdt.v3); // (11, 21, 31) dot (2, 3, 4) = 209 float t2 = Vector3.Dot(cdt.v3, cdt.vecs.a); float sum = t0 + t1 + t2; Console.WriteLine(" Managed Sum = {0}", sum); if ((sum != 735) || (cdt.iv != 100) || (cdt.str.ToString() != "ret_string")) { Console.WriteLine("PInvoke Vector3InStruct test failed"); return false; } } Console.WriteLine("All PInvoke testcases passed"); return true; } } class RPInvokeTest { public delegate void CallBackDelegate_RPInvoke_Vector3Arg( int i, Vector3 v1, [MarshalAs(UnmanagedType.LPStr)] string s, Vector3 v2); public delegate void CallBackDelegate_RPInvoke_Vector3Arg_Unix( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem1, float f32_mem2, float f32_mem3); public delegate void CallBackDelegate_RPInvoke_Vector3Arg_Unix2( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem1, float f32_mem2, float f32_mem3, Vector3 v3f32_mem4, float f32_mem5); public delegate Vector3 CallBackDelegate_RPInvoke_Vector3Ret(); public delegate void CallBackDelegate_RPInvoke_Vector3Array( [In, MarshalAs(UnmanagedType.LPArray, SizeParamIndex=1)] Vector3[] v, int size); public delegate void CallBackDelegate_RPInvoke_Vector3InStruct( DT v); public delegate void CallBackDelegate_RPInvoke_Vector3InComplexStruct( ref ComplexDT v); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3Arg( CallBackDelegate_RPInvoke_Vector3Arg callBack); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3Arg_Unix( CallBackDelegate_RPInvoke_Vector3Arg_Unix callBack); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3Arg_Unix2( CallBackDelegate_RPInvoke_Vector3Arg_Unix2 callBack); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern bool nativeCall_RPInvoke_Vector3Ret( CallBackDelegate_RPInvoke_Vector3Ret callBack); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3Array( CallBackDelegate_RPInvoke_Vector3Array callBack, int v); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern void nativeCall_RPInvoke_Vector3InStruct( CallBackDelegate_RPInvoke_Vector3InStruct callBack, int v); [DllImport(@"Vector3TestNative", CallingConvention = CallingConvention.StdCall)] public static extern bool nativeCall_RPInvoke_Vector3InComplexStruct( CallBackDelegate_RPInvoke_Vector3InComplexStruct callBack); static bool result = false; static float x,y,z; // Argument pass test // Test if the managed side correctly reads 12-byte Vector3 argument from the native side // and meet the backend assumption 1 and 2. static void callBack_RPInvoke_Vector3Arg( int i, Vector3 v1, [MarshalAs(UnmanagedType.LPStr)] string s, Vector3 v2) { // sum = (1, 2, 3) dot (1, 2, 3) = 14 float sum0 = Vector3.Dot(v1, v1); // sum = (10, 20, 30) dot (10, 20, 30) = 1400 float sum1 = Vector3.Dot(v2, v2); // sum = (10, 20, 30) dot (1, 2, 3) = 140 float sum2 = Vector3.Dot(v2, v1); Console.WriteLine("callBack_RPInvoke_Vector3Arg:"); Console.WriteLine(" iVal {0}", i); Console.WriteLine(" Sum0,1,2 = {0}, {1}, {2}", sum0, sum1, sum2); Console.WriteLine(" str {0}", s); result = (sum0 == 14) && (sum1 == 1400) && (sum2 == 140) && (s == "abcdefg") && (i == 123); } // Arugument test for Unix // Some arguments are mapped onto stack static void callBack_RPInvoke_Vector3Arg_Unix( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem0, float f32_mem1, float f32_mem2) { // sum = (1, 2, 3) dot (1, 2, 3) = 14 float sum0 = Vector3.Dot(v3f32_xmm0, v3f32_xmm0); // sum = (10, 20, 30) dot (10, 20, 30) = 1400 float sum1 = Vector3.Dot(v3f32_mem0, v3f32_mem0); // sum = (1, 2, 3) dot (10, 20, 30) = 140 float sum2 = Vector3.Dot(v3f32_xmm0, v3f32_mem0); // sum = 100 + 101 + 102 + 103 + 104 + 105 + 106 + 107 + 108 = 936 float sum3 = f32_xmm2 + f32_xmm3 + f32_xmm4 + f32_xmm5 + f32_xmm6 + f32_xmm7 + f32_mem0 + f32_mem1 + f32_mem2; Console.WriteLine("callBack_RPInvoke_Vector3Arg_Unix:"); Console.WriteLine(" {0}, {1}, {2}", v3f32_xmm0.X, v3f32_xmm0.Y, v3f32_xmm0.Z); Console.WriteLine(" {0}, {1}, {2}", v3f32_mem0.X, v3f32_mem0.Y, v3f32_mem0.Z); Console.WriteLine(" Sum0,1,2,3 = {0}, {1}, {2}, {3}", sum0, sum1, sum2, sum3); result = (sum0 == 14) && (sum1 == 1400) && (sum2 == 140) && (sum3==936); } // Arugument test for Unix // Some arguments are mapped onto stack static void callBack_RPInvoke_Vector3Arg_Unix2( Vector3 v3f32_xmm0, float f32_xmm2, float f32_xmm3, float f32_xmm4, float f32_xmm5, float f32_xmm6, float f32_xmm7, float f32_mem0, Vector3 v3f32_mem0, float f32_mem1, float f32_mem2, Vector3 v3f32_mem3, float f32_mem4) { // sum = (1, 2, 3) dot (1, 2, 3) = 14 float sum0 = Vector3.Dot(v3f32_xmm0, v3f32_xmm0); // sum = (4, 5, 6) dot (4, 5, 6) = 77 float sum1 = Vector3.Dot(v3f32_mem0, v3f32_mem0); // sum = (7, 8, 9) dot (7, 8, 9) = 194 float sum2 = Vector3.Dot(v3f32_mem3, v3f32_mem3); // sum = (1, 2, 3) dot (4, 5, 6) = 32 float sum3 = Vector3.Dot(v3f32_xmm0, v3f32_mem0); // sum = (4, 5, 6) dot (7, 8, 9) = 122 float sum4 = Vector3.Dot(v3f32_mem0, v3f32_mem3); // sum = 100 + 101 + 102 + 103 + 104 + 105 + 106 + 107 + 108 + 109 = 1045 float sum5 = f32_xmm2 + f32_xmm3 + f32_xmm4 + f32_xmm5 + f32_xmm6 + f32_xmm7 + f32_mem0 + f32_mem1 + f32_mem2 + f32_mem4; Console.WriteLine("callBack_RPInvoke_Vector3Arg_Unix2:"); Console.WriteLine(" {0}, {1}, {2}", v3f32_xmm0.X, v3f32_xmm0.Y, v3f32_xmm0.Z); Console.WriteLine(" {0}, {1}, {2}", v3f32_mem0.X, v3f32_mem0.Y, v3f32_mem0.Z); Console.WriteLine(" {0}, {1}, {2}", v3f32_mem3.X, v3f32_mem3.Y, v3f32_mem3.Z); Console.WriteLine(" Sum0,1,2,3,4,5 = {0}, {1}, {2}, {3}, {4}, {5}", sum0, sum1, sum2, sum3, sum4, sum5); result = (sum0 == 14) && (sum1 == 77) && (sum2 == 194) && (sum3 == 32) && (sum4 == 122) && (sum5 == 1045); } // Return test. static Vector3 callBack_RPInvoke_Vector3Ret() { Vector3 tmp = new Vector3(1, 2, 3); return tmp; } // Test if the managed side correctly reads an array of 12-byte Vector3 elements // from the native side and meets the backend assumptions. static void callBack_RPInvoke_Vector3Array( [In, MarshalAs(UnmanagedType.LPArray, SizeParamIndex=1)] Vector3[] v, int size) { // sum0 = (2,3,4) dot (2,3,4) = 4 + 9 + 16 = 29 float sum0 = Vector3.Dot(v[0], v[0]); // sum0 = (11,21,31) dot (11,21,31) = 121 + 441 + 961 = 1523 float sum1 = Vector3.Dot(v[1], v[1]); // sum0 = (11,21,31) dot (2,3,4) = 22 + 63 + 124 = 209 float sum2 = Vector3.Dot(v[0], v[1]); Console.WriteLine("callBack_RPInvoke_Vector3Array:"); Console.WriteLine(" Sum0 = {0} Sum1 = {1} Sum2 = {2}", sum0, sum1, sum2); result = (sum0 == 29) && (sum1 == 1523) && (sum2 == 209); } // Test if the managed side correctly reads 12-byte Vector objects in a struct and // meet the backend assumptions. static void callBack_RPInvoke_Vector3InStruct(DT v) { // sum0 = (2,3,4) dot (2,3,4) = 29 float sum0 = Vector3.Dot(v.a, v.a); // sum1 = (11,21,31) dot (11,21,31) = 22 + 42 + 62 = 1523 float sum1 = Vector3.Dot(v.b, v.b); // sum2 = (2,3,4) dot (11,21,31) = 209 float sum2 = Vector3.Dot(v.a, v.b); Console.WriteLine("callBack_RPInvoke_Vector3InStruct:"); Console.WriteLine(" Sum0 = {0} Sum1 = {1} Sum2 = {2}", sum0, sum1, sum2); result = (sum0 == 29) && (sum1 == 1523) == (sum2 == 209); } // Complex struct type test static void callBack_RPInvoke_Vector3InComplexStruct(ref ComplexDT arg) { ComplexDT ret; Console.WriteLine("callBack_RPInvoke_Vector3InComplexStruct"); Console.WriteLine(" Arg ival: {0}", arg.iv); Console.WriteLine(" Arg Vector3 v1: ({0} {1} {2})", arg.vecs.a.X, arg.vecs.a.Y, arg.vecs.a.Z); Console.WriteLine(" Arg Vector3 v2: ({0} {1} {2})", arg.vecs.b.X, arg.vecs.b.Y, arg.vecs.b.Z); Console.WriteLine(" Arg Vector3 v3: ({0} {1} {2})", arg.v3.X, arg.v3.Y, arg.v3.Z); Console.WriteLine(" Arg string arg: {0}", arg.str); arg.vecs.a.X = arg.vecs.a.X + 1; arg.vecs.a.Y = arg.vecs.a.Y + 1; arg.vecs.a.Z = arg.vecs.a.Z + 1; arg.vecs.b.X = arg.vecs.b.X + 1; arg.vecs.b.Y = arg.vecs.b.Y + 1; arg.vecs.b.Z = arg.vecs.b.Z + 1; arg.v3.X = arg.v3.X + 1; arg.v3.Y = arg.v3.Y + 1; arg.v3.Z = arg.v3.Z + 1; arg.iv = arg.iv + 1; arg.str = "ret_string"; Console.WriteLine(" Return ival: {0}", arg.iv); Console.WriteLine(" Return Vector3 v1: ({0} {1} {2})", arg.vecs.a.X, arg.vecs.a.Y, arg.vecs.a.Z); Console.WriteLine(" Return Vector3 v2: ({0} {1} {2})", arg.vecs.b.X, arg.vecs.b.Y, arg.vecs.b.Z); Console.WriteLine(" Return Vector3 v3: ({0} {1} {2})", arg.v3.X, arg.v3.Y, arg.v3.Z); Console.WriteLine(" Return string arg: {0}", arg.str); float sum = arg.vecs.a.X + arg.vecs.a.Y + arg.vecs.a.Z + arg.vecs.b.X + arg.vecs.b.Y + arg.vecs.b.Z + arg.v3.X + arg.v3.Y + arg.v3.Z; Console.WriteLine(" Sum of all return float scalar values = {0}", sum); } public static bool test() { int x = 1; nativeCall_RPInvoke_Vector3Arg(callBack_RPInvoke_Vector3Arg); if (!result) { Console.WriteLine("RPInvoke Vector3Arg test failed"); return false; } nativeCall_RPInvoke_Vector3Arg_Unix(callBack_RPInvoke_Vector3Arg_Unix); if (!result) { Console.WriteLine("RPInvoke Vector3Arg_Unix test failed"); return false; } nativeCall_RPInvoke_Vector3Arg_Unix2(callBack_RPInvoke_Vector3Arg_Unix2); if (!result) { Console.WriteLine("RPInvoke Vector3Arg_Unix2 test failed"); return false; } result = nativeCall_RPInvoke_Vector3Ret(callBack_RPInvoke_Vector3Ret); if (!result) { Console.WriteLine("RPInvoke Vector3Ret test failed"); return false; } nativeCall_RPInvoke_Vector3Array(callBack_RPInvoke_Vector3Array, x); if (!result) { Console.WriteLine("RPInvoke Vector3Array test failed"); return false; } nativeCall_RPInvoke_Vector3InStruct(callBack_RPInvoke_Vector3InStruct, x); if (!result) { Console.WriteLine("RPInvoke Vector3InStruct test failed"); return false; } result = nativeCall_RPInvoke_Vector3InComplexStruct(callBack_RPInvoke_Vector3InComplexStruct); if (!result) { Console.WriteLine("RPInvoke Vector3InComplexStruct test failed"); return false; } Console.WriteLine("All RPInvoke testcases passed"); return true; } } class Test { public static int Main() { if (!PInvokeTest.test()) { return 101; } if (!RPInvokeTest.test()) { return 101; } return 100; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace System.Reflection.Tests { // MemberInfo.Name Property // Gets the name of the current member. public class ReflectionMemberInfoName { // PosTest1: type in current executing module [Fact] public void PosTest1() { bool expectedValue = true; bool actualValue = false; Type testType; testType = typeof(ReflectionMemberInfoName); actualValue = testType.Name.Equals("ReflectionMemberInfoName"); Assert.Equal(expectedValue, actualValue); } // PosTest2: nested type in current executing module [Fact] public void PosTest2() { bool expectedValue = true; bool actualValue = false; Type testType; testType = typeof(TestClass1); actualValue = testType.Name.Equals("TestClass1"); Assert.Equal(expectedValue, actualValue); } // PosTest3: private instance field in current executing module [Fact] public void PosTest3() { bool expectedValue = true; bool actualValue = false; FieldInfo fieldInfo; fieldInfo = typeof(TestClass1).GetField("_data1", BindingFlags.NonPublic | BindingFlags.Instance); MemberInfo memberInfo = fieldInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, "_data1"); Assert.Equal(expectedValue, actualValue); } // PosTest4: private static field in current executing module [Fact] public void PosTest4() { bool expectedValue = true; bool actualValue = false; FieldInfo fieldInfo; fieldInfo = typeof(TestClass1).GetField("s_count", BindingFlags.NonPublic | BindingFlags.Static); MemberInfo memberInfo = fieldInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, "s_count"); Assert.Equal(expectedValue, actualValue); } // PosTest5: public instance method in current executing module [Fact] public void PosTest5() { bool expectedValue = true; bool actualValue = false; MethodInfo methodInfo; methodInfo = typeof(TestClass1).GetMethod("Do", BindingFlags.Public | BindingFlags.Instance); MemberInfo memberInfo = methodInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, "Do"); Assert.Equal(expectedValue, actualValue); } // PosTest6: public static method in current executing module [Fact] public void PosTest6() { bool expectedValue = true; bool actualValue = false; PropertyInfo propertyInfo; MethodInfo methodInfo; propertyInfo = typeof(TestClass1).GetProperty("InstanceCount", BindingFlags.Static | BindingFlags.Public); methodInfo = propertyInfo.GetGetMethod(); MemberInfo memberInfo = methodInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, "get_InstanceCount"); Assert.Equal(expectedValue, actualValue); } // PosTest7: public static property in current executing module [Fact] public void PosTest7() { bool expectedValue = true; bool actualValue = false; PropertyInfo propertyInfo; propertyInfo = typeof(TestClass1).GetProperty("InstanceCount", BindingFlags.Static | BindingFlags.Public); MemberInfo memberInfo = propertyInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, "InstanceCount"); Assert.Equal(expectedValue, actualValue); } // PosTest8: public instance property in current executing module [Fact] public void PosTest8() { bool expectedValue = true; bool actualValue = false; PropertyInfo propertyInfo; propertyInfo = typeof(TestClass1).GetProperty("Data1", BindingFlags.Instance | BindingFlags.Public); MemberInfo memberInfo = propertyInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, "Data1"); Assert.Equal(expectedValue, actualValue); } // PosTest9: public instance event in current executing module [Fact] public void PosTest9() { bool expectedValue = true; bool actualValue = false; EventInfo eventInfo; eventInfo = typeof(TestButton).GetEvent("Click", BindingFlags.Instance | BindingFlags.Public); MemberInfo memberInfo = eventInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, "Click"); Assert.Equal(expectedValue, actualValue); } // PosTest10: public constructor in current executing module [Fact] public void PosTest10() { bool expectedValue = true; bool actualValue = false; ConstructorInfo constructorInfo; Type[] parametersTypes = { typeof(int) }; constructorInfo = typeof(TestClass1).GetConstructor(parametersTypes); MemberInfo memberInfo = constructorInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, ".ctor"); Assert.Equal(expectedValue, actualValue); } // PosTest11: type in referenced module [Fact] public void PosTest11() { bool expectedValue = true; bool actualValue = false; Type testType = typeof(String); actualValue = testType.Name.Equals("String"); Assert.Equal(expectedValue, actualValue); } // PosTest12: method in referenced module [Fact] public void PosTest12() { bool expectedValue = true; bool actualValue = false; Type testType = typeof(String); Type[] parameterTypes = { typeof(String), typeof(String) }; MethodInfo methodInfo = typeof(String).GetMethod("CompareOrdinal", parameterTypes); MemberInfo memberInfo = methodInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, "CompareOrdinal"); Assert.Equal(expectedValue, actualValue); } // PosTest13: property in referenced module [Fact] public void PosTest13() { bool expectedValue = true; bool actualValue = false; Type testType = typeof(String); PropertyInfo propertyInfo = typeof(String).GetProperty("Length"); MemberInfo memberInfo = propertyInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, "Length"); Assert.Equal(expectedValue, actualValue); } // PosTest14: consturctor in referenced module [Fact] public void PosTest14() { bool expectedValue = true; bool actualValue = false; Type testType = typeof(String); Type[] parameterTypes = { typeof(char), typeof(int) }; ConstructorInfo constructorInfo = typeof(String).GetConstructor(parameterTypes); MemberInfo memberInfo = constructorInfo as MemberInfo; actualValue = 0 == string.CompareOrdinal(memberInfo.Name, ".ctor"); Assert.Equal(expectedValue, actualValue); } private class TestClass1 { private static int s_count = 0; //Defualt constructor public TestClass1() { ++s_count; } public TestClass1(int data1) { ++s_count; _data1 = data1; } public static int InstanceCount { get { return s_count; } } public int Data1 { get { return _data1; } set { _data1 = value; } } private int _data1; public void Do() { } } private class TestButton { public event EventHandler Click; protected void OnClick(EventArgs e) { if (null != Click) { Click(this, e); } } } } }
#region Copyright notice and license // Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Text; #if !NET35 using System.Threading; using System.Threading.Tasks; #endif #if NET35 using Google.Protobuf.Compatibility; #endif namespace Google.Protobuf { /// <summary> /// Immutable array of bytes. /// </summary> public sealed class ByteString : IEnumerable<byte>, IEquatable<ByteString> { private static readonly ByteString empty = new ByteString(new byte[0]); private readonly byte[] bytes; /// <summary> /// Unsafe operations that can cause IO Failure and/or other catestrophic side-effects. /// </summary> internal static class Unsafe { /// <summary> /// Constructs a new ByteString from the given byte array. The array is /// *not* copied, and must not be modified after this constructor is called. /// </summary> internal static ByteString FromBytes(byte[] bytes) { return new ByteString(bytes); } } /// <summary> /// Internal use only. Ensure that the provided array is not mutated and belongs to this instance. /// </summary> internal static ByteString AttachBytes(byte[] bytes) { return new ByteString(bytes); } /// <summary> /// Constructs a new ByteString from the given byte array. The array is /// *not* copied, and must not be modified after this constructor is called. /// </summary> private ByteString(byte[] bytes) { this.bytes = bytes; } /// <summary> /// Returns an empty ByteString. /// </summary> public static ByteString Empty { get { return empty; } } /// <summary> /// Returns the length of this ByteString in bytes. /// </summary> public int Length { get { return bytes.Length; } } /// <summary> /// Returns <c>true</c> if this byte string is empty, <c>false</c> otherwise. /// </summary> public bool IsEmpty { get { return Length == 0; } } #if NETSTANDARD2_0 /// <summary> /// Provides read-only access to the data of this <see cref="ByteString"/>. /// No data is copied so this is the most efficient way of accessing. /// </summary> public ReadOnlySpan<byte> Span => new ReadOnlySpan<byte>(bytes); #endif /// <summary> /// Converts this <see cref="ByteString"/> into a byte array. /// </summary> /// <remarks>The data is copied - changes to the returned array will not be reflected in this <c>ByteString</c>.</remarks> /// <returns>A byte array with the same data as this <c>ByteString</c>.</returns> public byte[] ToByteArray() { return (byte[]) bytes.Clone(); } /// <summary> /// Converts this <see cref="ByteString"/> into a standard base64 representation. /// </summary> /// <returns>A base64 representation of this <c>ByteString</c>.</returns> public string ToBase64() { return Convert.ToBase64String(bytes); } /// <summary> /// Constructs a <see cref="ByteString" /> from the Base64 Encoded String. /// </summary> public static ByteString FromBase64(string bytes) { // By handling the empty string explicitly, we not only optimize but we fix a // problem on CF 2.0. See issue 61 for details. return bytes == "" ? Empty : new ByteString(Convert.FromBase64String(bytes)); } /// <summary> /// Constructs a <see cref="ByteString"/> from data in the given stream, synchronously. /// </summary> /// <remarks>If successful, <paramref name="stream"/> will be read completely, from the position /// at the start of the call.</remarks> /// <param name="stream">The stream to copy into a ByteString.</param> /// <returns>A ByteString with content read from the given stream.</returns> public static ByteString FromStream(Stream stream) { ProtoPreconditions.CheckNotNull(stream, nameof(stream)); int capacity = stream.CanSeek ? checked((int) (stream.Length - stream.Position)) : 0; var memoryStream = new MemoryStream(capacity); stream.CopyTo(memoryStream); #if NETSTANDARD1_0 || NETSTANDARD2_0 byte[] bytes = memoryStream.ToArray(); #else // Avoid an extra copy if we can. byte[] bytes = memoryStream.Length == memoryStream.Capacity ? memoryStream.GetBuffer() : memoryStream.ToArray(); #endif return AttachBytes(bytes); } #if !NET35 /// <summary> /// Constructs a <see cref="ByteString"/> from data in the given stream, asynchronously. /// </summary> /// <remarks>If successful, <paramref name="stream"/> will be read completely, from the position /// at the start of the call.</remarks> /// <param name="stream">The stream to copy into a ByteString.</param> /// <param name="cancellationToken">The cancellation token to use when reading from the stream, if any.</param> /// <returns>A ByteString with content read from the given stream.</returns> public async static Task<ByteString> FromStreamAsync(Stream stream, CancellationToken cancellationToken = default(CancellationToken)) { ProtoPreconditions.CheckNotNull(stream, nameof(stream)); int capacity = stream.CanSeek ? checked((int) (stream.Length - stream.Position)) : 0; var memoryStream = new MemoryStream(capacity); // We have to specify the buffer size here, as there's no overload accepting the cancellation token // alone. But it's documented to use 81920 by default if not specified. await stream.CopyToAsync(memoryStream, 81920, cancellationToken); #if NETSTANDARD1_0 || NETSTANDARD2_0 byte[] bytes = memoryStream.ToArray(); #else // Avoid an extra copy if we can. byte[] bytes = memoryStream.Length == memoryStream.Capacity ? memoryStream.GetBuffer() : memoryStream.ToArray(); #endif return AttachBytes(bytes); } #endif /// <summary> /// Constructs a <see cref="ByteString" /> from the given array. The contents /// are copied, so further modifications to the array will not /// be reflected in the returned ByteString. /// This method can also be invoked in <c>ByteString.CopyFrom(0xaa, 0xbb, ...)</c> form /// which is primarily useful for testing. /// </summary> public static ByteString CopyFrom(params byte[] bytes) { return new ByteString((byte[]) bytes.Clone()); } /// <summary> /// Constructs a <see cref="ByteString" /> from a portion of a byte array. /// </summary> public static ByteString CopyFrom(byte[] bytes, int offset, int count) { byte[] portion = new byte[count]; ByteArray.Copy(bytes, offset, portion, 0, count); return new ByteString(portion); } #if NETSTANDARD2_0 /// <summary> /// Constructs a <see cref="ByteString" /> from a read only span. The contents /// are copied, so further modifications to the span will not /// be reflected in the returned <see cref="ByteString" />. /// </summary> public static ByteString CopyFrom(ReadOnlySpan<byte> bytes) { return new ByteString(bytes.ToArray()); } #endif /// <summary> /// Creates a new <see cref="ByteString" /> by encoding the specified text with /// the given encoding. /// </summary> public static ByteString CopyFrom(string text, Encoding encoding) { return new ByteString(encoding.GetBytes(text)); } /// <summary> /// Creates a new <see cref="ByteString" /> by encoding the specified text in UTF-8. /// </summary> public static ByteString CopyFromUtf8(string text) { return CopyFrom(text, Encoding.UTF8); } /// <summary> /// Retuns the byte at the given index. /// </summary> public byte this[int index] { get { return bytes[index]; } } /// <summary> /// Converts this <see cref="ByteString"/> into a string by applying the given encoding. /// </summary> /// <remarks> /// This method should only be used to convert binary data which was the result of encoding /// text with the given encoding. /// </remarks> /// <param name="encoding">The encoding to use to decode the binary data into text.</param> /// <returns>The result of decoding the binary data with the given decoding.</returns> public string ToString(Encoding encoding) { return encoding.GetString(bytes, 0, bytes.Length); } /// <summary> /// Converts this <see cref="ByteString"/> into a string by applying the UTF-8 encoding. /// </summary> /// <remarks> /// This method should only be used to convert binary data which was the result of encoding /// text with UTF-8. /// </remarks> /// <returns>The result of decoding the binary data with the given decoding.</returns> public string ToStringUtf8() { return ToString(Encoding.UTF8); } /// <summary> /// Returns an iterator over the bytes in this <see cref="ByteString"/>. /// </summary> /// <returns>An iterator over the bytes in this object.</returns> public IEnumerator<byte> GetEnumerator() { return ((IEnumerable<byte>) bytes).GetEnumerator(); } /// <summary> /// Returns an iterator over the bytes in this <see cref="ByteString"/>. /// </summary> /// <returns>An iterator over the bytes in this object.</returns> IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } /// <summary> /// Creates a CodedInputStream from this ByteString's data. /// </summary> public CodedInputStream CreateCodedInput() { // We trust CodedInputStream not to reveal the provided byte array or modify it return new CodedInputStream(bytes); } /// <summary> /// Compares two byte strings for equality. /// </summary> /// <param name="lhs">The first byte string to compare.</param> /// <param name="rhs">The second byte string to compare.</param> /// <returns><c>true</c> if the byte strings are equal; false otherwise.</returns> public static bool operator ==(ByteString lhs, ByteString rhs) { if (ReferenceEquals(lhs, rhs)) { return true; } if (ReferenceEquals(lhs, null) || ReferenceEquals(rhs, null)) { return false; } if (lhs.bytes.Length != rhs.bytes.Length) { return false; } for (int i = 0; i < lhs.Length; i++) { if (rhs.bytes[i] != lhs.bytes[i]) { return false; } } return true; } /// <summary> /// Compares two byte strings for inequality. /// </summary> /// <param name="lhs">The first byte string to compare.</param> /// <param name="rhs">The second byte string to compare.</param> /// <returns><c>false</c> if the byte strings are equal; true otherwise.</returns> public static bool operator !=(ByteString lhs, ByteString rhs) { return !(lhs == rhs); } /// <summary> /// Compares this byte string with another object. /// </summary> /// <param name="obj">The object to compare this with.</param> /// <returns><c>true</c> if <paramref name="obj"/> refers to an equal <see cref="ByteString"/>; <c>false</c> otherwise.</returns> public override bool Equals(object obj) { return this == (obj as ByteString); } /// <summary> /// Returns a hash code for this object. Two equal byte strings /// will return the same hash code. /// </summary> /// <returns>A hash code for this object.</returns> public override int GetHashCode() { int ret = 23; foreach (byte b in bytes) { ret = (ret * 31) + b; } return ret; } /// <summary> /// Compares this byte string with another. /// </summary> /// <param name="other">The <see cref="ByteString"/> to compare this with.</param> /// <returns><c>true</c> if <paramref name="other"/> refers to an equal byte string; <c>false</c> otherwise.</returns> public bool Equals(ByteString other) { return this == other; } /// <summary> /// Used internally by CodedOutputStream to avoid creating a copy for the write /// </summary> internal void WriteRawBytesTo(CodedOutputStream outputStream) { outputStream.WriteRawBytes(bytes, 0, bytes.Length); } /// <summary> /// Copies the entire byte array to the destination array provided at the offset specified. /// </summary> public void CopyTo(byte[] array, int position) { ByteArray.Copy(bytes, 0, array, position, bytes.Length); } /// <summary> /// Writes the entire byte array to the provided stream /// </summary> public void WriteTo(Stream outputStream) { outputStream.Write(bytes, 0, bytes.Length); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Net.Sockets; namespace System.Net { /// <devdoc> /// <para> /// Provides an internet protocol (IP) address. /// </para> /// </devdoc> public class IPAddress { public static readonly IPAddress Any = new IPAddress(0x0000000000000000); public static readonly IPAddress Loopback = new IPAddress(0x000000000100007F); public static readonly IPAddress Broadcast = new IPAddress(0x00000000FFFFFFFF); public static readonly IPAddress None = Broadcast; internal const long LoopbackMask = 0x00000000000000FF; public static readonly IPAddress IPv6Any = new IPAddress(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, 0); public static readonly IPAddress IPv6Loopback = new IPAddress(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 }, 0); public static readonly IPAddress IPv6None = new IPAddress(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, 0); /// <summary> /// For IPv4 addresses, this field stores the Address. /// For IPv6 addresses, this field stores the ScopeId. /// Instead of accessing this field directly, use the <see cref="PrivateAddress"/> or <see cref="PrivateScopeId"/> properties. /// </summary> private uint _addressOrScopeId; /// <summary> /// This field is only used for IPv6 addresses. A null value indicates that this instance is an IPv4 address. /// </summary> private readonly ushort[] _numbers; /// <summary> /// A lazily initialized cache of the result of calling <see cref="ToString"/>. /// </summary> private string _toString; /// <summary> /// This field is only used for IPv6 addresses. A lazily initialized cache of the <see cref="GetHashCode"/> value. /// </summary> private int _hashCode; // Maximum length of address literals (potentially including a port number) // generated by any address-to-string conversion routine. This length can // be used when declaring buffers used with getnameinfo, WSAAddressToString, // inet_ntoa, etc. We just provide one define, rather than one per api, // to avoid confusion. // // The totals are derived from the following data: // 15: IPv4 address // 45: IPv6 address including embedded IPv4 address // 11: Scope Id // 2: Brackets around IPv6 address when port is present // 6: Port (including colon) // 1: Terminating null byte internal const int NumberOfLabels = IPAddressParserStatics.IPv6AddressBytes / 2; private bool IsIPv4 { get { return _numbers == null; } } private bool IsIPv6 { get { return _numbers != null; } } private uint PrivateAddress { get { Debug.Assert(IsIPv4); return _addressOrScopeId; } set { Debug.Assert(IsIPv4); _addressOrScopeId = value; } } private uint PrivateScopeId { get { Debug.Assert(IsIPv6); return _addressOrScopeId; } set { Debug.Assert(IsIPv6); _addressOrScopeId = value; } } /// <devdoc> /// <para> /// Initializes a new instance of the <see cref='System.Net.IPAddress'/> /// class with the specified address. /// </para> /// </devdoc> public IPAddress(long newAddress) { if (newAddress < 0 || newAddress > 0x00000000FFFFFFFF) { throw new ArgumentOutOfRangeException("newAddress"); } PrivateAddress = (uint)newAddress; } /// <devdoc> /// <para> /// Constructor for an IPv6 Address with a specified Scope. /// </para> /// </devdoc> public IPAddress(byte[] address, long scopeid) { if (address == null) { throw new ArgumentNullException("address"); } if (address.Length != IPAddressParserStatics.IPv6AddressBytes) { throw new ArgumentException(SR.dns_bad_ip_address, "address"); } _numbers = new ushort[NumberOfLabels]; for (int i = 0; i < NumberOfLabels; i++) { _numbers[i] = (ushort)(address[i * 2] * 256 + address[i * 2 + 1]); } // Consider: Since scope is only valid for link-local and site-local // addresses we could implement some more robust checking here if (scopeid < 0 || scopeid > 0x00000000FFFFFFFF) { throw new ArgumentOutOfRangeException("scopeid"); } PrivateScopeId = (uint)scopeid; } private IPAddress(ushort[] numbers, uint scopeid) { Debug.Assert(numbers != null); _numbers = numbers; PrivateScopeId = scopeid; } /// <devdoc> /// <para> /// Constructor for IPv4 and IPv6 Address. /// </para> /// </devdoc> public IPAddress(byte[] address) { if (address == null) { throw new ArgumentNullException("address"); } if (address.Length != IPAddressParserStatics.IPv4AddressBytes && address.Length != IPAddressParserStatics.IPv6AddressBytes) { throw new ArgumentException(SR.dns_bad_ip_address, "address"); } if (address.Length == IPAddressParserStatics.IPv4AddressBytes) { PrivateAddress = (uint)((address[3] << 24 | address[2] << 16 | address[1] << 8 | address[0]) & 0x0FFFFFFFF); } else { _numbers = new ushort[NumberOfLabels]; for (int i = 0; i < NumberOfLabels; i++) { _numbers[i] = (ushort)(address[i * 2] * 256 + address[i * 2 + 1]); } } } // We need this internally since we need to interface with winsock, // and winsock only understands Int32. internal IPAddress(int newAddress) { PrivateAddress = (uint)newAddress; } /// <devdoc> /// <para> /// Converts an IP address string to an <see cref='System.Net.IPAddress'/> instance. /// </para> /// </devdoc> public static bool TryParse(string ipString, out IPAddress address) { address = IPAddressParser.Parse(ipString, true); return (address != null); } public static IPAddress Parse(string ipString) { return IPAddressParser.Parse(ipString, false); } /// <devdoc> /// <para> /// Provides a copy of the IPAddress internals as an array of bytes. /// </para> /// </devdoc> public byte[] GetAddressBytes() { byte[] bytes; if (IsIPv6) { bytes = new byte[NumberOfLabels * 2]; int j = 0; for (int i = 0; i < NumberOfLabels; i++) { bytes[j++] = (byte)((_numbers[i] >> 8) & 0xFF); bytes[j++] = (byte)((_numbers[i]) & 0xFF); } } else { uint address = PrivateAddress; bytes = new byte[IPAddressParserStatics.IPv4AddressBytes]; bytes[0] = (byte)(address); bytes[1] = (byte)(address >> 8); bytes[2] = (byte)(address >> 16); bytes[3] = (byte)(address >> 24); } return bytes; } public AddressFamily AddressFamily { get { return IsIPv4 ? AddressFamily.InterNetwork : AddressFamily.InterNetworkV6; } } // When IPv6 support was added to the .NET Framework, the public Address property was marked as Obsolete. // The public obsolete Address property has not been carried forward in .NET Core, but remains here as // internal to allow internal types that understand IPv4 to still access it without obsolete warnings. internal long Address { get { return PrivateAddress; } } /// <devdoc> /// <para> /// IPv6 Scope identifier. This is really a uint32, but that isn't CLS compliant /// </para> /// </devdoc> public long ScopeId { get { // Not valid for IPv4 addresses if (IsIPv4) { throw new SocketException(SocketError.OperationNotSupported); } return PrivateScopeId; } set { // Not valid for IPv4 addresses if (IsIPv4) { throw new SocketException(SocketError.OperationNotSupported); } // Consider: Since scope is only valid for link-local and site-local // addresses we could implement some more robust checking here if (value < 0 || value > 0x00000000FFFFFFFF) { throw new ArgumentOutOfRangeException("value"); } PrivateScopeId = (uint)value; } } /// <devdoc> /// <para> /// Converts the Internet address to either standard dotted quad format /// or standard IPv6 representation. /// </para> /// </devdoc> public override string ToString() { if (_toString == null) { _toString = IsIPv4 ? IPAddressParser.IPv4AddressToString(GetAddressBytes()) : IPAddressParser.IPv6AddressToString(GetAddressBytes(), PrivateScopeId); } return _toString; } public static long HostToNetworkOrder(long host) { #if BIGENDIAN return host; #else return (((long)HostToNetworkOrder((int)host) & 0xFFFFFFFF) << 32) | ((long)HostToNetworkOrder((int)(host >> 32)) & 0xFFFFFFFF); #endif } public static int HostToNetworkOrder(int host) { #if BIGENDIAN return host; #else return (((int)HostToNetworkOrder((short)host) & 0xFFFF) << 16) | ((int)HostToNetworkOrder((short)(host >> 16)) & 0xFFFF); #endif } public static short HostToNetworkOrder(short host) { #if BIGENDIAN return host; #else return (short)((((int)host & 0xFF) << 8) | (int)((host >> 8) & 0xFF)); #endif } public static long NetworkToHostOrder(long network) { return HostToNetworkOrder(network); } public static int NetworkToHostOrder(int network) { return HostToNetworkOrder(network); } public static short NetworkToHostOrder(short network) { return HostToNetworkOrder(network); } public static bool IsLoopback(IPAddress address) { if (address == null) { throw new ArgumentNullException("address"); } if (address.IsIPv6) { // Do Equals test for IPv6 addresses return address.Equals(IPv6Loopback); } else { return ((address.PrivateAddress & LoopbackMask) == (Loopback.PrivateAddress & LoopbackMask)); } } /// <devdoc> /// <para> /// Determines if an address is an IPv6 Multicast address /// </para> /// </devdoc> public bool IsIPv6Multicast { get { return IsIPv6 && ((_numbers[0] & 0xFF00) == 0xFF00); } } /// <devdoc> /// <para> /// Determines if an address is an IPv6 Link Local address /// </para> /// </devdoc> public bool IsIPv6LinkLocal { get { return IsIPv6 && ((_numbers[0] & 0xFFC0) == 0xFE80); } } /// <devdoc> /// <para> /// Determines if an address is an IPv6 Site Local address /// </para> /// </devdoc> public bool IsIPv6SiteLocal { get { return IsIPv6 && ((_numbers[0] & 0xFFC0) == 0xFEC0); } } public bool IsIPv6Teredo { get { return IsIPv6 && (_numbers[0] == 0x2001) && (_numbers[1] == 0); } } // 0:0:0:0:0:FFFF:x.x.x.x public bool IsIPv4MappedToIPv6 { get { if (IsIPv4) { return false; } for (int i = 0; i < 5; i++) { if (_numbers[i] != 0) { return false; } } return (_numbers[5] == 0xFFFF); } } internal bool Equals(object comparandObj, bool compareScopeId) { IPAddress comparand = comparandObj as IPAddress; if (comparand == null) { return false; } // Compare families before address representations if (AddressFamily != comparand.AddressFamily) { return false; } if (IsIPv6) { // For IPv6 addresses, we must compare the full 128-bit representation. for (int i = 0; i < NumberOfLabels; i++) { if (comparand._numbers[i] != _numbers[i]) { return false; } } // The scope IDs must also match return comparand.PrivateScopeId == PrivateScopeId || !compareScopeId; } else { // For IPv4 addresses, compare the integer representation. return comparand.PrivateAddress == PrivateAddress; } } /// <devdoc> /// <para> /// Compares two IP addresses. /// </para> /// </devdoc> public override bool Equals(object comparand) { return Equals(comparand, true); } public override int GetHashCode() { // For IPv6 addresses, we cannot simply return the integer // representation as the hashcode. Instead, we calculate // the hashcode from the string representation of the address. if (IsIPv6) { if (_hashCode == 0) { _hashCode = StringComparer.OrdinalIgnoreCase.GetHashCode(ToString()); } return _hashCode; } else { // For IPv4 addresses, we can simply use the integer representation. return unchecked((int)PrivateAddress); } } // For security, we need to be able to take an IPAddress and make a copy that's immutable and not derived. internal IPAddress Snapshot() { return IsIPv4 ? new IPAddress(PrivateAddress) : new IPAddress(_numbers, PrivateScopeId); } // IPv4 192.168.1.1 maps as ::FFFF:192.168.1.1 public IPAddress MapToIPv6() { if (IsIPv6) { return this; } uint address = PrivateAddress; ushort[] labels = new ushort[NumberOfLabels]; labels[5] = 0xFFFF; labels[6] = (ushort)(((address & 0x0000FF00) >> 8) | ((address & 0x000000FF) << 8)); labels[7] = (ushort)(((address & 0xFF000000) >> 24) | ((address & 0x00FF0000) >> 8)); return new IPAddress(labels, 0); } // Takes the last 4 bytes of an IPv6 address and converts it to an IPv4 address. // This does not restrict to address with the ::FFFF: prefix because other types of // addresses display the tail segments as IPv4 like Terado. public IPAddress MapToIPv4() { if (IsIPv4) { return this; } // Cast the ushort values to a uint and mask with unsigned literal before bit shifting. // Otherwise, we can end up getting a negative value for any IPv4 address that ends with // a byte higher than 127 due to sign extension of the most significant 1 bit. long address = ((((uint)_numbers[6] & 0x0000FF00u) >> 8) | (((uint)_numbers[6] & 0x000000FFu) << 8)) | (((((uint)_numbers[7] & 0x0000FF00u) >> 8) | (((uint)_numbers[7] & 0x000000FFu) << 8)) << 16); return new IPAddress(address); } } }
// UrlRewriter - A .NET URL Rewriter module // Version 2.0 // // Copyright 2007 Intelligencia // Copyright 2007 Seth Yates // namespace Intelligencia.UrlRewriter { using System; using System.Collections; using System.IO; using System.Linq; using System.Net; using System.Text; using System.Text.RegularExpressions; using System.Web; using Intelligencia.UrlRewriter.Configuration; using Intelligencia.UrlRewriter.Utilities; /// <summary> /// The core RewriterEngine class. /// </summary> public class RewriterEngine { /// <summary> /// Constructor. /// </summary> /// <param name="contextFacade">The context facade to use.</param> /// <param name="configuration">The configuration to use.</param> public RewriterEngine(IContextFacade contextFacade, RewriterConfiguration configuration) { if (contextFacade == null) { throw new ArgumentNullException("contextFacade"); } if (configuration == null) { throw new ArgumentNullException("configuration"); } ContextFacade = contextFacade; _configuration = configuration; } /// <summary> /// Resolves an Application-path relative location /// </summary> /// <param name="location">The location</param> /// <returns>The absolute location.</returns> public string ResolveLocation(string location) { if (location == null) { throw new ArgumentNullException("location"); } string appPath = ContextFacade.GetApplicationPath(); if (appPath.Length > 1) { appPath += "/"; } return location.Replace("~/", appPath); } /// <summary> /// Performs the rewriting. /// </summary> public void Rewrite() { string originalUrl = ContextFacade.GetRawUrl().Replace("+", " "); RawUrl = originalUrl; // Create the context RewriteContext context = new RewriteContext( this, originalUrl, ContextFacade.GetHttpMethod(), ContextFacade.MapPath, ContextFacade.GetServerVariables(), ContextFacade.GetHeaders(), ContextFacade.GetCookies()); // Process each rule. ProcessRules(context); // Append any headers defined. AppendHeaders(context); // Append any cookies defined. AppendCookies(context); // Rewrite the path if the location has changed. ContextFacade.SetStatusCode((int)context.StatusCode); if ((context.Location != originalUrl) && ((int)context.StatusCode < 400)) { if ((int)context.StatusCode < 300) { // Successful status if less than 300 _configuration.Logger.Info( MessageProvider.FormatString(Message.RewritingXtoY, ContextFacade.GetRawUrl(), context.Location)); // Verify that the url exists on this server. HandleDefaultDocument(context); // VerifyResultExists(context); if (context.Location.Contains(@"&")) { var queryStringCollection = HttpUtility.ParseQueryString(new Uri(this.ContextFacade.GetRequestUrl(), context.Location).Query); StringBuilder builder = new StringBuilder(); foreach (string value in queryStringCollection.AllKeys.Distinct()) { builder.AppendFormat("{0}={1}&", value, queryStringCollection.GetValues(value).FirstOrDefault()); } context.Location = context.Location.Remove(context.Location.IndexOf("?") + 1); context.Location = context.Location + builder; if (context.Location.EndsWith(@"&")) { context.Location = context.Location.Remove(context.Location.Length - 1); } } ContextFacade.RewritePath(context.Location); } else { // Redirection _configuration.Logger.Info( MessageProvider.FormatString( Message.RedirectingXtoY, ContextFacade.GetRawUrl(), context.Location)); ContextFacade.SetRedirectLocation(context.Location); } } else if ((int)context.StatusCode >= 400) { HandleError(context); } else if (HandleDefaultDocument(context)) { ContextFacade.RewritePath(context.Location); } // Sets the context items. SetContextItems(context); } /// <summary> /// Expands the given input based on the current context. /// </summary> /// <param name="context">The current context</param> /// <param name="input">The input to expand.</param> /// <returns>The expanded input</returns> public string Expand(RewriteContext context, string input) { if (context == null) { throw new ArgumentNullException("context"); } if (input == null) { throw new ArgumentNullException("input"); } /* replacement :- $n * | ${[a-zA-Z0-9\-]+} * | ${fn( <replacement> )} * | ${<replacement-or-id>:<replacement-or-value>:<replacement-or-value>} * * replacement-or-id :- <replacement> | <id> * replacement-or-value :- <replacement> | <value> */ /* $1 - regex replacement * ${propertyname} * ${map-name:value} map-name is replacement, value is replacement * ${map-name:value|default-value} map-name is replacement, value is replacement, default-value is replacement * ${fn(value)} value is replacement */ using (StringReader reader = new StringReader(input)) { using (StringWriter writer = new StringWriter()) { char ch = (char)reader.Read(); while (ch != (char)65535) { if ((char)ch == '$') { writer.Write(Reduce(context, reader)); } else { writer.Write((char)ch); } ch = (char)reader.Read(); } return writer.GetStringBuilder().ToString(); } } } private void ProcessRules(RewriteContext context) { const int MaxRestart = 10; // Controls the number of restarts so we don't get into an infinite loop IList rewriteRules = _configuration.Rules; int restarts = 0; for (int i = 0; i < rewriteRules.Count; i++) { // If the rule is conditional, ensure the conditions are met. IRewriteCondition condition = rewriteRules[i] as IRewriteCondition; if (condition == null || condition.IsMatch(context)) { // Execute the action. IRewriteAction action = rewriteRules[i] as IRewriteAction; RewriteProcessing processing = action.Execute(context); // If the action is Stop, then break out of the processing loop if (processing == RewriteProcessing.StopProcessing) { _configuration.Logger.Debug(MessageProvider.FormatString(Message.StoppingBecauseOfRule)); break; } else if (processing == RewriteProcessing.RestartProcessing) { _configuration.Logger.Debug(MessageProvider.FormatString(Message.RestartingBecauseOfRule)); // Restart from the first rule. i = 0; if (++restarts > MaxRestart) { throw new InvalidOperationException(MessageProvider.FormatString(Message.TooManyRestarts)); } } } } } private bool HandleDefaultDocument(RewriteContext context) { Uri uri = new Uri(ContextFacade.GetRequestUrl(), context.Location); UriBuilder b = new UriBuilder(uri); b.Path += "/"; uri = b.Uri; if (uri.Host == ContextFacade.GetRequestUrl().Host) { string filename = ContextFacade.MapPath(uri.AbsolutePath); if (Directory.Exists(filename)) { foreach (string document in RewriterConfiguration.Current.DefaultDocuments) { string pathName = Path.Combine(filename, document); if (File.Exists(pathName)) { context.Location = new Uri(uri, document).AbsolutePath; return true; } } } } return false; } private void VerifyResultExists(RewriteContext context) { if ((String.Compare(context.Location, ContextFacade.GetRawUrl()) != 0) && ((int)context.StatusCode < 300)) { Uri uri = new Uri(ContextFacade.GetRequestUrl(), context.Location); if (uri.Host == ContextFacade.GetRequestUrl().Host) { string filename = ContextFacade.MapPath(uri.AbsolutePath); if (!File.Exists(filename)) { _configuration.Logger.Debug(MessageProvider.FormatString(Message.ResultNotFound, filename)); context.StatusCode = HttpStatusCode.NotFound; } else { HandleDefaultDocument(context); } } } } private void HandleError(RewriteContext context) { // Return the status code. ContextFacade.SetStatusCode((int)context.StatusCode); // Get the error handler if there is one. IRewriteErrorHandler handler = _configuration.ErrorHandlers[(int)context.StatusCode] as IRewriteErrorHandler; if (handler != null) { try { _configuration.Logger.Debug(MessageProvider.FormatString(Message.CallingErrorHandler)); // Execute the error handler. ContextFacade.HandleError(handler); } catch (HttpException) { throw; } catch (Exception exc) { _configuration.Logger.Fatal(exc.Message, exc); throw new HttpException( (int)HttpStatusCode.InternalServerError, HttpStatusCode.InternalServerError.ToString()); } } else { throw new HttpException((int)context.StatusCode, context.StatusCode.ToString()); } } private void AppendHeaders(RewriteContext context) { foreach (string headerKey in context.Headers) { ContextFacade.AppendHeader(headerKey, context.Headers[headerKey]); } } private void AppendCookies(RewriteContext context) { for (int i = 0; i < context.Cookies.Count; i++) { HttpCookie cookie = context.Cookies[i]; ContextFacade.AppendCookie(cookie); } } private void SetContextItems(RewriteContext context) { this.OriginalQueryString = new Uri(this.ContextFacade.GetRequestUrl(), this.ContextFacade.GetRawUrl()).Query.Replace("?", string.Empty); this.QueryString = new Uri(this.ContextFacade.GetRequestUrl(), context.Location).Query.Replace("?", string.Empty); // Add in the properties as context items, so these will be accessible to the handler foreach (string key in context.Properties.Keys) { ContextFacade.SetItem(string.Format("Rewriter.{0}", key), context.Properties[key]); } } /// <summary> /// The raw url. /// </summary> public string RawUrl { get { return (string)ContextFacade.GetItem(ContextRawUrl); } set { ContextFacade.SetItem(ContextRawUrl, value); } } /// <summary> /// The original query string. /// </summary> public string OriginalQueryString { get { return (string)ContextFacade.GetItem(ContextOriginalQueryString); } set { ContextFacade.SetItem(ContextOriginalQueryString, value); } } /// <summary> /// The final querystring, after rewriting. /// </summary> public string QueryString { get { return (string)ContextFacade.GetItem(ContextQueryString); } set { ContextFacade.SetItem(ContextQueryString, value); } } private string Reduce(RewriteContext context, StringReader reader) { string result; char ch = (char)reader.Read(); if (char.IsDigit(ch)) { string num = ch.ToString(); if (char.IsDigit((char)reader.Peek())) { ch = (char)reader.Read(); num += ch.ToString(); } if (context.LastMatch != null) { Group group = context.LastMatch.Groups[Convert.ToInt32(num)]; result = @group != null ? @group.Value : string.Empty; } else { result = string.Empty; } } else switch (ch) { case '<': { string expr; using (StringWriter writer = new StringWriter()) { ch = (char)reader.Read(); while (ch != '>' && ch != (char)65535) { if (ch == '$') { writer.Write(this.Reduce(context, reader)); } else { writer.Write(ch); } ch = (char)reader.Read(); } expr = writer.GetStringBuilder().ToString(); } if (context.LastMatch != null) { Group group = context.LastMatch.Groups[expr]; if (@group != null) { result = @group.Value; } else { result = string.Empty; } } else { result = string.Empty; } } break; case '{': { string expr; bool isMap = false; bool isFunction = false; using (StringWriter writer = new StringWriter()) { ch = (char)reader.Read(); while (ch != '}' && ch != (char)65535) { if (ch == '$') { writer.Write(this.Reduce(context, reader)); } else { if (ch == ':') isMap = true; else if (ch == '(') isFunction = true; writer.Write(ch); } ch = (char)reader.Read(); } expr = writer.GetStringBuilder().ToString(); } if (isMap) { Match match = Regex.Match(expr, @"^([^\:]+)\:([^\|]+)(\|(.+))?$"); string mapName = match.Groups[1].Value; string mapArgument = match.Groups[2].Value; string mapDefault = match.Groups[4].Value; result = this._configuration.TransformFactory.GetTransform(mapName).ApplyTransform( mapArgument) ?? mapDefault; } else if (isFunction) { Match match = Regex.Match(expr, @"^([^\(]+)\((.+)\)$"); string functionName = match.Groups[1].Value; string functionArgument = match.Groups[2].Value; IRewriteTransform tx = this._configuration.TransformFactory.GetTransform(functionName); result = tx != null ? tx.ApplyTransform(functionArgument) : expr; } else { result = context.Properties[expr]; } } break; default: result = ch.ToString(); break; } return result; } private const string ContextQueryString = "UrlRewriter.NET.QueryString"; private const string ContextOriginalQueryString = "UrlRewriter.NET.OriginalQueryString"; private const string ContextRawUrl = "UrlRewriter.NET.RawUrl"; private RewriterConfiguration _configuration; private IContextFacade ContextFacade; } }
//----------------------------------------------------------------------- // <copyright file="KeyedMatrix.cs" company="Iain Sproat"> // Copyright Iain Sproat, 2013. // // Based in part on: // // Math.NET Numerics, part of the Math.NET Project // http://numerics.mathdotnet.com // http://github.com/mathnet/mathnet-numerics // http://mathnetnumerics.codeplex.com // // Copyright (c) 2009-2010 Math.NET // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. // </copyright> //----------------------------------------------------------------------- namespace SharpFE { using System; using System.Collections.Generic; using MathNet.Numerics.LinearAlgebra.Generic; /// <summary> /// A KeyedMatrix is a matrix whose elements can be accessed by Keys, rather than just index integers. /// This is roughly analagous to what a Dictionary is to a List. /// </summary> /// <typeparam name="TKey">The type of the instances which form the keys to this KeyedMatrix</typeparam> public class KeyedSquareMatrix<TKey> : KeyedRowColumnMatrix<TKey, TKey> { /// <summary> /// Initializes a new instance of the <see cref="KeyedMatrix{TKey}" /> class. /// </summary> /// <param name="keys">The keys which will be used to look up rows and columns of this square matrix. One unique key is expected per row.</param> public KeyedSquareMatrix(IList<TKey> keys) : base(keys, keys) { // empty } /// <summary> /// Initializes a new instance of the <see cref="KeyedMatrix{TKey}" /> class. /// </summary> /// <param name="keysForRows">The keys which will be used to look up rows of this matrix. One unique key is expected per row.</param> /// <param name="keysForColumns">The keys which will be used to look up columns of this matrix. One unique key is expected per column.</param> public KeyedSquareMatrix(IList<TKey> keysForRows, IList<TKey> keysForColumns) : base(keysForRows, keysForColumns) { // empty } /// <summary> /// Initializes a new instance of the <see cref="KeyedMatrix{TKey}" /> class. /// </summary> /// <param name="keysForRows">The keys which will be used to look up rows of this matrix. One unique key is expected per row.</param> /// <param name="keysForColumns">The keys which will be used to look up columns of this matrix. One unique key is expected per column.</param> /// <param name="initialValueOfAllElements">The value to which we assign to each element of the matrix</param> public KeyedSquareMatrix(IList<TKey> keysForRows, IList<TKey> keysForColumns, double initialValueOfAllElements) : base(keysForRows, keysForColumns, initialValueOfAllElements) { // empty } /// <summary> /// Initializes a new instance of the <see cref="KeyedMatrix{TKey}" /> class. /// </summary> /// <param name="keysForRows">The keys which will be used to look up rows of this matrix. One unique key is expected per row.</param> /// <param name="keysForColumns">The keys which will be used to look up columns of this matrix. One unique key is expected per column.</param> /// <param name="matrix">The value to which we assign to each element of the matrix</param> public KeyedSquareMatrix(IList<TKey> keysForRows, IList<TKey> keysForColumns, KeyedSquareMatrix<TKey> matrix) : base(keysForRows, keysForColumns, matrix) { // empty } /// <summary> /// Initializes a new instance of the <see cref="KeyedMatrix{TKey}" /> class. /// </summary> /// <param name="keysForRows">The keys which will be used to look up rows of this matrix. One unique key is expected per row.</param> /// <param name="keysForColumns">The keys which will be used to look up columns of this matrix. One unique key is expected per column.</param> /// <param name="matrix">The value to which we assign to each element of the matrix</param> public KeyedSquareMatrix(IList<TKey> keysForRows, IList<TKey> keysForColumns, Matrix<double> matrix) : base(keysForRows, keysForColumns, matrix) { // empty } public KeyedSquareMatrix(KeyedRowColumnMatrix<TKey, TKey> matrix) : base(matrix) { // empty } /// <summary> /// Initializes a new instance of the <see cref="KeyedMatrix{TKey}" /> class. /// </summary> /// <param name="matrix">The matrix which holds the keys and data to copy into this new matrix</param> public KeyedSquareMatrix(KeyedSquareMatrix<TKey> matrix) : base(matrix) { // empty } /// <summary> /// /// </summary> /// <param name="other"></param> /// <returns></returns> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1061:DoNotHideBaseClassMethods", Justification = "hiding base method avoids the need for calling members to cast")] public KeyedSquareMatrix<TKey> Multiply(KeyedRowColumnMatrix<TKey, TKey> other) { KeyedRowColumnMatrix<TKey, TKey> result = base.Multiply(other); return new KeyedSquareMatrix<TKey>(result); } public new KeyedSquareMatrix<TKey> Multiply(double scalar) { KeyedRowColumnMatrix<TKey, TKey> result = base.Multiply(scalar); return new KeyedSquareMatrix<TKey>(result); } public KeyedSquareMatrix<TKey> Add(KeyedSquareMatrix<TKey> other) { KeyedRowColumnMatrix<TKey, TKey> result = base.Add(other); return new KeyedSquareMatrix<TKey>(result); } /// <summary> /// Clones this matrix /// </summary> /// <returns>A shallow clone of this matrix</returns> public new KeyedSquareMatrix<TKey> Clone() { return new KeyedSquareMatrix<TKey>(this); } /// <summary> /// /// </summary> /// <returns></returns> public new KeyedSquareMatrix<TKey> Inverse() { KeyedRowColumnMatrix<TKey, TKey> result = base.Inverse(); return new KeyedSquareMatrix<TKey>(result); } /// <summary> /// /// </summary> /// <returns></returns> public new KeyedSquareMatrix<TKey> Transpose() { KeyedRowColumnMatrix<TKey, TKey> result = base.Transpose(); return new KeyedSquareMatrix<TKey>(result); } public KeyedSquareMatrix<TKey> TransposeThisAndMultiply(KeyedSquareMatrix<TKey> other) { KeyedRowColumnMatrix<TKey, TKey> result = base.TransposeThisAndMultiply(other); return new KeyedSquareMatrix<TKey>(result); } /// <summary> /// /// </summary> /// <param name="p"></param> /// <returns></returns> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "p", Justification = "Following Math.net library convention")] public new KeyedSquareMatrix<TKey> NormalizeRows(int p) { KeyedRowColumnMatrix<TKey, TKey> result = base.NormalizeRows(p); return new KeyedSquareMatrix<TKey>(result); } /// <summary> /// Creates a matrix which contains values from the requested sub-matrix /// </summary> /// <param name="rowsToInclude">A list of the keys of rows to include in the new matrix</param> /// <param name="columnsToInclude">A list of the keys of columns to include in the new matrix</param> /// <returns>A KeyedMatrix which contains values from the requested sub-matrix</returns> public KeyedSquareMatrix<TKey> SubMatrix(IList<TKey> keysToInclude) { KeyedSquareMatrix<TKey> subMatrix = new KeyedSquareMatrix<TKey>(keysToInclude); foreach (TKey rowKey in keysToInclude) //rows { foreach (TKey columnKey in keysToInclude) //columns { subMatrix.At(rowKey, columnKey, this.At(rowKey, columnKey)); } } return subMatrix; } public override string ToString() { return base.ToString(); } } }
// Copyright (c) 2018 Alachisoft // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License using System; namespace Alachisoft.NCache.Web.Caching.APILogging { internal class DebugAPIConfiguraions { private static int s_timeBeforeLoggingStart = 0; private static int s_numberOfIterations = 1; private static int s_durationOfEachIteration = 0; private static int s_intervalBetweenIterations = 0; private static bool s_loggingEnabled = false; private static int s_loggerThreadLoggingInterval = 5; /// <summary> /// Indicates that all logging intervals has been passed. /// </summary> private bool _loggingExpired = false; private DateTime _loggingStartTime; static DebugAPIConfiguraions() { loadConfiguration(); } public DebugAPIConfiguraions() { _loggingStartTime = DateTime.Now.AddSeconds(s_timeBeforeLoggingStart); } /// <summary> /// Gets time in seconds after cache start, after which logging should be started /// </summary> public static int TimeBeforeLoggingStart { get { return s_timeBeforeLoggingStart; } set { s_timeBeforeLoggingStart = value; } } /// <summary> /// Gets number of logging iterations /// </summary> public static int NumberOfIterations { get { return s_numberOfIterations; } set { s_numberOfIterations = value; } } /// <summary> /// Gets duration of each logging iteration in seconds /// </summary> public static int DurationOfEachIteration { get { return s_durationOfEachIteration; } set { s_durationOfEachIteration = value; } } /// <summary> /// Gets time interval in seconds between two consecutive logging iterations /// </summary> public static int IntervalBetweenIterations { get { return s_intervalBetweenIterations; } set { s_intervalBetweenIterations = value; } } /// <summary> /// Gets if logging is enabled or not /// </summary> public static bool LoggingEnabled { get { return s_loggingEnabled; } set { s_loggingEnabled = value; } } /// <summary> /// Gets time interval in seconds after which logger thread should write logs to file /// </summary> public static int LoggerThreadLoggingInterval { get { return s_loggerThreadLoggingInterval; } set { s_loggerThreadLoggingInterval = value; } } public bool LoggingExpired { get { return _loggingExpired; } } /// <summary> /// Loads configurations from application configuration (app.config/web.config) /// </summary> private static void loadConfiguration() { try { if (!string.IsNullOrEmpty(System.Configuration.ConfigurationManager.AppSettings["CacheClient.EnableAPILogging"])) s_loggingEnabled = bool.Parse(System.Configuration.ConfigurationManager.AppSettings["CacheClient.EnableAPILogging"]); } catch (Exception) { } try { if (!string.IsNullOrEmpty(System.Configuration.ConfigurationManager.AppSettings["CacheClient.TimeBeforeLoggingStart"])) { string time = System.Configuration.ConfigurationManager.AppSettings["CacheClient.TimeBeforeLoggingStart"]; string[] splitted = time.Split(':'); if (splitted.Length == 3) s_timeBeforeLoggingStart = int.Parse(splitted[0]) * 3600 + int.Parse(splitted[1]) * 60 + int.Parse(splitted[2]); } } catch (Exception) { } try { if (!string.IsNullOrEmpty(System.Configuration.ConfigurationManager.AppSettings["CacheClient.APILogIterations"])) s_numberOfIterations = int.Parse(System.Configuration.ConfigurationManager.AppSettings["CacheClient.APILogIterations"]); } catch (Exception) { } try { if (!string.IsNullOrEmpty(System.Configuration.ConfigurationManager.AppSettings["CacheClient.APILogIterationLength"])) s_durationOfEachIteration = int.Parse(System.Configuration.ConfigurationManager.AppSettings["CacheClient.APILogIterationLength"]); } catch (Exception) { } try { if (!string.IsNullOrEmpty(System.Configuration.ConfigurationManager.AppSettings["CacheClient.APILogDelayBetweenIteration"])) s_intervalBetweenIterations = int.Parse(System.Configuration.ConfigurationManager.AppSettings["CacheClient.APILogDelayBetweenIteration"]); } catch (Exception) { } try { if (!string.IsNullOrEmpty(System.Configuration.ConfigurationManager.AppSettings["CacheClient.LoggerThreadLoggingInterval"])) s_loggerThreadLoggingInterval = int.Parse(System.Configuration.ConfigurationManager.AppSettings["CacheClient.LoggerThreadLoggingInterval"]); } catch (Exception) { } } /// <summary> /// Checks whether current time instant is in logging interval /// </summary> /// <returns>true if current time instant is in logging interval, else false</returns> public bool IsInLoggingInterval() { if ((!s_loggingEnabled) || _loggingExpired) return false; TimeSpan normalizedCurrentInstance = DateTime.Now.Subtract(_loggingStartTime); double normalizedSeconds = normalizedCurrentInstance.TotalSeconds; if (normalizedSeconds < 0) return false; int completeIntervalLength = s_durationOfEachIteration + s_intervalBetweenIterations; if (normalizedSeconds / (s_numberOfIterations * completeIntervalLength) >= 1) { _loggingExpired = true; return false; } double fraction = normalizedSeconds - Convert.ToInt32(Math.Floor(normalizedSeconds)); int normalizedToInterval = Convert.ToInt32(Math.Floor(normalizedSeconds)) % completeIntervalLength; double timePassedInCurrentIteration = (double)normalizedToInterval + fraction; if (timePassedInCurrentIteration >= s_durationOfEachIteration) return false; return true; } /// <summary> /// Gets logging iteration number for specified time /// </summary> /// <param name="loggingTime">Time instant for which logging iteration number is needed</param> /// <returns>Logging iteration number</returns> public int GetIterationNumber(DateTime loggingTime) { TimeSpan normalizedCurrentInstance = _loggingStartTime.Subtract(loggingTime); double normalizedSeconds = normalizedCurrentInstance.TotalSeconds; double loggingInterval = s_durationOfEachIteration + s_intervalBetweenIterations; return (int)Math.Round(normalizedSeconds / loggingInterval); } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gagvr = Google.Ads.GoogleAds.V8.Resources; using gaxgrpc = Google.Api.Gax.Grpc; using gr = Google.Rpc; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using NUnit.Framework; using Google.Ads.GoogleAds.V8.Services; namespace Google.Ads.GoogleAds.Tests.V8.Services { /// <summary>Generated unit tests.</summary> public sealed class GeneratedCampaignLabelServiceClientTest { [Category("Autogenerated")][Test] public void GetCampaignLabelRequestObject() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); GetCampaignLabelRequest request = new GetCampaignLabelRequest { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), }; gagvr::CampaignLabel expectedResponse = new gagvr::CampaignLabel { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), CampaignAsCampaignName = gagvr::CampaignName.FromCustomerCampaign("[CUSTOMER_ID]", "[CAMPAIGN_ID]"), LabelAsLabelName = gagvr::LabelName.FromCustomerLabel("[CUSTOMER_ID]", "[LABEL_ID]"), }; mockGrpcClient.Setup(x => x.GetCampaignLabel(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); gagvr::CampaignLabel response = client.GetCampaignLabel(request); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GetCampaignLabelRequestObjectAsync() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); GetCampaignLabelRequest request = new GetCampaignLabelRequest { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), }; gagvr::CampaignLabel expectedResponse = new gagvr::CampaignLabel { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), CampaignAsCampaignName = gagvr::CampaignName.FromCustomerCampaign("[CUSTOMER_ID]", "[CAMPAIGN_ID]"), LabelAsLabelName = gagvr::LabelName.FromCustomerLabel("[CUSTOMER_ID]", "[LABEL_ID]"), }; mockGrpcClient.Setup(x => x.GetCampaignLabelAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::CampaignLabel>(stt::Task.FromResult(expectedResponse), null, null, null, null)); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); gagvr::CampaignLabel responseCallSettings = await client.GetCampaignLabelAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); gagvr::CampaignLabel responseCancellationToken = await client.GetCampaignLabelAsync(request, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GetCampaignLabel() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); GetCampaignLabelRequest request = new GetCampaignLabelRequest { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), }; gagvr::CampaignLabel expectedResponse = new gagvr::CampaignLabel { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), CampaignAsCampaignName = gagvr::CampaignName.FromCustomerCampaign("[CUSTOMER_ID]", "[CAMPAIGN_ID]"), LabelAsLabelName = gagvr::LabelName.FromCustomerLabel("[CUSTOMER_ID]", "[LABEL_ID]"), }; mockGrpcClient.Setup(x => x.GetCampaignLabel(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); gagvr::CampaignLabel response = client.GetCampaignLabel(request.ResourceName); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GetCampaignLabelAsync() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); GetCampaignLabelRequest request = new GetCampaignLabelRequest { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), }; gagvr::CampaignLabel expectedResponse = new gagvr::CampaignLabel { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), CampaignAsCampaignName = gagvr::CampaignName.FromCustomerCampaign("[CUSTOMER_ID]", "[CAMPAIGN_ID]"), LabelAsLabelName = gagvr::LabelName.FromCustomerLabel("[CUSTOMER_ID]", "[LABEL_ID]"), }; mockGrpcClient.Setup(x => x.GetCampaignLabelAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::CampaignLabel>(stt::Task.FromResult(expectedResponse), null, null, null, null)); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); gagvr::CampaignLabel responseCallSettings = await client.GetCampaignLabelAsync(request.ResourceName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); gagvr::CampaignLabel responseCancellationToken = await client.GetCampaignLabelAsync(request.ResourceName, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GetCampaignLabelResourceNames() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); GetCampaignLabelRequest request = new GetCampaignLabelRequest { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), }; gagvr::CampaignLabel expectedResponse = new gagvr::CampaignLabel { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), CampaignAsCampaignName = gagvr::CampaignName.FromCustomerCampaign("[CUSTOMER_ID]", "[CAMPAIGN_ID]"), LabelAsLabelName = gagvr::LabelName.FromCustomerLabel("[CUSTOMER_ID]", "[LABEL_ID]"), }; mockGrpcClient.Setup(x => x.GetCampaignLabel(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); gagvr::CampaignLabel response = client.GetCampaignLabel(request.ResourceNameAsCampaignLabelName); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GetCampaignLabelResourceNamesAsync() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); GetCampaignLabelRequest request = new GetCampaignLabelRequest { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), }; gagvr::CampaignLabel expectedResponse = new gagvr::CampaignLabel { ResourceNameAsCampaignLabelName = gagvr::CampaignLabelName.FromCustomerCampaignLabel("[CUSTOMER_ID]", "[CAMPAIGN_ID]", "[LABEL_ID]"), CampaignAsCampaignName = gagvr::CampaignName.FromCustomerCampaign("[CUSTOMER_ID]", "[CAMPAIGN_ID]"), LabelAsLabelName = gagvr::LabelName.FromCustomerLabel("[CUSTOMER_ID]", "[LABEL_ID]"), }; mockGrpcClient.Setup(x => x.GetCampaignLabelAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::CampaignLabel>(stt::Task.FromResult(expectedResponse), null, null, null, null)); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); gagvr::CampaignLabel responseCallSettings = await client.GetCampaignLabelAsync(request.ResourceNameAsCampaignLabelName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); gagvr::CampaignLabel responseCancellationToken = await client.GetCampaignLabelAsync(request.ResourceNameAsCampaignLabelName, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void MutateCampaignLabelsRequestObject() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); MutateCampaignLabelsRequest request = new MutateCampaignLabelsRequest { CustomerId = "customer_id3b3724cb", Operations = { new CampaignLabelOperation(), }, PartialFailure = false, ValidateOnly = true, }; MutateCampaignLabelsResponse expectedResponse = new MutateCampaignLabelsResponse { Results = { new MutateCampaignLabelResult(), }, PartialFailureError = new gr::Status(), }; mockGrpcClient.Setup(x => x.MutateCampaignLabels(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); MutateCampaignLabelsResponse response = client.MutateCampaignLabels(request); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task MutateCampaignLabelsRequestObjectAsync() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); MutateCampaignLabelsRequest request = new MutateCampaignLabelsRequest { CustomerId = "customer_id3b3724cb", Operations = { new CampaignLabelOperation(), }, PartialFailure = false, ValidateOnly = true, }; MutateCampaignLabelsResponse expectedResponse = new MutateCampaignLabelsResponse { Results = { new MutateCampaignLabelResult(), }, PartialFailureError = new gr::Status(), }; mockGrpcClient.Setup(x => x.MutateCampaignLabelsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MutateCampaignLabelsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); MutateCampaignLabelsResponse responseCallSettings = await client.MutateCampaignLabelsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); MutateCampaignLabelsResponse responseCancellationToken = await client.MutateCampaignLabelsAsync(request, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void MutateCampaignLabels() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); MutateCampaignLabelsRequest request = new MutateCampaignLabelsRequest { CustomerId = "customer_id3b3724cb", Operations = { new CampaignLabelOperation(), }, }; MutateCampaignLabelsResponse expectedResponse = new MutateCampaignLabelsResponse { Results = { new MutateCampaignLabelResult(), }, PartialFailureError = new gr::Status(), }; mockGrpcClient.Setup(x => x.MutateCampaignLabels(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); MutateCampaignLabelsResponse response = client.MutateCampaignLabels(request.CustomerId, request.Operations); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task MutateCampaignLabelsAsync() { moq::Mock<CampaignLabelService.CampaignLabelServiceClient> mockGrpcClient = new moq::Mock<CampaignLabelService.CampaignLabelServiceClient>(moq::MockBehavior.Strict); MutateCampaignLabelsRequest request = new MutateCampaignLabelsRequest { CustomerId = "customer_id3b3724cb", Operations = { new CampaignLabelOperation(), }, }; MutateCampaignLabelsResponse expectedResponse = new MutateCampaignLabelsResponse { Results = { new MutateCampaignLabelResult(), }, PartialFailureError = new gr::Status(), }; mockGrpcClient.Setup(x => x.MutateCampaignLabelsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MutateCampaignLabelsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); CampaignLabelServiceClient client = new CampaignLabelServiceClientImpl(mockGrpcClient.Object, null); MutateCampaignLabelsResponse responseCallSettings = await client.MutateCampaignLabelsAsync(request.CustomerId, request.Operations, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); MutateCampaignLabelsResponse responseCancellationToken = await client.MutateCampaignLabelsAsync(request.CustomerId, request.Operations, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace AngularWebApiDemo.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try get sample provided for a specific mediaType, controllerName, actionName and parameterNames. // If not found, try get the sample provided for a specific mediaType, controllerName and actionName regardless of the parameterNames // If still not found, try get the sample provided for a specific type and mediaType if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create one using <see cref="ObjectGenerator"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // Try create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); sampleObject = objectGenerator.GenerateObject(type); } return sampleObject; } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, e.Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
using System; using System.Collections.Generic; using System.Linq; using Mvc.JQuery.Datatables.DynamicLinq; using Mvc.JQuery.Datatables.Reflection; namespace Mvc.JQuery.Datatables { internal class DataTablesFiltering { public IQueryable<T> ApplyFiltersAndSort<T>(DataTablesParam dtParameters, IQueryable<T> data, DataTablesPropertyInfo[] columns) { if (!String.IsNullOrEmpty(dtParameters.sSearch)) { var parts = new List<string>(); var parameters = new List<object>(); for (var i = 0; i < dtParameters.iColumns; i++) { if (dtParameters.bSearchable[i]) { try { parts.Add(GetFilterClause(dtParameters.sSearch, columns[i], parameters)); } catch (Exception) { //if the clause doesn't work, skip it! } } } var values = parts.Where(p => p != null); data = data.Where(string.Join(" or ", values), parameters.ToArray()); } for (int i = 0; i < dtParameters.sSearchValues.Count; i++) { if (dtParameters.bSearchable[i]) { var searchColumn = dtParameters.sSearchValues[i]; if (!string.IsNullOrWhiteSpace(searchColumn)) { DataTablesPropertyInfo column = FindColumn(dtParameters, columns, i); var parameters = new List<object>(); var filterClause = GetFilterClause(searchColumn, column, parameters); if (string.IsNullOrWhiteSpace(filterClause) == false) { data = data.Where(filterClause, parameters.ToArray()); } } } } string sortString = ""; for (int i = 0; i < dtParameters.iSortingCols; i++) { int columnNumber = dtParameters.iSortCol[i]; DataTablesPropertyInfo column = FindColumn(dtParameters, columns, columnNumber); string columnName = column.PropertyInfo.Name; string sortDir = dtParameters.sSortDir[i]; if (i != 0) sortString += ", "; sortString += columnName + " " + sortDir; } if (string.IsNullOrWhiteSpace(sortString)) { sortString = columns[0].PropertyInfo.Name; } data = data.OrderBy(sortString); return data; } private DataTablesPropertyInfo FindColumn(DataTablesParam dtParameters, DataTablesPropertyInfo[] columns, int i) { if (dtParameters.sColumnNames.Any()) { return columns.First(x => x.PropertyInfo.Name == dtParameters.sColumnNames[i]); } else { return columns[i]; } } public delegate string ReturnedFilteredQueryForType( string query, string columnName, DataTablesPropertyInfo columnType, List<object> parametersForLinqQuery); private static readonly List<ReturnedFilteredQueryForType> Filters = new List<ReturnedFilteredQueryForType>() { Guard(IsBoolType, TypeFilters.BoolFilter), Guard(IsDateTimeType, TypeFilters.DateTimeFilter), Guard(IsDateTimeOffsetType, TypeFilters.DateTimeOffsetFilter), Guard(IsNumericType, TypeFilters.NumericFilter), Guard(IsEnumType, TypeFilters.EnumFilter), Guard(IsStringType, TypeFilters.StringFilter), }; public delegate string GuardedFilter( string query, string columnName, DataTablesPropertyInfo columnType, List<object> parametersForLinqQuery); private static ReturnedFilteredQueryForType Guard(Func<DataTablesPropertyInfo, bool> guard, GuardedFilter filter) { return (q, c, t, p) => { if (!guard(t)) { return null; } return filter(q, c, t, p); }; } public static void RegisterFilter<T>(GuardedFilter filter) { Filters.Add(Guard(arg => arg is T, filter)); } private static string GetFilterClause(string query, DataTablesPropertyInfo column, List<object> parametersForLinqQuery) { var isCollection = column.Type.IsGenericType && column.Type.GetGenericTypeDefinition() == typeof(IEnumerable<>); Func<string, string> filterClause = (queryPart) => Filters.Select( f => f(queryPart, isCollection ? "it" : column.PropertyInfo.Name, column, parametersForLinqQuery)) .FirstOrDefault(filterPart => filterPart != null) ?? ""; var queryParts = query.Split('|').Select(filterClause).Where(fc => fc != "").ToArray(); if (queryParts.Any()) { if (isCollection) { return String.Format("{0}.Any(({1}))", column.PropertyInfo.Name, string.Join(") OR (", queryParts)); } else { return "(" + string.Join(") OR (", queryParts) + ")"; } } return null; } public static bool IsNumericType(DataTablesPropertyInfo propertyInfo) { return IsNumericType(propertyInfo.Type) || (propertyInfo.Type.IsGenericType && propertyInfo.Type.GetGenericTypeDefinition() == typeof(IEnumerable<>) && IsNumericType(propertyInfo.Type.GetGenericArguments()[0])); } private static bool IsNumericType(Type type) { if (type == null || type.IsEnum) { return false; } switch (Type.GetTypeCode(type)) { case TypeCode.Byte: case TypeCode.Decimal: case TypeCode.Double: case TypeCode.Int16: case TypeCode.Int32: case TypeCode.Int64: case TypeCode.SByte: case TypeCode.Single: case TypeCode.UInt16: case TypeCode.UInt32: case TypeCode.UInt64: return true; case TypeCode.Object: if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof (Nullable<>)) { return IsNumericType(Nullable.GetUnderlyingType(type)); } return false; } return false; } public static bool IsEnumType(DataTablesPropertyInfo propertyInfo) { return propertyInfo.Type.IsEnum || (propertyInfo.Type.IsGenericType && propertyInfo.Type.GetGenericTypeDefinition() == typeof(IEnumerable<>) && propertyInfo.Type.GetGenericArguments()[0].IsEnum); } public static bool IsBoolType(DataTablesPropertyInfo propertyInfo) { return propertyInfo.Type == typeof(bool) || propertyInfo.Type == typeof(bool?) || propertyInfo.Type == typeof(IEnumerable<bool>) || propertyInfo.Type == typeof(IEnumerable<bool?>); } public static bool IsDateTimeType(DataTablesPropertyInfo propertyInfo) { return propertyInfo.Type == typeof(DateTime) || propertyInfo.Type == typeof(DateTime?) || propertyInfo.Type == typeof(IEnumerable<DateTime>) || propertyInfo.Type == typeof(IEnumerable<DateTime?>); } public static bool IsDateTimeOffsetType(DataTablesPropertyInfo propertyInfo) { return propertyInfo.Type == typeof(DateTimeOffset) || propertyInfo.Type == typeof(DateTimeOffset?) || propertyInfo.Type == typeof(IEnumerable<DateTimeOffset>) || propertyInfo.Type == typeof(IEnumerable<DateTimeOffset?>); } public static bool IsStringType(DataTablesPropertyInfo propertyInfo) { return propertyInfo.Type == typeof(string) || propertyInfo.Type == typeof(IEnumerable<string>); } } }
namespace MicrosoftPractices.Tests { using System; using Microsoft.Practices.Unity; using NUnit.Framework; using PokerTell.Infrastructure.Interfaces; using PokerTell.Infrastructure.Services; [TestFixture] internal class ThatUnity { #region Constants and Fields IUnityContainer _container; #endregion #region Public Methods [SetUp] public void Init() { _container = new UnityContainer(); } [Test] public void ResolvedFuncInvoke_PointingAtContainerResolve_ReturnsNewInstances() { _container .RegisterType<IModel, Model>() .RegisterInstance<Func<IModel>>(() => _container.Resolve<IModel>()) .RegisterType<IConsumer, Consumer>(); var consumer = _container.Resolve<IConsumer>(); Assert.That(consumer.ResolvedModelsAreUnique); } [Test] public void ResolvedFuncInvoke_PointingAtContainerResolveViaConstructorExtension_ReturnsNewInstances() { _container .RegisterConstructor<IModel, Model>() .RegisterType<IConsumer, Consumer2>(); var consumer = _container.Resolve<IConsumer>(); Assert.That(consumer.ResolvedModelsAreUnique); } [Test] public void ResolvedFuncInvoke_PointingAtContainerResolveViaFunctionWrapper_ReturnsNewInstances() { _container .RegisterType<IModel, Model>() .RegisterInstance<IConstructor<IModel>>(new Constructor<IModel>(() => _container.Resolve<IModel>())) .RegisterType<IConsumer, Consumer2>(); var consumer = _container.Resolve<IConsumer>(); Assert.That(consumer.ResolvedModelsAreUnique); } [Test] public void ResolvedFuncInvoke_PointingAtInstantiatingCodeBlock_ReturnsNewInstances() { _container .RegisterType<IModel, Model>() .RegisterInstance<Func<IModel>>(() => new Model()) .RegisterType<IConsumer, Consumer>(); var consumer = _container.Resolve<IConsumer>(); Assert.That(consumer.ResolvedModelsAreUnique); } #endregion } internal interface IConsumer { #region Properties bool ResolvedModelsAreUnique { get; } #endregion #region Public Methods string ToString(); #endregion } internal class Consumer : IConsumer { #region Constants and Fields IModel _model1; IModel _model2; #endregion #region Constructors and Destructors public Consumer(Func<IModel> newModel) { CreateModels(newModel); } #endregion #region Properties public bool ResolvedModelsAreUnique { get { return (_model1 != null) && (_model2 != null) && (! _model1.Id.Equals(_model2.Id)); } } #endregion #region Implemented Interfaces #region IConsumer public override string ToString() { return string.Format("Consumer holds models:\n{0}\n{1}", _model1, _model2); } #endregion #endregion #region Methods void CreateModels(Func<IModel> newModel) { _model1 = newModel.Invoke().InitializeWith(1); _model2 = newModel.Invoke().InitializeWith(2); } #endregion } internal class Consumer2 : IConsumer { IModel _model1; IModel _model2; public Consumer2(IConstructor<IModel> model) { CreateModels(model); } public bool ResolvedModelsAreUnique { get { return (_model1 != null) && (_model2 != null) && (!_model1.Id.Equals(_model2.Id)); } } public override string ToString() { return string.Format("Consumer holds models:\n{0}\n{1}", _model1, _model2); } void CreateModels(IConstructor<IModel> model) { _model1 = model.New.InitializeWith(1); _model2 = model.New.InitializeWith(2); } } internal interface IModel { #region Properties int Id { get; set; } #endregion #region Public Methods IModel InitializeWith(int id); string ToString(); #endregion } internal class Model : IModel { #region Properties public int Id { get; set; } #endregion #region Implemented Interfaces #region IModel public IModel InitializeWith(int id) { Id = id; return this; } public override string ToString() { return "Model #" + Id; } #endregion #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.CSharp.Test.Utilities; using Microsoft.CodeAnalysis.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.CSharp.UnitTests { public partial class IOperationTests : SemanticModelTestBase { [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void TestSizeOfExpression() { string source = @" using System; class C { void M(int i) { i = /*<bind>*/sizeof(int)/*</bind>*/; } } "; string expectedOperationTree = @" ISizeOfExpression (OperationKind.SizeOfExpression, Type: System.Int32, Constant: 4) (Syntax: 'sizeof(int)') TypeOperand: System.Int32 "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<SizeOfExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void TestSizeOfExpression_NonPrimitiveTypeArgument() { string source = @" using System; class C { void M(int i) { i = /*<bind>*/sizeof(C)/*</bind>*/; } } "; string expectedOperationTree = @" ISizeOfExpression (OperationKind.SizeOfExpression, Type: System.Int32, IsInvalid) (Syntax: 'sizeof(C)') TypeOperand: C "; var expectedDiagnostics = new DiagnosticDescription[] { // CS0208: Cannot take the address of, get the size of, or declare a pointer to a managed type ('C') // i = /*<bind>*/sizeof(C)/*</bind>*/; Diagnostic(ErrorCode.ERR_ManagedAddr, "sizeof(C)").WithArguments("C").WithLocation(8, 23), // CS0233: 'C' does not have a predefined size, therefore sizeof can only be used in an unsafe context (consider using System.Runtime.InteropServices.Marshal.SizeOf) // i = /*<bind>*/sizeof(C)/*</bind>*/; Diagnostic(ErrorCode.ERR_SizeofUnsafe, "sizeof(C)").WithArguments("C").WithLocation(8, 23) }; VerifyOperationTreeAndDiagnosticsForTest<SizeOfExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void TestSizeOfExpression_PointerTypeArgument() { string source = @" using System; class C { unsafe void M(int i) { i = /*<bind>*/sizeof(void**)/*</bind>*/; } } "; string expectedOperationTree = @" ISizeOfExpression (OperationKind.SizeOfExpression, Type: System.Int32) (Syntax: 'sizeof(void**)') TypeOperand: System.Void** "; var expectedDiagnostics = DiagnosticDescription.None; VerifyOperationTreeAndDiagnosticsForTest<SizeOfExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics, compilationOptions: TestOptions.UnsafeReleaseDll); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void TestSizeOfExpression_ErrorTypeArgument() { string source = @" using System; class C { void M(int i) { i = /*<bind>*/sizeof(UndefinedType)/*</bind>*/; } } "; string expectedOperationTree = @" ISizeOfExpression (OperationKind.SizeOfExpression, Type: System.Int32, IsInvalid) (Syntax: 'sizeof(UndefinedType)') TypeOperand: UndefinedType "; var expectedDiagnostics = new DiagnosticDescription[] { // CS0246: The type or namespace name 'UndefinedType' could not be found (are you missing a using directive or an assembly reference?) // i = /*<bind>*/sizeof(UndefinedType)/*</bind>*/; Diagnostic(ErrorCode.ERR_SingleTypeNameNotFound, "UndefinedType").WithArguments("UndefinedType").WithLocation(8, 30), // CS0233: 'UndefinedType' does not have a predefined size, therefore sizeof can only be used in an unsafe context (consider using System.Runtime.InteropServices.Marshal.SizeOf) // i = /*<bind>*/sizeof(UndefinedType)/*</bind>*/; Diagnostic(ErrorCode.ERR_SizeofUnsafe, "sizeof(UndefinedType)").WithArguments("UndefinedType").WithLocation(8, 23) }; VerifyOperationTreeAndDiagnosticsForTest<SizeOfExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void TestSizeOfExpression_IdentifierArgument() { string source = @" using System; class C { void M(int i) { i = /*<bind>*/sizeof(i)/*</bind>*/; } } "; string expectedOperationTree = @" ISizeOfExpression (OperationKind.SizeOfExpression, Type: System.Int32, IsInvalid) (Syntax: 'sizeof(i)') TypeOperand: i "; var expectedDiagnostics = new DiagnosticDescription[] { // CS0118: 'i' is a variable but is used like a type // i = /*<bind>*/sizeof(i)/*</bind>*/; Diagnostic(ErrorCode.ERR_BadSKknown, "i").WithArguments("i", "variable", "type").WithLocation(8, 30), // CS0233: 'i' does not have a predefined size, therefore sizeof can only be used in an unsafe context (consider using System.Runtime.InteropServices.Marshal.SizeOf) // i = /*<bind>*/sizeof(i)/*</bind>*/; Diagnostic(ErrorCode.ERR_SizeofUnsafe, "sizeof(i)").WithArguments("i").WithLocation(8, 23) }; VerifyOperationTreeAndDiagnosticsForTest<SizeOfExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void TestSizeOfExpression_ExpressionArgument() { string source = @" using System; class C { void M(int i) { i = /*<bind>*/sizeof(M2()/*</bind>*/); } int M2() => 0; } "; string expectedOperationTree = @" IInvalidExpression (OperationKind.InvalidExpression, Type: ?, IsInvalid) (Syntax: 'sizeof(M2()') Children(1): ISizeOfExpression (OperationKind.SizeOfExpression, Type: System.Int32, IsInvalid) (Syntax: 'sizeof(M2') TypeOperand: M2 "; var expectedDiagnostics = new DiagnosticDescription[] { // CS1026: ) expected // i = /*<bind>*/sizeof(M2()/*</bind>*/); Diagnostic(ErrorCode.ERR_CloseParenExpected, "(").WithLocation(8, 32), // CS1002: ; expected // i = /*<bind>*/sizeof(M2()/*</bind>*/); Diagnostic(ErrorCode.ERR_SemicolonExpected, ")").WithLocation(8, 45), // CS1513: } expected // i = /*<bind>*/sizeof(M2()/*</bind>*/); Diagnostic(ErrorCode.ERR_RbraceExpected, ")").WithLocation(8, 45), // CS0246: The type or namespace name 'M2' could not be found (are you missing a using directive or an assembly reference?) // i = /*<bind>*/sizeof(M2()/*</bind>*/); Diagnostic(ErrorCode.ERR_SingleTypeNameNotFound, "M2").WithArguments("M2").WithLocation(8, 30), // CS0233: 'M2' does not have a predefined size, therefore sizeof can only be used in an unsafe context (consider using System.Runtime.InteropServices.Marshal.SizeOf) // i = /*<bind>*/sizeof(M2()/*</bind>*/); Diagnostic(ErrorCode.ERR_SizeofUnsafe, "sizeof(M2").WithArguments("M2").WithLocation(8, 23) }; VerifyOperationTreeAndDiagnosticsForTest<InvocationExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } [CompilerTrait(CompilerFeature.IOperation)] [Fact] public void TestSizeOfExpression_MissingArgument() { string source = @" using System; class C { void M(int i) { i = /*<bind>*/sizeof()/*</bind>*/; } } "; string expectedOperationTree = @" ISizeOfExpression (OperationKind.SizeOfExpression, Type: System.Int32, IsInvalid) (Syntax: 'sizeof()') TypeOperand: ? "; var expectedDiagnostics = new DiagnosticDescription[] { // CS1031: Type expected // i = /*<bind>*/sizeof()/*</bind>*/; Diagnostic(ErrorCode.ERR_TypeExpected, ")").WithLocation(8, 30), // CS0233: '?' does not have a predefined size, therefore sizeof can only be used in an unsafe context (consider using System.Runtime.InteropServices.Marshal.SizeOf) // i = /*<bind>*/sizeof()/*</bind>*/; Diagnostic(ErrorCode.ERR_SizeofUnsafe, "sizeof()").WithArguments("?").WithLocation(8, 23) }; VerifyOperationTreeAndDiagnosticsForTest<SizeOfExpressionSyntax>(source, expectedOperationTree, expectedDiagnostics); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // using System; using System.Numerics; using System.Runtime.CompilerServices; internal partial class VectorTest { private const int Pass = 100; private const int Fail = -1; private class VectorGetTest<T> where T : struct, IComparable<T>, IEquatable<T> { public static int VectorGet(T value, int index) { int returnVal = Pass; Vector<T> A = new Vector<T>(value); // Test variable index. for (int i = 0; i < Vector<T>.Count; i++) { if (!CheckValue(A[i], value)) returnVal = Fail; } if (!CheckValue(A[index], value)) returnVal = Fail; // Test constant index. if (!CheckValue(A[0], value)) returnVal = Fail; if (Vector<T>.Count >= 2) { if (!CheckValue(A[1], value)) { Console.WriteLine("Failed for [1] for type " + typeof(T).ToString()); returnVal = Fail; } } if (Vector<T>.Count >= 4) { if (!CheckValue(A[2], value)) { Console.WriteLine("Failed for [2] for type " + typeof(T).ToString()); returnVal = Fail; } if (!CheckValue(A[3], value)) { Console.WriteLine("Failed for [3] for type " + typeof(T).ToString()); returnVal = Fail; } } if (Vector<T>.Count >= 8) { if (!CheckValue(A[4], value)) { Console.WriteLine("Failed for [4] for type " + typeof(T).ToString()); returnVal = Fail; } if (!CheckValue(A[5], value)) { Console.WriteLine("Failed for [5] for type " + typeof(T).ToString()); returnVal = Fail; } if (!CheckValue(A[6], value)) { Console.WriteLine("Failed for [6] for type " + typeof(T).ToString()); returnVal = Fail; } if (!CheckValue(A[7], value)) { Console.WriteLine("Failed for [7] for type " + typeof(T).ToString()); returnVal = Fail; } } if (Vector<T>.Count >= 16) { if (!CheckValue(A[8], value)) returnVal = Fail; if (!CheckValue(A[9], value)) returnVal = Fail; if (!CheckValue(A[10], value)) returnVal = Fail; if (!CheckValue(A[11], value)) returnVal = Fail; if (!CheckValue(A[12], value)) returnVal = Fail; if (!CheckValue(A[13], value)) returnVal = Fail; if (!CheckValue(A[14], value)) returnVal = Fail; if (!CheckValue(A[15], value)) returnVal = Fail; } if (Vector<T>.Count >= 32) { if (!CheckValue(A[16], value)) returnVal = Fail; if (!CheckValue(A[17], value)) returnVal = Fail; if (!CheckValue(A[18], value)) returnVal = Fail; if (!CheckValue(A[19], value)) returnVal = Fail; if (!CheckValue(A[20], value)) returnVal = Fail; if (!CheckValue(A[21], value)) returnVal = Fail; if (!CheckValue(A[22], value)) returnVal = Fail; if (!CheckValue(A[23], value)) returnVal = Fail; if (!CheckValue(A[24], value)) returnVal = Fail; if (!CheckValue(A[25], value)) returnVal = Fail; if (!CheckValue(A[26], value)) returnVal = Fail; if (!CheckValue(A[27], value)) returnVal = Fail; if (!CheckValue(A[28], value)) returnVal = Fail; if (!CheckValue(A[29], value)) returnVal = Fail; if (!CheckValue(A[30], value)) returnVal = Fail; if (!CheckValue(A[31], value)) returnVal = Fail; } return returnVal; } [MethodImpl(MethodImplOptions.NoOptimization)] public static int VectorGetIndexerOutOfRange(T value, int index) { int returnVal = Pass; bool caught; Vector<T> A = new Vector<T>(value); T check; caught = false; try { switch (Vector<T>.Count) { case 2: check = A[2]; break; case 4: check = A[4]; break; case 8: check = A[8]; break; case 16: check = A[16]; break; case 32: check = A[32]; break; } } catch (IndexOutOfRangeException) { caught = true; } catch (Exception e) { Console.WriteLine("Caught exception: " + e.GetType()); } if (!caught) { Console.WriteLine("Failed to throw IndexOutOfRangeException for index == Count of " + Vector<T>.Count); returnVal = Fail; } return returnVal; } } private static int Main() { int returnVal = Pass; if (VectorGetTest<Double>.VectorGet(101D, 1) == Fail) returnVal = Fail; if (VectorGetTest<Double>.VectorGet(100D, 1) == Fail) returnVal = Fail; if (VectorGetTest<Double>.VectorGetIndexerOutOfRange(100D, 1) == Fail) returnVal = Fail; if (VectorGetTest<Single>.VectorGet(101F, 1) == Fail) returnVal = Fail; if (VectorGetTest<Single>.VectorGet(100F, 1) == Fail) returnVal = Fail; if (VectorGetTest<Single>.VectorGetIndexerOutOfRange(100F, 1) == Fail) returnVal = Fail; if (VectorGetTest<int>.VectorGet(101, 1) == Fail) returnVal = Fail; if (VectorGetTest<int>.VectorGet(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<int>.VectorGetIndexerOutOfRange(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<long>.VectorGet(101, 1) == Fail) returnVal = Fail; if (VectorGetTest<long>.VectorGet(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<long>.VectorGetIndexerOutOfRange(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<ushort>.VectorGet(101, 1) == Fail) returnVal = Fail; if (VectorGetTest<ushort>.VectorGet(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<ushort>.VectorGetIndexerOutOfRange(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<byte>.VectorGet(101, 1) == Fail) returnVal = Fail; if (VectorGetTest<byte>.VectorGet(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<byte>.VectorGetIndexerOutOfRange(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<short>.VectorGet(101, 1) == Fail) returnVal = Fail; if (VectorGetTest<short>.VectorGet(-100, 1) == Fail) returnVal = Fail; if (VectorGetTest<short>.VectorGetIndexerOutOfRange(-100, 1) == Fail) returnVal = Fail; if (VectorGetTest<sbyte>.VectorGet(101, 1) == Fail) returnVal = Fail; if (VectorGetTest<sbyte>.VectorGet(-100, 1) == Fail) returnVal = Fail; if (VectorGetTest<sbyte>.VectorGetIndexerOutOfRange(-100, 1) == Fail) returnVal = Fail; if (VectorGetTest<uint>.VectorGet(101, 1) == Fail) returnVal = Fail; if (VectorGetTest<uint>.VectorGet(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<uint>.VectorGetIndexerOutOfRange(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<ulong>.VectorGet(101, 1) == Fail) returnVal = Fail; if (VectorGetTest<ulong>.VectorGet(100, 1) == Fail) returnVal = Fail; if (VectorGetTest<ulong>.VectorGetIndexerOutOfRange(100, 1) == Fail) returnVal = Fail; JitLog jitLog = new JitLog(); if (!jitLog.Check("get_Item", "Double")) returnVal = Fail; if (!jitLog.Check("get_Count", "Double")) returnVal = Fail; if (!jitLog.Check("get_Item", "Single")) returnVal = Fail; if (!jitLog.Check("get_Count", "Single")) returnVal = Fail; if (!jitLog.Check("get_Item", "Int32")) returnVal = Fail; if (!jitLog.Check("get_Count", "Int32")) returnVal = Fail; if (!jitLog.Check("get_Item", "Int64")) returnVal = Fail; if (!jitLog.Check("get_Count", "Int64")) returnVal = Fail; if (!jitLog.Check("get_Item", "UInt16")) returnVal = Fail; if (!jitLog.Check("get_Count", "UInt16")) returnVal = Fail; if (!jitLog.Check("get_Item", "Byte")) returnVal = Fail; if (!jitLog.Check("get_Count", "Byte")) returnVal = Fail; if (!jitLog.Check("get_Item", "Int16")) returnVal = Fail; if (!jitLog.Check("get_Count", "Int16")) returnVal = Fail; if (!jitLog.Check("get_Item", "SByte")) returnVal = Fail; if (!jitLog.Check("get_Count", "SByte")) returnVal = Fail; if (!jitLog.Check("get_Item", "UInt32")) returnVal = Fail; if (!jitLog.Check("get_Count", "UInt32")) returnVal = Fail; if (!jitLog.Check("get_Item", "UInt64")) returnVal = Fail; if (!jitLog.Check("get_Count", "UInt64")) returnVal = Fail; jitLog.Dispose(); return returnVal; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace System.Collections.Tests { public class ReadOnlyCollectionBaseTests { private static MyReadOnlyCollectionBase CreateCollection() { var fooArray = new Foo[100]; for (int i = 0; i < 100; i++) { fooArray[i] = new Foo(i, i.ToString()); } return new MyReadOnlyCollectionBase(fooArray); } [Fact] public static void SyncRoot() { MyReadOnlyCollectionBase collection = CreateCollection(); Assert.True(collection.SyncRoot is ArrayList); Assert.Same(collection.SyncRoot, collection.SyncRoot); } [Fact] public static void AddRange_Count() { MyReadOnlyCollectionBase collection = CreateCollection(); Assert.Equal(100, collection.Count); } [Fact] public static void CopyTo_ZeroIndex() { MyReadOnlyCollectionBase collection = CreateCollection(); var copyArray = new Foo[100]; collection.CopyTo(copyArray, 0); Assert.Equal(100, copyArray.Length); for (int i = 0; i < 100; i++) { Assert.Equal(i, copyArray[i].IntValue); Assert.Equal(i.ToString(), copyArray[i].StringValue); } } [Fact] public static void CopyTo_NonZeroIndex() { MyReadOnlyCollectionBase collection = CreateCollection(); var copyArray = new Foo[200]; collection.CopyTo(copyArray, 100); Assert.Equal(200, copyArray.Length); for (int i = 0; i < 100; i++) { Assert.Equal(i, copyArray[100 + i].IntValue); Assert.Equal(i.ToString(), copyArray[100 + i].StringValue); } } [Fact] public static void CopyTo_Invalid() { MyReadOnlyCollectionBase collection = CreateCollection(); AssertExtensions.Throws<ArgumentNullException>("destinationArray", "dest", () => collection.CopyTo(null, 0)); // Array is null AssertExtensions.Throws<ArgumentException>("destinationArray", string.Empty, () => collection.CopyTo(new Foo[100], 50)); // Index + collection.Count > array.Length AssertExtensions.Throws<ArgumentOutOfRangeException>("destinationIndex", "dstIndex", () => collection.CopyTo(new Foo[100], -1)); // Index < 0 } [Fact] public static void GetEnumerator() { MyReadOnlyCollectionBase collection = CreateCollection(); IEnumerator enumerator = collection.GetEnumerator(); // Calling current should throw when the enumerator has not started enumerating Assert.Throws<InvalidOperationException>(() => enumerator.Current); int counter = 0; while (enumerator.MoveNext()) { Foo current = (Foo)enumerator.Current; Assert.Equal(counter, current.IntValue); Assert.Equal(counter.ToString(), current.StringValue); counter++; } Assert.Equal(collection.Count, counter); // Calling current should throw when the enumerator has finished enumerating Assert.Throws<InvalidOperationException>(() => (Foo)enumerator.Current); // Calling current should throw when the enumerator is reset enumerator.Reset(); Assert.Throws<InvalidOperationException>(() => (Foo)enumerator.Current); } [Fact] public static void IsSynchronized() { MyReadOnlyCollectionBase collection = CreateCollection(); Assert.False(((ICollection)collection).IsSynchronized); } [Fact] public static void IListMethods() { MyReadOnlyCollectionBase collection = CreateCollection(); for (int i = 0; i < 100; i++) { Assert.Equal(i, collection[i].IntValue); Assert.Equal(i.ToString(), collection[i].StringValue); Assert.Equal(i, collection.IndexOf(new Foo(i, i.ToString()))); Assert.True(collection.Contains(new Foo(i, i.ToString()))); } } [Fact] public static void IListProperties() { MyReadOnlyCollectionBase collection = CreateCollection(); Assert.True(collection.IsFixedSize); Assert.True(collection.IsReadOnly); } [Fact] public static void VirtualMethods() { VirtualTestReadOnlyCollection collectionBase = new VirtualTestReadOnlyCollection(); Assert.Equal(collectionBase.Count, int.MinValue); Assert.Null(collectionBase.GetEnumerator()); } // ReadOnlyCollectionBase is provided to be used as the base class for strongly typed collections. // Let's use one of our own here for the type Foo. private class MyReadOnlyCollectionBase : ReadOnlyCollectionBase { public MyReadOnlyCollectionBase(Foo[] values) { InnerList.AddRange(values); } public Foo this[int indx] { get { return (Foo)InnerList[indx]; } } public void CopyTo(Array array, int index) => ((ICollection)this).CopyTo(array, index); public virtual object SyncRoot { get { return ((ICollection)this).SyncRoot; } } public int IndexOf(Foo f) => ((IList)InnerList).IndexOf(f); public bool Contains(Foo f) => ((IList)InnerList).Contains(f); public bool IsFixedSize { get { return true; } } public bool IsReadOnly { get { return true; } } } private class VirtualTestReadOnlyCollection : ReadOnlyCollectionBase { public override int Count { get { return int.MinValue; } } public override IEnumerator GetEnumerator() => null; } private class Foo { public Foo() { } public Foo(int intValue, string stringValue) { IntValue = intValue; StringValue = stringValue; } public int IntValue { get; set; } public string StringValue { get; set; } public override bool Equals(object obj) { Foo foo = obj as Foo; if (obj == null) return false; return foo.IntValue == IntValue && foo.StringValue == StringValue; } public override int GetHashCode() => IntValue; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Xml; using System.Collections; namespace System.Data.Common { internal sealed class UInt16Storage : DataStorage { private static readonly ushort s_defaultValue = ushort.MinValue; private ushort[] _values; public UInt16Storage(DataColumn column) : base(column, typeof(ushort), s_defaultValue, StorageType.UInt16) { } public override object Aggregate(int[] records, AggregateType kind) { bool hasData = false; try { switch (kind) { case AggregateType.Sum: ulong sum = s_defaultValue; foreach (int record in records) { if (HasValue(record)) { checked { sum += _values[record]; } hasData = true; } } if (hasData) { return sum; } return _nullValue; case AggregateType.Mean: long meanSum = s_defaultValue; int meanCount = 0; foreach (int record in records) { if (HasValue(record)) { checked { meanSum += _values[record]; } meanCount++; hasData = true; } } if (hasData) { ushort mean; checked { mean = (ushort)(meanSum / meanCount); } return mean; } return _nullValue; case AggregateType.Var: case AggregateType.StDev: int count = 0; double var = 0.0f; double prec = 0.0f; double dsum = 0.0f; double sqrsum = 0.0f; foreach (int record in records) { if (HasValue(record)) { dsum += _values[record]; sqrsum += _values[record] * (double)_values[record]; count++; } } if (count > 1) { var = count * sqrsum - (dsum * dsum); prec = var / (dsum * dsum); // we are dealing with the risk of a cancellation error // double is guaranteed only for 15 digits so a difference // with a result less than 1e-15 should be considered as zero if ((prec < 1e-15) || (var < 0)) var = 0; else var = var / (count * (count - 1)); if (kind == AggregateType.StDev) { return Math.Sqrt(var); } return var; } return _nullValue; case AggregateType.Min: ushort min = ushort.MaxValue; for (int i = 0; i < records.Length; i++) { int record = records[i]; if (HasValue(record)) { min = Math.Min(_values[record], min); hasData = true; } } if (hasData) { return min; } return _nullValue; case AggregateType.Max: ushort max = ushort.MinValue; for (int i = 0; i < records.Length; i++) { int record = records[i]; if (HasValue(record)) { max = Math.Max(_values[record], max); hasData = true; } } if (hasData) { return max; } return _nullValue; case AggregateType.First: if (records.Length > 0) { return _values[records[0]]; } return null; case AggregateType.Count: count = 0; for (int i = 0; i < records.Length; i++) { if (HasValue(records[i])) { count++; } } return count; } } catch (OverflowException) { throw ExprException.Overflow(typeof(ushort)); } throw ExceptionBuilder.AggregateException(kind, _dataType); } public override int Compare(int recordNo1, int recordNo2) { ushort valueNo1 = _values[recordNo1]; ushort valueNo2 = _values[recordNo2]; if (valueNo1 == s_defaultValue || valueNo2 == s_defaultValue) { int bitCheck = CompareBits(recordNo1, recordNo2); if (0 != bitCheck) { return bitCheck; } } //return valueNo1.CompareTo(valueNo2); return valueNo1 - valueNo2; // copied from UInt16.CompareTo(UInt16) } public override int CompareValueTo(int recordNo, object value) { System.Diagnostics.Debug.Assert(0 <= recordNo, "Invalid record"); System.Diagnostics.Debug.Assert(null != value, "null value"); if (_nullValue == value) { return (HasValue(recordNo) ? 1 : 0); } ushort valueNo1 = _values[recordNo]; if ((s_defaultValue == valueNo1) && !HasValue(recordNo)) { return -1; } return valueNo1.CompareTo((ushort)value); //return ((int)valueNo1 - (int)valueNo2); // copied from UInt16.CompareTo(UInt16) } public override object ConvertValue(object value) { if (_nullValue != value) { if (null != value) { value = ((IConvertible)value).ToUInt16(FormatProvider); } else { value = _nullValue; } } return value; } public override void Copy(int recordNo1, int recordNo2) { CopyBits(recordNo1, recordNo2); _values[recordNo2] = _values[recordNo1]; } public override object Get(int record) { ushort value = _values[record]; if (!value.Equals(s_defaultValue)) { return value; } return GetBits(record); } public override void Set(int record, object value) { System.Diagnostics.Debug.Assert(null != value, "null value"); if (_nullValue == value) { _values[record] = s_defaultValue; SetNullBit(record, true); } else { _values[record] = ((IConvertible)value).ToUInt16(FormatProvider); SetNullBit(record, false); } } public override void SetCapacity(int capacity) { ushort[] newValues = new ushort[capacity]; if (null != _values) { Array.Copy(_values, 0, newValues, 0, Math.Min(capacity, _values.Length)); } _values = newValues; base.SetCapacity(capacity); } public override object ConvertXmlToObject(string s) { return XmlConvert.ToUInt16(s); } public override string ConvertObjectToXml(object value) { return XmlConvert.ToString((ushort)value); } protected override object GetEmptyStorage(int recordCount) { return new ushort[recordCount]; } protected override void CopyValue(int record, object store, BitArray nullbits, int storeIndex) { ushort[] typedStore = (ushort[])store; typedStore[storeIndex] = _values[record]; nullbits.Set(storeIndex, !HasValue(record)); } protected override void SetStorage(object store, BitArray nullbits) { _values = (ushort[])store; SetNullStorage(nullbits); } } }
/* Copyright 2019 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Drawing; using System.Runtime.InteropServices; using ESRI.ArcGIS.ADF.BaseClasses; using ESRI.ArcGIS.ADF.CATIDs; using ESRI.ArcGIS.Controls; using ESRI.ArcGIS.Carto; using ESRI.ArcGIS.Analyst3D; using ESRI.ArcGIS.GlobeCore; namespace RSSWeatherLayer3D { /// <summary> /// Connects and disconnects from the RSS weather service. /// </summary> [ClassInterface(ClassInterfaceType.None)] [Guid("4484FB2E-9E79-4642-8B14-32DA6AE2EAF3")] [ProgId("RSSWeatherLayer3D.AddWeatherLayerCmd")] [ComVisible(true)] public sealed class AddWeatherLayerCmd: BaseCommand { #region COM Registration Function(s) [ComRegisterFunction()] [ComVisible(false)] static void RegisterFunction(Type registerType) { // Required for ArcGIS Component Category Registrar support ArcGISCategoryRegistration(registerType); // // TODO: Add any COM registration code here // } [ComUnregisterFunction()] [ComVisible(false)] static void UnregisterFunction(Type registerType) { // Required for ArcGIS Component Category Registrar support ArcGISCategoryUnregistration(registerType); // // TODO: Add any COM unregistration code here // } #region ArcGIS Component Category Registrar generated code /// <summary> /// Required method for ArcGIS Component Category registration - /// Do not modify the contents of this method with the code editor. /// </summary> private static void ArcGISCategoryRegistration(Type registerType) { string regKey = string.Format("HKEY_CLASSES_ROOT\\CLSID\\{{{0}}}", registerType.GUID); GMxCommands.Register(regKey); ControlsCommands.Register(regKey); } /// <summary> /// Required method for ArcGIS Component Category unregistration - /// Do not modify the contents of this method with the code editor. /// </summary> private static void ArcGISCategoryUnregistration(Type registerType) { string regKey = string.Format("HKEY_CLASSES_ROOT\\CLSID\\{{{0}}}", registerType.GUID); GMxCommands.Unregister(regKey); ControlsCommands.Unregister(regKey); } #endregion #endregion //Class members private IGlobeHookHelper m_globeHookHelper = null; private RSSWeatherLayer3DClass m_weatherLayer = null; private IScene m_scene = null; //private bool m_bOnce = true; private bool m_bConnected = false; /// <summary> /// CTor /// </summary> public AddWeatherLayerCmd() { base.m_category = "Weather3D"; base.m_caption = "Load Layer"; base.m_message = "Connect to the RSS weather service"; base.m_toolTip = "Connect to weather service"; base.m_name = base.m_category + "_" + base.m_caption; try { base.m_bitmap = new Bitmap(GetType().Assembly.GetManifestResourceStream(GetType(), "Bitmaps.AddWeatherLayerCmd.bmp")); } catch (Exception ex) { System.Diagnostics.Trace.WriteLine(ex.Message, "Invalid Bitmap"); } } #region Overriden Class Methods /// <summary> /// Occurs when this command is created /// </summary> /// <param name="hook">Instance of the application</param> public override void OnCreate(object hook) { //Instantiate the hook helper if (null == m_globeHookHelper) m_globeHookHelper = new GlobeHookHelperClass(); //set the hook m_globeHookHelper.Hook = hook; m_bConnected = false; } /// <summary> /// Occurs when this command is clicked /// </summary> public override void OnClick() { //test whether connected to the service if (!m_bConnected) //in case not connected. { IGlobe globe = m_globeHookHelper.Globe; m_scene = globe as IScene; //create the instance of the layer if (null == m_weatherLayer) { m_weatherLayer = new RSSWeatherLayer3DClass(); } //test whether the layer has been added to the globe (allow for only one instance of the layer) bool bLayerHasBeenAdded = false; IEnumLayer layers = m_scene.get_Layers(null, false); layers.Reset(); ILayer layer = layers.Next(); while(layer != null) { if(layer is RSSWeatherLayer3DClass) { bLayerHasBeenAdded = true; break; } layer = layers.Next(); } //in case that the layer hasn't been added if(!bLayerHasBeenAdded) { layer = (ILayer)m_weatherLayer; layer.Name = "RSS Weather Layer"; try { //add the layer to the globe globe.AddLayerType(layer, esriGlobeLayerType.esriGlobeLayerTypeDraped, false); } catch(Exception ex) { System.Diagnostics.Trace.WriteLine("Failed" + ex.Message); } } //connect to the RSS weather service m_weatherLayer.Connect(); } else { //disconnect from the service m_weatherLayer.Disconnect(); //delete the layer from the globe m_scene.DeleteLayer(m_weatherLayer); //dispose the layer m_weatherLayer.Dispose(); m_weatherLayer = null; } //set the connectionflag m_bConnected = !m_bConnected; } /// <summary> /// set the state of the button (acts like a check button) /// </summary> public override bool Checked { get { return m_bConnected; } } #endregion } }
using System; using System.Collections.Generic; using System.Text; using IL2CPU.Debug.Symbols; using Microsoft.VisualStudio; using Microsoft.VisualStudio.Debugger.Interop; //using Dapper; //using SQLinq.Dapper; //using SQLinq; using FIELD_INFO = IL2CPU.Debug.Symbols.FIELD_INFO; namespace Cosmos.VS.DebugEngine.AD7.Impl { // An implementation of IDebugProperty2 // This interface represents a stack frame property, a program document property, or some other property. // The property is usually the result of an expression evaluation. // // The sample engine only supports locals and parameters for functions that have symbols loaded. class AD7Property : IDebugProperty2 { private DebugLocalInfo m_variableInformation; private AD7Process mProcess; private AD7StackFrame mStackFrame; private LOCAL_ARGUMENT_INFO mDebugInfo; const uint mArrayLengthOffset = 8; const uint mArrayFirstElementOffset = 16; private const string NULL = "null"; protected int OFFSET => mDebugInfo.OFFSET; public AD7Property(DebugLocalInfo localInfo, AD7Process process, AD7StackFrame stackFrame) { m_variableInformation = localInfo; mProcess = process; mStackFrame = stackFrame; if (localInfo.IsLocal) { mDebugInfo = mStackFrame.mLocalInfos[m_variableInformation.Index]; } else if (localInfo.IsReference) { mDebugInfo = new LOCAL_ARGUMENT_INFO() { TYPENAME = localInfo.Type, NAME = localInfo.Name, OFFSET = localInfo.Offset }; } else { mDebugInfo = mStackFrame.mArgumentInfos[m_variableInformation.Index]; } } public void ReadData<T>(ref DEBUG_PROPERTY_INFO propertyInfo, Func<byte[], int, T> ByteToTypeAction) { byte[] xData; if (m_variableInformation.IsReference) { xData = mProcess.mDbgConnector.GetMemoryData(m_variableInformation.Pointer, (uint)System.Runtime.InteropServices.Marshal.SizeOf(typeof(T))); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Memory data received was null!"); return; } var xTypedIntValue = ByteToTypeAction(xData, 0); propertyInfo.bstrValue = String.Format("{0}", xTypedIntValue); } else { xData = mProcess.mDbgConnector.GetStackData(OFFSET, (uint)System.Runtime.InteropServices.Marshal.SizeOf(typeof(T))); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Stack data received was null!"); return; } var xTypedIntValue = ByteToTypeAction(xData, 0); propertyInfo.bstrValue = String.Format("{0}", xTypedIntValue); } } public void ReadDataArray<T>(ref DEBUG_PROPERTY_INFO propertyInfo, string typeAsString) { byte[] xData; // Get handle xData = mProcess.mDbgConnector.GetStackData(OFFSET, 4); // Get actual pointer xData = mProcess.mDbgConnector.GetMemoryData(BitConverter.ToUInt32(xData, 0), 4); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Stack data received was null!"); } else { uint xPointer = BitConverter.ToUInt32(xData, 0); if (xPointer == 0) { propertyInfo.bstrValue = NULL; } else { xData = mProcess.mDbgConnector.GetMemoryData(xPointer + mArrayLengthOffset, 4, 4); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Memory data received was null!"); } else { uint xDataLength = BitConverter.ToUInt32(xData, 0); bool xIsTooLong = xDataLength > 512; if (xIsTooLong) { xDataLength = 512; } if (xDataLength > 0) { if (m_variableInformation.Children.Count == 0) { for (int i = 0; i < xDataLength; i++) { var inf = new DebugLocalInfo(); inf.IsReference = true; inf.Type = typeof(T).FullName; inf.Offset = (int)(mArrayFirstElementOffset + (System.Runtime.InteropServices.Marshal.SizeOf(typeof(T)) * i)); inf.Pointer = (uint)(xPointer + mArrayFirstElementOffset + (System.Runtime.InteropServices.Marshal.SizeOf(typeof(T)) * i)); inf.Name = "[" + i.ToString() + "]"; m_variableInformation.Children.Add(new AD7Property(inf, mProcess, mStackFrame)); } } } propertyInfo.bstrValue = String.Format(typeAsString + "[{0}] at 0x{1} ", xDataLength, xPointer.ToString("X")); } } } } // Construct a DEBUG_PROPERTY_INFO representing this local or parameter. public DEBUG_PROPERTY_INFO ConstructDebugPropertyInfo(enum_DEBUGPROP_INFO_FLAGS dwFields) { var propertyInfo = new DEBUG_PROPERTY_INFO(); try { if (dwFields.HasFlag(enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_FULLNAME)) { propertyInfo.bstrFullName = m_variableInformation.Name; propertyInfo.dwFields |= enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_FULLNAME; } if (dwFields.HasFlag(enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_NAME)) { propertyInfo.bstrName = m_variableInformation.Name; propertyInfo.dwFields |= enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_NAME; } if (dwFields.HasFlag(enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_TYPE)) { propertyInfo.bstrType = mDebugInfo.TYPENAME; propertyInfo.dwFields |= enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_TYPE; } if (dwFields.HasFlag(enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_VALUE)) { byte[] xData; #region string if (mDebugInfo.TYPENAME == typeof(string).FullName) { const uint xStringLengthOffset = 12; const uint xStringFirstCharOffset = 16; // Get handle xData = mProcess.mDbgConnector.GetStackData(OFFSET, 4); // Get actual pointer xData = mProcess.mDbgConnector.GetMemoryData(BitConverter.ToUInt32(xData, 0), 4); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Stack data received was null!"); } else { uint xStrPointer = BitConverter.ToUInt32(xData, 0); if (xStrPointer == 0) { propertyInfo.bstrValue = NULL; } else { xData = mProcess.mDbgConnector.GetMemoryData(xStrPointer + xStringLengthOffset, 4, 4); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Memory data received was null!"); } else { uint xStringLength = BitConverter.ToUInt32(xData, 0); propertyInfo.bstrValue = "String of length: " + xStringLength; if (xStringLength > 100) { propertyInfo.bstrValue = "For now, strings larger than 100 chars are not supported.."; } else if (xStringLength == 0) { propertyInfo.bstrValue = "\"\""; } else { xData = mProcess.mDbgConnector.GetMemoryData(xStrPointer + xStringFirstCharOffset, xStringLength * 2, 2); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Memory data received was null!"); } else { propertyInfo.bstrValue = "\"" + Encoding.Unicode.GetString(xData) + "\""; } } } } } } #endregion #warning TODO: String[] #region byte // Byte else if (mDebugInfo.TYPENAME == typeof(byte).FullName) { ReadData<byte>(ref propertyInfo, new Func<byte[], int, byte>(delegate(byte[] barr, int ind) { return barr[ind]; })); } else if (mDebugInfo.TYPENAME == typeof(byte[]).FullName) { ReadDataArray<byte>(ref propertyInfo, "byte"); } #endregion #region sbyte // SByte else if (mDebugInfo.TYPENAME == typeof(sbyte).FullName) { ReadData<sbyte>(ref propertyInfo, new Func<byte[], int, sbyte>(delegate(byte[] barr, int ind) { return unchecked((sbyte)barr[ind]); })); } else if (mDebugInfo.TYPENAME == typeof(sbyte[]).FullName) { ReadDataArray<sbyte>(ref propertyInfo, "sbyte"); } #endregion #region char else if (mDebugInfo.TYPENAME == typeof(char).FullName) { xData = mProcess.mDbgConnector.GetStackData(OFFSET, 2); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Stack data received was null!"); } else { var xTypedCharValue = BitConverter.ToChar(xData, 0); propertyInfo.bstrValue = String.Format("{0} '{1}'", (ushort)xTypedCharValue, xTypedCharValue); } } else if (mDebugInfo.TYPENAME == typeof(char[]).FullName) { // Get handle xData = mProcess.mDbgConnector.GetStackData(OFFSET, 4); // Get actual pointer xData = mProcess.mDbgConnector.GetMemoryData(BitConverter.ToUInt32(xData, 0), 4); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Stack data received was null!"); } else { uint xArrayPointer = BitConverter.ToUInt32(xData, 0); if (xArrayPointer == 0) { propertyInfo.bstrValue = NULL; } else { xData = mProcess.mDbgConnector.GetMemoryData(xArrayPointer + mArrayLengthOffset, 4, 4); if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Memory data received was null!"); } else { uint xDataLength = BitConverter.ToUInt32(xData, 0); bool xIsTooLong = xDataLength > 512; var xSB = new StringBuilder(); xSB.AppendFormat("Char[{0}] at 0x{1} {{ ", xDataLength, xArrayPointer.ToString("X")); if (xIsTooLong) { xDataLength = 512; } if (xDataLength > 0) { xData = mProcess.mDbgConnector.GetMemoryData(xArrayPointer + mArrayFirstElementOffset, xDataLength * 2); if (xData == null) { xSB.Append(String.Format("Error! Memory data received was null!")); } else { bool first = true; for (int i = 0; (i / 2) < xDataLength; i += 2) { if (!first) { xSB.Append(", "); } char c = BitConverter.ToChar(xData, i); xSB.Append('\''); if (c == '\0') { xSB.Append("\\0"); } else { xSB.Append(c); } xSB.Append('\''); first = false; } } } if (xIsTooLong) { xSB.Append(", .."); } xSB.Append(" }"); propertyInfo.bstrValue = xSB.ToString(); } } } } #endregion #region short // Short else if (mDebugInfo.TYPENAME == typeof(short).FullName) { ReadData<short>(ref propertyInfo, new Func<byte[], int, short>(BitConverter.ToInt16)); } else if (mDebugInfo.TYPENAME == typeof(short[]).FullName) { ReadDataArray<short>(ref propertyInfo, "short"); } #endregion #region ushort // UShort else if (mDebugInfo.TYPENAME == typeof(ushort).FullName) { ReadData<ushort>(ref propertyInfo, new Func<byte[], int, ushort>(BitConverter.ToUInt16)); } else if (mDebugInfo.TYPENAME == typeof(ushort[]).FullName) { ReadDataArray<ushort>(ref propertyInfo, "ushort"); } #endregion #region int // Int32 else if (mDebugInfo.TYPENAME == typeof(int).FullName) { ReadData<int>(ref propertyInfo, new Func<byte[], int, int>(BitConverter.ToInt32)); } else if (mDebugInfo.TYPENAME == typeof(int[]).FullName) { ReadDataArray<int>(ref propertyInfo, "int"); } #endregion #region uint // UInt32 else if (mDebugInfo.TYPENAME == typeof(uint).FullName) { ReadData<uint>(ref propertyInfo, new Func<byte[], int, uint>(BitConverter.ToUInt32)); } else if (mDebugInfo.TYPENAME == typeof(uint[]).FullName) { ReadDataArray<uint>(ref propertyInfo, "uint"); } #endregion #region long // Long else if (mDebugInfo.TYPENAME == typeof(long).FullName) { ReadData<long>(ref propertyInfo, new Func<byte[], int, long>(BitConverter.ToInt64)); } else if (mDebugInfo.TYPENAME == typeof(long[]).FullName) { ReadDataArray<long>(ref propertyInfo, "long"); } #endregion #region ulong // ULong else if (mDebugInfo.TYPENAME == typeof(ulong).FullName) { ReadData<ulong>(ref propertyInfo, new Func<byte[], int, ulong>(BitConverter.ToUInt64)); } else if (mDebugInfo.TYPENAME == typeof(ulong[]).FullName) { ReadDataArray<ulong>(ref propertyInfo, "ulong"); } #endregion #region float // Float else if (mDebugInfo.TYPENAME == typeof(float).FullName) { ReadData<float>(ref propertyInfo, new Func<byte[], int, float>(BitConverter.ToSingle)); } else if (mDebugInfo.TYPENAME == typeof(float[]).FullName) { ReadDataArray<float>(ref propertyInfo, "float"); } #endregion #region double // Double else if (mDebugInfo.TYPENAME == typeof(double).FullName) { ReadData<double>(ref propertyInfo, new Func<byte[], int, double>(BitConverter.ToDouble)); } else if (mDebugInfo.TYPENAME == typeof(double[]).FullName) { ReadDataArray<double>(ref propertyInfo, "double"); } #endregion #region bool // Bool else if (mDebugInfo.TYPENAME == typeof(bool).FullName) { ReadData<bool>(ref propertyInfo, new Func<byte[], int, bool>(BitConverter.ToBoolean)); } else if (mDebugInfo.TYPENAME == typeof(bool[]).FullName) { ReadDataArray<bool>(ref propertyInfo, "bool"); } #endregion else { if (m_variableInformation.IsReference) { xData = mProcess.mDbgConnector.GetMemoryData(m_variableInformation.Pointer, 4, 4); } else { xData = mProcess.mDbgConnector.GetStackData(OFFSET, 4); } if (xData == null) { propertyInfo.bstrValue = String.Format("Error! Stack data received was null!"); } else { var xPointer = BitConverter.ToUInt32(xData, 0); if (xPointer == 0) { propertyInfo.bstrValue = NULL; } else { try { var mp = mProcess.mDebugInfoDb.GetFieldMap(mDebugInfo.TYPENAME); foreach (string str in mp.FieldNames) { FIELD_INFO xFieldInfo; xFieldInfo = mProcess.mDebugInfoDb.GetFieldInfoByName(str); var inf = new DebugLocalInfo(); inf.IsReference = true; inf.Type = xFieldInfo.TYPE; inf.Offset = xFieldInfo.OFFSET; inf.Pointer = (uint)(xPointer + xFieldInfo.OFFSET + 12); inf.Name = GetFieldName(xFieldInfo); m_variableInformation.Children.Add(new AD7Property(inf, mProcess, mStackFrame)); } propertyInfo.bstrValue = String.Format("{0} (0x{1})", xPointer, xPointer.ToString("X").ToUpper()); } catch (Exception ex) { if (ex.GetType().Name == "SQLiteException") { //Ignore but warn user propertyInfo.bstrValue = "SQLiteException. Could not get type information for " + mDebugInfo.TYPENAME; } else { throw new Exception("Unexpected error in AD7Property.cs:459", ex); } } } } } propertyInfo.dwFields |= enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_VALUE; } if (dwFields.HasFlag(enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_ATTRIB)) { // The sample does not support writing of values displayed in the debugger, so mark them all as read-only. propertyInfo.dwAttrib = enum_DBG_ATTRIB_FLAGS.DBG_ATTRIB_VALUE_READONLY; if (m_variableInformation.Children.Count > 0) { propertyInfo.dwAttrib |= enum_DBG_ATTRIB_FLAGS.DBG_ATTRIB_OBJ_IS_EXPANDABLE; } } propertyInfo.pProperty = (IDebugProperty2)this; propertyInfo.dwFields |= (enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_PROP); // If the debugger has asked for the property, or the property has children (meaning it is a pointer in the sample) // then set the pProperty field so the debugger can call back when the children are enumerated. //if (((dwFields & (uint)enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_PROP) != 0) //|| (this.m_variableInformation.child != null)) //{ // propertyInfo.pProperty = (IDebugProperty2)this; // propertyInfo.dwFields |= (enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_PROP); //} } catch { } return propertyInfo; } private static string GetFieldName(FIELD_INFO fInf) { string s = fInf.NAME; int i = s.LastIndexOf('.'); if (i > 0) { s = s.Substring(i + 1, s.Length - i - 1); return s; } return s; } #region IDebugProperty2 Members // Enumerates the children of a property. This provides support for dereferencing pointers, displaying members of an array, or fields of a class or struct. // The sample debugger only supports pointer dereferencing as children. This means there is only ever one child. public int EnumChildren(enum_DEBUGPROP_INFO_FLAGS dwFields, uint dwRadix, ref System.Guid guidFilter, enum_DBG_ATTRIB_FLAGS dwAttribFilter, string pszNameFilter, uint dwTimeout, out IEnumDebugPropertyInfo2 ppEnum) { ppEnum = null; if (m_variableInformation.Children.Count > 0) { var infs = new List<DEBUG_PROPERTY_INFO>(); foreach (AD7Property dp in m_variableInformation.Children) { infs.Add(dp.ConstructDebugPropertyInfo(dwFields)); } ppEnum = new AD7PropertyEnum(infs.ToArray()); return VSConstants.S_OK; } //if (this.m_variableInformation.child != null) //{ // DEBUG_PROPERTY_INFO[] properties = new DEBUG_PROPERTY_INFO[1]; // properties[0] = (new AD7Property(this.m_variableInformation.child)).ConstructDebugPropertyInfo(dwFields); // ppEnum = new AD7PropertyEnum(properties); // return VSConstants.S_OK; //} return VSConstants.S_FALSE; } // Returns the property that describes the most-derived property of a property // This is called to support object oriented languages. It allows the debug engine to return an IDebugProperty2 for the most-derived // object in a hierarchy. This engine does not support this. public int GetDerivedMostProperty(out IDebugProperty2 ppDerivedMost) { throw new Exception("The method or operation is not implemented."); } // This method exists for the purpose of retrieving information that does not lend itself to being retrieved by calling the IDebugProperty2::GetPropertyInfo // method. This includes information about custom viewers, managed type slots and other information. // The sample engine does not support this. public int GetExtendedInfo(ref System.Guid guidExtendedInfo, out object pExtendedInfo) { throw new Exception("The method or operation is not implemented."); } // Returns the memory bytes for a property value. public int GetMemoryBytes(out IDebugMemoryBytes2 ppMemoryBytes) { throw new Exception("The method or operation is not implemented."); } // Returns the memory context for a property value. public int GetMemoryContext(out IDebugMemoryContext2 ppMemory) { throw new Exception("The method or operation is not implemented."); } // Returns the parent of a property. // The sample engine does not support obtaining the parent of properties. public int GetParent(out IDebugProperty2 ppParent) { throw new Exception("The method or operation is not implemented."); } // Fills in a DEBUG_PROPERTY_INFO structure that describes a property. public int GetPropertyInfo(enum_DEBUGPROP_INFO_FLAGS dwFields, uint dwRadix, uint dwTimeout, IDebugReference2[] rgpArgs, uint dwArgCount, DEBUG_PROPERTY_INFO[] pPropertyInfo) { pPropertyInfo[0] = new DEBUG_PROPERTY_INFO(); rgpArgs = null; pPropertyInfo[0] = ConstructDebugPropertyInfo(dwFields); return VSConstants.S_OK; } // Return an IDebugReference2 for this property. An IDebugReference2 can be thought of as a type and an address. public int GetReference(out IDebugReference2 ppReference) { throw new Exception("The method or operation is not implemented."); } // Returns the size, in bytes, of the property value. public int GetSize(out uint pdwSize) { throw new Exception("The method or operation is not implemented."); } // The debugger will call this when the user tries to edit the property's values // the sample has set the read-only flag on its properties, so this should not be called. public int SetValueAsReference(IDebugReference2[] rgpArgs, uint dwArgCount, IDebugReference2 pValue, uint dwTimeout) { throw new Exception("The method or operation is not implemented."); } // The debugger will call this when the user tries to edit the property's values in one of the debugger windows. // the sample has set the read-only flag on its properties, so this should not be called. public int SetValueAsString(string pszValue, uint dwRadix, uint dwTimeout) { throw new Exception("The method or operation is not implemented."); } #endregion } }
using System; using System.IO; using System.Text; using NUnit.Framework; using Org.BouncyCastle.Bcpg.Sig; using Org.BouncyCastle.Utilities.Date; using Org.BouncyCastle.Utilities.Encoders; using Org.BouncyCastle.Utilities.IO; using Org.BouncyCastle.Utilities.Test; namespace Org.BouncyCastle.Bcpg.OpenPgp.Tests { [TestFixture] public class PgpSignatureTest : SimpleTest { private const int[] NO_PREFERENCES = null; private static readonly int[] PREFERRED_SYMMETRIC_ALGORITHMS = new int[] { (int)SymmetricKeyAlgorithmTag.Aes128, (int)SymmetricKeyAlgorithmTag.TripleDes }; private static readonly int[] PREFERRED_HASH_ALGORITHMS = new int[] { (int)HashAlgorithmTag.Sha1, (int)HashAlgorithmTag.Sha256 }; private static readonly int[] PREFERRED_COMPRESSION_ALGORITHMS = new int[] { (int)CompressionAlgorithmTag.ZLib }; private const int TEST_EXPIRATION_TIME = 10000; private const string TEST_USER_ID = "test user id"; private static readonly byte[] TEST_DATA = Encoding.ASCII.GetBytes("hello world!\nhello world!\n"); private static readonly byte[] TEST_DATA_WITH_CRLF = Encoding.ASCII.GetBytes("hello world!\r\nhello world!\r\n"); private static readonly byte[] dsaKeyRing = Base64.Decode( "lQHhBD9HBzURBACzkxRCVGJg5+Ld9DU4Xpnd4LCKgMq7YOY7Gi0EgK92gbaa6+zQ" + "oQFqz1tt3QUmpz3YVkm/zLESBBtC1ACIXGggUdFMUr5I87+1Cb6vzefAtGt8N5VV" + "1F/MXv1gJz4Bu6HyxL/ncfe71jsNhav0i4yAjf2etWFj53zK6R+Ojg5H6wCgpL9/" + "tXVfGP8SqFvyrN/437MlFSUEAIN3V6j/MUllyrZglrtr2+RWIwRrG/ACmrF6hTug" + "Ol4cQxaDYNcntXbhlTlJs9MxjTH3xxzylyirCyq7HzGJxZzSt6FTeh1DFYzhJ7Qu" + "YR1xrSdA6Y0mUv0ixD5A4nPHjupQ5QCqHGeRfFD/oHzD4zqBnJp/BJ3LvQ66bERJ" + "mKl5A/4uj3HoVxpb0vvyENfRqKMmGBISycY4MoH5uWfb23FffsT9r9KL6nJ4syLz" + "aRR0gvcbcjkc9Z3epI7gr3jTrb4d8WPxsDbT/W1tv9bG/EHawomLcihtuUU68Uej" + "6/wZot1XJqu2nQlku57+M/V2X1y26VKsipolPfja4uyBOOyvbP4DAwIDIBTxWjkC" + "GGAWQO2jy9CTvLHJEoTO7moHrp1FxOVpQ8iJHyRqZzLllO26OzgohbiPYz8u9qCu" + "lZ9Xn7QzRXJpYyBFY2hpZG5hIChEU0EgVGVzdCBLZXkpIDxlcmljQGJvdW5jeWNh" + "c3RsZS5vcmc+iFkEExECABkFAj9HBzUECwcDAgMVAgMDFgIBAh4BAheAAAoJEM0j" + "9enEyjRDAlwAnjTjjt57NKIgyym7OTCwzIU3xgFpAJ0VO5m5PfQKmGJRhaewLSZD" + "4nXkHg=="); private static readonly char[] dsaPass = "hello world".ToCharArray(); private static readonly byte[] rsaKeyRing = Base64.Decode( "lQIEBEBXUNMBBADScQczBibewnbCzCswc/9ut8R0fwlltBRxMW0NMdKJY2LF" + "7k2COeLOCIU95loJGV6ulbpDCXEO2Jyq8/qGw1qD3SCZNXxKs3GS8Iyh9Uwd" + "VL07nMMYl5NiQRsFB7wOb86+94tYWgvikVA5BRP5y3+O3GItnXnpWSJyREUy" + "6WI2QQAGKf4JAwIVmnRs4jtTX2DD05zy2mepEQ8bsqVAKIx7lEwvMVNcvg4Y" + "8vFLh9Mf/uNciwL4Se/ehfKQ/AT0JmBZduYMqRU2zhiBmxj4cXUQ0s36ysj7" + "fyDngGocDnM3cwPxaTF1ZRBQHSLewP7dqE7M73usFSz8vwD/0xNOHFRLKbsO" + "RqDlLA1Cg2Yd0wWPS0o7+qqk9ndqrjjSwMM8ftnzFGjShAdg4Ca7fFkcNePP" + "/rrwIH472FuRb7RbWzwXA4+4ZBdl8D4An0dwtfvAO+jCZSrLjmSpxEOveJxY" + "GduyR4IA4lemvAG51YHTHd4NXheuEqsIkn1yarwaaj47lFPnxNOElOREMdZb" + "nkWQb1jfgqO24imEZgrLMkK9bJfoDnlF4k6r6hZOp5FSFvc5kJB4cVo1QJl4" + "pwCSdoU6luwCggrlZhDnkGCSuQUUW45NE7Br22NGqn4/gHs0KCsWbAezApGj" + "qYUCfX1bcpPzUMzUlBaD5rz2vPeO58CDtBJ0ZXN0ZXIgPHRlc3RAdGVzdD6I" + "sgQTAQIAHAUCQFdQ0wIbAwQLBwMCAxUCAwMWAgECHgECF4AACgkQs8JyyQfH" + "97I1QgP8Cd+35maM2cbWV9iVRO+c5456KDi3oIUSNdPf1NQrCAtJqEUhmMSt" + "QbdiaFEkPrORISI/2htXruYn0aIpkCfbUheHOu0sef7s6pHmI2kOQPzR+C/j" + "8D9QvWsPOOso81KU2axUY8zIer64Uzqc4szMIlLw06c8vea27RfgjBpSCryw" + "AgAA"); private static readonly char[] rsaPass = "2002 Buffalo Sabres".ToCharArray(); private static readonly byte[] nullPacketsSubKeyBinding = Base64.Decode( "iDYEGBECAAAAACp9AJ9PlJCrFpi+INwG7z61eku2Wg1HaQCgl33X5Egj+Kf7F9CXIWj2iFCvQDo="); public override void PerformTest() { // // RSA tests // PgpSecretKeyRing pgpPriv = new PgpSecretKeyRing(rsaKeyRing); PgpSecretKey secretKey = pgpPriv.GetSecretKey(); PgpPrivateKey pgpPrivKey = secretKey.ExtractPrivateKey(rsaPass); try { doTestSig(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey); Fail("RSA wrong key test failed."); } catch (PgpException) { // expected } try { doTestSigV3(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey); Fail("RSA V3 wrong key test failed."); } catch (PgpException) { // expected } // // certifications // PgpSignatureGenerator sGen = new PgpSignatureGenerator(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha1); sGen.InitSign(PgpSignature.KeyRevocation, pgpPrivKey); PgpSignature sig = sGen.GenerateCertification(secretKey.PublicKey); sig.InitVerify(secretKey.PublicKey); if (!sig.VerifyCertification(secretKey.PublicKey)) { Fail("revocation verification failed."); } PgpSecretKeyRing pgpDSAPriv = new PgpSecretKeyRing(dsaKeyRing); PgpSecretKey secretDSAKey = pgpDSAPriv.GetSecretKey(); PgpPrivateKey pgpPrivDSAKey = secretDSAKey.ExtractPrivateKey(dsaPass); sGen = new PgpSignatureGenerator(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1); sGen.InitSign(PgpSignature.SubkeyBinding, pgpPrivDSAKey); PgpSignatureSubpacketGenerator unhashedGen = new PgpSignatureSubpacketGenerator(); PgpSignatureSubpacketGenerator hashedGen = new PgpSignatureSubpacketGenerator(); hashedGen.SetSignatureExpirationTime(false, TEST_EXPIRATION_TIME); hashedGen.SetSignerUserId(true, TEST_USER_ID); hashedGen.SetPreferredCompressionAlgorithms(false, PREFERRED_COMPRESSION_ALGORITHMS); hashedGen.SetPreferredHashAlgorithms(false, PREFERRED_HASH_ALGORITHMS); hashedGen.SetPreferredSymmetricAlgorithms(false, PREFERRED_SYMMETRIC_ALGORITHMS); sGen.SetHashedSubpackets(hashedGen.Generate()); sGen.SetUnhashedSubpackets(unhashedGen.Generate()); sig = sGen.GenerateCertification(secretDSAKey.PublicKey, secretKey.PublicKey); byte[] sigBytes = sig.GetEncoded(); PgpObjectFactory f = new PgpObjectFactory(sigBytes); sig = ((PgpSignatureList) f.NextPgpObject())[0]; sig.InitVerify(secretDSAKey.PublicKey); if (!sig.VerifyCertification(secretDSAKey.PublicKey, secretKey.PublicKey)) { Fail("subkey binding verification failed."); } PgpSignatureSubpacketVector hashedPcks = sig.GetHashedSubPackets(); PgpSignatureSubpacketVector unhashedPcks = sig.GetUnhashedSubPackets(); if (hashedPcks.Count != 6) { Fail("wrong number of hashed packets found."); } if (unhashedPcks.Count != 1) { Fail("wrong number of unhashed packets found."); } if (!hashedPcks.GetSignerUserId().Equals(TEST_USER_ID)) { Fail("test userid not matching"); } if (hashedPcks.GetSignatureExpirationTime() != TEST_EXPIRATION_TIME) { Fail("test signature expiration time not matching"); } if (unhashedPcks.GetIssuerKeyId() != secretDSAKey.KeyId) { Fail("wrong issuer key ID found in certification"); } int[] prefAlgs = hashedPcks.GetPreferredCompressionAlgorithms(); preferredAlgorithmCheck("compression", PREFERRED_COMPRESSION_ALGORITHMS, prefAlgs); prefAlgs = hashedPcks.GetPreferredHashAlgorithms(); preferredAlgorithmCheck("hash", PREFERRED_HASH_ALGORITHMS, prefAlgs); prefAlgs = hashedPcks.GetPreferredSymmetricAlgorithms(); preferredAlgorithmCheck("symmetric", PREFERRED_SYMMETRIC_ALGORITHMS, prefAlgs); SignatureSubpacketTag[] criticalHashed = hashedPcks.GetCriticalTags(); if (criticalHashed.Length != 1) { Fail("wrong number of critical packets found."); } if (criticalHashed[0] != SignatureSubpacketTag.SignerUserId) { Fail("wrong critical packet found in tag list."); } // // no packets passed // sGen = new PgpSignatureGenerator(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1); sGen.InitSign(PgpSignature.SubkeyBinding, pgpPrivDSAKey); sGen.SetHashedSubpackets(null); sGen.SetUnhashedSubpackets(null); sig = sGen.GenerateCertification(TEST_USER_ID, secretKey.PublicKey); sig.InitVerify(secretDSAKey.PublicKey); if (!sig.VerifyCertification(TEST_USER_ID, secretKey.PublicKey)) { Fail("subkey binding verification failed."); } hashedPcks = sig.GetHashedSubPackets(); if (hashedPcks.Count != 1) { Fail("found wrong number of hashed packets"); } unhashedPcks = sig.GetUnhashedSubPackets(); if (unhashedPcks.Count != 1) { Fail("found wrong number of unhashed packets"); } try { sig.VerifyCertification(secretKey.PublicKey); Fail("failed to detect non-key signature."); } catch (InvalidOperationException) { // expected } // // override hash packets // sGen = new PgpSignatureGenerator(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1); sGen.InitSign(PgpSignature.SubkeyBinding, pgpPrivDSAKey); hashedGen = new PgpSignatureSubpacketGenerator(); DateTime creationTime = new DateTime(1973, 7, 27); hashedGen.SetSignatureCreationTime(false, creationTime); sGen.SetHashedSubpackets(hashedGen.Generate()); sGen.SetUnhashedSubpackets(null); sig = sGen.GenerateCertification(TEST_USER_ID, secretKey.PublicKey); sig.InitVerify(secretDSAKey.PublicKey); if (!sig.VerifyCertification(TEST_USER_ID, secretKey.PublicKey)) { Fail("subkey binding verification failed."); } hashedPcks = sig.GetHashedSubPackets(); if (hashedPcks.Count != 1) { Fail("found wrong number of hashed packets in override test"); } if (!hashedPcks.HasSubpacket(SignatureSubpacketTag.CreationTime)) { Fail("hasSubpacket test for creation time failed"); } DateTime sigCreationTime = hashedPcks.GetSignatureCreationTime(); if (!sigCreationTime.Equals(creationTime)) { Fail("creation of overridden date failed."); } prefAlgs = hashedPcks.GetPreferredCompressionAlgorithms(); preferredAlgorithmCheck("compression", NO_PREFERENCES, prefAlgs); prefAlgs = hashedPcks.GetPreferredHashAlgorithms(); preferredAlgorithmCheck("hash", NO_PREFERENCES, prefAlgs); prefAlgs = hashedPcks.GetPreferredSymmetricAlgorithms(); preferredAlgorithmCheck("symmetric", NO_PREFERENCES, prefAlgs); if (hashedPcks.GetKeyExpirationTime() != 0) { Fail("unexpected key expiration time found"); } if (hashedPcks.GetSignatureExpirationTime() != 0) { Fail("unexpected signature expiration time found"); } if (hashedPcks.GetSignerUserId() != null) { Fail("unexpected signer user ID found"); } criticalHashed = hashedPcks.GetCriticalTags(); if (criticalHashed.Length != 0) { Fail("critical packets found when none expected"); } unhashedPcks = sig.GetUnhashedSubPackets(); if (unhashedPcks.Count != 1) { Fail("found wrong number of unhashed packets in override test"); } // // general signatures // doTestSig(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha256, secretKey.PublicKey, pgpPrivKey); doTestSig(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha384, secretKey.PublicKey, pgpPrivKey); doTestSig(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha512, secretKey.PublicKey, pgpPrivKey); doTestSigV3(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey); doTestTextSig(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey, TEST_DATA_WITH_CRLF, TEST_DATA_WITH_CRLF); doTestTextSig(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey, TEST_DATA, TEST_DATA_WITH_CRLF); doTestTextSigV3(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey, TEST_DATA_WITH_CRLF, TEST_DATA_WITH_CRLF); doTestTextSigV3(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey, TEST_DATA, TEST_DATA_WITH_CRLF); // // DSA Tests // pgpPriv = new PgpSecretKeyRing(dsaKeyRing); secretKey = pgpPriv.GetSecretKey(); pgpPrivKey = secretKey.ExtractPrivateKey(dsaPass); try { doTestSig(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey); Fail("DSA wrong key test failed."); } catch (PgpException) { // expected } try { doTestSigV3(PublicKeyAlgorithmTag.RsaGeneral, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey); Fail("DSA V3 wrong key test failed."); } catch (PgpException) { // expected } doTestSig(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey); doTestSigV3(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey); doTestTextSig(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey, TEST_DATA_WITH_CRLF, TEST_DATA_WITH_CRLF); doTestTextSig(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey, TEST_DATA, TEST_DATA_WITH_CRLF); doTestTextSigV3(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey, TEST_DATA_WITH_CRLF, TEST_DATA_WITH_CRLF); doTestTextSigV3(PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1, secretKey.PublicKey, pgpPrivKey, TEST_DATA, TEST_DATA_WITH_CRLF); // special cases // doTestMissingSubpackets(nullPacketsSubKeyBinding); doTestMissingSubpackets(generateV3BinarySig(pgpPrivKey, PublicKeyAlgorithmTag.Dsa, HashAlgorithmTag.Sha1)); // keyflags doTestKeyFlagsValues(); } private void doTestKeyFlagsValues() { checkValue(KeyFlags.CertifyOther, 0x01); checkValue(KeyFlags.SignData, 0x02); checkValue(KeyFlags.EncryptComms, 0x04); checkValue(KeyFlags.EncryptStorage, 0x08); checkValue(KeyFlags.Split, 0x10); checkValue(KeyFlags.Authentication, 0x20); checkValue(KeyFlags.Shared, 0x80); // yes this actually happens checkValue(new byte[] { 4, 0, 0, 0 }, 0x04); checkValue(new byte[] { 4, 0, 0 }, 0x04); checkValue(new byte[] { 4, 0 }, 0x04); checkValue(new byte[] { 4 }, 0x04); } private void checkValue(int flag, int val) { KeyFlags f = new KeyFlags(true, flag); if (f.Flags != val) { Fail("flag value mismatch"); } } private void checkValue(byte[] flag, int val) { KeyFlags f = new KeyFlags(true, flag); if (f.Flags != val) { Fail("flag value mismatch"); } } private void doTestMissingSubpackets(byte[] signature) { PgpObjectFactory f = new PgpObjectFactory(signature); object obj = f.NextPgpObject(); while (!(obj is PgpSignatureList)) { obj = f.NextPgpObject(); if (obj is PgpLiteralData) { Stream input = ((PgpLiteralData)obj).GetDataStream(); Streams.Drain(input); } } PgpSignature sig = ((PgpSignatureList)obj)[0]; if (sig.Version > 3) { PgpSignatureSubpacketVector v = sig.GetHashedSubPackets(); if (v.GetKeyExpirationTime() != 0) { Fail("key expiration time not zero for missing subpackets"); } if (!sig.HasSubpackets) { Fail("HasSubpackets property was false with packets"); } } else { if (sig.GetHashedSubPackets() != null) { Fail("hashed sub packets found when none expected"); } if (sig.GetUnhashedSubPackets() != null) { Fail("unhashed sub packets found when none expected"); } if (sig.HasSubpackets) { Fail("HasSubpackets property was true with no packets"); } } } private void preferredAlgorithmCheck( string type, int[] expected, int[] prefAlgs) { if (expected == null) { if (prefAlgs != null) { Fail("preferences for " + type + " found when none expected"); } } else { if (prefAlgs.Length != expected.Length) { Fail("wrong number of preferred " + type + " algorithms found"); } for (int i = 0; i != expected.Length; i++) { if (expected[i] != prefAlgs[i]) { Fail("wrong algorithm found for " + type + ": expected " + expected[i] + " got " + prefAlgs); } } } } private void doTestSig( PublicKeyAlgorithmTag encAlgorithm, HashAlgorithmTag hashAlgorithm, PgpPublicKey pubKey, PgpPrivateKey privKey) { MemoryStream bOut = new MemoryStream(); MemoryStream testIn = new MemoryStream(TEST_DATA, false); PgpSignatureGenerator sGen = new PgpSignatureGenerator(encAlgorithm, hashAlgorithm); sGen.InitSign(PgpSignature.BinaryDocument, privKey); sGen.GenerateOnePassVersion(false).Encode(bOut); PgpLiteralDataGenerator lGen = new PgpLiteralDataGenerator(); Stream lOut = lGen.Open( new UncloseableStream(bOut), PgpLiteralData.Binary, "_CONSOLE", TEST_DATA.Length * 2, DateTime.UtcNow); int ch; while ((ch = testIn.ReadByte()) >= 0) { lOut.WriteByte((byte)ch); sGen.Update((byte)ch); } lOut.Write(TEST_DATA, 0, TEST_DATA.Length); sGen.Update(TEST_DATA); lGen.Close(); sGen.Generate().Encode(bOut); verifySignature(bOut.ToArray(), hashAlgorithm, pubKey, TEST_DATA); } private void doTestTextSig( PublicKeyAlgorithmTag encAlgorithm, HashAlgorithmTag hashAlgorithm, PgpPublicKey pubKey, PgpPrivateKey privKey, byte[] data, byte[] canonicalData) { PgpSignatureGenerator sGen = new PgpSignatureGenerator(encAlgorithm, HashAlgorithmTag.Sha1); MemoryStream bOut = new MemoryStream(); MemoryStream testIn = new MemoryStream(data, false); DateTime creationTime = DateTime.UtcNow; sGen.InitSign(PgpSignature.CanonicalTextDocument, privKey); sGen.GenerateOnePassVersion(false).Encode(bOut); PgpLiteralDataGenerator lGen = new PgpLiteralDataGenerator(); Stream lOut = lGen.Open( new UncloseableStream(bOut), PgpLiteralData.Text, "_CONSOLE", data.Length * 2, creationTime); int ch; while ((ch = testIn.ReadByte()) >= 0) { lOut.WriteByte((byte)ch); sGen.Update((byte)ch); } lOut.Write(data, 0, data.Length); sGen.Update(data); lGen.Close(); PgpSignature sig = sGen.Generate(); if (sig.CreationTime == DateTimeUtilities.UnixMsToDateTime(0)) { Fail("creation time not set in v4 signature"); } sig.Encode(bOut); verifySignature(bOut.ToArray(), hashAlgorithm, pubKey, canonicalData); } private void doTestSigV3( PublicKeyAlgorithmTag encAlgorithm, HashAlgorithmTag hashAlgorithm, PgpPublicKey pubKey, PgpPrivateKey privKey) { byte[] bytes = generateV3BinarySig(privKey, encAlgorithm, hashAlgorithm); verifySignature(bytes, hashAlgorithm, pubKey, TEST_DATA); } private byte[] generateV3BinarySig( PgpPrivateKey privKey, PublicKeyAlgorithmTag encAlgorithm, HashAlgorithmTag hashAlgorithm) { MemoryStream bOut = new MemoryStream(); MemoryStream testIn = new MemoryStream(TEST_DATA, false); PgpV3SignatureGenerator sGen = new PgpV3SignatureGenerator(encAlgorithm, hashAlgorithm); sGen.InitSign(PgpSignature.BinaryDocument, privKey); sGen.GenerateOnePassVersion(false).Encode(bOut); PgpLiteralDataGenerator lGen = new PgpLiteralDataGenerator(); Stream lOut = lGen.Open( new UncloseableStream(bOut), PgpLiteralData.Binary, "_CONSOLE", TEST_DATA.Length * 2, DateTime.UtcNow); int ch; while ((ch = testIn.ReadByte()) >= 0) { lOut.WriteByte((byte)ch); sGen.Update((byte)ch); } lOut.Write(TEST_DATA, 0, TEST_DATA.Length); sGen.Update(TEST_DATA); lGen.Close(); sGen.Generate().Encode(bOut); return bOut.ToArray(); } private void doTestTextSigV3( PublicKeyAlgorithmTag encAlgorithm, HashAlgorithmTag hashAlgorithm, PgpPublicKey pubKey, PgpPrivateKey privKey, byte[] data, byte[] canonicalData) { PgpV3SignatureGenerator sGen = new PgpV3SignatureGenerator(encAlgorithm, HashAlgorithmTag.Sha1); MemoryStream bOut = new MemoryStream(); MemoryStream testIn = new MemoryStream(data, false); sGen.InitSign(PgpSignature.CanonicalTextDocument, privKey); sGen.GenerateOnePassVersion(false).Encode(bOut); PgpLiteralDataGenerator lGen = new PgpLiteralDataGenerator(); Stream lOut = lGen.Open( new UncloseableStream(bOut), PgpLiteralData.Text, "_CONSOLE", data.Length * 2, DateTime.UtcNow); int ch; while ((ch = testIn.ReadByte()) >= 0) { lOut.WriteByte((byte)ch); sGen.Update((byte)ch); } lOut.Write(data, 0, data.Length); sGen.Update(data); lGen.Close(); PgpSignature sig = sGen.Generate(); if (sig.CreationTime == DateTimeUtilities.UnixMsToDateTime(0)) { Fail("creation time not set in v3 signature"); } sig.Encode(bOut); verifySignature(bOut.ToArray(), hashAlgorithm, pubKey, canonicalData); } private void verifySignature( byte[] encodedSig, HashAlgorithmTag hashAlgorithm, PgpPublicKey pubKey, byte[] original) { PgpObjectFactory pgpFact = new PgpObjectFactory(encodedSig); PgpOnePassSignatureList p1 = (PgpOnePassSignatureList)pgpFact.NextPgpObject(); PgpOnePassSignature ops = p1[0]; PgpLiteralData p2 = (PgpLiteralData)pgpFact.NextPgpObject(); Stream dIn = p2.GetInputStream(); ops.InitVerify(pubKey); int ch; while ((ch = dIn.ReadByte()) >= 0) { ops.Update((byte)ch); } PgpSignatureList p3 = (PgpSignatureList)pgpFact.NextPgpObject(); PgpSignature sig = p3[0]; DateTime creationTime = sig.CreationTime; // Check creationTime is recent if (creationTime.CompareTo(DateTime.UtcNow) > 0 || creationTime.CompareTo(DateTime.UtcNow.AddMinutes(-10)) < 0) { Fail("bad creation time in signature: " + creationTime); } if (sig.KeyId != pubKey.KeyId) { Fail("key id mismatch in signature"); } if (!ops.Verify(sig)) { Fail("Failed generated signature check - " + hashAlgorithm); } sig.InitVerify(pubKey); for (int i = 0; i != original.Length; i++) { sig.Update(original[i]); } sig.Update(original); if (!sig.Verify()) { Fail("Failed generated signature check against original data"); } } public override string Name { get { return "PGPSignatureTest"; } } public static void Main( string[] args) { RunTest(new PgpSignatureTest()); } [Test] public void TestFunction() { string resultText = Perform().ToString(); Assert.AreEqual(Name + ": Okay", resultText); } } }
// // RangeHeaderValueTest.cs // // Authors: // Marek Safar <marek.safar@gmail.com> // // Copyright (C) 2011 Xamarin Inc (http://www.xamarin.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections; using System.Collections.Generic; using System.Net.Http.Headers; using System.Linq; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace MonoTests.System.Net.Http.Headers { [TestClass] public class RangeHeaderValueTest { [TestMethod] public void Ctor_InvalidArguments () { try { new RangeHeaderValue (null, null); Assert.Fail ("#1"); } catch (ArgumentException) { } try { new RangeHeaderValue (long.MinValue, null); Assert.Fail ("#2"); } catch (ArgumentOutOfRangeException) { } try { new RangeHeaderValue (10, 1); Assert.Fail ("#3"); } catch (ArgumentOutOfRangeException) { } } [TestMethod] public void Equals () { var value = new RangeHeaderValue (4, null); Assert.AreEqual (value, new RangeHeaderValue (4, null), "#1"); Assert.AreNotEqual (value, new RangeHeaderValue (4, 5), "#2"); Assert.AreNotEqual (value, new RangeHeaderValue (), "#3"); value = new RangeHeaderValue (2, 4); Assert.AreEqual (value, new RangeHeaderValue (2, 4), "#4"); Assert.AreNotEqual (value, new RangeHeaderValue (2, null), "#5"); Assert.AreNotEqual (value, new RangeHeaderValue (2, 3), "#6"); } [TestMethod] public void Parse () { var res = RangeHeaderValue.Parse ("bytes=2-40"); Assert.AreEqual ("bytes", res.Unit, "#1"); Assert.AreEqual (2, res.Ranges.First ().From, "#2"); Assert.AreEqual (40, res.Ranges.First ().To, "#3"); Assert.AreEqual ("bytes=2-40", res.ToString (), "#4"); res = RangeHeaderValue.Parse ("d-dd = 2 - "); Assert.AreEqual ("d-dd", res.Unit, "#10"); Assert.AreEqual (2, res.Ranges.First ().From, "#11"); Assert.IsNull (res.Ranges.First ().To, "#12"); Assert.AreEqual ("d-dd=2-", res.ToString (), "#13"); res = RangeHeaderValue.Parse ("zz = - 6 , 5 - 9, -8"); Assert.AreEqual ("zz", res.Unit, "#20"); Assert.IsNull (res.Ranges.First ().From, "#21"); Assert.AreEqual (6, res.Ranges.First ().To, "#22"); Assert.AreEqual (5, res.Ranges.Skip (1).First ().From, "#21b"); Assert.AreEqual (9, res.Ranges.Skip (1).First ().To, "#22b"); Assert.AreEqual ("zz=-6, 5-9, -8", res.ToString (), "#23"); res = RangeHeaderValue.Parse ("ddd = 2 -, 1-4"); Assert.AreEqual ("ddd", res.Unit, "#30"); Assert.AreEqual (2, res.Ranges.First ().From, "#31"); Assert.IsNull (res.Ranges.First ().To, "#32"); Assert.AreEqual ("ddd=2-, 1-4", res.ToString (), "#33"); res = RangeHeaderValue.Parse ("bytes=0-"); Assert.AreEqual ("bytes", res.Unit, "#40"); Assert.AreEqual (0, res.Ranges.First ().From, "#41"); Assert.IsNull (res.Ranges.First ().To, "#42"); Assert.AreEqual ("bytes=0-", res.ToString (), "#43"); res = RangeHeaderValue.Parse ("bytes=0-,-9"); Assert.AreEqual ("bytes", res.Unit, "#50"); Assert.AreEqual (0, res.Ranges.First ().From, "#51"); Assert.IsNull (res.Ranges.First ().To, "#52"); Assert.IsNull (res.Ranges.Skip (1).First ().From, "#53"); Assert.AreEqual (9, res.Ranges.Skip (1).First ().To, "#54"); Assert.AreEqual ("bytes=0-, -9", res.ToString (), "#55"); } [TestMethod] public void Parse_Invalid () { try { RangeHeaderValue.Parse (null); Assert.Fail ("#1"); } catch (FormatException) { } try { RangeHeaderValue.Parse (" "); Assert.Fail ("#2"); } catch (FormatException) { } try { RangeHeaderValue.Parse ("5-6"); Assert.Fail ("#3"); } catch (FormatException) { } try { RangeHeaderValue.Parse ("bytes="); Assert.Fail ("#4"); } catch (FormatException) { } try { RangeHeaderValue.Parse ("byte=1"); Assert.Fail ("#5"); } catch (FormatException) { } try { RangeHeaderValue.Parse ("byte=10-6"); Assert.Fail ("#6"); } catch (FormatException) { } } [TestMethod] public void Properties () { var value = new RangeHeaderValue (3, 9); Assert.AreEqual ("bytes", value.Unit, "#1"); Assert.AreEqual (3, value.Ranges.First ().From, "#2"); Assert.AreEqual (9, value.Ranges.First ().To, "#3"); value = new RangeHeaderValue (); Assert.AreEqual ("bytes", value.Unit, "#4"); Assert.AreEqual (0, value.Ranges.Count, "#5"); } [TestMethod] public void Properties_Invalid () { var value = new RangeHeaderValue (); try { value.Unit = ""; Assert.Fail ("#1"); } catch (ArgumentException) { } } [TestMethod] public void TryParse () { RangeHeaderValue res; Assert.IsTrue (RangeHeaderValue.TryParse ("bytes=4-33", out res), "#1"); Assert.AreEqual ("bytes", res.Unit, "#2"); Assert.AreEqual (4, res.Ranges.First ().From, "#3"); Assert.AreEqual (33, res.Ranges.First ().To, "#4"); } [TestMethod] public void TryParse_Invalid () { RangeHeaderValue res; Assert.IsFalse (RangeHeaderValue.TryParse ("bytes=4,33", out res), "#1"); Assert.IsNull (res, "#2"); } } }
#pragma warning disable 109, 114, 219, 429, 168, 162 namespace pony.unity3d { public class Keyboard : global::haxe.lang.HxObject, global::pony.ui.IKeyboard<object> { public Keyboard(global::haxe.lang.EmptyObject empty) { unchecked { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" { } } #line default } public Keyboard() { unchecked { #line 49 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::pony.unity3d.Keyboard.__hx_ctor_pony_unity3d_Keyboard(this); } #line default } public static void __hx_ctor_pony_unity3d_Keyboard(global::pony.unity3d.Keyboard __temp_me129) { unchecked { #line 50 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" { #line 50 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::pony.events.Signal this1 = global::pony.events.Signal.create<object>(__temp_me129); #line 50 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" __temp_me129.down = ((global::pony.events.Signal) (this1) ); } { #line 51 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::pony.events.Signal this2 = global::pony.events.Signal.create<object>(__temp_me129); #line 51 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" __temp_me129.up = ((global::pony.events.Signal) (this2) ); } #line 53 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" __temp_me129.keys = global::Type.allEnums<global::UnityEngine.KeyCode>(typeof(global::UnityEngine.KeyCode)); } #line default } public static new object __hx_createEmpty() { unchecked { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return new global::pony.unity3d.Keyboard(((global::haxe.lang.EmptyObject) (global::haxe.lang.EmptyObject.EMPTY) )); } #line default } public static new object __hx_create(global::Array arr) { unchecked { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return new global::pony.unity3d.Keyboard(); } #line default } public virtual object pony_ui_IKeyboard_cast<T_c>() { unchecked { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return this; } #line default } public global::pony.events.Signal down; public global::pony.events.Signal up; public global::Array<global::UnityEngine.KeyCode> keys; public void enable() { unchecked { #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object listener = default(object); #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object __temp_stmt628 = default(object); #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" { #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object l = default(object); #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" { #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object f = global::pony._Function.Function_Impl_.@from(((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("update"), ((int) (117802505) ))) ), 0); #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" l = global::pony.events._Listener.Listener_Impl_._fromFunction(f, false); } #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" __temp_stmt628 = l; } #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" listener = ((object) (__temp_stmt628) ); #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::pony.events.Signal this1 = global::pony.DeltaTime.update; #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this1.@add(((object) (listener) ), new global::haxe.lang.Null<int>(-120, true)); #line 56 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object __temp_expr629 = this1.target; } #line default } public void disable() { unchecked { #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object listener = default(object); #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object __temp_stmt630 = default(object); #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" { #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object l = default(object); #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" { #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object f = global::pony._Function.Function_Impl_.@from(((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("update"), ((int) (117802505) ))) ), 0); #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" l = global::pony.events._Listener.Listener_Impl_._fromFunction(f, false); } #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" __temp_stmt630 = l; } #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" listener = ((object) (__temp_stmt630) ); #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::pony.events.Signal this1 = global::pony.DeltaTime.update; #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this1.@remove(((object) (listener) )); #line 58 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object __temp_expr631 = this1.target; } #line default } public virtual void update() { unchecked { #line 61 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" if (global::UnityEngine.Input.anyKeyDown) { int _g = 0; #line 62 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::Array<global::UnityEngine.KeyCode> _g1 = this.keys; #line 62 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" while (( _g < _g1.length )) { #line 62 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::UnityEngine.KeyCode k = _g1[_g]; #line 62 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" ++ _g; if (global::UnityEngine.Input.GetKeyDown(((global::UnityEngine.KeyCode) (k) ))) { #line 63 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.dispatchKey(this.down, k); } if (global::UnityEngine.Input.GetKeyUp(((global::UnityEngine.KeyCode) (k) ))) { #line 64 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.dispatchKey(this.up, k); } } } else { #line 67 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" int _g2 = 0; #line 67 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::Array<global::UnityEngine.KeyCode> _g11 = this.keys; #line 67 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" while (( _g2 < _g11.length )) { #line 67 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::UnityEngine.KeyCode k1 = _g11[_g2]; #line 67 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" ++ _g2; if (global::UnityEngine.Input.GetKeyUp(((global::UnityEngine.KeyCode) (k1) ))) { #line 68 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.dispatchKey(this.up, k1); } } } } #line default } public virtual void dispatchKey(global::pony.events.Signal s, global::UnityEngine.KeyCode k) { unchecked { #line 72 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::pony.ui.Key k1 = default(global::pony.ui.Key); #line 72 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" { #line 72 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::UnityEngine.KeyCode __temp_switch632 = (k); if (( __temp_switch632 == global::UnityEngine.KeyCode.A )) { #line 73 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.A; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.B )) { #line 74 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.B; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.C )) { #line 75 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.C; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.D )) { #line 76 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.D; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.E )) { #line 77 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.E; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F )) { #line 78 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.G )) { #line 79 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.G; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.H )) { #line 80 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.H; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.I )) { #line 81 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.I; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.J )) { #line 82 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.J; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.K )) { #line 83 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.K; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.L )) { #line 84 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.L; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.M )) { #line 85 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.M; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.N )) { #line 86 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.N; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.O )) { #line 87 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.O; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.P )) { #line 88 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.P; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Q )) { #line 89 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Q; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.R )) { #line 90 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.R; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.S )) { #line 91 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.S; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.T )) { #line 92 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.T; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.U )) { #line 93 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.U; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.V )) { #line 94 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.V; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.W )) { #line 95 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.W; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.X )) { #line 96 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.X; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Y )) { #line 97 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Y; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Z )) { #line 98 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Z; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Backspace )) { #line 99 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Backspace; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Tab )) { #line 100 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Tab; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Escape )) { #line 101 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Escape; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Space )) { #line 102 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Space; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad0 )) { #line 103 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad0; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad1 )) { #line 104 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad1; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad2 )) { #line 105 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad2; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad3 )) { #line 106 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad3; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad4 )) { #line 107 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad4; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad5 )) { #line 108 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad5; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad6 )) { #line 109 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad6; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad7 )) { #line 110 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad7; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad8 )) { #line 111 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad8; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Keypad9 )) { #line 112 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Keypad9; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.KeypadDivide )) { #line 113 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.KeypadDivide; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.KeypadEquals )) { #line 114 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Equals; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.KeypadMultiply )) { #line 115 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.KeypadMultiply; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.KeypadMinus )) { #line 116 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.KeypadMinus; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.KeypadPlus )) { #line 117 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.KeypadPlus; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.KeypadEnter )) { #line 118 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.KeypadEnter; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.KeypadPeriod )) { #line 119 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.KeypadDot; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.UpArrow )) { #line 120 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Up; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.DownArrow )) { #line 121 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Down; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.RightArrow )) { #line 122 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Right; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.LeftArrow )) { #line 123 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Left; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Insert )) { #line 124 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Insert; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Delete )) { #line 125 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Delete; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Home )) { #line 126 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Home; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.End )) { #line 127 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.End; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.PageUp )) { #line 128 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.PageUp; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.PageDown )) { #line 129 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.PageDown; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F1 )) { #line 130 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F1; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F2 )) { #line 131 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F2; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F3 )) { #line 132 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F3; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F4 )) { #line 133 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F4; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F5 )) { #line 134 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F5; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F6 )) { #line 135 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F6; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F7 )) { #line 136 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F7; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F8 )) { #line 137 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F8; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F9 )) { #line 138 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F9; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F10 )) { #line 139 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F10; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F11 )) { #line 140 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F11; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.F12 )) { #line 141 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.F12; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha1 )) { #line 142 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number1; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha2 )) { #line 143 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number2; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha3 )) { #line 144 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number3; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha4 )) { #line 145 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number4; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha5 )) { #line 146 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number5; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha6 )) { #line 147 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number6; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha7 )) { #line 148 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number7; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha8 )) { #line 149 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number8; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha9 )) { #line 150 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number9; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Alpha0 )) { #line 151 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Number0; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Print )) { #line 152 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.PrintScreen; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Pause )) { #line 153 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Pause; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.ScrollLock )) { #line 154 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.ScrollLock; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Numlock )) { #line 155 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.NumLock; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.KeypadEquals )) { #line 156 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Equals; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Minus )) { #line 157 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Minus; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.BackQuote )) { #line 158 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Tilde; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Period )) { #line 159 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Dot; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Slash )) { #line 160 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.RightSlash; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Backslash )) { #line 161 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.LeftSlash; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Quote )) { #line 162 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Quote; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.LeftShift )) { #line 163 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Shift; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.LeftControl )) { #line 164 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Ctrl; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.LeftAlt )) { #line 165 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Alt; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.CapsLock )) { #line 166 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.CapsLock; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.LeftWindows )) { #line 167 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.LeftWin; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.RightWindows )) { #line 168 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.RightWin; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Plus )) { #line 169 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Plus; } else { if (( __temp_switch632 == global::UnityEngine.KeyCode.Return )) { #line 170 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" k1 = global::pony.ui.Key.Enter; } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } } #line 172 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" if (( k1 != default(global::pony.ui.Key) )) { #line 172 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" { #line 172 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" s.dispatchEvent(new global::pony.events.Event(((global::Array) (new global::Array<global::pony.ui.Key>(new global::pony.ui.Key[]{k1})) ), ((object) (s.target) ), ((global::pony.events.Event) (default(global::pony.events.Event)) ))); #line 172 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" global::pony.events.Signal __temp_expr633 = s; } #line 172 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" object __temp_expr634 = s.target; } } #line default } public override object __hx_setField(string field, int hash, object @value, bool handleProperties) { unchecked { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" switch (hash) { case 1191633396: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.keys = ((global::Array<global::UnityEngine.KeyCode>) (global::Array<object>.__hx_cast<global::UnityEngine.KeyCode>(((global::Array) (@value) ))) ); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return @value; } case 26203: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.up = ((global::pony.events.Signal) (@value) ); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return @value; } case 1114503266: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.down = ((global::pony.events.Signal) (@value) ); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return @value; } default: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return base.__hx_setField(field, hash, @value, handleProperties); } } } #line default } public override object __hx_getField(string field, int hash, bool throwErrors, bool isCheck, bool handleProperties) { unchecked { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" switch (hash) { case 1721492229: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("dispatchKey"), ((int) (1721492229) ))) ); } case 117802505: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("update"), ((int) (117802505) ))) ); } case 827877864: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("disable"), ((int) (827877864) ))) ); } case 2022157955: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return ((global::haxe.lang.Function) (new global::haxe.lang.Closure(((object) (this) ), global::haxe.lang.Runtime.toString("enable"), ((int) (2022157955) ))) ); } case 1191633396: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return this.keys; } case 26203: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return this.up; } case 1114503266: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return this.down; } default: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return base.__hx_getField(field, hash, throwErrors, isCheck, handleProperties); } } } #line default } public override object __hx_invokeField(string field, int hash, global::Array dynargs) { unchecked { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" switch (hash) { case 1721492229: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.dispatchKey(((global::pony.events.Signal) (dynargs[0]) ), ((global::UnityEngine.KeyCode) (dynargs[1]) )); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" break; } case 117802505: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.update(); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" break; } case 827877864: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.disable(); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" break; } case 2022157955: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" this.enable(); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" break; } default: { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return base.__hx_invokeField(field, hash, dynargs); } } #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" return default(object); } #line default } public override void __hx_getFields(global::Array<object> baseArr) { unchecked { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" baseArr.push("keys"); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" baseArr.push("up"); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" baseArr.push("down"); #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" { #line 42 "C:\\HaxeToolkit\\haxe\\lib\\pony/git/pony/unity3d/Keyboard.hx" base.__hx_getFields(baseArr); } } #line default } } }
// // AssetBundleWindow.cs // // Editor window that lets the user decide the settings for the // Asset Bundles. // // The MIT License (MIT) // // Copyright (c) 2013 Niklas Borglund // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using UnityEngine; using UnityEditor; using System.Collections.Generic; using System.IO; public class AssetBundleWindow : EditorWindow { private AssetBundleContent contentWindow; public string assetBundleFolderLocation = "/Data/AssetBundles/"; public string exportLocation = "/../AssetBundles/"; public string bundleFileExtension = ".unity3d"; public bool optionalSettings = false; public BuildTarget buildTarget = BuildTarget.WebPlayer; //BuildAssetBundleOptions public bool buildAssetBundleOptions = true; public bool collectDependencies = true; public bool completeAssets = true; public bool disableWriteTypeTree = false; public bool deterministicAssetBundle = false; public bool uncompressedAssetBundle = false; public bool setLowerCaseName = true; public Dictionary<string, int> bundleVersions = new Dictionary<string, int>(); public Dictionary<string, List<string>> bundleContents = new Dictionary<string, List<string>>(); public Dictionary<string, float> bundleFileSizes = new Dictionary<string, float>(); //The position of the scrollview private Vector2 scrollPosition = Vector2.zero; //The undo manager HOEditorUndoManager undoManager; private void OnEnable() { // Instantiate undoManager undoManager = new HOEditorUndoManager( this, "AssetBundleCreator" ); } void OnGUI() { undoManager.CheckUndo(); GUILayout.Label ("Export Settings", EditorStyles.boldLabel); assetBundleFolderLocation = EditorGUILayout.TextField("AssetBundles folder", assetBundleFolderLocation); GUILayout.Label ("Application.dataPath " + Application.dataPath, EditorStyles.label); exportLocation = EditorGUILayout.TextField("Export folder", exportLocation); bundleFileExtension = EditorGUILayout.TextField("Bundle file ext.", bundleFileExtension); setLowerCaseName = EditorGUILayout.Toggle("Names to lower case", setLowerCaseName); buildAssetBundleOptions = EditorGUILayout.BeginToggleGroup("BuildAssetBundleOptions", buildAssetBundleOptions); collectDependencies = EditorGUILayout.Toggle("CollectDependencies", collectDependencies); completeAssets = EditorGUILayout.Toggle("CompleteAssets", completeAssets); disableWriteTypeTree = EditorGUILayout.Toggle("DisableWriteTypeTree", disableWriteTypeTree); deterministicAssetBundle = EditorGUILayout.Toggle("DeterministicAssetBundle", deterministicAssetBundle); uncompressedAssetBundle = EditorGUILayout.Toggle("UncompressedAssetBundle", uncompressedAssetBundle); EditorGUILayout.EndToggleGroup(); optionalSettings = EditorGUILayout.BeginToggleGroup("Optional Settings", optionalSettings); buildTarget = (BuildTarget)EditorGUILayout.EnumPopup("Build Target", buildTarget); EditorGUILayout.EndToggleGroup(); undoManager.CheckDirty(); GUILayout.Label("Reset Settings", EditorStyles.boldLabel); if (GUILayout.Button("Reset")) { ClearPreferences(this); WriteEditorPrefs(this); CreateAssetBundles.ReadBundleControlFile(Application.dataPath + exportLocation + CreateAssetBundles.bundleControlFileName, bundleVersions); CreateAssetBundles.ReadBundleContentsFile(Application.dataPath + exportLocation + CreateAssetBundles.bundleContentsFileName, bundleContents); ReadBundleFileSizes(); } GUILayout.Label("Build", EditorStyles.boldLabel); if (GUILayout.Button("Build Asset Bundles")) { if (!CreateAssetBundles.ExportAssetBundleFolders(this)) { Debug.LogError("AssetBundle Build Failed! - Please check your settings in the Bundle Creator at Assets->Bundle Creator-> Asset Bundle Creator."); } else { //It worked, save the preferences and reload the control file WriteEditorPrefs(this); bundleVersions.Clear(); bundleContents.Clear(); bundleFileSizes.Clear(); CreateAssetBundles.ReadBundleControlFile(Application.dataPath + exportLocation + CreateAssetBundles.bundleControlFileName, bundleVersions); CreateAssetBundles.ReadBundleContentsFile(Application.dataPath + exportLocation + CreateAssetBundles.bundleContentsFileName, bundleContents); ReadBundleFileSizes(); } } GUILayout.Label("Bundle Versions", EditorStyles.boldLabel); scrollPosition = EditorGUILayout.BeginScrollView(scrollPosition); foreach (KeyValuePair<string, int> bundleVersion in bundleVersions) { float bundleFileSize = 0; bundleFileSizes.TryGetValue(bundleVersion.Key, out bundleFileSize); if (GUILayout.Button(bundleVersion.Key + ", Version:" + bundleVersion.Value + ", Size: " + bundleFileSize + "kb")) { List<string> assetsInBundle = null; bundleContents.TryGetValue(bundleVersion.Key, out assetsInBundle); if (assetsInBundle != null) { CreateContentWindow(); contentWindow.SelectAssetBundle(bundleVersion.Key, assetsInBundle, Application.dataPath + exportLocation, bundleFileSize); contentWindow.ShowTab(); } } } EditorGUILayout.EndScrollView(); } public void ReadBundleFileSizes() { bundleFileSizes.Clear(); if (bundleVersions.Count > 0) { foreach (KeyValuePair<string, int> bundleVersion in bundleVersions) { if (File.Exists(Application.dataPath + exportLocation + bundleVersion.Key)) { FileInfo thisFileInfo = new FileInfo(Application.dataPath + exportLocation + bundleVersion.Key); bundleFileSizes.Add(bundleVersion.Key, (thisFileInfo.Length / 1024)); } } } } private void CreateContentWindow() { if (contentWindow == null) { contentWindow = AssetBundleContent.CreateContentWindow(); } } private static void ReadEditorPrefs(AssetBundleWindow thisWindow) { //load editor prefs //cws is for "cry wolf studios" if (EditorPrefs.HasKey("cws_assetFolder")) { thisWindow.assetBundleFolderLocation = EditorPrefs.GetString("cws_assetFolder"); } if (EditorPrefs.HasKey("cws_exportFolder")) { thisWindow.exportLocation = EditorPrefs.GetString("cws_exportFolder"); } if (EditorPrefs.HasKey("cws_bundleExtension")) { thisWindow.bundleFileExtension = EditorPrefs.GetString("cws_bundleExtension"); } if (EditorPrefs.HasKey("cws_optionalSettings")) { thisWindow.optionalSettings = EditorPrefs.GetBool("cws_optionalSettings"); } if (EditorPrefs.HasKey("cws_buildTarget")) { thisWindow.buildTarget = (BuildTarget)EditorPrefs.GetInt("cws_buildTarget"); } if (EditorPrefs.HasKey("cws_buildAssetBundleOptions")) { thisWindow.buildAssetBundleOptions = EditorPrefs.GetBool("cws_buildAssetBundleOptions"); } if (EditorPrefs.HasKey("cws_collectDependencies")) { thisWindow.collectDependencies = EditorPrefs.GetBool("cws_collectDependencies"); } if (EditorPrefs.HasKey("cws_completeAssets")) { thisWindow.completeAssets = EditorPrefs.GetBool("cws_completeAssets"); } if (EditorPrefs.HasKey("cws_disableWriteTypeTree")) { thisWindow.disableWriteTypeTree = EditorPrefs.GetBool("cws_disableWriteTypeTree"); } if (EditorPrefs.HasKey("cws_deterministicAssetBundle")) { thisWindow.deterministicAssetBundle = EditorPrefs.GetBool("cws_deterministicAssetBundle"); } if (EditorPrefs.HasKey("cws_uncompressedAssetBundle")) { thisWindow.uncompressedAssetBundle = EditorPrefs.GetBool("cws_uncompressedAssetBundle"); } if (EditorPrefs.HasKey("cws_setLowerCaseName")) { thisWindow.setLowerCaseName = EditorPrefs.GetBool("cws_setLowerCaseName"); } } private static void WriteEditorPrefs(AssetBundleWindow thisWindow) { //save editor prefs //cws is for "cry wolf studios" EditorPrefs.SetString("cws_assetFolder", thisWindow.assetBundleFolderLocation); EditorPrefs.SetString("cws_exportFolder", thisWindow.exportLocation); EditorPrefs.SetString("cws_bundleExtension", thisWindow.bundleFileExtension); EditorPrefs.SetBool("cws_optionalSettings", thisWindow.optionalSettings); EditorPrefs.SetInt("cws_buildTarget", (int)thisWindow.buildTarget); EditorPrefs.SetBool("cws_buildAssetBundleOptions", thisWindow.buildAssetBundleOptions); EditorPrefs.SetBool("cws_collectDependencies", thisWindow.collectDependencies); EditorPrefs.SetBool("cws_completeAssets", thisWindow.completeAssets); EditorPrefs.SetBool("cws_disableWriteTypeTree", thisWindow.disableWriteTypeTree); EditorPrefs.SetBool("cws_deterministicAssetBundle", thisWindow.deterministicAssetBundle); EditorPrefs.SetBool("cws_uncompressedAssetBundle", thisWindow.uncompressedAssetBundle); EditorPrefs.SetBool("cws_setLowerCaseName", thisWindow.setLowerCaseName); //If you want the export folder at runtime (for asset bundle loading in editor mode) PlayerPrefs.SetString("cws_exportFolder", thisWindow.exportLocation); } private static void ClearPreferences(AssetBundleWindow thisWindow) { thisWindow.assetBundleFolderLocation = "/BundleCreator/Data/AssetBundles/"; thisWindow.exportLocation = "/../AssetBundles/"; thisWindow.bundleFileExtension = ".unity3d"; thisWindow.optionalSettings = false; thisWindow.buildTarget = BuildTarget.WebPlayer; //BuildAssetBundleOptions thisWindow.buildAssetBundleOptions = true; thisWindow.collectDependencies = true; thisWindow.completeAssets = true; thisWindow.disableWriteTypeTree = false; thisWindow.deterministicAssetBundle = false; thisWindow.uncompressedAssetBundle = false; thisWindow.bundleVersions.Clear(); thisWindow.bundleContents.Clear(); thisWindow.bundleFileSizes.Clear(); } //Show window [MenuItem("Assets/Bundle Creator/Asset Bundle Creator")] public static void ShowWindow() { AssetBundleWindow thisWindow = (AssetBundleWindow)EditorWindow.GetWindow(typeof(AssetBundleWindow)); thisWindow.title = "Bundle Creator"; ReadEditorPrefs(thisWindow); CreateAssetBundles.ReadBundleControlFile(Application.dataPath + thisWindow.exportLocation + CreateAssetBundles.bundleControlFileName, thisWindow.bundleVersions); CreateAssetBundles.ReadBundleContentsFile(Application.dataPath + thisWindow.exportLocation + CreateAssetBundles.bundleContentsFileName, thisWindow.bundleContents); thisWindow.ReadBundleFileSizes(); } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Text; using Microsoft.Xna.Framework; namespace Cocos2D { public enum CCBPropertyType { Position = 0, Size, Point, PointLock, ScaleLock, Degrees, Integer, Float, FloatVar, Check, SpriteFrame, Texture, Byte, Color3, Color4FVar, Flip, Blendmode, FntFile, Text, FontTTF, IntegerLabeled, Block, Animation, CCBFile, String, BlockCCControl, FloatScale, FloatXY } internal enum CCBFloatType { Float0 = 0, Float1, Minus1, Float05, Integer, Full } internal enum PlatformType { All = 0, IOS, Mac } public enum CCBTargetType { None = 0, DocumentRoot = 1, Owner = 2, } public enum CCBEasingType { Instant, Linear, CubicIn, CubicOut, CubicInOut, ElasticIn, ElasticOut, ElasticInOut, BounceIn, BounceOut, BounceInOut, BackIn, BackOut, BackInOut, } public enum CCBPositionType { RelativeBottomLeft, RelativeTopLeft, RelativeTopRight, RelativeBottomRight, Percent, MultiplyResolution, } internal enum SizeType { Absolute, Percent, RelativeContainer, HorizontalPercent, VerticalPercent, MultiplyResolution, } public enum CCBScaleType { Absolute, MultiplyResolution } /** * @addtogroup cocosbuilder * @{ */ public class CCBFile : CCNode { private CCNode _CCBFileNode; public CCNode FileNode { get { return _CCBFileNode; } set { _CCBFileNode = value; } } public CCBFile() { } } public interface ICCNodeLoaderListener { void OnNodeLoaded(CCNode node, CCNodeLoader nodeLoader); } public interface ICCBSelectorResolver { Action<object> OnResolveCCBCCMenuItemSelector(object target, string pSelectorName); Action<CCNode> OnResolveCCBCCCallFuncSelector(Object pTarget, string pSelectorName); Action<object, CCControlEvent> OnResolveCCBCCControlSelector(object target, string pSelectorName); } public interface ICCBScriptOwnerProtocol { ICCBSelectorResolver CreateNew(); } /** * @brief Parse CCBI file which is generated by CocosBuilder */ public class CCBReader { private static float __ccbResolutionScale = 1.0f; private const int CCBVersion = 5; private readonly List<string> _animatedProps = new List<string>(); private readonly ICCBMemberVariableAssigner _CCBMemberVariableAssigner; private readonly ICCBSelectorResolver _CCBSelectorResolver; private readonly CCNodeLoaderLibrary _nodeLoaderLibrary; private readonly ICCNodeLoaderListener _nodeLoaderListener; private readonly List<string> _loadedSpriteSheets; private readonly List<string> _stringCache = new List<string>(); public bool _hasScriptingOwner = false; private CCBAnimationManager _actionManager; internal byte[] _bytes; internal int _currentBit; internal int _currentByte; internal object _owner; private Dictionary<CCNode, CCBAnimationManager> _actionManagers; private List<string> _ownerOutletNames; private List<CCNode> _ownerOutletNodes; private List<CCNode> _nodesWithAnimationManagers; private List<CCBAnimationManager> _animationManagersForNodes; private List<string> _ownerCallbackNames; private List<CCNode> _ownerCallbackNodes; private string _CCBRootPath; private bool _jsControlled; private static readonly UTF8Encoding utf8Encoder = new UTF8Encoding(false); public CCBReader(CCNodeLoaderLibrary nodeLoaderLibrary) : this(nodeLoaderLibrary, null, null, null) { } public CCBReader(CCNodeLoaderLibrary nodeLoaderLibrary, ICCBMemberVariableAssigner memberVariableAssigner) : this(nodeLoaderLibrary, memberVariableAssigner, null, null) { } public CCBReader(CCNodeLoaderLibrary nodeLoaderLibrary, ICCBMemberVariableAssigner memberVariableAssigner, ICCBSelectorResolver selectorResolver) : this(nodeLoaderLibrary, memberVariableAssigner, selectorResolver, null) { } public CCBReader(CCNodeLoaderLibrary nodeLoaderLibrary, ICCBMemberVariableAssigner memberVariableAssigner, ICCBSelectorResolver selectorResolver, ICCNodeLoaderListener nodeLoaderListener) { _currentByte = -1; _currentBit = -1; _loadedSpriteSheets = new List<string>(); _nodeLoaderLibrary = nodeLoaderLibrary; _CCBMemberVariableAssigner = memberVariableAssigner; _CCBSelectorResolver = selectorResolver; _nodeLoaderListener = nodeLoaderListener; Init(); } public CCBReader(CCBReader reader) { _currentByte = -1; _currentBit = -1; _loadedSpriteSheets = reader._loadedSpriteSheets; _nodeLoaderLibrary = reader._nodeLoaderLibrary; _CCBMemberVariableAssigner = reader._CCBMemberVariableAssigner; _CCBSelectorResolver = reader._CCBSelectorResolver; _nodeLoaderListener = reader._nodeLoaderListener; _ownerCallbackNames = reader._ownerCallbackNames; _ownerCallbackNodes = reader._ownerCallbackNodes; _ownerOutletNames = reader._ownerOutletNames; _ownerOutletNodes = reader._ownerOutletNodes; _CCBRootPath = reader.CCBRootPath; Init(); } public CCBReader() { _currentByte = -1; _currentBit = -1; Init(); } public ICCBMemberVariableAssigner MemberVariableAssigner { get { return _CCBMemberVariableAssigner; } } public ICCBSelectorResolver SelectorResolver { get { return _CCBSelectorResolver; } } public CCBAnimationManager AnimationManager { get { return _actionManager; } set { _actionManager = value; } } // Used in CCNodeLoader.parseProperties() public List<string> AnimatedProperties { get { return _animatedProps; } } public List<string> LoadedSpriteSheet { get { return _loadedSpriteSheets; } } public object Owner { get { return _owner; } } public static float ResolutionScale { get { return __ccbResolutionScale; } set { __ccbResolutionScale = value; } } public string CCBRootPath { set { Debug.Assert(value != null, ""); _CCBRootPath = value; } get { return _CCBRootPath; } } private bool Init() { // Setup action manager CCBAnimationManager pActionManager = new CCBAnimationManager(); AnimationManager = pActionManager; // Setup resolution scale and container size _actionManager.RootContainerSize = CCDirector.SharedDirector.WinSize; return true; } public CCNode ReadNodeGraphFromFile(string fileName) { return ReadNodeGraphFromFile(fileName, null); } public CCNode ReadNodeGraphFromFile(string fileName, object owner) { return ReadNodeGraphFromFile(fileName, owner, CCDirector.SharedDirector.WinSize); } public CCNode ReadNodeGraphFromFile(string fileName, object owner, CCSize parentSize) { if (string.IsNullOrEmpty(fileName)) { return null; } string strCCBFileName = fileName; string strSuffix = ".ccbi"; // Add ccbi suffix if (!CCBReader.EndsWith(strCCBFileName, strSuffix)) { strCCBFileName += strSuffix; } string strPath = CCFileUtils.FullPathFromRelativePath(strCCBFileName); var pBytes = CCFileUtils.GetFileBytes(strPath); byte[] data = pBytes; CCNode ret = ReadNodeGraphFromData(data, owner, parentSize); return ret; } public CCNode ReadNodeGraphFromData(byte[] bytes, object owner, CCSize parentSize) { _bytes = bytes; _currentByte = 0; _currentBit = 0; _owner = owner; _actionManager.RootContainerSize = parentSize; _actionManager._owner = _owner; _ownerOutletNodes = new List<CCNode>(); _ownerCallbackNodes = new List<CCNode>(); Dictionary<CCNode, CCBAnimationManager> animationManagers = new Dictionary<CCNode, CCBAnimationManager>(); CCNode pNodeGraph = ReadFileWithCleanUp(true, animationManagers); if (pNodeGraph != null && _actionManager.AutoPlaySequenceId != -1 && !_jsControlled) { // Auto play animations _actionManager.RunAnimationsForSequenceIdTweenDuration(_actionManager.AutoPlaySequenceId, 0); } // Assign actionManagers to userObject if (_jsControlled) { _nodesWithAnimationManagers = new List<CCNode>(); _animationManagersForNodes = new List<CCBAnimationManager>(); } foreach (var pElement in animationManagers) { CCNode pNode = pElement.Key; CCBAnimationManager manager = animationManagers[pNode]; pNode.UserObject = manager; if (_jsControlled) { _nodesWithAnimationManagers.Add(pNode); _animationManagersForNodes.Add(manager); } } return pNodeGraph; } public CCScene CreateSceneWithNodeGraphFromFile(string fileName) { return CreateSceneWithNodeGraphFromFile(fileName, null); } public CCScene CreateSceneWithNodeGraphFromFile(string fileName, object owner) { return CreateSceneWithNodeGraphFromFile(fileName, owner, CCDirector.SharedDirector.WinSize); } public CCScene CreateSceneWithNodeGraphFromFile(string fileName, object owner, CCSize parentSize) { CCNode pNode = ReadNodeGraphFromFile(fileName, owner, parentSize); CCScene pScene = new CCScene(); pScene.AddChild(pNode); return pScene; } /* Utility methods. */ public static String LastPathComponent(String pPath) { int slashPos = pPath.IndexOf('/'); if (slashPos != -1) { return pPath.Substring(slashPos + 1); } return pPath; } public static String DeletePathExtension(String pPath) { int dotPos = pPath.LastIndexOf('.'); if (dotPos != -1) { return pPath.Substring(0, dotPos); } return pPath; } public static String ToLowerCase(String pString) { return pString.ToLower(); } public static bool EndsWith(String pString, String pEnding) { return pString.EndsWith(pEnding); } /* Parse methods. */ public int ReadInt(bool pSigned) { int numBits = 0; while (!GetBit()) { numBits++; } int current = 0; for (int a = numBits - 1; a >= 0; a--) { if (GetBit()) { current |= 1 << a; } } current |= 1 << numBits; int num; if (pSigned) { int s = current % 2; if (s != 0) { num = (current / 2); } else { num = (-current / 2); } } else { num = current - 1; } AlignBits(); return num; } public byte ReadByte() { byte b = _bytes[_currentByte]; _currentByte++; return b; } public bool ReadBool() { return 0 != ReadByte(); } public string ReadUTF8() { int b0 = ReadByte(); int b1 = ReadByte(); int numBytes = b0 << 8 | b1; string result = utf8Encoder.GetString(_bytes, _currentByte, numBytes); _currentByte += numBytes; return result; } public float ReadFloat() { var type = (CCBFloatType) ReadByte(); switch (type) { case CCBFloatType.Float0: return 0; case CCBFloatType.Float1: return 1; case CCBFloatType.Minus1: return -1; case CCBFloatType.Float05: return 0.5f; case CCBFloatType.Integer: return ReadInt(true); default: var byteArray = new byte[4]; byteArray[0] = _bytes[_currentByte + 0]; byteArray[1] = _bytes[_currentByte + 1]; byteArray[2] = _bytes[_currentByte + 2]; byteArray[3] = _bytes[_currentByte + 3]; float f = BitConverter.ToSingle(byteArray, 0); _currentByte += 4; return f; } } public string ReadCachedString() { int i = ReadInt(false); return _stringCache[i]; } public bool IsJSControlled() { return _jsControlled; } public bool ReadCallbackKeyframesForSeq(CCBSequence seq) { int numKeyframes = ReadInt(false); if (numKeyframes == 0) return true; CCBSequenceProperty channel = new CCBSequenceProperty(); for (int i = 0; i < numKeyframes; ++i) { float time = ReadFloat(); string callbackName = ReadCachedString(); int callbackType = ReadInt(false); List<CCBValue> value = new List<CCBValue>(); value.Add(new CCBValue(callbackName)); value.Add(new CCBValue(callbackType.ToString())); CCBKeyframe keyframe = new CCBKeyframe(); keyframe.Time = time; keyframe.Value = value; if (_jsControlled) { //string callbackIdentifier; _actionManager.GetKeyframeCallbacks().Add(String.Format("{0}:{1}", callbackType, callbackName)); } channel.Keyframes.Add(keyframe); } seq.CallBackChannel = channel; return true; } public bool ReadSoundKeyframesForSeq(CCBSequence seq) { int numKeyframes = ReadInt(false); if (numKeyframes == 0) return true; CCBSequenceProperty channel = new CCBSequenceProperty(); for (int i = 0; i < numKeyframes; ++i) { float time = ReadFloat(); string soundFile = ReadCachedString(); float pitch = ReadFloat(); float pan = ReadFloat(); float gain = ReadFloat(); List<CCBValue> value = new List<CCBValue>(); value.Add(new CCBValue(soundFile)); value.Add(new CCBValue(pitch.ToString())); value.Add(new CCBValue(pan.ToString())); value.Add(new CCBValue(gain.ToString())); CCBKeyframe keyframe = new CCBKeyframe(); keyframe.Time = time; keyframe.Value = value; channel.Keyframes.Add(keyframe); } seq.SoundChannel = channel; return true; } public List<string> OwnerCallbackNames { get { return new List<string>(_ownerCallbackNames); } } public List<CCNode> OwnerCallbackNodes { get { return _ownerCallbackNodes; } } public List<string> OwnerOutletNames { get { return new List<string>(_ownerOutletNames); } } public List<CCNode> OwnerOutletNodes { get { return _ownerOutletNodes; } } public List<CCNode> NodesWithAnimationManagers { get { return _nodesWithAnimationManagers; } } public List<CCBAnimationManager> AnimationManagersForNodes { get { return _animationManagersForNodes; } } public Dictionary<CCNode, CCBAnimationManager> AnimationManagers { get { return _actionManagers; } set { _actionManagers = value; } } public void AddOwnerCallbackName(string name) { _ownerCallbackNames.Add(name); } public void AddOwnerCallbackNode(CCNode node) { _ownerCallbackNodes.Add(node); } public void AddDocumentCallbackName(string name) { _actionManager.AddDocumentCallbackName(name); } public void AddDocumentCallbackNode(CCNode node) { _actionManager.AddDocumentCallbackNode(node); } public CCNode ReadFileWithCleanUp(bool bCleanUp, Dictionary<CCNode, CCBAnimationManager> am) { if (!ReadHeader()) { return null; } if (!ReadStringCache()) { return null; } if (!ReadSequences()) { return null; } AnimationManagers = am; CCNode node = ReadNodeGraph(null); _actionManagers[node] = _actionManager; if (bCleanUp) { CleanUpNodeGraph(node); } return node; } public void AddOwnerOutletName(string name) { _ownerOutletNames.Add(name); } public void AddOwnerOutletNode(CCNode node) { if (node == null) return; _ownerOutletNodes.Add(node); } private void CleanUpNodeGraph(CCNode node) { node.UserObject = null; if (node.Children != null) { for (int i = 0; i < node.Children.Count; i++) { CleanUpNodeGraph(node.Children[i]); } } } private bool ReadSequences() { List<CCBSequence> sequences = _actionManager.Sequences; int numSeqs = ReadInt(false); for (int i = 0; i < numSeqs; i++) { var seq = new CCBSequence(); seq.Duration = ReadFloat(); seq.Name = ReadCachedString(); seq.SequenceId = ReadInt(false); seq.ChainedSequenceId = ReadInt(true); if (!ReadCallbackKeyframesForSeq(seq)) return false; if (!ReadSoundKeyframesForSeq(seq)) return false; sequences.Add(seq); } _actionManager.AutoPlaySequenceId = ReadInt(true); return true; } private CCBKeyframe ReadKeyframe(CCBPropertyType type) { var keyframe = new CCBKeyframe(); keyframe.Time = ReadFloat(); var easingType = (CCBEasingType) ReadInt(false); float easingOpt = 0; object value = null; if (easingType == CCBEasingType.CubicIn || easingType == CCBEasingType.CubicOut || easingType == CCBEasingType.CubicInOut || easingType == CCBEasingType.ElasticIn || easingType == CCBEasingType.ElasticOut || easingType == CCBEasingType.ElasticInOut) { easingOpt = ReadFloat(); } keyframe.EasingType = easingType; keyframe.EasingOpt = easingOpt; if (type == CCBPropertyType.Check) { value = new CCBValue(ReadBool()); } else if (type == CCBPropertyType.Byte) { value = new CCBValue(ReadByte()); } else if (type == CCBPropertyType.Color3) { byte r = ReadByte(); byte g = ReadByte(); byte b = ReadByte(); var c = new CCColor3B(r, g, b); value = new CCColor3BWapper(c); } else if (type == CCBPropertyType.Degrees) { value = new CCBValue(ReadFloat()); } else if (type == CCBPropertyType.ScaleLock || type == CCBPropertyType.Position || type == CCBPropertyType.FloatXY) { float a = ReadFloat(); float b = ReadFloat(); value = new List<CCBValue> { new CCBValue(a), new CCBValue(b) }; } else if (type == CCBPropertyType.SpriteFrame) { string spriteSheet = ReadCachedString(); string spriteFile = ReadCachedString(); CCSpriteFrame spriteFrame; if (String.IsNullOrEmpty(spriteSheet)) { spriteFile = _CCBRootPath + spriteFile; CCTexture2D texture = CCTextureCache.SharedTextureCache.AddImage(CCFileUtils.RemoveExtension(spriteFile)); var bounds = new CCRect(0, 0, texture.ContentSize.Width, texture.ContentSize.Height); spriteFrame = new CCSpriteFrame(texture, bounds); } else { spriteSheet = _CCBRootPath + spriteSheet; CCSpriteFrameCache frameCache = CCSpriteFrameCache.SharedSpriteFrameCache; // Load the sprite sheet only if it is not loaded if (!_loadedSpriteSheets.Contains(spriteSheet)) { frameCache.AddSpriteFramesWithFile(spriteSheet); _loadedSpriteSheets.Add(spriteSheet); } spriteFrame = frameCache.SpriteFrameByName(spriteFile); } value = spriteFrame; } keyframe.Value = value; return keyframe; } private bool ReadHeader() { /* If no bytes loaded, don't crash about it. */ if (_bytes == null) { return false; } /* Read magic bytes */ if (_bytes[_currentByte + 0] != 'i' || _bytes[_currentByte + 1] != 'b' || _bytes[_currentByte + 2] != 'c' || _bytes[_currentByte + 3] != 'c') { return false; } _currentByte += 4; /* Read version. */ int version = ReadInt(false); if (version != CCBVersion) { CCLog.Log("WARNING! Incompatible ccbi file version (file: %d reader: %d)", version, CCBVersion); return false; } // Read JS check _jsControlled = ReadBool(); _actionManager._jsControlled = _jsControlled; return true; } private bool ReadStringCache() { int numStrings = ReadInt(false); for (int i = 0; i < numStrings; i++) { _stringCache.Add(ReadUTF8()); } return true; } /*private void ReadStringCacheEntry() { int b0 = ReadByte(); int b1 = ReadByte(); int numBytes = b0 << 8 | b1; string s = Encoding.UTF8.GetString(_bytes, _currentByte, numBytes); _currentByte += numBytes; _stringCache.Add(s); }*/ private CCNode ReadNodeGraph() { return ReadNodeGraph(null); } private CCNode ReadNodeGraph(CCNode parent) { /* Read class name. */ string className = ReadCachedString(); string _jsControlledName = null; if (_jsControlled) { _jsControlledName = ReadCachedString(); } // Read assignment type and name var memberVarAssignmentType = (CCBTargetType) ReadInt(false); string memberVarAssignmentName = String.Empty; if (memberVarAssignmentType != CCBTargetType.None) { memberVarAssignmentName = ReadCachedString(); } CCNodeLoader ccNodeLoader = _nodeLoaderLibrary.GetCCNodeLoader(className); if (ccNodeLoader == null) { CCLog.Log("no corresponding node loader for {0}", className); return null; } CCNode node = ccNodeLoader.LoadCCNode(parent, this); // Set root node if (_actionManager.RootNode == null) { _actionManager.RootNode = node; } // Assign controller if (_jsControlled && node == _actionManager.RootNode) { _actionManager.DocumentControllerName = _jsControlledName; } // Read animated properties var seqs = new Dictionary<int, Dictionary<string, CCBSequenceProperty>>(); _animatedProps.Clear(); int numSequence = ReadInt(false); for (int i = 0; i < numSequence; ++i) { int seqId = ReadInt(false); var seqNodeProps = new Dictionary<string, CCBSequenceProperty>(); int numProps = ReadInt(false); for (int j = 0; j < numProps; ++j) { var seqProp = new CCBSequenceProperty(); seqProp.Name = ReadCachedString(); seqProp.Type = (CCBPropertyType) ReadInt(false); _animatedProps.Add(seqProp.Name); int numKeyframes = ReadInt(false); for (int k = 0; k < numKeyframes; ++k) { CCBKeyframe keyframe = ReadKeyframe(seqProp.Type); seqProp.Keyframes.Add(keyframe); } seqNodeProps.Add(seqProp.Name, seqProp); } seqs.Add(seqId, seqNodeProps); } if (seqs.Count > 0) { _actionManager.AddNode(node, seqs); } // Read properties ccNodeLoader.ParseProperties(node, parent, this); bool isCCBFileNode = node is CCBFile; // Handle sub ccb files (remove middle node) if (isCCBFileNode) { var ccbFileNode = (CCBFile) node; CCNode embeddedNode = ccbFileNode.FileNode; embeddedNode.Position = ccbFileNode.Position; embeddedNode.Rotation = ccbFileNode.Rotation; embeddedNode.ScaleX = ccbFileNode.ScaleX; embeddedNode.ScaleY = ccbFileNode.ScaleY; embeddedNode.Tag = ccbFileNode.Tag; embeddedNode.Visible = true; //embeddedNode.IgnoreAnchorPointForPosition = ccbFileNode.IgnoreAnchorPointForPosition; _actionManager.MoveAnimationsFromNode(ccbFileNode, embeddedNode); ccbFileNode.FileNode = null; node = embeddedNode; } #if CCB_ENABLE_JAVASCRIPT /* if (memberVarAssignmentType && memberVarAssignmentName && ![memberVarAssignmentName isEqualToString:@""]) { [[JSCocoa sharedController] setObject:node withName:memberVarAssignmentName]; }*/ #else if (memberVarAssignmentType != CCBTargetType.None) { if (!_jsControlled) { object target = null; if (memberVarAssignmentType == CCBTargetType.DocumentRoot) { target = _actionManager.RootNode; } else if (memberVarAssignmentType == CCBTargetType.Owner) { target = _owner; } if (target != null) { var targetAsCCBMemberVariableAssigner = target as ICCBMemberVariableAssigner; bool assigned = false; if (memberVarAssignmentType != CCBTargetType.None) { if (targetAsCCBMemberVariableAssigner != null) { assigned = targetAsCCBMemberVariableAssigner.OnAssignCCBMemberVariable(target, memberVarAssignmentName, node); } if (!assigned && _CCBMemberVariableAssigner != null) { _CCBMemberVariableAssigner.OnAssignCCBMemberVariable(target, memberVarAssignmentName, node); } } } } else { if (memberVarAssignmentType == CCBTargetType.DocumentRoot) { _actionManager.AddDocumentOutletName(memberVarAssignmentName); _actionManager.AddDocumentOutletNode(node); } else { _ownerOutletNames.Add(memberVarAssignmentName); _ownerOutletNodes.Add(node); } } } // Assign custom properties. if (ccNodeLoader.CustomProperties.Count > 0) { bool customAssigned = false; if(!_jsControlled) { Object target = node; if(target != null) { ICCBMemberVariableAssigner targetAsCCBMemberVariableAssigner = target as ICCBMemberVariableAssigner; if(targetAsCCBMemberVariableAssigner != null) { var pCustomPropeties = ccNodeLoader.CustomProperties; foreach (var pElement in pCustomPropeties) { customAssigned = targetAsCCBMemberVariableAssigner.OnAssignCCBCustomProperty(target, pElement.Key, pElement.Value); if(!customAssigned && _CCBMemberVariableAssigner != null) { customAssigned = _CCBMemberVariableAssigner.OnAssignCCBCustomProperty(target, pElement.Key, pElement.Value); } } } } } } #endif // CCB_ENABLE_JAVASCRIPT _animatedProps.Clear(); /* Read and add children. */ int numChildren = ReadInt(false); for (int i = 0; i < numChildren; i++) { CCNode child = ReadNodeGraph(node); node.AddChild(child); } if (!isCCBFileNode) { // Call onNodeLoaded var nodeAsCCNodeLoaderListener = node as ICCNodeLoaderListener; if (nodeAsCCNodeLoaderListener != null) { nodeAsCCNodeLoaderListener.OnNodeLoaded(node, ccNodeLoader); } else if (_nodeLoaderListener != null) { _nodeLoaderListener.OnNodeLoaded(node, ccNodeLoader); } } return node; } private bool GetBit() { bool bit; byte b = _bytes[_currentByte]; if ((b & (1 << _currentBit)) != 0) { bit = true; } else { bit = false; } _currentBit++; if (_currentBit >= 8) { _currentBit = 0; _currentByte++; } return bit; } private void AlignBits() { if (_currentBit != 0) { _currentBit = 0; _currentByte++; } } } }
/* * Licensed to the Apache Software Foundation (ASF) Under one or more * contributor license agreements. See the NOTICE file distributed with * this work for Additional information regarding copyright ownership. * The ASF licenses this file to You Under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed Under the License is distributed on an "AS Is" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations Under the License. */ namespace NPOI.SS.Formula.Functions { using System; using System.Collections.Generic; using NPOI.SS.Formula.Eval; using NPOI.SS.Formula; /** * @author Amol S. Deshmukh &lt; amolweb at ya hoo dot com &gt; * This Is the base class for all excel function evaluator * classes that take variable number of operands, and * where the order of operands does not matter */ internal abstract class MultiOperandNumericFunction : Function { static double[] EMPTY_DOUBLE_ARRAY = { }; private bool _isReferenceBoolCounted; private bool _isBlankCounted; protected MultiOperandNumericFunction(bool isReferenceBoolCounted, bool isBlankCounted) { _isReferenceBoolCounted = isReferenceBoolCounted; _isBlankCounted = isBlankCounted; } protected internal abstract double Evaluate(double[] values); public ValueEval Evaluate(ValueEval[] args, int srcCellRow, int srcCellCol) { double d; try { double[] values = GetNumberArray(args); d = Evaluate(values); } catch (EvaluationException e) { return e.GetErrorEval(); } if (Double.IsNaN(d) || Double.IsInfinity(d)) return ErrorEval.NUM_ERROR; return new NumberEval(d); } private class DoubleList { private double[] _array; private int _Count; public DoubleList() { _array = new double[8]; _Count = 0; } public double[] ToArray() { if (_Count < 1) { return EMPTY_DOUBLE_ARRAY; } double[] result = new double[_Count]; Array.Copy(_array, 0, result, 0, _Count); return result; } public void Add(double[] values) { int AddLen = values.Length; EnsureCapacity(_Count + AddLen); Array.Copy(values, 0, _array, _Count, AddLen); _Count += AddLen; } private void EnsureCapacity(int reqSize) { if (reqSize > _array.Length) { int newSize = reqSize * 3 / 2; // grow with 50% extra double[] newArr = new double[newSize]; Array.Copy(_array, 0, newArr, 0, _Count); _array = newArr; } } public void Add(double value) { EnsureCapacity(_Count + 1); _array[_Count] = value; _Count++; } } private static int DEFAULT_MAX_NUM_OPERANDS = 30; /** * Maximum number of operands accepted by this function. * Subclasses may override to Change default value. */ protected int MaxNumOperands { get { return DEFAULT_MAX_NUM_OPERANDS; } } /** * Whether to count nested subtotals. */ public virtual bool IsSubtotalCounted { get { return true; } } /** * Collects values from a single argument */ private void CollectValues(ValueEval operand, DoubleList temp) { if (operand is TwoDEval) { TwoDEval ae = (TwoDEval)operand; int width = ae.Width; int height = ae.Height; for (int rrIx = 0; rrIx < height; rrIx++) { for (int rcIx = 0; rcIx < width; rcIx++) { ValueEval ve = ae.GetValue(rrIx, rcIx); if (!IsSubtotalCounted && ae.IsSubTotal(rrIx, rcIx)) continue; CollectValue(ve, true, temp); } } return; } if (operand is RefEval) { RefEval re = (RefEval)operand; CollectValue(re.InnerValueEval, true, temp); return; } CollectValue((ValueEval)operand, false, temp); } private void CollectValue(ValueEval ve, bool isViaReference, DoubleList temp) { if (ve == null) { throw new ArgumentException("ve must not be null"); } if (ve is NumberEval) { NumberEval ne = (NumberEval)ve; temp.Add(ne.NumberValue); return; } if (ve is ErrorEval) { throw new EvaluationException((ErrorEval)ve); } if (ve is StringEval) { if (isViaReference) { // ignore all ref strings return; } String s = ((StringEval)ve).StringValue; Double d = OperandResolver.ParseDouble(s); if (double.IsNaN(d)) { throw new EvaluationException(ErrorEval.VALUE_INVALID); } temp.Add(d); return; } if (ve is BoolEval) { if (!isViaReference || _isReferenceBoolCounted) { BoolEval boolEval = (BoolEval)ve; temp.Add(boolEval.NumberValue); } return; } if (ve == BlankEval.instance) { if (_isBlankCounted) { temp.Add(0.0); } return; } throw new InvalidOperationException("Invalid ValueEval type passed for conversion: (" + ve.GetType() + ")"); } /** * Returns a double array that contains values for the numeric cells * from among the list of operands. Blanks and Blank equivalent cells * are ignored. Error operands or cells containing operands of type * that are considered invalid and would result in #VALUE! error in * excel cause this function to return <c>null</c>. * * @return never <c>null</c> */ protected double[] GetNumberArray(ValueEval[] operands) { if (operands.Length > MaxNumOperands) { throw EvaluationException.InvalidValue(); } DoubleList retval = new DoubleList(); for (int i = 0, iSize = operands.Length; i < iSize; i++) { CollectValues(operands[i], retval); } return retval.ToArray(); } /** * Ensures that a two dimensional array has all sub-arrays present and the same Length * @return <c>false</c> if any sub-array Is missing, or Is of different Length */ protected static bool AreSubArraysConsistent(double[][] values) { if (values == null || values.Length < 1) { // TODO this doesn't seem right. Fix or Add comment. return true; } if (values[0] == null) { return false; } int outerMax = values.Length; int innerMax = values[0].Length; for (int i = 1; i < outerMax; i++) { // note - 'i=1' start at second sub-array double[] subArr = values[i]; if (subArr == null) { return false; } if (innerMax != subArr.Length) { return false; } } return true; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.Data.Entity; using Microsoft.Data.Entity.Metadata; using Microsoft.Data.Entity.Metadata.Builders; using Microsoft.Data.Entity.Relational.Migrations; using Microsoft.Data.Entity.Relational.Migrations.Builders; using Microsoft.Data.Entity.Relational.Migrations.Infrastructure; using Microsoft.Data.Entity.Relational.Migrations.Operations; using Viex.Models; namespace Viex.Migrations { public partial class CreateIdentitySchema : Migration { public override void Up(MigrationBuilder migration) { migration.CreateTable( name: "AspNetUsers", columns: table => new { AccessFailedCount = table.Column(type: "int", nullable: false), ConcurrencyStamp = table.Column(type: "nvarchar(max)", nullable: true), Email = table.Column(type: "nvarchar(max)", nullable: true), EmailConfirmed = table.Column(type: "bit", nullable: false), Id = table.Column(type: "nvarchar(450)", nullable: true), LockoutEnabled = table.Column(type: "bit", nullable: false), LockoutEnd = table.Column(type: "datetimeoffset", nullable: true), NormalizedEmail = table.Column(type: "nvarchar(max)", nullable: true), NormalizedUserName = table.Column(type: "nvarchar(max)", nullable: true), PasswordHash = table.Column(type: "nvarchar(max)", nullable: true), PhoneNumber = table.Column(type: "nvarchar(max)", nullable: true), PhoneNumberConfirmed = table.Column(type: "bit", nullable: false), SecurityStamp = table.Column(type: "nvarchar(max)", nullable: true), TwoFactorEnabled = table.Column(type: "bit", nullable: false), UserName = table.Column(type: "nvarchar(max)", nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetUsers", x => x.Id); }); migration.CreateTable( name: "AspNetRoles", columns: table => new { ConcurrencyStamp = table.Column(type: "nvarchar(max)", nullable: true), Id = table.Column(type: "nvarchar(450)", nullable: true), Name = table.Column(type: "nvarchar(max)", nullable: true), NormalizedName = table.Column(type: "nvarchar(max)", nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetRoles", x => x.Id); }); migration.CreateTable( name: "AspNetUserClaims", columns: table => new { ClaimType = table.Column(type: "nvarchar(max)", nullable: true), ClaimValue = table.Column(type: "nvarchar(max)", nullable: true), Id = table.Column(type: "int", nullable: false) .Annotation("SqlServer:ValueGeneration", "Identity"), UserId = table.Column(type: "nvarchar(450)", nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetUserClaims", x => x.Id); table.ForeignKey( name: "FK_AspNetUserClaims_AspNetUsers_UserId", columns: x => x.UserId, referencedTable: "AspNetUsers", referencedColumn: "Id"); }); migration.CreateTable( name: "AspNetUserLogins", columns: table => new { LoginProvider = table.Column(type: "nvarchar(450)", nullable: true), ProviderDisplayName = table.Column(type: "nvarchar(max)", nullable: true), ProviderKey = table.Column(type: "nvarchar(450)", nullable: true), UserId = table.Column(type: "nvarchar(450)", nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetUserLogins", x => new { x.LoginProvider, x.ProviderKey }); table.ForeignKey( name: "FK_AspNetUserLogins_AspNetUsers_UserId", columns: x => x.UserId, referencedTable: "AspNetUsers", referencedColumn: "Id"); }); migration.CreateTable( name: "AspNetRoleClaims", columns: table => new { ClaimType = table.Column(type: "nvarchar(max)", nullable: true), ClaimValue = table.Column(type: "nvarchar(max)", nullable: true), Id = table.Column(type: "int", nullable: false) .Annotation("SqlServer:ValueGeneration", "Identity"), RoleId = table.Column(type: "nvarchar(450)", nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetRoleClaims", x => x.Id); table.ForeignKey( name: "FK_AspNetRoleClaims_AspNetRoles_RoleId", columns: x => x.RoleId, referencedTable: "AspNetRoles", referencedColumn: "Id"); }); migration.CreateTable( name: "AspNetUserRoles", columns: table => new { RoleId = table.Column(type: "nvarchar(450)", nullable: true), UserId = table.Column(type: "nvarchar(450)", nullable: true) }, constraints: table => { table.PrimaryKey("PK_AspNetUserRoles", x => new { x.UserId, x.RoleId }); table.ForeignKey( name: "FK_AspNetUserRoles_AspNetRoles_RoleId", columns: x => x.RoleId, referencedTable: "AspNetRoles", referencedColumn: "Id"); table.ForeignKey( name: "FK_AspNetUserRoles_AspNetUsers_UserId", columns: x => x.UserId, referencedTable: "AspNetUsers", referencedColumn: "Id"); }); } public override void Down(MigrationBuilder migration) { migration.DropTable("AspNetUserRoles"); migration.DropTable("AspNetRoleClaims"); migration.DropTable("AspNetUserLogins"); migration.DropTable("AspNetUserClaims"); migration.DropTable("AspNetRoles"); migration.DropTable("AspNetUsers"); } } [ContextType(typeof(ApplicationDbContext))] partial class CreateIdentitySchema { public override string Id { get { return "00000000000000_CreateIdentitySchema"; } } public override string ProductVersion { get { return "7.0.0-beta4"; } } public override IModel Target { get { var builder = new BasicModelBuilder() .Annotation("SqlServer:ValueGeneration", "Identity"); builder.Entity("Viex.Models.ApplicationUser", b => { b.Property<int>("AccessFailedCount") .Annotation("OriginalValueIndex", 0); b.Property<string>("ConcurrencyStamp") .ConcurrencyToken() .Annotation("OriginalValueIndex", 1); b.Property<string>("Email") .Annotation("OriginalValueIndex", 2); b.Property<bool>("EmailConfirmed") .Annotation("OriginalValueIndex", 3); b.Property<string>("Id") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 4); b.Property<bool>("LockoutEnabled") .Annotation("OriginalValueIndex", 5); b.Property<DateTimeOffset?>("LockoutEnd") .Annotation("OriginalValueIndex", 6); b.Property<string>("NormalizedEmail") .Annotation("OriginalValueIndex", 7); b.Property<string>("NormalizedUserName") .Annotation("OriginalValueIndex", 8); b.Property<string>("PasswordHash") .Annotation("OriginalValueIndex", 9); b.Property<string>("PhoneNumber") .Annotation("OriginalValueIndex", 10); b.Property<bool>("PhoneNumberConfirmed") .Annotation("OriginalValueIndex", 11); b.Property<string>("SecurityStamp") .Annotation("OriginalValueIndex", 12); b.Property<bool>("TwoFactorEnabled") .Annotation("OriginalValueIndex", 13); b.Property<string>("UserName") .Annotation("OriginalValueIndex", 14); b.Key("Id"); b.Annotation("Relational:TableName", "AspNetUsers"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRole", b => { b.Property<string>("ConcurrencyStamp") .ConcurrencyToken() .Annotation("OriginalValueIndex", 0); b.Property<string>("Id") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 1); b.Property<string>("Name") .Annotation("OriginalValueIndex", 2); b.Property<string>("NormalizedName") .Annotation("OriginalValueIndex", 3); b.Key("Id"); b.Annotation("Relational:TableName", "AspNetRoles"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRoleClaim`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]", b => { b.Property<string>("ClaimType") .Annotation("OriginalValueIndex", 0); b.Property<string>("ClaimValue") .Annotation("OriginalValueIndex", 1); b.Property<int>("Id") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 2) .Annotation("SqlServer:ValueGeneration", "Default"); b.Property<string>("RoleId") .Annotation("OriginalValueIndex", 3); b.Key("Id"); b.Annotation("Relational:TableName", "AspNetRoleClaims"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserClaim`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]", b => { b.Property<string>("ClaimType") .Annotation("OriginalValueIndex", 0); b.Property<string>("ClaimValue") .Annotation("OriginalValueIndex", 1); b.Property<int>("Id") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 2) .Annotation("SqlServer:ValueGeneration", "Default"); b.Property<string>("UserId") .Annotation("OriginalValueIndex", 3); b.Key("Id"); b.Annotation("Relational:TableName", "AspNetUserClaims"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserLogin`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]", b => { b.Property<string>("LoginProvider") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 0); b.Property<string>("ProviderDisplayName") .Annotation("OriginalValueIndex", 1); b.Property<string>("ProviderKey") .GenerateValueOnAdd() .Annotation("OriginalValueIndex", 2); b.Property<string>("UserId") .Annotation("OriginalValueIndex", 3); b.Key("LoginProvider", "ProviderKey"); b.Annotation("Relational:TableName", "AspNetUserLogins"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserRole`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]", b => { b.Property<string>("RoleId") .Annotation("OriginalValueIndex", 0); b.Property<string>("UserId") .Annotation("OriginalValueIndex", 1); b.Key("UserId", "RoleId"); b.Annotation("Relational:TableName", "AspNetUserRoles"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityRoleClaim`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]", b => { b.ForeignKey("Microsoft.AspNet.Identity.EntityFramework.IdentityRole", "RoleId"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserClaim`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]", b => { b.ForeignKey("Viex.Models.ApplicationUser", "UserId"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserLogin`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]", b => { b.ForeignKey("Viex.Models.ApplicationUser", "UserId"); }); builder.Entity("Microsoft.AspNet.Identity.EntityFramework.IdentityUserRole`1[[System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]", b => { b.ForeignKey("Microsoft.AspNet.Identity.EntityFramework.IdentityRole", "RoleId"); b.ForeignKey("Viex.Models.ApplicationUser", "UserId"); }); return builder.Model; } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.CSharp; using Microsoft.CodeAnalysis.CSharp.CodeStyle; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Formatting; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Simplification; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.CSharp.IntroduceVariable { internal partial class CSharpIntroduceVariableService { protected override Task<Document> IntroduceLocalAsync( SemanticDocument document, ExpressionSyntax expression, bool allOccurrences, bool isConstant, CancellationToken cancellationToken) { var options = document.Project.Solution.Workspace.Options; var newLocalNameToken = GenerateUniqueLocalName(document, expression, isConstant, cancellationToken); var newLocalName = SyntaxFactory.IdentifierName(newLocalNameToken); var modifiers = isConstant ? SyntaxFactory.TokenList(SyntaxFactory.Token(SyntaxKind.ConstKeyword)) : default(SyntaxTokenList); var declarationStatement = SyntaxFactory.LocalDeclarationStatement( modifiers, SyntaxFactory.VariableDeclaration( this.GetTypeSyntax(document, expression, isConstant, options, cancellationToken), SyntaxFactory.SingletonSeparatedList(SyntaxFactory.VariableDeclarator( newLocalNameToken.WithAdditionalAnnotations(RenameAnnotation.Create()), null, SyntaxFactory.EqualsValueClause(expression.WithoutTrailingTrivia().WithoutLeadingTrivia()))))); var anonymousMethodParameters = GetAnonymousMethodParameters(document, expression, cancellationToken); var lambdas = anonymousMethodParameters.SelectMany(p => p.ContainingSymbol.DeclaringSyntaxReferences.Select(r => r.GetSyntax(cancellationToken)).AsEnumerable()) .Where(n => n is ParenthesizedLambdaExpressionSyntax || n is SimpleLambdaExpressionSyntax) .ToSet(); var parentLambda = GetParentLambda(expression, lambdas); if (parentLambda != null) { return Task.FromResult(IntroduceLocalDeclarationIntoLambda( document, expression, newLocalName, declarationStatement, parentLambda, allOccurrences, cancellationToken)); } else if (IsInExpressionBodiedMember(expression)) { return Task.FromResult(RewriteExpressionBodiedMemberAndIntroduceLocalDeclaration( document, expression, newLocalName, declarationStatement, allOccurrences, cancellationToken)); } else { return IntroduceLocalDeclarationIntoBlockAsync( document, expression, newLocalName, declarationStatement, allOccurrences, cancellationToken); } } private Document IntroduceLocalDeclarationIntoLambda( SemanticDocument document, ExpressionSyntax expression, IdentifierNameSyntax newLocalName, LocalDeclarationStatementSyntax declarationStatement, SyntaxNode oldLambda, bool allOccurrences, CancellationToken cancellationToken) { var oldBody = oldLambda is ParenthesizedLambdaExpressionSyntax ? (ExpressionSyntax)((ParenthesizedLambdaExpressionSyntax)oldLambda).Body : (ExpressionSyntax)((SimpleLambdaExpressionSyntax)oldLambda).Body; var rewrittenBody = Rewrite( document, expression, newLocalName, document, oldBody, allOccurrences, cancellationToken); var delegateType = document.SemanticModel.GetTypeInfo(oldLambda, cancellationToken).ConvertedType as INamedTypeSymbol; var newBody = delegateType != null && delegateType.DelegateInvokeMethod != null && delegateType.DelegateInvokeMethod.ReturnsVoid ? SyntaxFactory.Block(declarationStatement) : SyntaxFactory.Block(declarationStatement, SyntaxFactory.ReturnStatement(rewrittenBody)); newBody = newBody.WithAdditionalAnnotations(Formatter.Annotation); var newLambda = oldLambda is ParenthesizedLambdaExpressionSyntax ? ((ParenthesizedLambdaExpressionSyntax)oldLambda).WithBody(newBody) : (SyntaxNode)((SimpleLambdaExpressionSyntax)oldLambda).WithBody(newBody); var newRoot = document.Root.ReplaceNode(oldLambda, newLambda); return document.Document.WithSyntaxRoot(newRoot); } private SyntaxNode GetParentLambda(ExpressionSyntax expression, ISet<SyntaxNode> lambdas) { var current = expression; while (current != null) { if (lambdas.Contains(current.Parent)) { return current.Parent; } current = current.Parent as ExpressionSyntax; } return null; } private TypeSyntax GetTypeSyntax(SemanticDocument document, ExpressionSyntax expression, bool isConstant, OptionSet options, CancellationToken cancellationToken) { var typeSymbol = GetTypeSymbol(document, expression, cancellationToken); if (typeSymbol.ContainsAnonymousType()) { return SyntaxFactory.IdentifierName("var"); } if (!isConstant && options.GetOption(CSharpCodeStyleOptions.UseVarWhenDeclaringLocals) && CanUseVar(typeSymbol)) { return SyntaxFactory.IdentifierName("var"); } return typeSymbol.GenerateTypeSyntax(); } private bool CanUseVar(ITypeSymbol typeSymbol) { return typeSymbol.TypeKind != TypeKind.Delegate && !typeSymbol.IsErrorType() && !typeSymbol.IsFormattableString(); } private static async Task<Tuple<SemanticDocument, ISet<ExpressionSyntax>>> ComplexifyParentingStatements( SemanticDocument semanticDocument, ISet<ExpressionSyntax> matches, CancellationToken cancellationToken) { // First, track the matches so that we can get back to them later. var newRoot = semanticDocument.Root.TrackNodes(matches); var newDocument = semanticDocument.Document.WithSyntaxRoot(newRoot); var newSemanticDocument = await SemanticDocument.CreateAsync(newDocument, cancellationToken).ConfigureAwait(false); var newMatches = newSemanticDocument.Root.GetCurrentNodes(matches.AsEnumerable()).ToSet(); // Next, expand the topmost parenting expression of each match, being careful // not to expand the matches themselves. var topMostExpressions = newMatches .Select(m => m.AncestorsAndSelf().OfType<ExpressionSyntax>().Last()) .Distinct(); newRoot = await newSemanticDocument.Root .ReplaceNodesAsync( topMostExpressions, computeReplacementAsync: async (oldNode, newNode, ct) => { return await Simplifier .ExpandAsync( oldNode, newSemanticDocument.Document, expandInsideNode: node => { var expression = node as ExpressionSyntax; return expression == null || !newMatches.Contains(expression); }, cancellationToken: ct) .ConfigureAwait(false); }, cancellationToken: cancellationToken) .ConfigureAwait(false); newDocument = newSemanticDocument.Document.WithSyntaxRoot(newRoot); newSemanticDocument = await SemanticDocument.CreateAsync(newDocument, cancellationToken).ConfigureAwait(false); newMatches = newSemanticDocument.Root.GetCurrentNodes(matches.AsEnumerable()).ToSet(); return Tuple.Create(newSemanticDocument, newMatches); } private Document RewriteExpressionBodiedMemberAndIntroduceLocalDeclaration( SemanticDocument document, ExpressionSyntax expression, NameSyntax newLocalName, LocalDeclarationStatementSyntax declarationStatement, bool allOccurrences, CancellationToken cancellationToken) { var oldBody = expression.GetAncestorOrThis<ArrowExpressionClauseSyntax>(); var oldParentingNode = oldBody.Parent; var leadingTrivia = oldBody.GetLeadingTrivia() .AddRange(oldBody.ArrowToken.TrailingTrivia); var newStatement = Rewrite(document, expression, newLocalName, document, oldBody.Expression, allOccurrences, cancellationToken); var newBody = SyntaxFactory.Block(declarationStatement, SyntaxFactory.ReturnStatement(newStatement)) .WithLeadingTrivia(leadingTrivia) .WithTrailingTrivia(oldBody.GetTrailingTrivia()) .WithAdditionalAnnotations(Formatter.Annotation); SyntaxNode newParentingNode = null; if (oldParentingNode is BasePropertyDeclarationSyntax) { var getAccessor = SyntaxFactory.AccessorDeclaration(SyntaxKind.GetAccessorDeclaration, newBody); var accessorList = SyntaxFactory.AccessorList(SyntaxFactory.List(new[] { getAccessor })); newParentingNode = ((BasePropertyDeclarationSyntax)oldParentingNode).RemoveNode(oldBody, SyntaxRemoveOptions.KeepNoTrivia); if (newParentingNode.IsKind(SyntaxKind.PropertyDeclaration)) { var propertyDeclaration = ((PropertyDeclarationSyntax)newParentingNode); newParentingNode = propertyDeclaration .WithAccessorList(accessorList) .WithSemicolonToken(SyntaxFactory.Token(SyntaxKind.None)) .WithTrailingTrivia(propertyDeclaration.SemicolonToken.TrailingTrivia); } else if (newParentingNode.IsKind(SyntaxKind.IndexerDeclaration)) { var indexerDeclaration = ((IndexerDeclarationSyntax)newParentingNode); newParentingNode = indexerDeclaration .WithAccessorList(accessorList) .WithSemicolonToken(SyntaxFactory.Token(SyntaxKind.None)) .WithTrailingTrivia(indexerDeclaration.SemicolonToken.TrailingTrivia); } } else if (oldParentingNode is BaseMethodDeclarationSyntax) { newParentingNode = ((BaseMethodDeclarationSyntax)oldParentingNode) .RemoveNode(oldBody, SyntaxRemoveOptions.KeepNoTrivia) .WithBody(newBody); if (newParentingNode.IsKind(SyntaxKind.MethodDeclaration)) { var methodDeclaration = ((MethodDeclarationSyntax)newParentingNode); newParentingNode = methodDeclaration .WithSemicolonToken(SyntaxFactory.Token(SyntaxKind.None)) .WithTrailingTrivia(methodDeclaration.SemicolonToken.TrailingTrivia); } else if (newParentingNode.IsKind(SyntaxKind.OperatorDeclaration)) { var operatorDeclaration = ((OperatorDeclarationSyntax)newParentingNode); newParentingNode = operatorDeclaration .WithSemicolonToken(SyntaxFactory.Token(SyntaxKind.None)) .WithTrailingTrivia(operatorDeclaration.SemicolonToken.TrailingTrivia); } else if (newParentingNode.IsKind(SyntaxKind.ConversionOperatorDeclaration)) { var conversionOperatorDeclaration = ((ConversionOperatorDeclarationSyntax)newParentingNode); newParentingNode = conversionOperatorDeclaration .WithSemicolonToken(SyntaxFactory.Token(SyntaxKind.None)) .WithTrailingTrivia(conversionOperatorDeclaration.SemicolonToken.TrailingTrivia); } } var newRoot = document.Root.ReplaceNode(oldParentingNode, newParentingNode); return document.Document.WithSyntaxRoot(newRoot); } private async Task<Document> IntroduceLocalDeclarationIntoBlockAsync( SemanticDocument document, ExpressionSyntax expression, NameSyntax newLocalName, LocalDeclarationStatementSyntax declarationStatement, bool allOccurrences, CancellationToken cancellationToken) { declarationStatement = declarationStatement.WithAdditionalAnnotations(Formatter.Annotation); var oldOutermostBlock = expression.GetAncestorsOrThis<BlockSyntax>().LastOrDefault(); var matches = FindMatches(document, expression, document, oldOutermostBlock, allOccurrences, cancellationToken); Debug.Assert(matches.Contains(expression)); var complexified = await ComplexifyParentingStatements(document, matches, cancellationToken).ConfigureAwait(false); document = complexified.Item1; matches = complexified.Item2; // Our original expression should have been one of the matches, which were tracked as part // of complexification, so we can retrieve the latest version of the expression here. expression = document.Root.GetCurrentNodes(expression).First(); var innermostStatements = new HashSet<StatementSyntax>( matches.Select(expr => expr.GetAncestorOrThis<StatementSyntax>())); if (innermostStatements.Count == 1) { // If there was only one match, or all the matches came from the same // statement, then we want to place the declaration right above that // statement. Note: we special case this because the statement we are going // to go above might not be in a block and we may have to generate it return IntroduceLocalForSingleOccurrenceIntoBlock( document, expression, newLocalName, declarationStatement, allOccurrences, cancellationToken); } var oldInnerMostCommonBlock = matches.FindInnermostCommonBlock(); var allAffectedStatements = new HashSet<StatementSyntax>(matches.SelectMany(expr => expr.GetAncestorsOrThis<StatementSyntax>())); var firstStatementAffectedInBlock = oldInnerMostCommonBlock.Statements.First(allAffectedStatements.Contains); var firstStatementAffectedIndex = oldInnerMostCommonBlock.Statements.IndexOf(firstStatementAffectedInBlock); var newInnerMostBlock = Rewrite( document, expression, newLocalName, document, oldInnerMostCommonBlock, allOccurrences, cancellationToken); var statements = new List<StatementSyntax>(); statements.AddRange(newInnerMostBlock.Statements.Take(firstStatementAffectedIndex)); statements.Add(declarationStatement); statements.AddRange(newInnerMostBlock.Statements.Skip(firstStatementAffectedIndex)); var finalInnerMostBlock = newInnerMostBlock.WithStatements( SyntaxFactory.List<StatementSyntax>(statements)); var newRoot = document.Root.ReplaceNode(oldInnerMostCommonBlock, finalInnerMostBlock); return document.Document.WithSyntaxRoot(newRoot); } private Document IntroduceLocalForSingleOccurrenceIntoBlock( SemanticDocument document, ExpressionSyntax expression, NameSyntax localName, LocalDeclarationStatementSyntax localDeclaration, bool allOccurrences, CancellationToken cancellationToken) { var oldStatement = expression.GetAncestorOrThis<StatementSyntax>(); var newStatement = Rewrite( document, expression, localName, document, oldStatement, allOccurrences, cancellationToken); if (oldStatement.IsParentKind(SyntaxKind.Block)) { var oldBlock = oldStatement.Parent as BlockSyntax; var statementIndex = oldBlock.Statements.IndexOf(oldStatement); var newBlock = oldBlock.WithStatements(CreateNewStatementList( oldBlock.Statements, localDeclaration, newStatement, statementIndex)); var newRoot = document.Root.ReplaceNode(oldBlock, newBlock); return document.Document.WithSyntaxRoot(newRoot); } else if (oldStatement.IsParentKind(SyntaxKind.SwitchSection)) { var oldSwitchSection = oldStatement.Parent as SwitchSectionSyntax; var statementIndex = oldSwitchSection.Statements.IndexOf(oldStatement); var newSwitchSection = oldSwitchSection.WithStatements(CreateNewStatementList( oldSwitchSection.Statements, localDeclaration, newStatement, statementIndex)); var newRoot = document.Root.ReplaceNode(oldSwitchSection, newSwitchSection); return document.Document.WithSyntaxRoot(newRoot); } else { // we need to introduce a block to put the original statement, along with // the statement we're generating var newBlock = SyntaxFactory.Block(localDeclaration, newStatement).WithAdditionalAnnotations(Formatter.Annotation); var newRoot = document.Root.ReplaceNode(oldStatement, newBlock); return document.Document.WithSyntaxRoot(newRoot); } } private static SyntaxList<StatementSyntax> CreateNewStatementList( SyntaxList<StatementSyntax> oldStatements, LocalDeclarationStatementSyntax localDeclaration, StatementSyntax newStatement, int statementIndex) { return oldStatements.Take(statementIndex) .Concat(localDeclaration.WithLeadingTrivia(oldStatements.Skip(statementIndex).First().GetLeadingTrivia())) .Concat(newStatement.WithoutLeadingTrivia()) .Concat(oldStatements.Skip(statementIndex + 1)) .ToSyntaxList(); } } }
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Linq; using System.Threading; using Commands.Storage.ScenarioTest.Common; using Commands.Storage.ScenarioTest.Util; using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.WindowsAzure.Storage; using MS.Test.Common.MsTestLib; using StorageTestLib; using StorageBlob = Microsoft.WindowsAzure.Storage.Blob; namespace Commands.Storage.ScenarioTest.Functional.Blob { /// <summary> /// functional tests for Get-CopyState /// </summary> [TestClass] class GetCopyState : TestBase { [ClassInitialize()] public static void GetBlobClassInit(TestContext testContext) { TestBase.TestClassInitialize(testContext); } [ClassCleanup()] public static void GetBlobClassCleanup() { TestBase.TestClassCleanup(); } /// <summary> /// monitor mulitple copy progress /// 8.21 Get-AzureStorageBlobCopyState Positive Functional Cases /// 3. Monitor a list of copying blobs /// </summary> [TestMethod()] [TestCategory(Tag.Function)] [TestCategory(PsTag.Blob)] [TestCategory(PsTag.GetBlobCopyState)] public void GetCopyStateFromMultiBlobsTest() { StorageBlob.CloudBlobContainer srcContainer = blobUtil.CreateContainer(); StorageBlob.CloudBlobContainer destContainer = blobUtil.CreateContainer(); List<StorageBlob.ICloudBlob> blobs = blobUtil.CreateRandomBlob(srcContainer); try { ((PowerShellAgent)agent).AddPipelineScript(String.Format("Get-AzureStorageBlob -Container {0}", srcContainer.Name)); ((PowerShellAgent)agent).AddPipelineScript(String.Format("Start-AzureStorageBlobCopy -DestContainer {0}", destContainer.Name)); Test.Assert(agent.GetAzureStorageBlobCopyState(string.Empty, string.Empty, true), "Get copy state for many blobs should be successed."); Test.Assert(agent.Output.Count == blobs.Count, String.Format("Expected get {0} copy state, and actually get {1} copy state", blobs.Count, agent.Output.Count)); List<StorageBlob.IListBlobItem> destBlobs = destContainer.ListBlobs().ToList(); Test.Assert(destBlobs.Count == blobs.Count, String.Format("Expected get {0} copied blobs, and actually get {1} copy state", blobs.Count, destBlobs.Count)); for (int i = 0, count = agent.Output.Count; i < count; i++) { AssertFinishedCopyState(blobs[i].Uri, i); } } finally { blobUtil.RemoveContainer(srcContainer.Name); blobUtil.RemoveContainer(destContainer.Name); } } /// <summary> /// monitor mulitple copy progress /// 8.21 Get-AzureStorageBlobCopyState Positive Functional Cases /// 3. Monitor a list of copying blobs /// </summary> [TestMethod()] [TestCategory(Tag.Function)] [TestCategory(PsTag.Blob)] [TestCategory(PsTag.GetBlobCopyState)] public void GetCopyStateWithInvalidNameTest() { string invalidContainerName = "Invalid"; int maxBlobNameLength = 1024; string invalidBlobName = new string('a', maxBlobNameLength + 1); string invalidContainerErrorMessage = String.Format("Container name '{0}' is invalid.", invalidContainerName); string invalidBlobErrorMessage = String.Format("Blob name '{0}' is invalid.", invalidBlobName); Test.Assert(!agent.GetAzureStorageBlobCopyState(invalidContainerName, Utility.GenNameString("blob"), false), "get copy state should failed with invalid container name"); ExpectedStartsWithErrorMessage(invalidContainerErrorMessage); Test.Assert(!agent.GetAzureStorageBlobCopyState(Utility.GenNameString("container"), invalidBlobName, false), "get copy state should failed with invalid blob name"); ExpectedStartsWithErrorMessage(invalidBlobErrorMessage); } /// <summary> /// monitor mulitple copy progress /// 8.21 Get-AzureStorageBlobCopyState Positive Functional Cases /// 3. Monitor a list of copying blobs /// </summary> [TestMethod()] [TestCategory(Tag.Function)] [TestCategory(PsTag.Blob)] [TestCategory(PsTag.GetBlobCopyState)] public void GetCopyStateWithNotExistContainerAndBlobTest() { string srcContainerName = Utility.GenNameString("copy"); string blobName = Utility.GenNameString("blob"); string errorMessage = string.Empty; Test.Assert(!agent.GetAzureStorageBlobCopyState(srcContainerName, blobName, false), "Get copy state should fail with not existing container"); errorMessage = string.Format("Can not find blob '{0}' in container '{1}'.", blobName, srcContainerName); ExpectedEqualErrorMessage(errorMessage); try { StorageBlob.CloudBlobContainer srcContainer = blobUtil.CreateContainer(srcContainerName); Test.Assert(!agent.GetAzureStorageBlobCopyState(srcContainerName, blobName, false), "Get copy state should fail with not existing blob"); ExpectedEqualErrorMessage(errorMessage); } finally { blobUtil.RemoveContainer(srcContainerName); } } /// <summary> /// monitor mulitple copy progress /// 8.21 Get-AzureStorageBlobCopyState Positive Functional Cases /// 4. Monitor copying status of the blob in root container /// </summary> [TestMethod()] [TestCategory(Tag.Function)] [TestCategory(PsTag.Blob)] [TestCategory(PsTag.GetBlobCopyState)] public void GetCopyStateFromRootContainerTest() { StorageBlob.CloudBlobContainer rootContainer = blobUtil.CreateContainer("$root"); string srcBlobName = Utility.GenNameString("src"); StorageBlob.ICloudBlob srcBlob = blobUtil.CreateRandomBlob(rootContainer, srcBlobName); StorageBlob.ICloudBlob destBlob = blobUtil.CreateBlob(rootContainer, Utility.GenNameString("dest"), srcBlob.BlobType); if (destBlob.BlobType == StorageBlob.BlobType.BlockBlob) { ((StorageBlob.CloudBlockBlob)destBlob).StartCopyFromBlob((StorageBlob.CloudBlockBlob)srcBlob); } else { ((StorageBlob.CloudPageBlob)destBlob).StartCopyFromBlob((StorageBlob.CloudPageBlob)srcBlob); } Test.Assert(agent.GetAzureStorageBlobCopyState("$root", destBlob.Name, true), "Get copy state in $root container should be successed."); AssertFinishedCopyState(srcBlob.Uri); } /// <summary> /// monitor copy progress for cross account copy /// 8.21 Get-AzureStorageBlobCopyState Positive Functional Cases /// 5. Get the copy status (on-going) on specified blob for cross account copying /// This test use the start-copy pipeline. so It also validate the start-copy cmdlet /// </summary> [TestMethod()] [TestCategory(Tag.Function)] [TestCategory(PsTag.Blob)] [TestCategory(PsTag.GetBlobCopyState)] public void GetCopyStateFromCrossAccountCopyTest() { CloudStorageAccount secondaryAccount = TestBase.GetCloudStorageAccountFromConfig("Secondary"); object destContext = PowerShellAgent.GetStorageContext(secondaryAccount.ToString(true)); CloudBlobUtil destBlobUtil = new CloudBlobUtil(secondaryAccount); string destContainerName = Utility.GenNameString("secondary"); StorageBlob.CloudBlobContainer destContainer = destBlobUtil.CreateContainer(destContainerName); blobUtil.SetupTestContainerAndBlob(); //remove the same name container in source storage account, so we could avoid some conflicts. blobUtil.RemoveContainer(destContainer.Name); try { Test.Assert(agent.StartAzureStorageBlobCopy(blobUtil.Blob, destContainer.Name, string.Empty, destContext), "Start cross account copy should successed"); int expectedBlobCount = 1; Test.Assert(agent.Output.Count == expectedBlobCount, String.Format("Expected get {0} copy blob, and actually it's {1}", expectedBlobCount, agent.Output.Count)); StorageBlob.ICloudBlob destBlob = (StorageBlob.ICloudBlob)agent.Output[0]["ICloudBlob"]; //make sure this context is different from the PowerShell.Context object context = agent.Output[0]["Context"]; Test.Assert(PowerShellAgent.Context != context, "make sure you are using different context for cross account copy"); Test.Assert(agent.GetAzureStorageBlobCopyState(destBlob, context, true), "Get copy state in dest container should be successed."); AssertFinishedCopyState(blobUtil.Blob.Uri); } finally { blobUtil.CleanupTestContainerAndBlob(); destBlobUtil.RemoveContainer(destContainer.Name); } } /// <summary> /// monitor copy progress for cross account copy /// 8.21 Get-AzureStorageBlobCopyState Positive Functional Cases /// 5. 6. Get the copy status (on-going) on specified blob for Uri copying /// </summary> [TestMethod()] [TestCategory(Tag.Function)] [TestCategory(PsTag.Blob)] [TestCategory(PsTag.GetBlobCopyState)] public void GetCopyStateFromUriTest() { blobUtil.SetupTestContainerAndBlob(); string copiedName = Utility.GenNameString("copied"); //Set the blob permission, so the copy task could directly copy by uri StorageBlob.BlobContainerPermissions permission = new StorageBlob.BlobContainerPermissions(); permission.PublicAccess = StorageBlob.BlobContainerPublicAccessType.Blob; blobUtil.Container.SetPermissions(permission); try { Test.Assert(agent.StartAzureStorageBlobCopy(blobUtil.Blob.Uri.ToString(), blobUtil.ContainerName, copiedName, PowerShellAgent.Context), Utility.GenComparisonData("Start copy blob using source uri", true)); Test.Assert(agent.GetAzureStorageBlobCopyState(blobUtil.ContainerName, copiedName, true), "Get copy state in dest container should be successed."); AssertFinishedCopyState(blobUtil.Blob.Uri); } finally { blobUtil.CleanupTestContainerAndBlob(); } } /// <summary> /// monitor copy progress for cross account copy /// 8.21 Get-AzureStorageBlobCopyState Positive Functional Cases /// 5. 6. Get the copy status (on-going) on specified blob for Uri copying /// </summary> [TestMethod()] [TestCategory(Tag.Function)] [TestCategory(PsTag.Blob)] [TestCategory(PsTag.GetBlobCopyState)] public void GetCopyStateWhenCopyingTest() { StorageBlob.CloudBlobContainer Container = blobUtil.CreateContainer(); string ContainerName = Container.Name; string BlobName = Utility.GenNameString("blockblob"); StorageBlob.ICloudBlob Blob = blobUtil.CreateBlockBlob(Container, BlobName); string uri = Test.Data.Get("BigFileUri"); Test.Assert(!String.IsNullOrEmpty(uri), string.Format("Big file uri should be not empty, actually it's {0}", uri)); if (String.IsNullOrEmpty(uri)) { return; } Blob.StartCopyFromBlob(new Uri(uri)); int maxMonitorTime = 30; //seconds int checkCount = 0; int sleepInterval = 1000; //ms StorageBlob.CopyStatus status = StorageBlob.CopyStatus.Pending; try { int expectedCopyStateCount = 1; do { Test.Info(String.Format("{0}th check current copy state", checkCount)); Test.Assert(agent.GetAzureStorageBlobCopyState(ContainerName, BlobName, false), "Get copy state in dest container should be successed."); Test.Assert(agent.Output.Count == expectedCopyStateCount, String.Format("Should contain {0} copy state, and actually it's {1}", expectedCopyStateCount, agent.Output.Count)); status = (StorageBlob.CopyStatus)agent.Output[0]["Status"]; Test.Assert(status == StorageBlob.CopyStatus.Pending, String.Format("Copy status should be Pending, actually it's {0}", status)); checkCount++; Thread.Sleep(sleepInterval); } while (status == StorageBlob.CopyStatus.Pending && checkCount < maxMonitorTime); Test.Info("Finish the monitor loop and try to abort copy"); try { Blob.AbortCopy(Blob.CopyState.CopyId); } catch (StorageException e) { //TODO use extension method if (e.RequestInformation != null && e.RequestInformation.HttpStatusCode == 409) { Test.Info("Skip 409 abort conflict exception. Error:{0}", e.Message); Test.Info("Detail Error Message: {0}", e.RequestInformation.HttpStatusMessage); } else { Test.AssertFail(String.Format("Can't abort copy. Error: {0}", e.Message)); } } Test.Assert(agent.GetAzureStorageBlobCopyState(ContainerName, BlobName, false), "Get copy state in dest container should be successed."); Test.Assert(agent.Output.Count == expectedCopyStateCount, String.Format("Should contain {0} copy state, and actually it's {1}", expectedCopyStateCount, agent.Output.Count)); status = (StorageBlob.CopyStatus)agent.Output[0]["Status"]; Test.Assert(status == StorageBlob.CopyStatus.Aborted, String.Format("Copy status should be Aborted, actually it's {0}", status)); } finally { blobUtil.RemoveContainer(Container.Name); } } private void AssertFinishedCopyState(Uri SourceUri, int startIndex = 0) { string expectedSourceUri = CloudBlobUtil.ConvertCopySourceUri(SourceUri.ToString()); Test.Assert(agent.Output.Count > startIndex, String.Format("Should contain the great than {0} copy state, and actually it's {1}", startIndex, agent.Output.Count)); string sourceUri = ((Uri)agent.Output[startIndex]["Source"]).ToString(); Test.Assert(sourceUri.StartsWith(expectedSourceUri), String.Format("source uri should start with {0}, and actualy it's {1}", expectedSourceUri, sourceUri)); StorageBlob.CopyStatus status = (StorageBlob.CopyStatus)agent.Output[startIndex]["Status"]; Test.Assert(status != StorageBlob.CopyStatus.Pending, String.Format("Copy status should not be Pending, actually it's {0}", status)); string copyId = (string)agent.Output[startIndex]["CopyId"]; Test.Assert(!String.IsNullOrEmpty(copyId), "Copy ID should be not empty"); } } }
// // BlobStore.cs // // Author: // Zachary Gramana <zack@xamarin.com> // // Copyright (c) 2014 Xamarin Inc // Copyright (c) 2014 .NET Foundation // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // // // Copyright (c) 2014 Couchbase, Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the // License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, // either express or implied. See the License for the specific language governing permissions // and limitations under the License. // using System; using System.Collections.Generic; using System.IO; using Couchbase.Lite; using Couchbase.Lite.Util; using Sharpen; namespace Couchbase.Lite { /// <summary>A persistent content-addressable store for arbitrary-size data blobs.</summary> /// <remarks> /// A persistent content-addressable store for arbitrary-size data blobs. /// Each blob is stored as a file named by its SHA-1 digest. /// </remarks> internal class BlobStore { public static string FileExtension = ".blob"; public static string TmpFileExtension = ".blobtmp"; public static string TmpFilePrefix = "tmp"; private readonly string path; public BlobStore(string path) { this.path = path; FilePath directory = new FilePath(path); directory.Mkdirs(); if (!directory.IsDirectory()) { throw new InvalidOperationException(string.Format("Unable to create directory for: {0}", directory)); } } public static BlobKey KeyForBlob(byte[] data) { MessageDigest md; try { md = MessageDigest.GetInstance("SHA-1"); } catch (NoSuchAlgorithmException) { Log.E(Database.TAG, "Error, SHA-1 digest is unavailable."); return null; } byte[] sha1hash = new byte[40]; md.Update(data, 0, data.Length); sha1hash = md.Digest(); BlobKey result = new BlobKey(sha1hash); return result; } public static BlobKey KeyForBlobFromFile(FileInfo file) { MessageDigest md; try { md = MessageDigest.GetInstance("SHA-1"); } catch (NoSuchAlgorithmException) { Log.E(Database.TAG, "Error, SHA-1 digest is unavailable."); return null; } byte[] sha1hash = new byte[40]; try { var fis = new FileStream(file.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); byte[] buffer = new byte[65536]; int lenRead = fis.Read(buffer, 0, buffer.Length); while (lenRead > 0) { md.Update(buffer, 0, lenRead); lenRead = fis.Read(buffer, 0, buffer.Length); } fis.Close(); } catch (IOException) { Log.E(Database.TAG, "Error readin tmp file to compute key"); } sha1hash = md.Digest(); BlobKey result = new BlobKey(sha1hash); return result; } public string PathForKey(BlobKey key) { return path + FilePath.separator + key + FileExtension; } public long GetSizeOfBlob(BlobKey key) { string path = PathForKey(key); FilePath file = new FilePath(path); return file.Length(); } public bool GetKeyForFilename(BlobKey outKey, string filename) { if (!filename.EndsWith(FileExtension)) { return false; } //trim off extension string rest = filename.Substring(path.Length + 1, filename.Length - FileExtension.Length - (path.Length + 1)); outKey.Bytes = BlobKey.ConvertFromHex(rest); return true; } public byte[] BlobForKey(BlobKey key) { if (key == null) { return null; } string keyPath = PathForKey(key); FilePath file = new FilePath(keyPath); byte[] result = null; try { result = GetBytesFromFile(file); } catch (IOException e) { Log.E(Database.TAG, "Error reading file", e); } return result; } public Stream BlobStreamForKey(BlobKey key) { var keyPath = PathForKey(key); Log.D(Database.TAG, "Blob Path : " + keyPath); var file = new FilePath(keyPath); if (file.CanRead()) { try { return new FileStream(file, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); } catch (FileNotFoundException e) { Log.E(Database.TAG, "Unexpected file not found in blob store", e); return null; } catch (Exception e) { Log.E(Database.TAG, "Cannot new FileStream", e); } } return null; } public bool StoreBlobStream(Stream inputStream, out BlobKey outKey) { FilePath tmp = null; try { tmp = FilePath.CreateTempFile(TmpFilePrefix, TmpFileExtension, new FilePath(this.path)); FileOutputStream fos = new FileOutputStream(tmp); byte[] buffer = new byte[65536]; int lenRead = ((InputStream)inputStream).Read(buffer); while (lenRead > 0) { fos.Write(buffer, 0, lenRead); lenRead = ((InputStream)inputStream).Read(buffer); } inputStream.Close(); fos.Close(); } catch (IOException e) { Log.E(Database.TAG, "Error writing blog to tmp file", e); outKey = null; return false; } outKey = KeyForBlobFromFile(tmp); var keyPath = PathForKey(outKey); var file = new FilePath(keyPath); if (file.CanRead()) { // object with this hash already exists, we should delete tmp file and return true tmp.Delete(); } else { // does not exist, we should rename tmp file to this name tmp.RenameTo(file); } return true; } public bool StoreBlob(byte[] data, BlobKey outKey) { BlobKey newKey = KeyForBlob(data); outKey.Bytes = newKey.Bytes; string keyPath = PathForKey(outKey); FilePath file = new FilePath(keyPath); if (file.CanRead()) { return true; } FileOutputStream fos = null; try { fos = new FileOutputStream(file); fos.Write(data); } catch (FileNotFoundException e) { Log.E(Database.TAG, "Error opening file for output", e); return false; } catch (IOException ioe) { Log.E(Database.TAG, "Error writing to file", ioe); return false; } finally { if (fos != null) { try { fos.Close(); } catch (IOException) { } } } // ignore return true; } /// <exception cref="System.IO.IOException"></exception> private static byte[] GetBytesFromFile(FilePath file) { InputStream @is = new FileStream (file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); // Get the size of the file long length = file.Length(); // Create the byte array to hold the data byte[] bytes = new byte[(int)length]; // Read in the bytes int offset = 0; int numRead = 0; while (offset < bytes.Length && (numRead = @is.Read(bytes, offset, bytes.Length - offset)) >= 0) { offset += numRead; } // Ensure all the bytes have been read in if (offset < bytes.Length) { throw new IOException("Could not completely read file " + file.GetName()); } // Close the input stream and return bytes @is.Close(); return bytes; } public ICollection<BlobKey> AllKeys() { ICollection<BlobKey> result = new HashSet<BlobKey>(); FilePath file = new FilePath(path); FilePath[] contents = file.ListFiles(); foreach (FilePath attachment in contents) { if (attachment.IsDirectory()) { continue; } BlobKey attachmentKey = new BlobKey(); GetKeyForFilename(attachmentKey, attachment.GetPath()); result.AddItem(attachmentKey); } return result; } public int Count() { FilePath file = new FilePath(path); FilePath[] contents = file.ListFiles(); return contents.Length; } public long TotalDataSize() { long total = 0; FilePath file = new FilePath(path); FilePath[] contents = file.ListFiles(); foreach (FilePath attachment in contents) { total += attachment.Length(); } return total; } public int DeleteBlobsExceptWithKeys(ICollection<BlobKey> keysToKeep) { int numDeleted = 0; FilePath file = new FilePath(path); FilePath[] contents = file.ListFiles(); foreach (FilePath attachment in contents) { BlobKey attachmentKey = new BlobKey(); if (GetKeyForFilename(attachmentKey, attachment.GetPath()) && !keysToKeep.Contains(attachmentKey)) { bool result = attachment.Delete(); if (result) { ++numDeleted; } else { Log.E(Database.TAG, "Error deleting attachment"); } } } return numDeleted; } public int DeleteBlobs() { return DeleteBlobsExceptWithKeys(new List<BlobKey>()); } public bool IsGZipped(BlobKey key) { var magic = 0; var path = PathForKey(key); var file = new FilePath(path); if (file.CanRead()) { try { var raf = new FileStream (file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); magic = raf.ReadByte() & unchecked((0xff)) | ((raf.ReadByte() << 8) & unchecked((0xff00))); raf.Close(); } catch (Exception e) { Runtime.PrintStackTrace(e, Console.Error); } } return magic == 0; } public FileInfo TempDir() { FilePath directory = new FilePath(path); FilePath tempDirectory = new FilePath(directory, "temp_attachments"); tempDirectory.Mkdirs(); if (!tempDirectory.IsDirectory()) { throw new InvalidOperationException(string.Format("Unable to create directory for: {0}" , tempDirectory)); } return tempDirectory; } } }
using System; using System.Data; using System.Drawing; using System.Collections; using System.Windows.Forms; using System.ComponentModel; using System.Drawing.Drawing2D; namespace CloudBox.Controller { /// <summary> /// A replacement for the Windows Button Control. /// </summary> [DefaultEvent("Click")] public class VistaButton : System.Windows.Forms.UserControl { #region - Designer - private System.ComponentModel.Container components = null; /// <summary> /// Initialize the component with it's /// default settings. /// </summary> public VistaButton() { InitializeComponent(); this.mFadeIn.Tick += new EventHandler(mFadeIn_Tick); this.mFadeOut.Tick += new EventHandler(mFadeOut_Tick); this.SetStyle(ControlStyles.AllPaintingInWmPaint, true); this.SetStyle(ControlStyles.DoubleBuffer, true); this.SetStyle(ControlStyles.ResizeRedraw, true); this.SetStyle(ControlStyles.Selectable, true); this.SetStyle(ControlStyles.SupportsTransparentBackColor, true); this.SetStyle(ControlStyles.UserPaint, true); this.BackColor = Color.Transparent; mFadeIn.Interval = 30; mFadeOut.Interval = 30; } /// <summary> /// Release resources used by the control. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region - Component Designer generated code - private void InitializeComponent() { // // VistaButton // this.Name = "VistaButton"; this.ButtonText = string.Empty; this.Size = new System.Drawing.Size(100, 32); this.Paint += new System.Windows.Forms.PaintEventHandler(this.VistaButton_Paint); this.KeyUp += new System.Windows.Forms.KeyEventHandler(this.VistaButton_KeyUp); this.KeyDown += new System.Windows.Forms.KeyEventHandler(this.VistaButton_KeyDown); this.MouseEnter += new System.EventHandler(this.VistaButton_MouseEnter); this.MouseLeave += new System.EventHandler(this.VistaButton_MouseLeave); this.MouseUp +=new MouseEventHandler(VistaButton_MouseUp); this.MouseDown += new System.Windows.Forms.MouseEventHandler(this.VistaButton_MouseDown); this.GotFocus +=new EventHandler(VistaButton_MouseEnter); this.LostFocus +=new EventHandler(VistaButton_MouseLeave); this.Resize +=new EventHandler(VistaButton_Resize); } #endregion #endregion #region - Enums - /// <summary> /// A private enumeration that determines /// the mouse state in relation to the /// current instance of the control. /// </summary> enum State {None, Hover, Pressed}; /// <summary> /// A public enumeration that determines whether /// the button background is painted when the /// mouse is not inside the ClientArea. /// </summary> public enum Style { /// <summary> /// Draw the button as normal /// </summary> Default, /// <summary> /// Only draw the background on mouse over. /// </summary> Flat }; #endregion #region - Properties - #region - Private Variables - private bool calledbykey = false; private State mButtonState = State.None; private Timer mFadeIn = new Timer(); private Timer mFadeOut = new Timer(); private int mGlowAlpha = 0; #endregion #region - Text - private string mText; /// <summary> /// The text that is displayed on the button. /// </summary> [Category("Text"), Description("The text that is displayed on the button.")] public string ButtonText { get { return mText; } set { mText = value; this.Invalidate(); } } private Color mForeColor = Color.White; /// <summary> /// The color with which the text is drawn. /// </summary> [Category("Text"), Browsable(true), DefaultValue(typeof(Color),"White"), Description("The color with which the text is drawn.")] public override Color ForeColor { get { return mForeColor; } set { mForeColor = value; this.Invalidate(); } } private ContentAlignment mTextAlign = ContentAlignment.MiddleCenter; /// <summary> /// The alignment of the button text /// that is displayed on the control. /// </summary> [Category("Text"), DefaultValue(typeof(ContentAlignment),"MiddleCenter"), Description("The alignment of the button text " + "that is displayed on the control.")] public ContentAlignment TextAlign { get { return mTextAlign; } set { mTextAlign = value; this.Invalidate(); } } #endregion #region - Image - private Image mImage; /// <summary> /// The image displayed on the button that /// is used to help the user identify /// it's function if the text is ambiguous. /// </summary> [Category("Image"), DefaultValue(null), Description("The image displayed on the button that " + "is used to help the user identify" + "it's function if the text is ambiguous.")] public Image Image { get { return mImage; } set { mImage = value; this.Invalidate(); } } private ContentAlignment mImageAlign = ContentAlignment.MiddleLeft; /// <summary> /// The alignment of the image /// in relation to the button. /// </summary> [Category("Image"), DefaultValue(typeof(ContentAlignment),"MiddleLeft"), Description("The alignment of the image " + "in relation to the button.")] public ContentAlignment ImageAlign { get { return mImageAlign; } set { mImageAlign = value; this.Invalidate(); } } private Size mImageSize = new Size(24,24); /// <summary> /// The size of the image to be displayed on the /// button. This property defaults to 24x24. /// </summary> [Category("Image"), DefaultValue(typeof(Size),"24, 24"), Description("The size of the image to be displayed on the" + "button. This property defaults to 24x24.")] public Size ImageSize { get { return mImageSize; } set { mImageSize = value; this.Invalidate(); } } #endregion #region - Appearance - private Style mButtonStyle = Style.Default; /// <summary> /// Sets whether the button background is drawn /// while the mouse is outside of the client area. /// </summary> [Category("Appearance"), DefaultValue(typeof(Style),"Default"), Description("Sets whether the button background is drawn " + "while the mouse is outside of the client area.")] public Style ButtonStyle { get { return mButtonStyle; } set { mButtonStyle = value; this.Invalidate(); } } private int mCornerRadius = 8; /// <summary> /// The radius for the button corners. The /// greater this value is, the more 'smooth' /// the corners are. This property should /// not be greater than half of the /// controls height. /// </summary> [Category("Appearance"), DefaultValue(8), Description("The radius for the button corners. The " + "greater this value is, the more 'smooth' " + "the corners are. This property should " + "not be greater than half of the " + "controls height.")] public int CornerRadius { get { return mCornerRadius; } set { mCornerRadius = value; this.Invalidate(); } } private Color mHighlightColor = Color.White; /// <summary> /// The colour of the highlight on the top of the button. /// </summary> [Category("Appearance"), DefaultValue(typeof(Color), "White"), Description("The colour of the highlight on the top of the button.")] public Color HighlightColor { get { return mHighlightColor; } set { mHighlightColor = value; this.Invalidate(); } } private Color mButtonColor = Color.Black; /// <summary> /// The bottom color of the button that /// will be drawn over the base color. /// </summary> [Category("Appearance"), DefaultValue(typeof(Color), "Black"), Description("The bottom color of the button that " + "will be drawn over the base color.")] public Color ButtonColor { get { return mButtonColor; } set { mButtonColor = value; this.Invalidate(); } } private Color mGlowColor = Color.FromArgb(141,189,255); /// <summary> /// The colour that the button glows when /// the mouse is inside the client area. /// </summary> [Category("Appearance"), DefaultValue(typeof(Color), "141,189,255"), Description("The colour that the button glows when " + "the mouse is inside the client area.")] public Color GlowColor { get { return mGlowColor; } set { mGlowColor = value; this.Invalidate(); } } private Image mBackImage; /// <summary> /// The background image for the button, /// this image is drawn over the base /// color of the button. /// </summary> [Category("Appearance"), DefaultValue(null), Description("The background image for the button, " + "this image is drawn over the base " + "color of the button.")] public Image BackImage { get { return mBackImage; } set { mBackImage = value; this.Invalidate(); } } private Color mBaseColor = Color.Black; /// <summary> /// The backing color that the rest of /// the button is drawn. For a glassier /// effect set this property to Transparent. /// </summary> [Category("Appearance"), DefaultValue(typeof(Color), "Black"), Description("The backing color that the rest of" + "the button is drawn. For a glassier " + "effect set this property to Transparent.")] public Color BaseColor { get { return mBaseColor; } set { mBaseColor = value; this.Invalidate(); } } #endregion #endregion #region - Functions - private GraphicsPath RoundRect(RectangleF r, float r1, float r2, float r3, float r4) { float x = r.X, y = r.Y, w = r.Width, h = r.Height; GraphicsPath rr = new GraphicsPath(); rr.AddBezier(x, y + r1, x, y, x + r1, y, x + r1, y); rr.AddLine(x + r1, y, x + w - r2, y); rr.AddBezier(x + w - r2, y, x + w, y, x + w, y + r2, x + w, y + r2); rr.AddLine(x + w, y + r2, x + w, y + h - r3); rr.AddBezier(x + w, y + h - r3, x + w, y + h, x + w - r3, y + h, x + w - r3, y + h); rr.AddLine(x + w - r3, y + h, x + r4, y + h); rr.AddBezier(x + r4, y + h, x, y + h, x, y + h - r4, x, y + h - r4); rr.AddLine(x, y + h - r4, x, y + r1); return rr; } private StringFormat StringFormatAlignment(ContentAlignment textalign) { StringFormat sf = new StringFormat(); switch (textalign) { case ContentAlignment.TopLeft: case ContentAlignment.TopCenter: case ContentAlignment.TopRight: sf.LineAlignment = StringAlignment.Near; break; case ContentAlignment.MiddleLeft: case ContentAlignment.MiddleCenter: case ContentAlignment.MiddleRight: sf.LineAlignment = StringAlignment.Center; break; case ContentAlignment.BottomLeft: case ContentAlignment.BottomCenter: case ContentAlignment.BottomRight: sf.LineAlignment = StringAlignment.Far; break; } switch (textalign) { case ContentAlignment.TopLeft: case ContentAlignment.MiddleLeft: case ContentAlignment.BottomLeft: sf.Alignment = StringAlignment.Near; break; case ContentAlignment.TopCenter: case ContentAlignment.MiddleCenter: case ContentAlignment.BottomCenter: sf.Alignment = StringAlignment.Center; break; case ContentAlignment.TopRight: case ContentAlignment.MiddleRight: case ContentAlignment.BottomRight: sf.Alignment = StringAlignment.Far; break; } return sf; } #endregion #region - Drawing - /// <summary> /// Draws the outer border for the control /// using the ButtonColor property. /// </summary> /// <param name="g">The graphics object used in the paint event.</param> private void DrawOuterStroke(Graphics g) { if (this.ButtonStyle == Style.Flat && this.mButtonState == State.None){return;} Rectangle r = this.ClientRectangle; r.Width -= 1; r.Height -= 1; using (GraphicsPath rr = RoundRect(r, CornerRadius, CornerRadius, CornerRadius, CornerRadius)) { using (Pen p = new Pen(this.ButtonColor)) { g.DrawPath(p, rr); } } } /// <summary> /// Draws the inner border for the control /// using the HighlightColor property. /// </summary> /// <param name="g">The graphics object used in the paint event.</param> private void DrawInnerStroke(Graphics g) { if (this.ButtonStyle == Style.Flat && this.mButtonState == State.None){return;} Rectangle r = this.ClientRectangle; r.X++; r.Y++; r.Width -= 3; r.Height -= 3; using (GraphicsPath rr = RoundRect(r, CornerRadius, CornerRadius, CornerRadius, CornerRadius)) { using (Pen p = new Pen(this.HighlightColor)) { g.DrawPath(p, rr); } } } /// <summary> /// Draws the background for the control /// using the background image and the /// BaseColor. /// </summary> /// <param name="g">The graphics object used in the paint event.</param> private void DrawBackground(Graphics g) { if (this.ButtonStyle == Style.Flat && this.mButtonState == State.None){return;} int alpha = (mButtonState == State.Pressed) ? 204 : 127; Rectangle r = this.ClientRectangle; r.Width--; r.Height--; using (GraphicsPath rr = RoundRect(r, CornerRadius, CornerRadius, CornerRadius, CornerRadius)) { using (SolidBrush sb = new SolidBrush(this.BaseColor)) { g.FillPath(sb, rr); } SetClip(g); if (this.BackImage != null){g.DrawImage(this.BackImage, this.ClientRectangle);} g.ResetClip(); using (SolidBrush sb = new SolidBrush(Color.FromArgb(alpha, this.ButtonColor))) { g.FillPath(sb, rr); } } } /// <summary> /// Draws the Highlight over the top of the /// control using the HightlightColor. /// </summary> /// <param name="g">The graphics object used in the paint event.</param> private void DrawHighlight(Graphics g) { if (this.ButtonStyle == Style.Flat && this.mButtonState == State.None){return;} int alpha = (mButtonState == State.Pressed) ? 60 : 150; Rectangle rect = new Rectangle(0, 0, this.Width, this.Height / 2); using (GraphicsPath r = RoundRect(rect, CornerRadius, CornerRadius, 0, 0)) { using (LinearGradientBrush lg = new LinearGradientBrush(r.GetBounds(), Color.FromArgb(alpha, this.HighlightColor), Color.FromArgb(alpha / 3, this.HighlightColor), LinearGradientMode.Vertical)) { g.FillPath(lg, r); } } } /// <summary> /// Draws the glow for the button when the /// mouse is inside the client area using /// the GlowColor property. /// </summary> /// <param name="g">The graphics object used in the paint event.</param> private void DrawGlow(Graphics g) { if (this.mButtonState == State.Pressed){return;} SetClip(g); using (GraphicsPath glow = new GraphicsPath()) { glow.AddEllipse(-5,this.Height / 2 - 10, this.Width + 11, this.Height + 11); using (PathGradientBrush gl = new PathGradientBrush(glow)) { gl.CenterColor = Color.FromArgb(mGlowAlpha, this.GlowColor); gl.SurroundColors = new Color[] {Color.FromArgb(0, this.GlowColor)}; g.FillPath(gl, glow); } } g.ResetClip(); } /// <summary> /// Draws the text for the button. /// </summary> /// <param name="g">The graphics object used in the paint event.</param> private void DrawText(Graphics g) { StringFormat sf = StringFormatAlignment(this.TextAlign); Rectangle r = new Rectangle(8,8,this.Width - 17,this.Height - 17); g.DrawString(this.ButtonText,this.Font,new SolidBrush(this.ForeColor),r,sf); } /// <summary> /// Draws the image for the button /// </summary> /// <param name="g">The graphics object used in the paint event.</param> private void DrawImage(Graphics g) { if (this.Image == null) {return;} Rectangle r = new Rectangle(8,8,this.ImageSize.Width,this.ImageSize.Height); switch (this.ImageAlign) { case ContentAlignment.TopCenter: r = new Rectangle(this.Width / 2 - this.ImageSize.Width / 2,8,this.ImageSize.Width,this.ImageSize.Height); break; case ContentAlignment.TopRight: r = new Rectangle(this.Width - 8 - this.ImageSize.Width,8,this.ImageSize.Width,this.ImageSize.Height); break; case ContentAlignment.MiddleLeft: r = new Rectangle(8,this.Height / 2 - this.ImageSize.Height / 2,this.ImageSize.Width,this.ImageSize.Height); break; case ContentAlignment.MiddleCenter: r = new Rectangle(this.Width / 2 - this.ImageSize.Width / 2,this.Height / 2 - this.ImageSize.Height / 2,this.ImageSize.Width,this.ImageSize.Height); break; case ContentAlignment.MiddleRight: r = new Rectangle(this.Width - 8 - this.ImageSize.Width,this.Height / 2 - this.ImageSize.Height / 2,this.ImageSize.Width,this.ImageSize.Height); break; case ContentAlignment.BottomLeft: r = new Rectangle(8,this.Height - 8 - this.ImageSize.Height,this.ImageSize.Width,this.ImageSize.Height); break; case ContentAlignment.BottomCenter: r = new Rectangle(this.Width / 2 - this.ImageSize.Width / 2,this.Height - 8 - this.ImageSize.Height,this.ImageSize.Width,this.ImageSize.Height); break; case ContentAlignment.BottomRight: r = new Rectangle(this.Width - 8 - this.ImageSize.Width,this.Height - 8 - this.ImageSize.Height,this.ImageSize.Width,this.ImageSize.Height); break; } g.DrawImage(this.Image,r); } private void SetClip(Graphics g) { Rectangle r = this.ClientRectangle; r.X++; r.Y++; r.Width-=3; r.Height-=3; using (GraphicsPath rr = RoundRect(r, CornerRadius, CornerRadius, CornerRadius, CornerRadius)) { g.SetClip(rr); } } #endregion #region - Private Subs - private void VistaButton_Paint(object sender, PaintEventArgs e) { e.Graphics.SmoothingMode = SmoothingMode.AntiAlias; e.Graphics.InterpolationMode = InterpolationMode.HighQualityBicubic; DrawBackground(e.Graphics); DrawHighlight(e.Graphics); DrawImage(e.Graphics); DrawText(e.Graphics); DrawGlow(e.Graphics); DrawOuterStroke(e.Graphics); DrawInnerStroke(e.Graphics); } private void VistaButton_Resize(object sender, EventArgs e) { Rectangle r = this.ClientRectangle; r.X -= 1; r.Y -= 1; r.Width += 2; r.Height += 2; using (GraphicsPath rr = RoundRect(r, CornerRadius, CornerRadius, CornerRadius, CornerRadius)) { this.Region = new Region(rr); } } #region - Mouse and Keyboard Events - private void VistaButton_MouseEnter(object sender, EventArgs e) { mButtonState = State.Hover; mFadeOut.Stop(); mFadeIn.Start(); } private void VistaButton_MouseLeave(object sender, EventArgs e) { mButtonState = State.None; if (this.mButtonStyle == Style.Flat) { mGlowAlpha = 0; } mFadeIn.Stop(); mFadeOut.Start(); } private void VistaButton_MouseDown(object sender, MouseEventArgs e) { if (e.Button == MouseButtons.Left) { mButtonState = State.Pressed; if (this.mButtonStyle != Style.Flat) { mGlowAlpha = 255; } mFadeIn.Stop(); mFadeOut.Stop(); this.Invalidate(); } } private void mFadeIn_Tick(object sender, EventArgs e) { if (this.ButtonStyle == Style.Flat) {mGlowAlpha = 0;} if (mGlowAlpha + 30 >= 255) { mGlowAlpha = 255; mFadeIn.Stop(); } else { mGlowAlpha += 30; } this.Invalidate(); } private void mFadeOut_Tick(object sender, EventArgs e) { if (this.ButtonStyle == Style.Flat) {mGlowAlpha = 0;} if (mGlowAlpha - 30 <= 0) { mGlowAlpha = 0; mFadeOut.Stop(); } else { mGlowAlpha -= 30; } this.Invalidate(); } private void VistaButton_KeyDown(object sender, KeyEventArgs e) { if (e.KeyCode == Keys.Space) { MouseEventArgs m = new MouseEventArgs(MouseButtons.Left,0,0,0,0); VistaButton_MouseDown(sender, m); } } private void VistaButton_KeyUp(object sender, KeyEventArgs e) { if (e.KeyCode == Keys.Space) { MouseEventArgs m = new MouseEventArgs(MouseButtons.Left,0,0,0,0); calledbykey = true; VistaButton_MouseUp(sender, m); } } private void VistaButton_MouseUp(object sender, MouseEventArgs e) { if (e.Button == MouseButtons.Left) { mButtonState = State.Hover; mFadeIn.Stop(); mFadeOut.Stop(); this.Invalidate(); if (calledbykey == true) {this.OnClick(EventArgs.Empty); calledbykey = false;} } } #endregion #endregion } }
//------------------------------------------------------------------------------ // <copyright file="PrintController.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ namespace System.Drawing.Printing { using Microsoft.Win32; using System; using System.Diagnostics; using System.Drawing; using System.Runtime.InteropServices; using System.Security; using System.Security.Permissions; using System.Runtime.Versioning; /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController"]/*' /> /// <devdoc> /// <para>Controls how a document is printed.</para> /// </devdoc> public abstract class PrintController { // DEVMODEs are pretty expensive, so we cache one here and share it with the // Standard and Preview print controllers. If it weren't for all the rules about API changes, // I'd consider making this protected. #region SafeDeviceModeHandle Class /// <summary> /// Represents a SafeHandle for a Printer's Device Mode struct handle (DEVMODE) /// </summary> /// <SecurityNote> /// Critical: base class SafeHandle is critical /// </SecurityNote> [SecurityCritical] internal sealed class SafeDeviceModeHandle : SafeHandle { // This constructor is used by the P/Invoke marshaling layer // to allocate a SafeHandle instance. P/Invoke then does the // appropriate method call, storing the handle in this class. private SafeDeviceModeHandle() : base(IntPtr.Zero, true) { return; } internal SafeDeviceModeHandle(IntPtr handle) : base(IntPtr.Zero, true) // "true" means "owns the handle" { SetHandle(handle); } public override bool IsInvalid { get { return handle == IntPtr.Zero; } } // Specifies how to free the handle. // The boolean returned should be true for success and false if the runtime // should fire a SafeHandleCriticalFailure MDA (CustomerDebugProbe) if that // MDA is enabled. [SecurityCritical] protected override bool ReleaseHandle() { if (!IsInvalid) { SafeNativeMethods.GlobalFree(new HandleRef(this, handle)); } handle = IntPtr.Zero; return true; } public static implicit operator IntPtr(SafeDeviceModeHandle handle) { return (handle == null) ? IntPtr.Zero : handle.handle; } public static explicit operator SafeDeviceModeHandle(IntPtr handle) { return new SafeDeviceModeHandle(handle); } } #endregion internal SafeDeviceModeHandle modeHandle = null; /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.PrintController"]/*' /> /// <devdoc> /// <para> /// Initializes a new instance of the <see cref='System.Drawing.Printing.PrintController'/> class. /// </para> /// </devdoc> protected PrintController() { IntSecurity.SafePrinting.Demand(); } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.IsPreview"]/*' /> /// <devdoc> /// <para> /// This is new public property which notifies if this controller is used for PrintPreview. /// </para> /// </devdoc> public virtual bool IsPreview { get{ return false; } } // WARNING: if you have nested PrintControllers, this method won't get called on the inner one. // Add initialization code to StartPrint or StartPage instead. [ResourceExposure(ResourceScope.Process)] [ResourceConsumption(ResourceScope.Process)] internal void Print(PrintDocument document) { IntSecurity.SafePrinting.Demand(); // Most of the printing security is left to the individual print controller // // Get the PrintAction for this event PrintAction printAction; if (IsPreview) { printAction = PrintAction.PrintToPreview; } else { printAction = document.PrinterSettings.PrintToFile ? PrintAction.PrintToFile : PrintAction.PrintToPrinter; } // Check that user has permission to print to this particular printer PrintEventArgs printEvent = new PrintEventArgs(printAction); document._OnBeginPrint(printEvent); if (printEvent.Cancel) { document._OnEndPrint(printEvent); return; } OnStartPrint(document, printEvent); if (printEvent.Cancel) { document._OnEndPrint(printEvent); OnEndPrint(document, printEvent); return; } bool canceled = true; try { canceled = PrintLoop(document); } finally { try { try { document._OnEndPrint(printEvent); printEvent.Cancel = canceled | printEvent.Cancel; } finally { OnEndPrint(document, printEvent); } } finally { if (!IntSecurity.HasPermission(IntSecurity.AllPrinting)) { // Ensure programs with SafePrinting only get to print once for each time they // throw up the PrintDialog. IntSecurity.AllPrinting.Assert(); document.PrinterSettings.PrintDialogDisplayed = false; } } } } // Returns true if print was aborted. // WARNING: if you have nested PrintControllers, this method won't get called on the inner one // Add initialization code to StartPrint or StartPage instead. private bool PrintLoop(PrintDocument document) { QueryPageSettingsEventArgs queryEvent = new QueryPageSettingsEventArgs((PageSettings) document.DefaultPageSettings.Clone()); for (;;) { document._OnQueryPageSettings(queryEvent); if (queryEvent.Cancel) { return true; } PrintPageEventArgs pageEvent = CreatePrintPageEvent(queryEvent.PageSettings); Graphics graphics = OnStartPage(document, pageEvent); pageEvent.SetGraphics(graphics); try { document._OnPrintPage(pageEvent); OnEndPage(document, pageEvent); } finally { pageEvent.Dispose(); } if (pageEvent.Cancel) { return true; } else if (!pageEvent.HasMorePages) { return false; } else { // loop } } } private PrintPageEventArgs CreatePrintPageEvent(PageSettings pageSettings) { IntSecurity.AllPrintingAndUnmanagedCode.Assert(); Debug.Assert((modeHandle != null), "modeHandle is null. Someone must have forgot to call base.StartPrint"); Rectangle pageBounds = pageSettings.GetBounds(modeHandle); Rectangle marginBounds = new Rectangle(pageSettings.Margins.Left, pageSettings.Margins.Top, pageBounds.Width - (pageSettings.Margins.Left + pageSettings.Margins.Right), pageBounds.Height - (pageSettings.Margins.Top + pageSettings.Margins.Bottom)); PrintPageEventArgs pageEvent = new PrintPageEventArgs(null, marginBounds, pageBounds, pageSettings); return pageEvent; } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.OnStartPrint"]/*' /> /// <devdoc> /// <para>When overridden in a derived class, begins the control sequence of when and how to print a document.</para> /// </devdoc> [ResourceExposure(ResourceScope.Process)] [ResourceConsumption(ResourceScope.Process)] public virtual void OnStartPrint(PrintDocument document, PrintEventArgs e) { IntSecurity.AllPrintingAndUnmanagedCode.Assert(); modeHandle = (SafeDeviceModeHandle)document.PrinterSettings.GetHdevmode(document.DefaultPageSettings); } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.OnStartPage"]/*' /> /// <devdoc> /// <para>When overridden in a derived class, begins the control /// sequence of when and how to print a page in a document.</para> /// </devdoc> public virtual Graphics OnStartPage(PrintDocument document, PrintPageEventArgs e) { return null; } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.OnEndPage"]/*' /> /// <devdoc> /// <para>When overridden in a derived class, completes the control sequence of when and how /// to print a page in a document.</para> /// </devdoc> public virtual void OnEndPage(PrintDocument document, PrintPageEventArgs e) { } /// <include file='doc\PrintController.uex' path='docs/doc[@for="PrintController.OnEndPrint"]/*' /> /// <devdoc> /// <para>When overridden in a derived class, completes the /// control sequence of when and how to print a document.</para> /// </devdoc> public virtual void OnEndPrint(PrintDocument document, PrintEventArgs e) { IntSecurity.UnmanagedCode.Assert(); Debug.Assert((modeHandle != null), "modeHandle is null. Someone must have forgot to call base.StartPrint"); if (modeHandle != null) { modeHandle.Close(); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // RuntimeHelpers // This class defines a set of static methods that provide support for compilers. // using Internal.Reflection.Augments; using Internal.Reflection.Core.NonPortable; using Internal.Runtime.Augments; using System.Runtime; using System.Runtime.Serialization; using System.Threading; namespace System.Runtime.CompilerServices { public static class RuntimeHelpers { [Intrinsic] public static void InitializeArray(Array array, RuntimeFieldHandle fldHandle) { // We only support this intrinsic when it occurs within a well-defined IL sequence. // If a call to this method occurs within the recognized sequence, codegen must expand the IL sequence completely. // For any other purpose, the API is currently unsupported. // https://github.com/dotnet/corert/issues/364 throw new PlatformNotSupportedException(); } public static void RunClassConstructor(RuntimeTypeHandle type) { if (type.IsNull) throw new ArgumentException(SR.InvalidOperation_HandleIsNotInitialized); IntPtr pStaticClassConstructionContext = RuntimeAugments.Callbacks.TryGetStaticClassConstructionContext(type); if (pStaticClassConstructionContext == IntPtr.Zero) return; unsafe { ClassConstructorRunner.EnsureClassConstructorRun((StaticClassConstructionContext*)pStaticClassConstructionContext); } } public static void RunModuleConstructor(ModuleHandle module) { if (module.AssociatedModule == null) throw new ArgumentException(SR.InvalidOperation_HandleIsNotInitialized); ReflectionAugments.ReflectionCoreCallbacks.RunModuleConstructor(module.AssociatedModule); } public static Object GetObjectValue(Object obj) { if (obj == null) return null; EETypePtr eeType = obj.EETypePtr; if ((!eeType.IsValueType) || eeType.IsPrimitive) return obj; return RuntimeImports.RhMemberwiseClone(obj); } public new static bool Equals(Object obj1, Object obj2) { if (obj1 == obj2) return true; if ((obj1 == null) || (obj2 == null)) return false; // If it's not a value class, don't compare by value if (!obj1.EETypePtr.IsValueType) return false; // Make sure they are the same type. if (obj1.EETypePtr != obj2.EETypePtr) return false; return RuntimeImports.RhCompareObjectContentsAndPadding(obj1, obj2); } #if !FEATURE_SYNCTABLE private const int HASHCODE_BITS = 26; private const int MASK_HASHCODE = (1 << HASHCODE_BITS) - 1; #endif [ThreadStatic] private static int t_hashSeed; internal static int GetNewHashCode() { int multiplier = Environment.CurrentManagedThreadId * 4 + 5; // Every thread has its own generator for hash codes so that we won't get into a situation // where two threads consistently give out the same hash codes. // Choice of multiplier guarantees period of 2**32 - see Knuth Vol 2 p16 (3.2.1.2 Theorem A). t_hashSeed = t_hashSeed * multiplier + 1; return t_hashSeed; } public static unsafe int GetHashCode(Object o) { #if FEATURE_SYNCTABLE return ObjectHeader.GetHashCode(o); #else if (o == null) return 0; fixed (IntPtr* pEEType = &o.m_pEEType) { int* pSyncBlockIndex = (int*)((byte*)pEEType - 4); // skipping exactly 4 bytes for the SyncTableEntry (exactly 4 bytes not a pointer size). int hash = *pSyncBlockIndex & MASK_HASHCODE; if (hash == 0) return MakeHashCode(o, pSyncBlockIndex); else return hash; } #endif } #if !FEATURE_SYNCTABLE private static unsafe int MakeHashCode(Object o, int* pSyncBlockIndex) { int hash = GetNewHashCode() & MASK_HASHCODE; if (hash == 0) hash = 1; while (true) { int oldIndex = Volatile.Read(ref *pSyncBlockIndex); int currentHash = oldIndex & MASK_HASHCODE; if (currentHash != 0) { // Someone else set the hash code. hash = currentHash; break; } int newIndex = oldIndex | hash; if (Interlocked.CompareExchange(ref *pSyncBlockIndex, newIndex, oldIndex) == oldIndex) break; // If we get here someone else modified the header. They may have set the hash code, or maybe some // other bits. Let's try again. } return hash; } #endif public static int OffsetToStringData { // Workaround to allow WebAssembly to define a size here without a special CoreLib build // https://github.com/dotnet/corert/issues/4506 includes removing this. [Intrinsic] get { // Number of bytes from the address pointed to by a reference to // a String to the first 16-bit character in the String. // This property allows C#'s fixed statement to work on Strings. return String.FIRST_CHAR_OFFSET; } } [ThreadStatic] private static unsafe byte* t_sufficientStackLimit; public static unsafe void EnsureSufficientExecutionStack() { byte* limit = t_sufficientStackLimit; if (limit == null) limit = GetSufficientStackLimit(); byte* currentStackPtr = (byte*)(&limit); if (currentStackPtr < limit) throw new InsufficientExecutionStackException(); } public static unsafe bool TryEnsureSufficientExecutionStack() { byte* limit = t_sufficientStackLimit; if (limit == null) limit = GetSufficientStackLimit(); byte* currentStackPtr = (byte*)(&limit); return (currentStackPtr >= limit); } [MethodImpl(MethodImplOptions.NoInlining)] // Only called once per thread, no point in inlining. private static unsafe byte* GetSufficientStackLimit() { IntPtr lower, upper; RuntimeImports.RhGetCurrentThreadStackBounds(out lower, out upper); // Compute the limit used by EnsureSufficientExecutionStack and cache it on the thread. This minimum // stack size should be sufficient to allow a typical non-recursive call chain to execute, including // potential exception handling and garbage collection. #if BIT64 const int MinExecutionStackSize = 128 * 1024; #else const int MinExecutionStackSize = 64 * 1024; #endif byte* limit = (((byte*)upper - (byte*)lower > MinExecutionStackSize)) ? ((byte*)lower + MinExecutionStackSize) : ((byte*)upper); return (t_sufficientStackLimit = limit); } [Intrinsic] public static bool IsReferenceOrContainsReferences<T>() { var pEEType = EETypePtr.EETypePtrOf<T>(); return !pEEType.IsValueType || pEEType.HasPointers; } [Intrinsic] public static bool IsReference<T>() { var pEEType = EETypePtr.EETypePtrOf<T>(); return !pEEType.IsValueType; } // Constrained Execution Regions APIs are NOP's because we do not support CERs in .NET Core at all. public static void ProbeForSufficientStack() { } public static void PrepareConstrainedRegions() { } public static void PrepareConstrainedRegionsNoOP() { } public static void PrepareMethod(RuntimeMethodHandle method) { } public static void PrepareMethod(RuntimeMethodHandle method, RuntimeTypeHandle[] instantiation) { } public static void PrepareContractedDelegate(Delegate d) { } public static void PrepareDelegate(Delegate d) { if (d == null) throw new ArgumentNullException(nameof(d)); } public static void ExecuteCodeWithGuaranteedCleanup(TryCode code, CleanupCode backoutCode, Object userData) { if (code == null) throw new ArgumentNullException(nameof(code)); if (backoutCode == null) throw new ArgumentNullException(nameof(backoutCode)); bool exceptionThrown = false; try { code(userData); } catch { exceptionThrown = true; throw; } finally { backoutCode(userData, exceptionThrown); } } public delegate void TryCode(Object userData); public delegate void CleanupCode(Object userData, bool exceptionThrown); public static object GetUninitializedObject(Type type) { if (type == null) { throw new ArgumentNullException(nameof(type), SR.ArgumentNull_Type); } if(!type.IsRuntimeImplemented()) { throw new SerializationException(SR.Format(SR.Serialization_InvalidType, type.ToString())); } if (type.HasElementType || type.IsGenericParameter) { throw new ArgumentException(SR.Argument_InvalidValue); } if (type.ContainsGenericParameters) { throw new MemberAccessException(SR.Acc_CreateGeneric); } if (type.IsCOMObject) { throw new NotSupportedException(SR.NotSupported_ManagedActivation); } EETypePtr eeTypePtr = type.TypeHandle.ToEETypePtr(); if (eeTypePtr == EETypePtr.EETypePtrOf<string>()) { throw new ArgumentException(SR.Argument_NoUninitializedStrings); } if (eeTypePtr.IsAbstract) { throw new MemberAccessException(SR.Acc_CreateAbst); } if (eeTypePtr.IsByRefLike) { throw new NotSupportedException(SR.NotSupported_ByRefLike); } if (eeTypePtr.IsNullable) { return GetUninitializedObject(ReflectionCoreNonPortable.GetRuntimeTypeForEEType(eeTypePtr.NullableType)); } // Triggering the .cctor here is slightly different than desktop/CoreCLR, which // decide based on BeforeFieldInit, but we don't want to include BeforeFieldInit // in EEType just for this API to behave slightly differently. RunClassConstructor(type.TypeHandle); return RuntimeImports.RhNewObject(eeTypePtr); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.IO; using System.Collections.Generic; using Microsoft.Build.Execution; using Microsoft.Build.Framework; using Microsoft.Build.Tasks; using Microsoft.Build.Utilities; using Xunit; using Xunit.Abstractions; using Shouldly; namespace Microsoft.Build.UnitTests { sealed public class CreateItem_Tests { private readonly ITestOutputHelper _testOutput; public CreateItem_Tests(ITestOutputHelper output) { _testOutput = output; } /// <summary> /// CreateIteming identical lists results in empty list. /// </summary> [Fact] public void OneFromOneIsZero() { CreateItem t = new CreateItem(); t.BuildEngine = new MockEngine(); t.Include = new ITaskItem[] { new TaskItem("MyFile.txt") }; t.Exclude = new ITaskItem[] { new TaskItem("MyFile.txt") }; bool success = t.Execute(); Assert.True(success); Assert.Empty(t.Include); } /// <summary> /// CreateIteming completely different lists results in left list. /// </summary> [Fact] public void OneFromOneMismatchIsOne() { CreateItem t = new CreateItem(); t.BuildEngine = new MockEngine(); t.Include = new ITaskItem[] { new TaskItem("MyFile.txt") }; t.Exclude = new ITaskItem[] { new TaskItem("MyFileOther.txt") }; bool success = t.Execute(); Assert.True(success); Assert.Single(t.Include); Assert.Equal("MyFile.txt", t.Include[0].ItemSpec); } /// <summary> /// If 'Exclude' is unspecified, then 'Include' is the result. /// </summary> [Fact] public void UnspecifiedFromOneIsOne() { CreateItem t = new CreateItem(); t.BuildEngine = new MockEngine(); t.Include = new ITaskItem[] { new TaskItem("MyFile.txt") }; bool success = t.Execute(); Assert.True(success); Assert.Single(t.Include); Assert.Equal(t.Include[0].ItemSpec, t.Include[0].ItemSpec); } /// <summary> /// If 'Include' is unspecified, then empty is the result. /// </summary> [Fact] public void OneFromUnspecifiedIsEmpty() { CreateItem t = new CreateItem(); t.BuildEngine = new MockEngine(); t.Exclude = new ITaskItem[] { new TaskItem("MyFile.txt") }; bool success = t.Execute(); Assert.True(success); Assert.Empty(t.Include); } /// <summary> /// If 'Include' and 'Exclude' are unspecified, then empty is the result. /// </summary> [Fact] public void UnspecifiedFromUnspecifiedIsEmpty() { CreateItem t = new CreateItem(); t.BuildEngine = new MockEngine(); bool success = t.Execute(); Assert.True(success); Assert.Empty(t.Include); } /// <summary> /// CreateItem is case insensitive. /// </summary> [Fact] public void CaseDoesntMatter() { CreateItem t = new CreateItem(); t.BuildEngine = new MockEngine(); t.Include = new ITaskItem[] { new TaskItem("MyFile.txt") }; t.Exclude = new ITaskItem[] { new TaskItem("myfile.tXt") }; bool success = t.Execute(); Assert.True(success); Assert.Empty(t.Include); } /// <summary> /// Using the CreateItem task to expand wildcards, and then try accessing the RecursiveDir /// metadata to force batching. /// </summary> [Fact] public void WildcardsWithRecursiveDir() { ObjectModelHelpers.DeleteTempProjectDirectory(); ObjectModelHelpers.CreateFileInTempProjectDirectory("Myapp.proj", @" <Project ToolsVersion=`msbuilddefaulttoolsversion` xmlns=`msbuildnamespace`> <Target Name =`Repro`> <CreateItem Include=`**\*.txt`> <Output TaskParameter=`Include` ItemName=`Text`/> </CreateItem> <Copy SourceFiles=`@(Text)` DestinationFiles=`Destination\%(RecursiveDir)%(Filename)%(Extension)`/> </Target> </Project> "); ObjectModelHelpers.CreateFileInTempProjectDirectory("Foo.txt", "foo"); ObjectModelHelpers.CreateFileInTempProjectDirectory(Path.Combine("Subdir", "Bar.txt"), "bar"); MockLogger logger = new MockLogger(_testOutput); ObjectModelHelpers.BuildTempProjectFileExpectSuccess("Myapp.proj", logger); ObjectModelHelpers.AssertFileExistsInTempProjectDirectory(Path.Combine("Destination", "Foo.txt")); ObjectModelHelpers.AssertFileExistsInTempProjectDirectory(Path.Combine("Destination", "Subdir", "Bar.txt")); } /// <summary> /// Using the CreateItem task to expand wildcards and verifying that the RecursiveDir metadatum is successfully /// serialized/deserialized cross process. /// </summary> [Fact] public void RecursiveDirOutOfProc() { using var env = TestEnvironment.Create(_testOutput); ObjectModelHelpers.DeleteTempProjectDirectory(); string projectFileFullPath = ObjectModelHelpers.CreateFileInTempProjectDirectory("Myapp.proj", @" <Project ToolsVersion=`msbuilddefaulttoolsversion` xmlns=`msbuildnamespace`> <Target Name =`Repro` Returns=`@(Text)`> <CreateItem Include=`**\*.txt`> <Output TaskParameter=`Include` ItemName=`Text`/> </CreateItem> </Target> </Project> "); ObjectModelHelpers.CreateFileInTempProjectDirectory(Path.Combine("Subdir", "Bar.txt"), "bar"); BuildRequestData data = new BuildRequestData(projectFileFullPath, new Dictionary<string, string>(), null, new string[] { "Repro" }, null); BuildParameters parameters = new BuildParameters { DisableInProcNode = true, EnableNodeReuse = false, Loggers = new ILogger[] { new MockLogger(_testOutput) }, }; BuildResult result = BuildManager.DefaultBuildManager.Build(parameters, data); result.OverallResult.ShouldBe(BuildResultCode.Success); result.ResultsByTarget["Repro"].Items[0].GetMetadata("RecursiveDir").ShouldBe("Subdir" + Path.DirectorySeparatorChar); } /// <summary> /// CreateItem should add additional metadata when instructed /// </summary> [Fact] public void AdditionalMetaData() { CreateItem t = new CreateItem(); t.BuildEngine = new MockEngine(); t.Include = new ITaskItem[] { new TaskItem("MyFile.txt") }; t.AdditionalMetadata = new string[] { "MyMetaData=SomeValue" }; bool success = t.Execute(); Assert.True(success); Assert.Equal("SomeValue", t.Include[0].GetMetadata("MyMetaData")); } /// <summary> /// We should be able to preserve the existing metadata on items /// </summary> [Fact] public void AdditionalMetaDataPreserveExisting() { CreateItem t = new CreateItem(); t.BuildEngine = new MockEngine(); TaskItem item = new TaskItem("MyFile.txt"); item.SetMetadata("MyMetaData", "SomePreserveMeValue"); t.Include = new ITaskItem[] { item }; t.PreserveExistingMetadata = true; t.AdditionalMetadata = new string[] { "MyMetaData=SomeValue" }; bool success = t.Execute(); Assert.True(success); Assert.Equal("SomePreserveMeValue", t.Include[0].GetMetadata("MyMetaData")); } /// <summary> /// The default is to overwrite existing metadata on items /// </summary> [Fact] public void AdditionalMetaDataOverwriteExisting() { CreateItem t = new CreateItem(); t.BuildEngine = new MockEngine(); TaskItem item = new TaskItem("MyFile.txt"); item.SetMetadata("MyMetaData", "SomePreserveMeValue"); t.Include = new ITaskItem[] { item }; // The default for CreateItem is to overwrite any existing metadata // t.PreserveExistingMetadata = false; t.AdditionalMetadata = new string[] { "MyMetaData=SomeOverwriteValue" }; bool success = t.Execute(); Assert.True(success); Assert.Equal("SomeOverwriteValue", t.Include[0].GetMetadata("MyMetaData")); } } }
/* * Copyright (c) 2015, InWorldz Halcyon Developers * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of halcyon nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.Xml; using System.Net; using System.Reflection; using System.Timers; using System.Threading.Tasks; using log4net; using Nini.Config; using Nwc.XmlRpc; using OpenMetaverse; using OpenSim; using OpenSim.Framework; using OpenSim.Framework.Communications; using OpenSim.Framework.Communications.Cache; using OpenSim.Framework.Console; using OpenSim.Framework.Servers; using OpenSim.Framework.Servers.HttpServer; using OpenSim.Region.CoreModules.World.Terrain; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; namespace InWorldz.RemoteAdmin { class RemoteAdminPlugin : IApplicationPlugin { #region Declares private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private string m_name = "RemoteAdmin"; private string m_version = "0.0"; private static Object rslock = new Object(); private RemoteAdmin m_admin = null; private OpenSimBase m_app = null; #endregion public string Version { get { return m_version; } } public string Name { get { return m_name; } } public void Initialize() { m_log.Info("[RADMIN]: " + Name + " cannot be default-initialized!"); throw new PluginNotInitializedException(Name); } public void Initialize(OpenSimBase openSim) { m_app = openSim; m_admin = new RemoteAdmin(); } public void PostInitialize() { m_admin.AddCommand("Region", "Restart", RegionRestartHandler); m_admin.AddCommand("Region", "SendAlert", RegionSendAlertHandler); m_admin.AddCommand("Region", "Shutdown", RegionShutdownHandler); m_admin.AddCommand("Region", "Backup", RegionBackupHandler); m_admin.AddCommand("Region", "Restore", RegionRestoreHandler); m_admin.AddCommand("Region", "LoadOAR", LoadOARHandler); m_admin.AddCommand("Region", "SaveOAR", SaveOARHandler); m_admin.AddCommand("Region", "ChangeParcelFlags", RegionChangeParcelFlagsHandler); m_admin.AddHandler(MainServer.Instance); } public void Dispose() { m_admin.Dispose(); } #region RemoteAdmin Region Handlers private object RegionRestartHandler(IList args, IPEndPoint remoteClient) { m_admin.CheckSessionValid(new UUID((string)args[0])); UUID regionID = new UUID((string)args[1]); Scene rebootedScene; if (!m_app.SceneManager.TryGetScene(regionID, out rebootedScene)) throw new Exception("region not found"); rebootedScene.Restart(30); return (true); } public object RegionShutdownHandler(IList args, IPEndPoint remoteClient) { m_admin.CheckSessionValid(new UUID((string)args[0])); try { Scene rebootedScene; string message; UUID regionID = new UUID(Convert.ToString(args[1])); int delay = Convert.ToInt32(args[2]); if (!m_app.SceneManager.TryGetScene(regionID, out rebootedScene)) throw new Exception("Region not found"); message = GenerateShutdownMessage(delay); m_log.DebugFormat("[RADMIN] Shutdown: {0}", message); IDialogModule dialogModule = rebootedScene.RequestModuleInterface<IDialogModule>(); if (dialogModule != null) dialogModule.SendGeneralAlert(message); ulong tcNow = Util.GetLongTickCount(); ulong endTime = tcNow + (ulong)(delay * 1000); ulong nextReport = tcNow + (ulong)(60 * 1000); // Perform shutdown if (delay > 0) { while (true) { System.Threading.Thread.Sleep(1000); tcNow = Util.GetLongTickCount(); if (tcNow >= endTime) { break; } if (tcNow >= nextReport) { delay -= 60; if (delay >= 0) { GenerateShutdownMessage(delay); nextReport = tcNow + (ulong)(60 * 1000); } } } } // Do this on a new thread so the actual shutdown call returns successfully. Task.Factory.StartNew(() => { m_app.Shutdown(); }); } catch (Exception e) { m_log.ErrorFormat("[RADMIN] Shutdown: failed: {0}", e.Message); m_log.DebugFormat("[RADMIN] Shutdown: failed: {0}", e.ToString()); throw; } m_log.Info("[RADMIN]: Shutdown Administrator Request complete"); return true; } private static string GenerateShutdownMessage(int delay) { string message; if (delay > 0) { if (delay <= 60) message = "Region is going down in " + delay.ToString() + " second(s)."; else message = "Region is going down in " + (delay / 60).ToString() + " minute(s)."; } else { message = "Region is going down now."; } return message; } public object RegionSendAlertHandler(IList args, IPEndPoint remoteClient) { m_admin.CheckSessionValid(new UUID((string)args[0])); if (args.Count < 3) return false; Scene scene; if (!m_app.SceneManager.TryGetScene((string)args[1], out scene)) throw new Exception("region not found"); String message = (string)args[2]; IDialogModule dialogModule = scene.RequestModuleInterface<IDialogModule>(); if (dialogModule != null) dialogModule.SendGeneralAlert(message); return true; } /// <summary> /// Load an OAR file into a region.. /// <summary> /// <param name="request">incoming XML RPC request</param> /// <remarks> /// XmlRpcLoadOARMethod takes the following XMLRPC parameters /// <list type="table"> /// <listheader><term>parameter name</term><description>description</description></listheader> /// <item><term>session</term> /// <description>An authenticated session ID</description></item> /// <item><term>region_uuid</term> /// <description>UUID of the region</description></item> /// <item><term>filename</term> /// <description>file name of the OAR file</description></item> /// </list> /// /// Returns /// <list type="table"> /// <listheader><term>name</term><description>description</description></listheader> /// <item><term>success</term> /// <description>true or false</description></item> /// </list> /// </remarks> public object LoadOARHandler(IList args, IPEndPoint remoteClient) { m_admin.CheckSessionValid(new UUID((string)args[0])); Scene scene; if (!m_app.SceneManager.TryGetScene((string)args[1], out scene)) throw new Exception("region not found"); String filename = (string)args[2]; bool allowUserReassignment = Convert.ToBoolean(args[3]); bool skipErrorGroups = Convert.ToBoolean(args[4]); m_log.Info("[RADMIN]: Received Load OAR Administrator Request"); lock (rslock) { try { IRegionArchiverModule archiver = scene.RequestModuleInterface<IRegionArchiverModule>(); if (archiver != null) archiver.DearchiveRegion(filename, allowUserReassignment, skipErrorGroups); else throw new Exception("Archiver module not present for scene"); m_log.Info("[RADMIN]: Load OAR Administrator Request complete"); return true; } catch (Exception e) { m_log.InfoFormat("[RADMIN] LoadOAR: {0}", e.Message); m_log.DebugFormat("[RADMIN] LoadOAR: {0}", e.ToString()); } return false; } } /// <summary> /// Load an OAR file into a region.. /// <summary> /// <param name="request">incoming XML RPC request</param> /// <remarks> /// XmlRpcLoadOARMethod takes the following XMLRPC parameters /// <list type="table"> /// <listheader><term>parameter name</term><description>description</description></listheader> /// <item><term>session</term> /// <description>An authenticated session ID</description></item> /// <item><term>region_uuid</term> /// <description>UUID of the region</description></item> /// <item><term>filename</term> /// <description>file name of the OAR file</description></item> /// </list> /// /// Returns /// <list type="table"> /// <listheader><term>name</term><description>description</description></listheader> /// <item><term>success</term> /// <description>true or false</description></item> /// </list> /// </remarks> public object SaveOARHandler(IList args, IPEndPoint remoteClient) { m_admin.CheckSessionValid(new UUID((string)args[0])); Scene scene; if (!m_app.SceneManager.TryGetScene((string)args[1], out scene)) throw new Exception("region not found"); String filename = (string)args[2]; bool storeAssets = Convert.ToBoolean(args[3]); m_log.Info("[RADMIN]: Received Save OAR Administrator Request"); lock (rslock) { try { IRegionArchiverModule archiver = scene.RequestModuleInterface<IRegionArchiverModule>(); if (archiver != null) archiver.ArchiveRegion(filename, storeAssets); else throw new Exception("Archiver module not present for scene"); m_log.Info("[RADMIN]: Save OAR Administrator Request complete"); return true; } catch (Exception e) { m_log.InfoFormat("[RADMIN] LoadOAR: {0}", e.Message); m_log.DebugFormat("[RADMIN] LoadOAR: {0}", e.ToString()); } return false; } } /// <summary> /// Load an OAR file into a region.. /// <summary> /// <param name="request">incoming XML RPC request</param> /// <remarks> /// XmlRpcLoadOARMethod takes the following XMLRPC parameters /// <list type="table"> /// <listheader><term>parameter name</term><description>description</description></listheader> /// <item><term>session</term> /// <description>An authenticated session ID</description></item> /// <item><term>region_uuid</term> /// <description>UUID of the region</description></item> /// <item><term>filename</term> /// <description>file name of the OAR file</description></item> /// </list> /// /// Returns /// <list type="table"> /// <listheader><term>name</term><description>description</description></listheader> /// <item><term>success</term> /// <description>true or false</description></item> /// </list> /// </remarks> public object RegionBackupHandler(IList args, IPEndPoint remoteClient) { m_admin.CheckSessionValid(new UUID((string)args[0])); String regionName = (string)args[1]; String filename = (string)args[2]; bool storeAssets = Convert.ToBoolean(args[3]); m_log.Info("[RADMIN]: Received Region Backup (SaveExplicitOAR) Administrator Request"); lock (rslock) { try { m_app.SceneManager.SaveExplicitOar(regionName, filename, storeAssets); m_log.Info("[RADMIN]: Save OAR Administrator Request complete"); return true; } catch (Exception e) { m_log.ErrorFormat("[RADMIN] SaveOAR: {0}", e.ToString()); } return false; } } /// <summary> /// Load an OAR file into a region.. /// <summary> /// <param name="request">incoming XML RPC request</param> /// <remarks> /// XmlRpcLoadOARMethod takes the following XMLRPC parameters /// <list type="table"> /// <listheader><term>parameter name</term><description>description</description></listheader> /// <item><term>session</term> /// <description>An authenticated session ID</description></item> /// <item><term>region_uuid</term> /// <description>UUID of the region</description></item> /// <item><term>filename</term> /// <description>file name of the OAR file</description></item> /// </list> /// /// Returns /// <list type="table"> /// <listheader><term>name</term><description>description</description></listheader> /// <item><term>success</term> /// <description>true or false</description></item> /// </list> /// </remarks> public object RegionRestoreHandler(IList args, IPEndPoint remoteClient) { m_admin.CheckSessionValid(new UUID((string)args[0])); Scene scene; if (!m_app.SceneManager.TryGetScene((string)args[1], out scene)) throw new Exception("region not found"); String filename = (string)args[2]; bool allowUserReassignment = Convert.ToBoolean(args[3]); bool skipErrorGroups = Convert.ToBoolean(args[4]); m_log.Info("[RADMIN]: Received Region Restore Administrator Request"); lock (rslock) { try { IRegionArchiverModule archiver = scene.RequestModuleInterface<IRegionArchiverModule>(); if (archiver != null) archiver.DearchiveRegion(filename, allowUserReassignment, skipErrorGroups); else throw new Exception("Archiver module not present for scene"); m_log.Info("[RADMIN]: Load OAR Administrator Request complete"); return true; } catch (Exception e) { m_log.InfoFormat("[RADMIN] LoadOAR: {0}", e.Message); m_log.DebugFormat("[RADMIN] LoadOAR: {0}", e.ToString()); } return false; } } /// <summary> /// Changes the flags for all parcels on a region /// <summary> public object RegionChangeParcelFlagsHandler(IList args, IPEndPoint remoteClient) { m_admin.CheckSessionValid(new UUID((string)args[0])); Scene scene; if (!m_app.SceneManager.TryGetScene((string)args[1], out scene)) throw new Exception("region not found"); bool enable = args[2].ToString().ToLower() == "enable"; uint mask = Convert.ToUInt32(args[3]); m_log.Info("[RADMIN]: Received Region Change Parcel Flags Request"); lock (rslock) { try { ILandChannel channel = scene.LandChannel; List<ILandObject> parcels = channel.AllParcels(); foreach (var parcel in parcels) { LandData data = parcel.landData.Copy(); if (enable) { data.Flags = data.Flags | mask; } else { data.Flags = data.Flags & ~mask; } scene.LandChannel.UpdateLandObject(parcel.landData.LocalID, data); } m_log.Info("[RADMIN]: Change Parcel Flags Request complete"); return true; } catch (Exception e) { m_log.InfoFormat("[RADMIN] ChangeParcelFlags: {0}", e.Message); m_log.DebugFormat("[RADMIN] ChangeParcelFlags: {0}", e.ToString()); } return false; } } #endregion } }
/* * Copyright (c) 2015, InWorldz Halcyon Developers * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of halcyon nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using OpenSim.Region.Physics.Manager; using log4net; using System.Reflection; namespace InWorldz.PhysxPhysics { class PhysxCharacter : PhysicsActor, IDisposable { //private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private const float MAX_WALKABLE_SLOPE = 65.0f; private const float TERMINAL_VELOCITY_GRAVITY = 55.0f; private const float CHARACTER_DENSITY = 60.0f; private const float CONTACT_OFFSET = 0.015f; private const float MIN_FORCE_MAG_BEFORE_ZEROING_SQUARED = 0.00625f; private const float STEP_OFFSET = 0.45f; private const float ACCELERATION_COMPARISON_TOLERANCE = 0.006f; private const float MIN_RIDEON_HALF_EXTENTS = 0.625f; private const float POSITION_COMPARISON_TOLERANCE = 0.1f; private const float GRAVITY_PUSHBACK_DIFF_TOLERANCE = 0.001f; private const int VELOCITY_RAMPUP_TIME = 600; /// <summary> /// Maximum amount of time that is allowed to be passed to the velocity /// and other computations. If more time than this has passed, the /// movements of the avatar will be dilated to prevent explosions /// </summary> private const float MAX_TIMESTEP = 0.5f; /// <summary> /// The minimum tick count on windows /// </summary> private const float MIN_TIMESTEP = 0.0156f; private PhysxScene _scene; private PhysX.CapsuleController _controller; private OpenMetaverse.Vector3 _position; private OpenMetaverse.Quaternion _rotation; private OpenSim.Framework.Geom.Box _OBBobject; private float _height; private float _radius; private volatile bool _flying; private float _mass; /// <summary> /// The total resultant velocity from all the applied forces /// </summary> private OpenMetaverse.Vector3 _velocity; /// <summary> /// Target velocity set by the user (walking, flying, etc) /// </summary> private OpenMetaverse.Vector3 _vTarget; /// <summary> /// The current velocity due to gravity clamped at terminal /// </summary> private OpenMetaverse.Vector3 _vGravity; /// <summary> /// Current self-decaying forces acting on an avatar /// </summary> private OpenMetaverse.Vector3 _vForces; /// <summary> /// Current constant forces acting on an avatar /// </summary> // TODO - these two variables have to be serialized. // [...] private bool _cForcesAreLocal = false; private OpenMetaverse.Vector3 _cForces; /// <summary> /// Our current acceleration /// </summary> private OpenMetaverse.Vector3 _acceleration; private volatile bool _colliding = false; private volatile bool _collidingGround = false; // private volatile bool _collidingGroundMesh = false; /// <summary> /// Whether or not this character is frozen in place with its state intact. /// Used when moving a character between regions. /// </summary> private bool _suspended = false; uint _lastSync; private UserControllerHitReportDelegator _hitReportDelegator; private HashSet<PhysX.Shape> _collisionsTwoFramesAgo = new HashSet<PhysX.Shape>(); private HashSet<PhysX.Shape> _collisionsLastFrame = new HashSet<PhysX.Shape>(); private Dictionary<PhysxPrim, int> _collidingPrims = new Dictionary<PhysxPrim, int>(); private class ExternalReport { public bool RemoveAfterReport; public bool Reported; } /// <summary> /// This keeps tract of shape collisions reported by PhysX directly that aren't the result /// of character controller scans /// </summary> private Dictionary<PhysX.Shape, ExternalReport> _externalCollisionReports = new Dictionary<PhysX.Shape, ExternalReport>(); /// <summary> /// This keeps tract of the owners of shapes we have collided with in case they are deleted /// so that we can remove tracked shapes in the event that the prim gets deleted /// </summary> private Dictionary<PhysxPrim, HashSet<PhysX.Shape>> _externalCollisionPrims = new Dictionary<PhysxPrim, HashSet<PhysX.Shape>>(); private uint _localId; private bool _brakes; private bool _running; private bool _disposed = false; private CharacterRideOnBehavior _rideOnBehavior = new CharacterRideOnBehavior(); private OpenMetaverse.Vector4 _collisionPlane = new OpenMetaverse.Vector4(0f, 0f, 0f, 1f); private ulong _lastVelocityNonZero = 0; private static readonly PhysX.ControllerFilters FILTERS = new PhysX.ControllerFilters { ActiveGroups = (int)(CollisionGroupFlag.Character | CollisionGroupFlag.Ground | CollisionGroupFlag.Normal), FilterFlags = PhysX.SceneQueryFilterFlag.Static | PhysX.SceneQueryFilterFlag.Dynamic | PhysX.SceneQueryFilterFlag.Prefilter, FilterData = CollisionGroup.GetFilterData((uint)(PhysX.PairFlag.NotifyTouchFound | PhysX.PairFlag.NotifyTouchLost), 0, CollisionGroupFlag.Character) }; public override bool Disposed { get { return _disposed; } } public PhysxCharacter(PhysxScene scene, float height, float radius, OpenMetaverse.Vector3 position, OpenMetaverse.Quaternion rotation, bool flying, OpenMetaverse.Vector3 initialVelocity) { _scene = scene; _radius = Math.Max(radius, 0.2f); /* * The capsule is defined as a position, a vertical height, and a radius. The height is the distance between the * two sphere centers at the end of the capsule. In other words: * * p = pos (returned by controller) * h = height * r = radius * * p = center of capsule * top sphere center = p.y + h*0.5 * bottom sphere center = p.y - h*0.5 * top capsule point = p.y + h*0.5 + r * bottom capsule point = p.y - h*0.5 - r */ _height = height; _flying = flying; float volume = (float)(Math.PI * Math.Pow(_radius, 2) * this.CapsuleHeight); _mass = CHARACTER_DENSITY * volume; _position = position; _rotation = rotation; _hitReportDelegator = new UserControllerHitReportDelegator(); _hitReportDelegator.OnShapeHitCallback += this.OnShapeHit; _hitReportDelegator.OnControllerHitCallback += this.OnControllerHit; PhysX.CapsuleControllerDesc controllerDesc = new PhysX.CapsuleControllerDesc { Height = this.CapsuleHeight, Radius = _radius, StepOffset = STEP_OFFSET, UpDirection = new PhysX.Math.Vector3(0.0f, 0.0f, 1.0f), Position = PhysUtil.OmvVectorToPhysx(position), Material = scene.DEFAULT_MATERIAL, InteractionMode = PhysX.CCTInteractionMode.Include, SlopeLimit = (float)Math.Cos(OpenMetaverse.Utils.DEG_TO_RAD * MAX_WALKABLE_SLOPE), ContactOffset = CONTACT_OFFSET, Callback = _hitReportDelegator, BehaviorCallback = _rideOnBehavior }; _controller = _scene.ControllerManager.CreateController<PhysX.CapsuleController>(controllerDesc); _controller.Actor.UserData = this; DoZDepenetration(); _controller.ShapeFilterData = CollisionGroup.GetFilterData((uint)(PhysX.PairFlag.NotifyTouchFound | PhysX.PairFlag.NotifyTouchLost), 0, CollisionGroupFlag.Character); _lastSync = (uint)Environment.TickCount; _vTarget = initialVelocity; _velocity = initialVelocity; if (_vTarget != OpenMetaverse.Vector3.Zero) { //hack to continue at velocity until the controller picks up _lastVelocityNonZero = OpenSim.Framework.Util.GetLongTickCount() - VELOCITY_RAMPUP_TIME; } } private void DoZDepenetration() { PhysX.CapsuleGeometry capsule = new PhysX.CapsuleGeometry(_radius, this.CapsuleHeight / 2.0f); float zdepen = CalculateDepenetrationZOffset(_position, capsule, _controller.Actor.Shapes.First()); if (zdepen > 0.0f) { _position.Z += zdepen; _controller.Position = PhysUtil.OmvVectorToPhysx(_position); } } private float CalculateDepenetrationZOffset(OpenMetaverse.Vector3 pos, PhysX.Geometry avaGeom, PhysX.Shape avaShape) { const int MAX_ITERATIONS = 8; const float PUSH_MULTIPLIER = 1.5F; float pushFactor = 0.1f; OpenMetaverse.Vector3 offset = OpenMetaverse.Vector3.Zero; bool foundOverlap = false; //constant from looking at the rot returned from the live avatar, //remember that capsules are always upright, and z rotations don't have an effect //on their geometry OpenMetaverse.Quaternion capsuleRot = new OpenMetaverse.Quaternion(0f, -0.7071069f, 0f, 0.7071067f); for (int i = 0; i < MAX_ITERATIONS; i++) { foundOverlap = false; OpenMetaverse.Vector3 translatedPose = pos + offset; PhysX.Shape[] overlap = _scene.SceneImpl.OverlapMultiple(avaGeom, PhysUtil.PositionToMatrix(translatedPose, capsuleRot)); if (overlap == null) { foundOverlap = true; } else { foreach (var shape in overlap) { if (shape != avaShape && !ShapeIsVolumeDetect(shape)) { foundOverlap = true; break; } } } if (foundOverlap && i + 1 < MAX_ITERATIONS) { offset += new OpenMetaverse.Vector3(0f, 0f, pushFactor); pushFactor *= PUSH_MULTIPLIER; } else { break; } } if (foundOverlap == false && offset != OpenMetaverse.Vector3.Zero) { return offset.Z; } return 0.0f; } private bool ShapeIsVolumeDetect(PhysX.Shape shape) { return (shape.Flags & PhysX.ShapeFlag.TriggerShape) != 0; } private float CapsuleHeight { get { return _height - (_radius * 2.0f); } } public override bool Stopped { get { return false; } } public override OpenMetaverse.Vector3 Size { get { return new OpenMetaverse.Vector3(_radius*2.0f, _radius*2.0f, _height); } set { _scene.QueueCommand(new Commands.GenericSyncCmd( (PhysxScene scene) => { if (_height != value.Z) { _height = value.Z; _controller.Height = this.CapsuleHeight; DoZDepenetration(); } } )); } } public override OpenSim.Framework.PrimitiveBaseShape Shape { set { } get { return null; } } public override uint LocalID { set { _localId = value; } get { return _localId; } } public override OpenMetaverse.UUID Uuid { get { return OpenMetaverse.UUID.Zero; } set { } } public override bool Grabbed { set { } } public override bool Selected { set { } get { return false; } } public override void CrossingFailure() { } public override void ForceAboveParcel(float height) { } public override void DelinkFromParent(OpenMetaverse.Vector3 newWorldPos, OpenMetaverse.Quaternion newWorldRot) { } public override OpenMetaverse.Vector3 GetLockedAngularMotion() { return OpenMetaverse.Vector3.Zero; } public override void LockAngularMotion(OpenMetaverse.Vector3 axis) { } public override OpenMetaverse.Vector3 Position { get { return _position; } set { _position = value; _scene.QueueCommand( new Commands.GenericSyncCmd((PhysxScene scene) => { _position = value; _controller.Position = PhysUtil.OmvVectorToPhysx(value); DoZDepenetration(); RequestPhysicsTerseUpdate(); } )); } } public override float Mass { get { return _mass; } } public override OpenMetaverse.Vector3 Force { get { return _velocity * _mass; } } public override OpenMetaverse.Vector3 ConstantForce { get { return _cForces; } } public override bool ConstantForceIsLocal { get { return _cForcesAreLocal; } } public override OpenSim.Framework.Geom.Box OBBobject { get { return _OBBobject; } set { _OBBobject = value; } } public override void SetVolumeDetect(bool param) { } public override OpenMetaverse.Vector3 GeometricCenter { get { return new OpenMetaverse.Vector3(); } } public override OpenMetaverse.Vector3 CenterOfMass { get { return new OpenMetaverse.Vector3(); } } public override OpenMetaverse.Vector3 Velocity { get { return _velocity; } set { if (_vTarget == OpenMetaverse.Vector3.Zero && value != OpenMetaverse.Vector3.Zero) { _lastVelocityNonZero = OpenSim.Framework.Util.GetLongTickCount() - 30; //dont begin stopped } _vTarget = value; } } public override OpenMetaverse.Vector3 Torque { get { return new OpenMetaverse.Vector3(); } set { } } public override float CollisionScore { get { return 0; } set { } } public override OpenMetaverse.Vector3 Acceleration { get { return _acceleration; } } public override OpenMetaverse.Quaternion Rotation { get { return _rotation; } set { _rotation = value; //m_log.DebugFormat("[PhysxCharacter] new rot={0}", value); } } public override ActorType PhysicsActorType { get { return ActorType.Agent; } } public override bool IsPhysical { get { return true; } } public override bool Flying { get { return _flying; } set { _flying = value; } } public override bool SetAirBrakes { get { return _brakes; } set { _brakes = value; } } public override bool SetAlwaysRun { get { return _running; } set { _running = value; } } public override bool ThrottleUpdates { get { return false; } set { } } public override bool IsColliding { get { return _colliding; } set { } } public override bool CollidingGround { get { return _collidingGround; } set { } } public override bool CollidingObj { get { return _colliding && !_collidingGround; } set { } } public override bool FloatOnWater { set { } } public override OpenMetaverse.Vector3 AngularVelocity { get { return new OpenMetaverse.Vector3(); } set { } } public override OpenMetaverse.Vector3 AngularVelocityTarget { get { return new OpenMetaverse.Vector3(); } set { } } public override float Buoyancy { get { return 0; } set { } } public override IPhysicsProperties Properties { get { return null; } } private OpenMetaverse.Vector3 VTargetWithRunAndRamp { get { if (_vTarget == OpenMetaverse.Vector3.Zero) return OpenMetaverse.Vector3.Zero; OpenMetaverse.Vector3 baseTarget = _vTarget; if (_running && !_flying) { baseTarget *= 2.0f; } return ComputeVelocityRamp(baseTarget); } } private OpenMetaverse.Vector3 ComputeVelocityRamp(OpenMetaverse.Vector3 baseTarget) { ulong accelerationTime = OpenSim.Framework.Util.GetLongTickCount() - _lastVelocityNonZero; if (accelerationTime >= VELOCITY_RAMPUP_TIME) return baseTarget; //fully accelerated /* float x = (float)Math.Pow(2.0, Math.Log(Math.Abs(baseTarget.X) * (((double)accelerationTime / VELOCITY_RAMPUP_TIME)))) * Math.Sign(baseTarget.X); float y = (float)Math.Pow(2.0, Math.Log(Math.Abs(baseTarget.Y) * (((double)accelerationTime / VELOCITY_RAMPUP_TIME)))) * Math.Sign(baseTarget.Y); float z = (float)Math.Pow(2.0, Math.Log(Math.Abs(baseTarget.Z) * (((double)accelerationTime / VELOCITY_RAMPUP_TIME)))) * Math.Sign(baseTarget.Z); */ //linear ramp OpenMetaverse.Vector3 result = baseTarget * ((float)accelerationTime / VELOCITY_RAMPUP_TIME); //m_log.DebugFormat("[CHAR]: {0}", result); return result; } public static readonly OpenMetaverse.Vector4 NoCollPlane = new OpenMetaverse.Vector4(0f, 0f, 0f, 1f); public override OpenMetaverse.Vector4 CollisionPlane { get { if (CollidingGround) { return _collisionPlane; } else { return NoCollPlane; } } } public override void AddForce(OpenMetaverse.Vector3 force, ForceType ftype) { _scene.QueueCommand(new Commands.AddForceCmd(this, force, OpenMetaverse.Vector3.Zero, ftype)); } public override void AddAngularForce(OpenMetaverse.Vector3 force, ForceType ftype) { } public override void SubscribeCollisionEvents(int ms) { } public override void UnSubscribeEvents() { } public override bool SubscribedEvents() { return false; } public override void SyncWithPhysics(float timeStep, uint ticksSinceLastSimulate, uint frameNum) { if (_suspended) { // character is in the middle of a crossing. we do not simulate _lastSync = (uint)Environment.TickCount; return; } float secondsSinceLastSync = Math.Min(((uint)Environment.TickCount - _lastSync) * 0.001f, MAX_TIMESTEP); //m_log.DebugFormat("[CHAR]: secondsSinceLastSync: {0}", secondsSinceLastSync); //sometimes a single quantum doesnt show up here, and the calculation returns a zero if (secondsSinceLastSync < MIN_TIMESTEP * 2) { secondsSinceLastSync = MIN_TIMESTEP * 2; } AccumulateGravity(secondsSinceLastSync); DecayForces(secondsSinceLastSync); OpenMetaverse.Vector3 cforces = _cForcesAreLocal ? _cForces * _rotation : _cForces; cforces.Z = 0; OpenMetaverse.Vector3 vCombined = ApplyAirBrakes(_vGravity + _vForces + cforces + this.VTargetWithRunAndRamp) * secondsSinceLastSync; //m_log.DebugFormat("[CHAR]: vGrav: {0}, vForces: {1}, cForces: {2}, vTarget {3}", _vGravity, _vForces, cforces, this.VTargetWithRunAndRamp); if (vCombined == OpenMetaverse.Vector3.Zero) { SetVelocityAndRequestTerseUpdate(secondsSinceLastSync, OpenMetaverse.Vector3.Zero); ReportCollisionsFromLastFrame(frameNum); return; } OpenMetaverse.Vector3 lastPosition = _position; PhysX.ControllerFlag flags = _controller.Move(PhysUtil.OmvVectorToPhysx(vCombined), TimeSpan.FromSeconds(secondsSinceLastSync), 0.001f, FILTERS); _position = PhysUtil.PhysxVectorToOmv(_controller.Position); _lastSync = (uint)Environment.TickCount; //take into account any movement not accounted for by the other calculations //this is due to collision OpenMetaverse.Vector3 vColl = (_position - lastPosition) - vCombined; //m_log.InfoFormat("vColl {0} {1} PosDiff: {2} Expected: {3}", vColl, flags, _position - lastPosition, vCombined); //m_log.DebugFormat("[CHAR]: vColl: {0}", vColl); bool collidingDown = (flags & PhysX.ControllerFlag.Down) != 0; if (!collidingDown) _rideOnBehavior.AvatarNotStandingOnPrim(); //negative z in vcoll while colliding down is due to gravity/ground collision, dont report it float gravityPushback = Math.Abs(_vGravity.Z) * secondsSinceLastSync; if (collidingDown && vColl.Z > 0 && Math.Abs(vColl.Z - gravityPushback) < GRAVITY_PUSHBACK_DIFF_TOLERANCE) vColl.Z = 0; //m_log.DebugFormat("[CHAR]: vColl: {0} gravityPushback {1} collidingDown:{2}", vColl, gravityPushback, collidingDown); if (flags != 0) { _colliding = true; if (collidingDown) { _collidingGround = true; _flying = false; _vGravity = OpenMetaverse.Vector3.Zero; _vForces.Z = 0.0f; _vTarget.Z = 0.0f; } else { _collidingGround = false; //if we're colliding with anything but the ground, zero out other forces _vForces = OpenMetaverse.Vector3.Zero; } } else { _colliding = false; _collidingGround = false; } if (frameNum % 3 == 0) { CheckAvatarNotBelowGround(); } SetVelocityAndRequestTerseUpdate(secondsSinceLastSync, vColl); ReportCollisionsFromLastFrame(frameNum); if (!_position.ApproxEquals(lastPosition, POSITION_COMPARISON_TOLERANCE)) { RequestPhysicsPositionUpdate(); } } private void CheckAvatarNotBelowGround() { float groundHeight = _scene.TerrainChannel.CalculateHeightAt(_position.X, _position.Y); if (_position.Z < groundHeight) { _vForces = OpenMetaverse.Vector3.Zero; _vGravity = OpenMetaverse.Vector3.Zero; _vTarget = OpenMetaverse.Vector3.Zero; //place the avatar a decimeter above the ground _position.Z = groundHeight + (_height / 2.0f) + 0.1f; _controller.Position = PhysUtil.OmvVectorToPhysx(_position); } } private void ReportCollisionsFromLastFrame(uint frameNum) { IList<PhysX.Shape> shapesThatNeedDelete = DumpContinuingExternalCollisionsToLastFrame(); //try to optimize the common case where the collision set hasnt changed if (_collisionsLastFrame.Count == _collisionsTwoFramesAgo.Count && _collisionsLastFrame.SetEquals(_collisionsTwoFramesAgo) && ( shapesThatNeedDelete == null || shapesThatNeedDelete.Count == 0)) { ReportContinuingCollisionList(frameNum, _collisionsLastFrame); _collisionsLastFrame.Clear(); return; } IEnumerable<PhysX.Shape> continuingCollisions = _collisionsLastFrame.Intersect(_collisionsTwoFramesAgo); IEnumerable<PhysX.Shape> newCollisions = _collisionsLastFrame.Except(_collisionsTwoFramesAgo); IEnumerable<PhysX.Shape> endedCollisions = _collisionsTwoFramesAgo.Except(_collisionsLastFrame); ReportNewCollisions(newCollisions); ReportNewCollisions(shapesThatNeedDelete); ReportEndedCollisions(endedCollisions); ReportEndedCollisions(shapesThatNeedDelete); if (shapesThatNeedDelete != null) { foreach (PhysX.Shape shape in shapesThatNeedDelete) { RemoveExternalCollidingPrimShape(shape, (PhysxPrim)shape.Actor.UserData); } } ReportContinuingCollisionList(frameNum, continuingCollisions); SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.CharacterCollisionsChanged }); HashSet<PhysX.Shape> oldTwoFramesAgo = _collisionsTwoFramesAgo; oldTwoFramesAgo.Clear(); _collisionsTwoFramesAgo = _collisionsLastFrame; _collisionsLastFrame = oldTwoFramesAgo; } private void ReportEndedCollisions(IEnumerable<PhysX.Shape> endedCollisions) { if (endedCollisions == null) return; foreach (PhysX.Shape shape in endedCollisions) { PhysxPrim primActor = shape.Actor.UserData as PhysxPrim; if (primActor != null) { if (primActor.Properties.WantsCollisionNotification || primActor.Properties.ChildrenWantCollisionNotification) { primActor.OnCharacterContactChangeSync(shape, this, CollisionEventUpdateType.CollisionEnded); } //send the collision update to the scene presence SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.CollisionEnded, OtherColliderLocalId = primActor.LocalID, OtherColliderUUID = primActor.Uuid }); int count; if (_collidingPrims.TryGetValue(primActor, out count)) { --count; if (count == 0) { _collidingPrims.Remove(primActor); } else { _collidingPrims[primActor] = count; } } } else { //terrain? if (shape.Actor.UserData is TerrainManager) { //send the collision update to the scene presence SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.LandCollisionEnded, CollisionLocation = _position }); } else { //character? PhysxCharacter otherChar = shape.Actor.UserData as PhysxCharacter; if (otherChar != null) { //send the collision update to the scene presence SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.CharacterCollisionEnded, OtherColliderLocalId = otherChar.LocalID }); } } } } } private void ReportNewCollisions(IEnumerable<PhysX.Shape> newCollisions) { if (newCollisions == null) return; foreach (PhysX.Shape shape in newCollisions) { PhysxPrim primActor = shape.Actor.UserData as PhysxPrim; if (primActor != null) { if (primActor.Properties.WantsCollisionNotification || primActor.Properties.ChildrenWantCollisionNotification) { primActor.OnCharacterContactChangeSync(shape, this, CollisionEventUpdateType.CollisionBegan); } //send the collision update to the scene presence SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.CollisionBegan, OtherColliderLocalId = primActor.LocalID, OtherColliderUUID = primActor.Uuid }); int count; if (_collidingPrims.TryGetValue(primActor, out count)) { _collidingPrims[primActor] = ++count; } else { _collidingPrims.Add(primActor, 1); } } else { if (shape.Actor.UserData is TerrainManager) { //send the collision update to the scene presence SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.LandCollisionBegan, CollisionLocation = _position }); } else { //character? PhysxCharacter otherChar = shape.Actor.UserData as PhysxCharacter; if (otherChar != null) { //send the collision update to the scene presence SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.CharacterCollisionBegan, OtherColliderLocalId = otherChar.LocalID }); } } } } } private void ReportContinuingCollisionList(uint frameNum, IEnumerable<PhysX.Shape> continuingCollisions) { if (frameNum % 3 == 0 || frameNum % 4 == 0) { foreach (PhysX.Shape shape in continuingCollisions) { PhysxPrim primActor = shape.Actor.UserData as PhysxPrim; if (primActor != null) { //send the collision update to the scene presence SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.CollisionContinues, OtherColliderLocalId = primActor.LocalID, OtherColliderUUID = primActor.Uuid }); } else { TerrainManager terrainMgr = shape.Actor.UserData as TerrainManager; if (terrainMgr != null) { //send the ground collision update to the scene presence SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.LandCollisionContinues, CollisionLocation = _position }); } else { PhysxCharacter otherChar = shape.Actor.UserData as PhysxCharacter; if (otherChar != null) { //send the ground collision update to the scene presence SendCollisionUpdate(new CollisionEventUpdate { Type = CollisionEventUpdateType.CharacterCollisionContinues, OtherColliderLocalId = otherChar.LocalID }); } } } } } } private IList<PhysX.Shape> DumpContinuingExternalCollisionsToLastFrame() { Lazy<List<PhysX.Shape>> needsDeleteShapes = new Lazy<List<PhysX.Shape>>(); foreach (var shapeReportKVP in _externalCollisionReports) { shapeReportKVP.Value.Reported = true; if (shapeReportKVP.Value.RemoveAfterReport) { needsDeleteShapes.Value.Add(shapeReportKVP.Key); } else { _collisionsLastFrame.Add(shapeReportKVP.Key); } } if (needsDeleteShapes.IsValueCreated) { return needsDeleteShapes.Value; } else { return null; } } private void SetVelocityAndRequestTerseUpdate(float secondsSinceLastSync, OpenMetaverse.Vector3 vColl) { OpenMetaverse.Vector3 cforces = _cForcesAreLocal ? _cForces * _rotation : _cForces; cforces.Z = 0; OpenMetaverse.Vector3 oldVelocity = _velocity; _velocity = ApplyAirBrakes(_vGravity + _vForces + cforces + this.VTargetWithRunAndRamp + vColl); if (_velocity == OpenMetaverse.Vector3.Zero && oldVelocity != OpenMetaverse.Vector3.Zero) { _acceleration = OpenMetaverse.Vector3.Zero; RequestPhysicsTerseUpdate(); } else { OpenMetaverse.Vector3 velDiff = _velocity - oldVelocity; OpenMetaverse.Vector3 accel = velDiff / secondsSinceLastSync; if (!accel.ApproxEquals(_acceleration, ACCELERATION_COMPARISON_TOLERANCE)) { _acceleration = accel; RequestPhysicsTerseUpdate(); //m_log.DebugFormat("Avatar Terse Vel: {0} Accel: {1} Sync: {2}", _velocity, _acceleration, secondsSinceLastSync); //m_log.DebugFormat("Vel Breakdown: vGravity {0} vForces {1} vTarget {2} vColl {3}", _vGravity, _vForces, this.VTargetWithRun, vColl); } } } private void DecayForces(float secondsSinceLastSync) { if (_vForces != OpenMetaverse.Vector3.Zero) { if (_vTarget != OpenMetaverse.Vector3.Zero) { if (!_flying) { //user movement instantly cancels any x or y axis movement, but //it does not cancel z axis movement while jumping. This allows the user to have //a nice jump while walking _vForces.X = 0f; _vForces.Y = 0f; } else { _vForces = OpenMetaverse.Vector3.Zero; } } else { //decay velocity in relation to velocity to badly mimic drag OpenMetaverse.Vector3 decayForce; if (_collidingGround) { decayForce = OpenMetaverse.Vector3.Multiply(_vForces, 2.0f * secondsSinceLastSync); } else { decayForce = OpenMetaverse.Vector3.Multiply(_vForces, 1.0f * secondsSinceLastSync); } _vForces -= decayForce; if (_vForces.LengthSquared() < MIN_FORCE_MAG_BEFORE_ZEROING_SQUARED) { _vForces = OpenMetaverse.Vector3.Zero; } } } } private void AccumulateGravity(float secondsSinceLastSync) { if (_flying) { _vGravity.Z = 0.0f; } else { OpenMetaverse.Vector3 cforces = _cForcesAreLocal ? _cForces * _rotation : _cForces; //if we have an upward force, we need to start removing the energy from that before //adding negative force to vGravity if (_vForces.Z > 0.0f) { _vForces.Z += (Settings.Instance.Gravity + cforces.Z) * secondsSinceLastSync; if (_vForces.Z < 0.0f) _vForces.Z = 0.0f; } else if (Math.Abs(_vGravity.Z) < TERMINAL_VELOCITY_GRAVITY) { _vGravity.Z += (Settings.Instance.Gravity + cforces.Z) * secondsSinceLastSync; } } } private OpenMetaverse.Vector3 ApplyAirBrakes(OpenMetaverse.Vector3 velocity) { if (_brakes) { // The user has said to stop. if (_flying || !_colliding || _vTarget == OpenMetaverse.Vector3.Zero) // We are either flying or falling straight down. (Or standing still...) { // Possible BUG: Could not be handling the case of falling down a steeply inclined surface - whether ground or prim. Cannot test because in IW you cannot fall down a steeply inclined plane! // Dead stop. HACK: In SL a little bit of gravity sneaks in anyway. The constant comes from measuring that value. _vForces = OpenMetaverse.Vector3.Zero; _vGravity = OpenMetaverse.Vector3.Zero; velocity = new OpenMetaverse.Vector3(0.0f, 0.0f, -0.217762f); } else // We are walking or running. { // Slow down. velocity *= 0.25f; // SL seems to just about quarter walk/run speeds according to tests run on 20151217. } } return velocity; } public override void AddForceSync(OpenMetaverse.Vector3 Force, OpenMetaverse.Vector3 forceOffset, ForceType type) { Force /= _mass; switch (type) { case ForceType.ConstantLocalLinearForce: _cForcesAreLocal = true; _cForces = Force; break; case ForceType.ConstantGlobalLinearForce: _cForcesAreLocal = false; _cForces = Force; break; case ForceType.GlobalLinearImpulse: _vForces += Force; break; case ForceType.LocalLinearImpulse: _vForces += Force * _rotation; break; } } public override void LinkToNewParent(PhysicsActor obj, OpenMetaverse.Vector3 localPos, OpenMetaverse.Quaternion localRot) { throw new NotImplementedException(); } public override void UpdateOffsetPosition(OpenMetaverse.Vector3 newOffset, OpenMetaverse.Quaternion rotOffset) { throw new NotImplementedException(); } public void Dispose() { if (!_disposed) { InformCollidersOfRemoval(); _controller.InvalidateCache(); _scene.RemoveCharacterSync(this); _controller.Dispose(); _hitReportDelegator.Dispose(); _disposed = true; } } private void InformCollidersOfRemoval() { foreach (var colliderKvp in _collidingPrims) { colliderKvp.Key.ContactedCharacterDeleted(this); } } private void OnShapeHit(PhysX.ControllerShapeHit hit) { PhysicsActor otherActor = hit.Shape.Actor.UserData as PhysicsActor; UpdateCollisionPlane(hit); if (otherActor != null) { _collisionsLastFrame.Add(hit.Shape); if (otherActor.IsPhysical && otherActor.PhysicsActorType == ActorType.Prim && ((PhysxPrim)otherActor).CollisionGrp != CollisionGroupFlag.PhysicalPhantom) { KickOrRideOnPrim(hit, otherActor); } else if (hit.Direction.Z != 0) { _rideOnBehavior.AvatarNotStandingOnPrim(); } } else if (hit.Shape.Actor.UserData is TerrainManager) { //ground _collisionsLastFrame.Add(hit.Shape); _rideOnBehavior.AvatarNotStandingOnPrim(); } } static readonly TimeSpan MIN_TIMESTEP_TS = TimeSpan.FromSeconds(MIN_TIMESTEP); /// <summary> /// Called by physx when this controller runs into another one /// </summary> /// <param name="hit"></param> private void OnControllerHit(PhysX.ControllersHit hit) { hit.Other.Move(hit.Direction * hit.Length, MIN_TIMESTEP_TS); _collisionsLastFrame.Add(hit.Other.Actor.Shapes.First()); //let the other controller know we collided with them ((PhysxCharacter)hit.Other.Actor.UserData).HitByOtherCharacter(this, _controller.Actor.Shapes.First()); } /// <summary> /// Called by another PhysxCharacter when it collides with this one /// </summary> /// <param name="physxCharacter"></param> /// <param name="shape"></param> private void HitByOtherCharacter(PhysxCharacter physxCharacter, PhysX.Shape shape) { _collisionsLastFrame.Add(shape); } private void KickOrRideOnPrim(PhysX.ControllerShapeHit hit, PhysicsActor otherActor) { PhysxPrim otherPrimActor = (PhysxPrim)otherActor; //m_log.InfoFormat("dir: {0} {1} {2}", hit.Direction, hit.WorldNormal, hit.WorldPosition); float coeff; if (hit.Direction.Z != 0) { if (otherPrimActor.PhysxExtents.X > MIN_RIDEON_HALF_EXTENTS || otherPrimActor.PhysxExtents.Y > MIN_RIDEON_HALF_EXTENTS || otherPrimActor.PhysxExtents.Z > MIN_RIDEON_HALF_EXTENTS) { _rideOnBehavior.AvatarStandingOn(otherPrimActor); } else { _rideOnBehavior.AvatarNotStandingOnPrim(); } coeff = _mass * Math.Abs(Settings.Instance.Gravity) * MIN_TIMESTEP; } else { if (_rideOnBehavior.IsRideOnPrim(otherPrimActor)) { //dont kick the thing we're riding on coeff = 0; } else { //push the object out of the way at the appropriate rate const float OTHER_PUSH_MULT = 100.0f; coeff = _mass * hit.Length * OTHER_PUSH_MULT; } } //OpenMetaverse.Vector3.Negate(PhysUtil.PhysxVectorToOmv(hit.WorldNormal)) if (coeff != 0.0f) { OpenMetaverse.Vector3 hitVector = PhysUtil.PhysxVectorToOmv(hit.WorldPosition); if (hit.Direction.Z == 0) { //put the hit down the legs hitVector.Z -= _height / 3.0f; } otherActor.AddForceSync(PhysUtil.PhysxVectorToOmv(hit.Direction) * coeff, hitVector, ForceType.GlobalLinearImpulse); } } private void UpdateCollisionPlane(PhysX.ControllerShapeHit hit) { if (hit.Direction == -PhysX.Math.Vector3.UnitZ) { var omvNorm = PhysUtil.PhysxVectorToOmv(hit.WorldNormal); //we're colliding down, the collision normal should never have a negative Z if (omvNorm.Z < 0) { omvNorm = OpenMetaverse.Vector3.UnitZ; } OpenMetaverse.Vector4 collPlane = new OpenMetaverse.Vector4(omvNorm, OpenMetaverse.Vector3.Dot(PhysUtil.PhysxVectorToOmv(hit.WorldPosition), omvNorm) ); //m_log.InfoFormat("ColPlane: WorldNormal: {0}, WorldPosition: {1}", hit.WorldNormal, hit.WorldPosition); _collisionPlane = collPlane; } } public override void GatherTerseUpdate(out OpenMetaverse.Vector3 position, out OpenMetaverse.Quaternion rotation, out OpenMetaverse.Vector3 velocity, out OpenMetaverse.Vector3 acceleration, out OpenMetaverse.Vector3 angularVelocity) { position = _position; rotation = _rotation; velocity = _velocity; acceleration = _acceleration; angularVelocity = OpenMetaverse.Vector3.Zero; } /// <summary> /// Called when a prim actor shape changes or an actor is deleted. /// This invalidates the controller cache to prevent PhysX crashes /// due to bugs /// </summary> /// <param name="prim"></param> internal void InvalidateControllerCacheIfContacting(PhysxPrim prim) { if (_collidingPrims.ContainsKey(prim)) { _controller.InvalidateCache(); } } /// <summary> /// Called by physx collision handling when this character has changed contact with a prim and this change was /// not triggered by the character controller (which keeps objects physically separated when reporting contacts) /// /// This means that when this method is called it is the result of a prim hitting a character, not the other way around /// </summary> /// <param name="contactPairHeader">PhysX pair header</param> /// <param name="pairs">PhysX contact pairs</param> /// <param name="pairNumber">The index number where theis character appears in the pair</param> internal void OnContactChangeSync(PhysX.ContactPairHeader contactPairHeader, PhysX.ContactPair[] pairs, int ourActorIndex) { if ((contactPairHeader.Flags & PhysX.ContactPairHeaderFlag.DeletedActor0) != 0 || (contactPairHeader.Flags & PhysX.ContactPairHeaderFlag.DeletedActor1) != 0) { return; } foreach (var pair in pairs) { PhysX.Shape shape0 = pair.Shapes[0]; PhysX.Shape shape1 = pair.Shapes[1]; if ((shape0 == null) || (shape1 == null)) continue; PhysX.Shape otherShape; if (shape0.Actor != _controller.Actor) { otherShape = shape0; } else { otherShape = shape1; } //m_log.DebugFormat("[CHAR]: Collision: {0}", pair.Events); PhysxPrim colPrim = otherShape.Actor.UserData as PhysxPrim; if (colPrim != null) { if ((pair.Events & PhysX.PairFlag.NotifyTouchFound) != 0) { AddExternalCollidingPrimShape(otherShape, colPrim); } else if ((pair.Events & PhysX.PairFlag.NotifyTouchLost) != 0) { SetToRemoveAfterReport(otherShape, colPrim); } } } } private void SetToRemoveAfterReport(PhysX.Shape otherShape, PhysxPrim colPrim) { ExternalReport report; if (_externalCollisionReports.TryGetValue(otherShape, out report)) { if (report.Reported) { //this collision was reported already. remove it RemoveExternalCollidingPrimShape(otherShape, colPrim); } else { //this collision hasn't been reported yet. make sure the //collision processor knows to remove it after it is reported report.RemoveAfterReport = true; } } } private void RemoveExternalCollidingPrimShape(PhysX.Shape otherShape, PhysxPrim colPrim) { _externalCollisionReports.Remove(otherShape); HashSet<PhysX.Shape> primShapes; if (_externalCollisionPrims.TryGetValue(colPrim, out primShapes)) { primShapes.Remove(otherShape); if (primShapes.Count == 0) { _externalCollisionPrims.Remove(colPrim); colPrim.OnDeleted -= colPrim_OnDeleted; } } } private void AddExternalCollidingPrimShape(PhysX.Shape otherShape, PhysxPrim colPrim) { _externalCollisionReports[otherShape] = new ExternalReport { RemoveAfterReport = false, Reported = false }; HashSet<PhysX.Shape> primShapes; if (!_externalCollisionPrims.TryGetValue(colPrim, out primShapes)) { primShapes = new HashSet<PhysX.Shape>(); _externalCollisionPrims.Add(colPrim, primShapes); colPrim.OnDeleted += colPrim_OnDeleted; } primShapes.Add(otherShape); } /// <summary> /// Called when one of the prims we're doing external tracking on is deleted or makes /// a shape/state change and our collision data becomes invalid /// </summary> /// <param name="obj"></param> void colPrim_OnDeleted(PhysxPrim obj) { HashSet<PhysX.Shape> shapes; if (_externalCollisionPrims.TryGetValue(obj, out shapes)) { foreach (var shape in shapes) { _externalCollisionReports.Remove(shape); } _externalCollisionPrims.Remove(obj); } } public override void Suspend() { _scene.QueueCommand(new Commands.GenericSyncCmd( (PhysxScene scene) => { _suspended = true; } )); } public override void Resume(bool interpolate, AfterResumeCallback callback) { _scene.QueueCommand(new Commands.GenericSyncCmd( (PhysxScene scene) => { _suspended = false; if (callback != null) callback(); } )); } } }
#region Using Statements using System; using System.Collections.Generic; using WaveEngine.Common; using WaveEngine.Common.Graphics; using WaveEngine.Common.Math; using WaveEngine.Components.Animation; using WaveEngine.Components.Cameras; using WaveEngine.Components.Graphics2D; using WaveEngine.Components.Graphics3D; using WaveEngine.Framework; using WaveEngine.Framework.Graphics; using WaveEngine.Framework.Physics2D; using WaveEngine.Framework.Resources; using WaveEngine.Framework.Services; #endregion namespace PerPixelColliderProject { public enum SampleState { Playing, Waiting } public class MyScene : Scene { public const int MAXOBSTACLES = 6; public const int WAITINGTIME = 2000; public const float SCROLLACCELERATION = -3; public const float SCROLLSPEED = -150; public const float SCROLLWIDTH = 1800; private const string TEXTURESHIP = "Content/spaceShip.wpk"; private const string TEXTURESHIPBURST = "Content/spaceShipBurst.wpk"; private const string TEXTUREOBSTACLE = "Content/asteroid.wpk"; private const string TEXTUREGROUND = "Content/landScape.wpk"; private const string TEXTUREBACKGROUND = "Content/background.wpk"; private const string TEXTUREEXPLOSION = "Content/explosionSpriteSheet.wpk"; private const string EXPLOSIONSPRITESHEET = "Content/explosionSpriteSheet.xml"; public IList<Entity> obstacles; public Entity ship; public Entity ground, ground2, ground3; public Entity explosion; private SampleState state; public int countDown; public SampleState State { get { return this.state; } set { this.state = value; if (state == SampleState.Playing) { // Sets playing initial state int i = 0; float step = (SCROLLWIDTH / MAXOBSTACLES); foreach (var obstacle in this.obstacles) { obstacle.IsActive = true; var transform = obstacle.FindComponent<Transform2D>(); transform.X = step * (i + 3); transform.Y = (float)WaveServices.Random.NextDouble() * WaveServices.Platform.ScreenHeight; i++; } ScrollBehavior.ScrollSpeed = SCROLLSPEED; this.ground.IsActive = true; this.ground2.IsActive = true; this.ground3.IsActive = true; this.ship.Enabled = true; this.ship.FindComponent<Transform2D>().Y = 0; this.ship.FindComponent<ShipBehavior>().Reset(); this.explosion.Enabled = false; } else if (state == SampleState.Waiting) { // Sets waiting initial state foreach (var obstacles in this.obstacles) { obstacles.IsActive = false; } this.ship.Enabled = false; this.ground.IsActive = false; this.ground2.IsActive = false; this.ground3.IsActive = false; this.countDown = 0; } } } protected override void CreateScene() { FixedCamera2D camera2d = new FixedCamera2D("camera"); EntityManager.Add(camera2d); this.CreateExplosion(); this.CreateShip(); Vector2 topLeftCorner = Vector2.Zero; WaveServices.ViewportManager.RecoverPosition(ref topLeftCorner); this.ground = this.CreateGround("ground1", topLeftCorner.X); this.ground2 = this.CreateGround("ground2", topLeftCorner.X + 1024); this.ground3 = this.CreateGround("ground3", topLeftCorner.X + 2048); this.CreateObstacles(); this.CreateBackground(); this.state = SampleState.Playing; ScrollBehavior.ScrollSpeed = SCROLLSPEED; this.AddSceneBehavior(new CollisionSceneBehavior(), SceneBehavior.Order.PostUpdate); } private void CreateBackground() { Vector2 corner = Vector2.Zero; WaveServices.ViewportManager.RecoverPosition(ref corner); var transform = new Transform2D() { X = corner.X, Y = corner.Y, DrawOrder = 1, XScale = WaveServices.ViewportManager.ScreenWidth / (WaveServices.ViewportManager.RatioX * (float)256), YScale = WaveServices.ViewportManager.ScreenHeight / (WaveServices.ViewportManager.RatioY * (float)256) }; var background = new Entity("backGround") .AddComponent(transform) .AddComponent(new Sprite(TEXTUREBACKGROUND)) .AddComponent(new SpriteRenderer(DefaultLayers.Opaque)); this.EntityManager.Add(background); } private void CreateObstacles() { this.obstacles = new List<Entity>(); float x; float step = SCROLLWIDTH / MAXOBSTACLES; for (int i = 0; i < MAXOBSTACLES; i++) { x = step * (i + 3); float y = (float)(WaveServices.Random.NextDouble() * WaveServices.ViewportManager.VirtualHeight / WaveServices.ViewportManager.RatioY); float scale = ((float)WaveServices.Random.NextDouble() * 2f) + 0.5f; var obstacle = new Entity("obstacle_" + i) .AddComponent(new Transform2D() { X = x, Y = y, XScale = scale, YScale = scale, Origin = new WaveEngine.Common.Math.Vector2(0.5f, 0.5f) }) .AddComponent(new PerPixelCollider(TEXTUREOBSTACLE, 0)) .AddComponent(new Sprite(TEXTUREOBSTACLE)) .AddComponent(new SpriteRenderer(DefaultLayers.Alpha)) .AddComponent(new ScrollBehavior(SCROLLWIDTH, true, true)); this.obstacles.Add(obstacle); this.EntityManager.Add(obstacle); } } private Entity CreateGround(string name, float x) { Vector2 bottomLeft = new Vector2(0, WaveServices.Platform.ScreenHeight); WaveServices.ViewportManager.RecoverPosition(ref bottomLeft); var ground = new Entity(name) .AddComponent(new Transform2D() { X = x, Y = bottomLeft.Y - 128 }) .AddComponent(new PerPixelCollider(TEXTUREGROUND, 0)) .AddComponent(new Sprite(TEXTUREGROUND)) .AddComponent(new SpriteRenderer(DefaultLayers.Alpha)) .AddComponent(new ScrollBehavior(3072, false, false)); this.EntityManager.Add(ground); return ground; } private void CreateShip() { this.ship = new Entity("ship") .AddComponent(new Transform2D() { X = 100, Y = 32, Origin = new Vector2(0.5f) }) .AddComponent(new PerPixelCollider(TEXTURESHIP, 0)) .AddComponent(new Sprite(TEXTURESHIP)) .AddComponent(new SpriteRenderer(DefaultLayers.Alpha)) .AddComponent(new ShipBehavior()); var shipBurst = new Entity("shipBurst") .AddComponent(new Transform2D() { Origin = new Vector2(0.5f) }) .AddComponent(new Sprite(TEXTURESHIPBURST)) .AddComponent(new SpriteRenderer(DefaultLayers.Additive)); shipBurst.Enabled = false; this.ship.AddChild(shipBurst); this.EntityManager.Add(this.ship); } private void CreateExplosion() { this.explosion = new Entity("boom") .AddComponent(new Transform2D() { XScale = 3, YScale = 2.5f, Origin = new Vector2(0.5f) }) .AddComponent(new Sprite(TEXTUREEXPLOSION)) .AddComponent(Animation2D.Create<TexturePackerGenericXml>(EXPLOSIONSPRITESHEET) .Add("Explosion", new SpriteSheetAnimationSequence() { First = 1, Length = 16, FramesPerSecond = 16 })) .AddComponent(new AnimatedSpriteRenderer()); this.explosion.Enabled = false; this.EntityManager.Add(this.explosion); } public void Explosion() { // Creates the explosions and adjusts to the ship position. this.explosion.Enabled = true; var explosionTransform = this.explosion.FindComponent<Transform2D>(); var shipTransform = this.ship.FindComponent<Transform2D>(); explosionTransform.X = shipTransform.X; explosionTransform.Y = shipTransform.Y; var anim2D = this.explosion.FindComponent<Animation2D>(); anim2D.CurrentAnimation = "Explosion"; anim2D.Play(false); } } }
using System; using System.Web; using System.Web.Security; using System.Web.Services; using System.ComponentModel; using System.Web.Script.Services; using System.Xml; using System.Xml.Xsl; using System.IO; using System.Text.RegularExpressions; using System.Net; using System.Web.UI; using Umbraco.Core; using Umbraco.Core.IO; using Umbraco.Web.UI; using Umbraco.Web; using Umbraco.Web.Cache; using Umbraco.Web.WebServices; using umbraco.BusinessLogic; using umbraco.businesslogic.Exceptions; using umbraco.cms.businesslogic.web; using umbraco.cms.businesslogic.media; using umbraco.BasePages; namespace umbraco.presentation.webservices { /// <summary> /// Summary description for legacyAjaxCalls /// </summary> [WebService(Namespace = "http://umbraco.org/webservices")] [WebServiceBinding(ConformsTo = WsiProfiles.BasicProfile1_1)] [ToolboxItem(false)] [ScriptService] public class legacyAjaxCalls : UmbracoAuthorizedWebService { private User _currentUser; [WebMethod] public bool ValidateUser(string username, string password) { if (ValidateCredentials(username, password)) { var u = new BusinessLogic.User(username); BasePage.doLogin(u); return true; } return false; } /// <summary> /// method to accept a string value for the node id. Used for tree's such as python /// and xslt since the file names are the node IDs /// </summary> /// <param name="nodeId"></param> /// <param name="alias"></param> /// <param name="nodeType"></param> [WebMethod] [ScriptMethod] public void Delete(string nodeId, string alias, string nodeType) { if (!AuthorizeRequest()) return; //check which parameters to pass depending on the types passed in int intNodeId; // Fix for #26965 - numeric member login gets parsed as nodeId if (int.TryParse(nodeId, out intNodeId) && nodeType != "member") { LegacyDialogHandler.Delete( new HttpContextWrapper(HttpContext.Current), UmbracoUser, nodeType, intNodeId, alias); } else { LegacyDialogHandler.Delete( new HttpContextWrapper(HttpContext.Current), UmbracoUser, nodeType, 0, nodeId); } } /// <summary> /// Permanently deletes a document/media object. /// Used to remove an item from the recycle bin. /// </summary> /// <param name="nodeId"></param> /// <param name="nodeType"></param> [WebMethod] [ScriptMethod] public void DeleteContentPermanently(string nodeId, string nodeType) { int intNodeId; if (int.TryParse(nodeId, out intNodeId)) { switch (nodeType) { case "media": case "mediaRecycleBin": //ensure user has access to media AuthorizeRequest(DefaultApps.media.ToString(), true); new Media(intNodeId).delete(true); break; case "content": case "contentRecycleBin": default: //ensure user has access to content AuthorizeRequest(DefaultApps.content.ToString(), true); new Document(intNodeId).delete(true); break; } } else { throw new ArgumentException("The nodeId argument could not be parsed to an integer"); } } [WebMethod] [ScriptMethod] public void DisableUser(int userId) { AuthorizeRequest(DefaultApps.users.ToString(), true); BusinessLogic.User.GetUser(userId).disable(); } [WebMethod] [ScriptMethod] public string GetNodeName(int nodeId) { AuthorizeRequest(true); return new cms.businesslogic.CMSNode(nodeId).Text; } [WebMethod] [ScriptMethod] public string[] GetNodeBreadcrumbs(int nodeId) { AuthorizeRequest(true); var node = new cms.businesslogic.CMSNode(nodeId); var crumbs = new System.Collections.Generic.List<string>() { node.Text }; while (node != null && node.Level > 1) { node = node.Parent; crumbs.Add(node.Text); } crumbs.Reverse(); return crumbs.ToArray(); } [WebMethod] [ScriptMethod] public string NiceUrl(int nodeId) { AuthorizeRequest(true); return library.NiceUrl(nodeId); } [WebMethod] [ScriptMethod] public string ProgressStatus(string Key) { AuthorizeRequest(true); return Application[helper.Request("key")].ToString(); } [WebMethod] [ScriptMethod] public void RenewUmbracoSession() { AuthorizeRequest(true); BasePage.RenewLoginTimeout(); } [WebMethod] [ScriptMethod] public int GetSecondsBeforeUserLogout() { //TODO: Change this to not throw an exception otherwise we end up with JS errors all the time when recompiling!! AuthorizeRequest(true); var timeout = BasePage.GetTimeout(true); var timeoutDate = new DateTime(timeout); var currentDate = DateTime.Now; return (int) timeoutDate.Subtract(currentDate).TotalSeconds; } [WebMethod] [ScriptMethod] public string TemplateMasterPageContentContainer(int templateId, int masterTemplateId) { AuthorizeRequest(DefaultApps.settings.ToString(), true); return new cms.businesslogic.template.Template(templateId).GetMasterContentElement(masterTemplateId); } [WebMethod] [ScriptMethod] public string SaveFile(string fileName, string fileAlias, string fileContents, string fileType, int fileID, int masterID, bool ignoreDebug) { switch (fileType) { case "xslt": AuthorizeRequest(DefaultApps.developer.ToString(), true); return SaveXslt(fileName, fileContents, ignoreDebug); case "python": AuthorizeRequest(DefaultApps.developer.ToString(), true); return "true"; case "css": AuthorizeRequest(DefaultApps.settings.ToString(), true); return SaveCss(fileName, fileContents, fileID); case "script": AuthorizeRequest(DefaultApps.settings.ToString(), true); return SaveScript(fileName, fileContents); case "template": AuthorizeRequest(DefaultApps.settings.ToString(), true); return SaveTemplate(fileName, fileAlias, fileContents, fileID, masterID); default: throw new ArgumentException(String.Format("Invalid fileType passed: '{0}'", fileType)); } } public string Tidy(string textToTidy) { AuthorizeRequest(true); return library.Tidy(helper.Request("StringToTidy"), true); } private static string SaveCss(string fileName, string fileContents, int fileId) { string returnValue; var stylesheet = new StyleSheet(fileId) { Content = fileContents, Text = fileName }; try { stylesheet.saveCssToFile(); returnValue = "true"; } catch (Exception ee) { throw new Exception("Couldn't save file", ee); } return returnValue; } private string SaveXslt(string fileName, string fileContents, bool ignoreDebugging) { var tempFileName = IOHelper.MapPath(SystemDirectories.Xslt + "/" + System.DateTime.Now.Ticks + "_temp.xslt"); using (var sw = File.CreateText(tempFileName)) { sw.Write(fileContents); sw.Close(); } // Test the xslt var errorMessage = ""; if (!ignoreDebugging) { try { // Check if there's any documents yet if (content.Instance.XmlContent.SelectNodes("/root/node").Count > 0) { var macroXml = new XmlDocument(); macroXml.LoadXml("<macro/>"); var macroXslt = new XslCompiledTransform(); var umbPage = new page(content.Instance.XmlContent.SelectSingleNode("//node [@parentID = -1]")); var xslArgs = macro.AddMacroXsltExtensions(); var lib = new library(umbPage); xslArgs.AddExtensionObject("urn:umbraco.library", lib); HttpContext.Current.Trace.Write("umbracoMacro", "After adding extensions"); // Add the current node xslArgs.AddParam("currentPage", "", library.GetXmlNodeById(umbPage.PageID.ToString())); HttpContext.Current.Trace.Write("umbracoMacro", "Before performing transformation"); // Create reader and load XSL file // We need to allow custom DTD's, useful for defining an ENTITY var readerSettings = new XmlReaderSettings(); readerSettings.ProhibitDtd = false; using (var xmlReader = XmlReader.Create(tempFileName, readerSettings)) { var xslResolver = new XmlUrlResolver { Credentials = CredentialCache.DefaultCredentials }; macroXslt.Load(xmlReader, XsltSettings.TrustedXslt, xslResolver); xmlReader.Close(); // Try to execute the transformation var macroResult = new HtmlTextWriter(new StringWriter()); macroXslt.Transform(macroXml, xslArgs, macroResult); macroResult.Close(); } } else { errorMessage = "stub"; } } catch (Exception errorXslt) { errorMessage = (errorXslt.InnerException ?? errorXslt).ToString(); // Full error message errorMessage = errorMessage.Replace("\n", "<br/>\n"); // Find error var m = Regex.Matches(errorMessage, @"\d*[^,],\d[^\)]", RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace); foreach (Match mm in m) { var errorLine = mm.Value.Split(','); if (errorLine.Length > 0) { var theErrorLine = int.Parse(errorLine[0]); var theErrorChar = int.Parse(errorLine[1]); errorMessage = "Error in XSLT at line " + errorLine[0] + ", char " + errorLine[1] + "<br/>"; errorMessage += "<span style=\"font-family: courier; font-size: 11px;\">"; var xsltText = fileContents.Split("\n".ToCharArray()); for (var i = 0; i < xsltText.Length; i++) { if (i >= theErrorLine - 3 && i <= theErrorLine + 1) if (i + 1 == theErrorLine) { errorMessage += "<b>" + (i + 1) + ": &gt;&gt;&gt;&nbsp;&nbsp;" + Server.HtmlEncode(xsltText[i].Substring(0, theErrorChar)); errorMessage += "<span style=\"text-decoration: underline; border-bottom: 1px solid red\">" + Server.HtmlEncode(xsltText[i].Substring(theErrorChar, xsltText[i].Length - theErrorChar)).Trim() + "</span>"; errorMessage += " &lt;&lt;&lt;</b><br/>"; } else errorMessage += (i + 1) + ": &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;" + Server.HtmlEncode(xsltText[i]) + "<br/>"; } errorMessage += "</span>"; } } } } if (errorMessage == "" && fileName.ToLower().EndsWith(".xslt")) { //Hardcoded security-check... only allow saving files in xslt directory... var savePath = IOHelper.MapPath(SystemDirectories.Xslt + "/" + fileName); if (savePath.StartsWith(IOHelper.MapPath(SystemDirectories.Xslt))) { using (var sw = File.CreateText(savePath)) { sw.Write(fileContents); sw.Close(); } errorMessage = "true"; } else { errorMessage = "Illegal path"; } } File.Delete(tempFileName); return errorMessage; } private static string SaveScript(string filename, string contents) { var val = contents; string returnValue; try { var savePath = IOHelper.MapPath(SystemDirectories.Scripts + "/" + filename); //Directory check.. only allow files in script dir and below to be edited if (savePath.StartsWith(IOHelper.MapPath(SystemDirectories.Scripts + "/"))) { using (var sw = File.CreateText(IOHelper.MapPath(SystemDirectories.Scripts + "/" + filename))) { sw.Write(val); sw.Close(); returnValue = "true"; } } else { throw new ArgumentException("Couldnt save to file - Illegal path"); } } catch (Exception ex) { throw new ArgumentException(String.Format("Couldnt save to file '{0}'", filename), ex); } return returnValue; } private static string SaveTemplate(string templateName, string templateAlias, string templateContents, int templateID, int masterTemplateID) { var tp = new cms.businesslogic.template.Template(templateID); var retVal = "false"; tp.Text = templateName; tp.Alias = templateAlias; tp.MasterTemplate = masterTemplateID; tp.Design = templateContents; tp.Save(); retVal = "true"; return retVal; } [Obsolete("You should use the AuthorizeRequest methods on the base class of UmbracoAuthorizedWebService and ensure you inherit from that class for umbraco asmx web services")] public static void Authorize() { // check for secure connection if (GlobalSettings.UseSSL && !HttpContext.Current.Request.IsSecureConnection) throw new UserAuthorizationException("This installation requires a secure connection (via SSL). Please update the URL to include https://"); if (!BasePage.ValidateUserContextID(BasePages.BasePage.umbracoUserContextID)) throw new Exception("Client authorization failed. User is not logged in"); } } }
/*************************************************************************** * TrayLib.cs * * Copyright (C) 2007 Novell, Inc. ****************************************************************************/ /* THIS FILE IS LICENSED UNDER THE MIT LICENSE AS OUTLINED IMMEDIATELY BELOW: * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ using System; using System.Runtime.InteropServices; using Gtk; using Gdk; namespace Egg { public class TrayIcon : Plug { //int stamp; // FIXEME::get set but never used - investigate //Orientation orientation; int selection_atom; //int manager_atom; int system_tray_opcode_atom; //int orientation_atom; IntPtr manager_window; //FilterFunc filter; public TrayIcon (string name) { Title = name; //stamp = 1; //orientation = Orientation.Horizontal; AddEvents ((int)EventMask.PropertyChangeMask); //filter = new FilterFunc (ManagerFilter); } protected override void OnRealized () { base.OnRealized (); Display display = Screen.Display; IntPtr xdisplay = gdk_x11_display_get_xdisplay (display.Handle); selection_atom = XInternAtom (xdisplay, "_NET_SYSTEM_TRAY_S" + Screen.Number.ToString (), false); //manager_atom = XInternAtom (xdisplay, "MANAGER", false); system_tray_opcode_atom = XInternAtom (xdisplay, "_NET_SYSTEM_TRAY_OPCODE", false); //orientation_atom = XInternAtom (xdisplay, "_NET_SYSTEM_TRAY_ORIENTATION", false); UpdateManagerWindow (); //Screen.RootWindow.AddFilter (filter); } protected override void OnUnrealized () { if (manager_window != IntPtr.Zero) { //Gdk.Window gdkwin = Gdk.Window.LookupForDisplay (Display, (uint)manager_window); //gdkwin.RemoveFilter (filter); } //Screen.RootWindow.RemoveFilter (filter); base.OnUnrealized (); } private void UpdateManagerWindow () { IntPtr xdisplay = gdk_x11_display_get_xdisplay (Display.Handle); if (manager_window != IntPtr.Zero) { //Gdk.Window gdkwin = Gdk.Window.LookupForDisplay (Display, (uint)manager_window); //gdkwin.RemoveFilter (filter); } XGrabServer (xdisplay); manager_window = XGetSelectionOwner (xdisplay, selection_atom); if (manager_window != IntPtr.Zero) XSelectInput (xdisplay, manager_window, EventMask.StructureNotifyMask | EventMask.PropertyChangeMask); XUngrabServer (xdisplay); XFlush (xdisplay); if (manager_window != IntPtr.Zero) { //Gdk.Window gdkwin = Gdk.Window.LookupForDisplay (Display, (uint)manager_window); //gdkwin.AddFilter (filter); SendDockRequest (); GetOrientationProperty (); } } private void SendDockRequest () { SendManagerMessage (SystemTrayMessage.RequestDock, manager_window, Id, 0, 0); } private void SendManagerMessage (SystemTrayMessage message, IntPtr window, uint data1, uint data2, uint data3) { XClientMessageEvent ev = new XClientMessageEvent (); IntPtr display; ev.type = XEventName.ClientMessage; ev.window = window; ev.message_type = (IntPtr)system_tray_opcode_atom; ev.format = 32; ev.ptr1 = gdk_x11_get_server_time (GdkWindow.Handle); ev.ptr2 = (IntPtr)message; ev.ptr3 = (IntPtr)data1; ev.ptr4 = (IntPtr)data2; ev.ptr5 = (IntPtr)data3; display = gdk_x11_display_get_xdisplay (Display.Handle); gdk_error_trap_push (); XSendEvent (display, manager_window, false, EventMask.NoEventMask, ref ev); gdk_error_trap_pop (); } /* private FilterReturn ManagerFilter (IntPtr xevent, Event evnt) { //TODO: Implement; return FilterReturn.Continue; } */ private void GetOrientationProperty () { //TODO: Implement; } [DllImport ("gdk-x11-2.0")] static extern IntPtr gdk_x11_display_get_xdisplay (IntPtr display); [DllImport ("gdk-x11-2.0")] static extern IntPtr gdk_x11_get_server_time (IntPtr window); [DllImport ("gdk-x11-2.0")] static extern void gdk_error_trap_push (); [DllImport ("gdk-x11-2.0")] static extern void gdk_error_trap_pop (); [DllImport ("libX11", EntryPoint="XInternAtom")] extern static int XInternAtom(IntPtr display, string atom_name, bool only_if_exists); [DllImport ("libX11")] extern static void XGrabServer (IntPtr display); [DllImport ("libX11")] extern static void XUngrabServer (IntPtr display); [DllImport ("libX11")] extern static int XFlush (IntPtr display); [DllImport ("libX11")] extern static IntPtr XGetSelectionOwner (IntPtr display, int atom); [DllImport ("libX11")] extern static IntPtr XSelectInput (IntPtr window, IntPtr display, EventMask mask); [DllImport ("libX11", EntryPoint="XSendEvent")] extern static int XSendEvent(IntPtr display, IntPtr window, bool propagate, EventMask event_mask, ref XClientMessageEvent send_event); } [Flags] internal enum EventMask { NoEventMask = 0, KeyPressMask = 1<<0, KeyReleaseMask = 1<<1, ButtonPressMask = 1<<2, ButtonReleaseMask = 1<<3, EnterWindowMask = 1<<4, LeaveWindowMask = 1<<5, PointerMotionMask = 1<<6, PointerMotionHintMask = 1<<7, Button1MotionMask = 1<<8, Button2MotionMask = 1<<9, Button3MotionMask = 1<<10, Button4MotionMask = 1<<11, Button5MotionMask = 1<<12, ButtonMotionMask = 1<<13, KeymapStateMask = 1<<14, ExposureMask = 1<<15, VisibilityChangeMask = 1<<16, StructureNotifyMask = 1<<17, ResizeRedirectMask = 1<<18, SubstructureNotifyMask = 1<<19, SubstructureRedirectMask= 1<<20, FocusChangeMask = 1<<21, PropertyChangeMask = 1<<22, ColormapChangeMask = 1<<23, OwnerGrabButtonMask = 1<<24 } internal enum SystemTrayMessage { RequestDock, BeginMessage, CancelMessage } internal enum SystemTrayOrientation { Horz, Vert } [StructLayout(LayoutKind.Sequential)] internal struct XClientMessageEvent { internal XEventName type; internal IntPtr serial; internal bool send_event; internal IntPtr display; internal IntPtr window; internal IntPtr message_type; internal int format; internal IntPtr ptr1; internal IntPtr ptr2; internal IntPtr ptr3; internal IntPtr ptr4; internal IntPtr ptr5; } internal enum XEventName { KeyPress = 2, KeyRelease = 3, ButtonPress = 4, ButtonRelease = 5, MotionNotify = 6, EnterNotify = 7, LeaveNotify = 8, FocusIn = 9, FocusOut = 10, KeymapNotify = 11, Expose = 12, GraphicsExpose = 13, NoExpose = 14, VisibilityNotify = 15, CreateNotify = 16, DestroyNotify = 17, UnmapNotify = 18, MapNotify = 19, MapRequest = 20, ReparentNotify = 21, ConfigureNotify = 22, ConfigureRequest = 23, GravityNotify = 24, ResizeRequest = 25, CirculateNotify = 26, CirculateRequest = 27, PropertyNotify = 28, SelectionClear = 29, SelectionRequest = 30, SelectionNotify = 31, ColormapNotify = 32, ClientMessage = 33, MappingNotify = 34, TimerNotify = 100, LASTEvent } }// created on 3/16/2007 at 5:33 PM
using System; using System.Collections.Concurrent; using System.Configuration; using System.Diagnostics; using System.Linq; using System.Threading.Tasks; using System.Timers; using Stardust.Interstellar.Rest.Client; namespace Stardust.Continuum.Client { public static class ContinuumClient { /// <summary> /// If not null and greater than 1000 characters the streamed message element will be truncated for buffered transmissions /// </summary> /// <value> /// The size of the limit message. /// </value> public static int? LimitMessageSize { get; set; } private static Timer _timer = new Timer() { Interval = 500, AutoReset = true, Enabled = true }; private static string url; private static ConcurrentBag<StreamItem> _logBuffer = new ConcurrentBag<StreamItem>(); private static object triowing = new object(); static ContinuumClient() { _timer.Elapsed += _timer_Elapsed; _timer.Start(); } public static void Shutdown() { _timer.Enabled = false; _logPumpIsDisabled = true; Flush(); } private static void _timer_Elapsed(object sender, ElapsedEventArgs e) { _timer.Enabled = false; try { Flush(); } finally { _timer.Enabled = true; } } private static void Flush() { StreamItem[] items; lock (triowing) { ConcurrentBag<StreamItem> tempBuffer; lock (bufferLock) { tempBuffer = _logBuffer; _logBuffer = new ConcurrentBag<StreamItem>(); } items = tempBuffer.ToArray(); } switch (items.Length) { case 0: return; case 1: Task.Run(async () => await AddStreamInternal(items[0])).Wait(); break; default: Task.Run(async () => await AddStreamInternal(items.Reverse().ToArray())).Wait(); break; } } public static string BaseUrl { get { if (string.IsNullOrWhiteSpace(url)) { url = ConfigurationManager.AppSettings["continuum:apiUrl"]; } return url; } set { url = value; } } private static string _project; public static string Project { get { if (string.IsNullOrWhiteSpace(_project)) { _project = ConfigurationManager.AppSettings["continuum:project"]; } return _project; } set { _project = value; } } private static string _environment; private static ILogStream client; private static object bufferLock = new object(); private static bool _logPumpIsDisabled; public static string Environment { get { if (string.IsNullOrWhiteSpace(_environment)) { _environment = ConfigurationManager.AppSettings["continuum:environment"]; } return _environment; } set { _environment = value; } } public static void SetApiKey(string apiKey) { LogStreamConfig.ApiKey = apiKey; } public static void AddStream(StreamItem item, bool buffered = true) { if (buffered) { if(_logPumpIsDisabled) throw new ObjectDisposedException(nameof(ContinuumClient),"Batch message pump is closed"); if (LimitMessageSize.HasValue && LimitMessageSize.Value > 1000) item.Message = item.Message?.Substring(0, LimitMessageSize.Value); lock (bufferLock) { _logBuffer.Add(item); } } else { Task.Run(async () => { await AddStreamInternal(item); }); } } private static async Task AddStreamInternal(StreamItem item) { try { await LogStreamClient.AddStream(Project, Environment, item); } catch { } } public static void AddStream(StreamItem[] items) { Task.Run(async () => { await AddStreamInternal(items); }); } private static async Task AddStreamInternal(StreamItem[] items) { try { await LogStreamClient.AddStreamBatch(Project, Environment, items); } catch { } } public static void AddStream(string environment, StreamItem item) { Task.Run(async () => { try { await LogStreamClient.AddStream(Project, environment, item); } catch { } }); } public static void AddStream(string environment, StreamItem[] items) { Task.Run(async () => { try { await LogStreamClient.AddStreamBatch(Project, environment, items); } catch { } }); } public static void AddStream(string project, string environment, StreamItem item) { Task.Run(async () => { try { await LogStreamClient.AddStream(project, environment, item); } catch { } }); } public static void AddStream(string project, string environment, StreamItem[] items) { Task.Run(async () => { try { await LogStreamClient.AddStreamBatch(project, environment, items); } catch { } }); } private static ILogStream LogStreamClient { get { if (client == null) client = ProxyFactory.CreateInstance<ILogStream>(BaseUrl); var c = client; return c; } } } }
// Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information // // KeyInfoTest.cs - Test Cases for KeyInfo // // Author: // Sebastien Pouliot <sebastien@ximian.com> // // (C) 2002, 2003 Motus Technologies Inc. (http://www.motus.com) // Copyright (C) 2004-2005 Novell, Inc (http://www.novell.com) // // Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Linq; using System.Security.Cryptography.X509Certificates; using System.Xml; using Xunit; namespace System.Security.Cryptography.Xml.Tests { public class KeyInfoTest { private KeyInfo info; public KeyInfoTest() { info = new KeyInfo(); } [Fact] public void EmptyKeyInfo() { Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\" />", (info.GetXml().OuterXml)); Assert.Equal(0, info.Count); } [Fact] public void KeyInfoName() { KeyInfoName name = new KeyInfoName(); name.Value = "Mono::"; info.AddClause(name); Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><KeyName>Mono::</KeyName></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } [Fact] public void KeyInfoNode() { string test = "<Test>KeyInfoNode</Test>"; XmlDocument doc = new XmlDocument(); doc.LoadXml(test); KeyInfoNode node = new KeyInfoNode(doc.DocumentElement); info.AddClause(node); Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><Test xmlns=\"\">KeyInfoNode</Test></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } private static string dsaP = "rjxsMU368YOCTQejWkiuO9e/vUVwkLtq1jKiU3TtJ53hBJqjFRuTa228vZe+BH2su9RPn/vYFWfQDv6zgBYe3eNdu4Afw+Ny0FatX6dl3E77Ra6Tsd3MmLXBiGSQ1mMNd5G2XQGpbt9zsGlUaexXekeMLxIufgfZLwYp67M+2WM="; private static string dsaQ = "tf0K9rMyvUrU4cIkwbCrDRhQAJk="; private static string dsaG = "S8Z+1pGCed00w6DtVcqZLKjfqlCJ7JsugEFIgSy/Vxtu9YGCMclV4ijGEbPo/jU8YOSMuD7E9M7UaopMRcmKQjoKZzoJjkgVFP48Ohxl1f08lERnButsxanx3+OstFwUGQ8XNaGg3KrIoZt1FUnfxN3RHHTvVhjzNSHxMGULGaU="; private static string dsaY = "LnrxxRGLYeV2XLtK3SYz8RQHlHFZYrtznDZyMotuRfO5uC5YODhSFyLXvb1qB3WeGtF4h3Eo4KzHgMgfN2ZMlffxFRhJgTtH3ctbL8lfQoDkjeiPPnYGhspdJxr0tyZmiy0gkjJG3vwHYrLnvZWx9Wm/unqiOlGBPNuxJ+hOeP8="; //private static string dsaJ = "9RhE5TycDtdEIXxS3HfxFyXYgpy81zY5lVjwD6E9JP37MWEi80BlX6ab1YPm6xYSEoqReMPP9RgGiW6DuACpgI7+8vgCr4i/7VhzModJAA56PwvTu6UMt9xxKU/fT672v8ucREkMWoc7lEey"; //private static string dsaSeed = "HxW3N4RHWVgqDQKuGg7iJTUTiCs="; //private static string dsaPgenCounter = "Asw="; // private static string xmlDSA = "<DSAKeyValue><P>" + dsaP + "</P><Q>" + dsaQ + "</Q><G>" + dsaG + "</G><Y>" + dsaY + "</Y><J>" + dsaJ + "</J><Seed>" + dsaSeed + "</Seed><PgenCounter>" + dsaPgenCounter + "</PgenCounter></DSAKeyValue>"; private static string xmlDSA = "<DSAKeyValue><P>" + dsaP + "</P><Q>" + dsaQ + "</Q><G>" + dsaG + "</G><Y>" + dsaY + "</Y></DSAKeyValue>"; [Fact] public void DSAKeyValue() { using (DSA key = DSA.Create()) { key.ImportParameters(new DSAParameters { P = Convert.FromBase64String(dsaP), Q = Convert.FromBase64String(dsaQ), G = Convert.FromBase64String(dsaG), Y = Convert.FromBase64String(dsaY), //J = Convert.FromBase64String(dsaJ), //Seed = Convert.FromBase64String(dsaSeed), //Counter = BitConverter.ToUInt16(Convert.FromBase64String(dsaPgenCounter), 0) }); DSAKeyValue dsa = new DSAKeyValue(key); info.AddClause(dsa); AssertCrypto.AssertXmlEquals("dsa", "<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\">" + xmlDSA + "</KeyValue></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } } private static string rsaModulus = "9DC4XNdQJwMRnz5pP2a6U51MHCODRilaIoVXqUPhCUb0lJdGroeqVYT84ZyIVrcarzD7Tqs3aEOIa3rKox0N1bxQpZPqayVQeLAkjLLtzJW/ScRJx3uEDJdgT1JnM1FH0GZTinmEdCUXdLc7+Y/c/qqIkTfbwHbRZjW0bBJyExM="; private static string rsaExponent = "AQAB"; private static string xmlRSA = "<RSAKeyValue><Modulus>" + rsaModulus + "</Modulus><Exponent>" + rsaExponent + "</Exponent></RSAKeyValue>"; [Fact] public void RSAKeyValue() { using (RSA key = RSA.Create()) { key.ImportParameters(new RSAParameters() { Modulus = Convert.FromBase64String(rsaModulus), Exponent = Convert.FromBase64String(rsaExponent) }); RSAKeyValue rsa = new RSAKeyValue(key); info.AddClause(rsa); AssertCrypto.AssertXmlEquals("rsa", "<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\">" + xmlRSA + "</KeyValue></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } } [Fact] public void RetrievalMethod() { KeyInfoRetrievalMethod retrieval = new KeyInfoRetrievalMethod(); retrieval.Uri = "http://www.go-mono.org/"; info.AddClause(retrieval); Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><RetrievalMethod URI=\"http://www.go-mono.org/\" /></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } static byte[] cert = { 0x30,0x82,0x02,0x1D,0x30,0x82,0x01,0x86,0x02,0x01,0x14,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x30,0x58,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x43,0x41,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x4B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x20,0x43,0x61,0x6E,0x61,0x64,0x61,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06,0x0A,0x2B,0x06,0x01,0x04,0x01,0x2A,0x02,0x0B,0x02,0x01,0x13,0x18,0x6B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73, 0x73,0x40,0x6B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x2E,0x63,0x61,0x30,0x1E,0x17,0x0D,0x39,0x36,0x30,0x35,0x30,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x17,0x0D,0x39,0x39,0x30,0x35,0x30,0x37,0x30,0x30,0x30,0x30,0x30,0x30,0x5A,0x30,0x58,0x31,0x0B,0x30,0x09,0x06,0x03,0x55,0x04,0x06,0x13,0x02,0x43,0x41,0x31,0x1F,0x30,0x1D,0x06,0x03,0x55,0x04,0x03,0x13,0x16,0x4B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x20,0x43,0x61,0x6E,0x61,0x64,0x61,0x20,0x49,0x6E,0x63,0x2E,0x31,0x28,0x30,0x26,0x06, 0x0A,0x2B,0x06,0x01,0x04,0x01,0x2A,0x02,0x0B,0x02,0x01,0x13,0x18,0x6B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x40,0x6B,0x65,0x79,0x77,0x69,0x74,0x6E,0x65,0x73,0x73,0x2E,0x63,0x61,0x30,0x81,0x9D,0x30,0x0D,0x06,0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x01,0x05,0x00,0x03,0x81,0x8B,0x00,0x30,0x81,0x87,0x02,0x81,0x81,0x00,0xCD,0x23,0xFA,0x2A,0xE1,0xED,0x98,0xF4,0xE9,0xD0,0x93,0x3E,0xD7,0x7A,0x80,0x02,0x4C,0xCC,0xC1,0x02,0xAF,0x5C,0xB6,0x1F,0x7F,0xFA,0x57,0x42,0x6F,0x30,0xD1,0x20,0xC5,0xB5, 0x21,0x07,0x40,0x2C,0xA9,0x86,0xC2,0xF3,0x64,0x84,0xAE,0x3D,0x85,0x2E,0xED,0x85,0xBD,0x54,0xB0,0x18,0x28,0xEF,0x6A,0xF8,0x1B,0xE7,0x0B,0x16,0x1F,0x93,0x25,0x4F,0xC7,0xF8,0x8E,0xC3,0xB9,0xCA,0x98,0x84,0x0E,0x55,0xD0,0x2F,0xEF,0x78,0x77,0xC5,0x72,0x28,0x5F,0x60,0xBF,0x19,0x2B,0xD1,0x72,0xA2,0xB7,0xD8,0x3F,0xE0,0x97,0x34,0x5A,0x01,0xBD,0x04,0x9C,0xC8,0x78,0x45,0xCD,0x93,0x8D,0x15,0xF2,0x76,0x10,0x11,0xAB,0xB8,0x5B,0x2E,0x9E,0x52,0xDD,0x81,0x3E,0x9C,0x64,0xC8,0x29,0x93,0x02,0x01,0x03,0x30,0x0D,0x06, 0x09,0x2A,0x86,0x48,0x86,0xF7,0x0D,0x01,0x01,0x04,0x05,0x00,0x03,0x81,0x81,0x00,0x32,0x1A,0x35,0xBA,0xBF,0x43,0x27,0xD6,0xB4,0xD4,0xB8,0x76,0xE5,0xE3,0x9B,0x4D,0x6C,0xC0,0x86,0xC9,0x77,0x35,0xBA,0x6B,0x16,0x2D,0x13,0x46,0x4A,0xB0,0x32,0x53,0xA1,0x5B,0x5A,0xE9,0x99,0xE2,0x0C,0x86,0x88,0x17,0x4E,0x0D,0xFE,0x82,0xAC,0x4E,0x47,0xEF,0xFB,0xFF,0x39,0xAC,0xEE,0x35,0xC8,0xFA,0x52,0x37,0x0A,0x49,0xAD,0x59,0xAD,0xE2,0x8A,0xA9,0x1C,0xC6,0x5F,0x1F,0xF8,0x6F,0x73,0x7E,0xCD,0xA0,0x31,0xE8,0x0C,0xBE,0xF5,0x4D, 0xD9,0xB2,0xAB,0x8A,0x12,0xB6,0x30,0x78,0x68,0x11,0x7C,0x0D,0xF1,0x49,0x4D,0xA3,0xFD,0xB2,0xE9,0xFF,0x1D,0xF0,0x91,0xFA,0x54,0x85,0xFF,0x33,0x90,0xE8,0xC1,0xBF,0xA4,0x9B,0xA4,0x62,0x46,0xBD,0x61,0x12,0x59,0x98,0x41,0x89 }; [Fact] public void X509Data() { using (X509Certificate x509 = new X509Certificate(cert)) { KeyInfoX509Data x509data = new KeyInfoX509Data(x509); info.AddClause(x509data); AssertCrypto.AssertXmlEquals("X509Data", "<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><X509Data xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><X509Certificate>MIICHTCCAYYCARQwDQYJKoZIhvcNAQEEBQAwWDELMAkGA1UEBhMCQ0ExHzAdBgNVBAMTFktleXdpdG5lc3MgQ2FuYWRhIEluYy4xKDAmBgorBgEEASoCCwIBExhrZXl3aXRuZXNzQGtleXdpdG5lc3MuY2EwHhcNOTYwNTA3MDAwMDAwWhcNOTkwNTA3MDAwMDAwWjBYMQswCQYDVQQGEwJDQTEfMB0GA1UEAxMWS2V5d2l0bmVzcyBDYW5hZGEgSW5jLjEoMCYGCisGAQQBKgILAgETGGtleXdpdG5lc3NAa2V5d2l0bmVzcy5jYTCBnTANBgkqhkiG9w0BAQEFAAOBiwAwgYcCgYEAzSP6KuHtmPTp0JM+13qAAkzMwQKvXLYff/pXQm8w0SDFtSEHQCyphsLzZISuPYUu7YW9VLAYKO9q+BvnCxYfkyVPx/iOw7nKmIQOVdAv73h3xXIoX2C/GSvRcqK32D/glzRaAb0EnMh4Rc2TjRXydhARq7hbLp5S3YE+nGTIKZMCAQMwDQYJKoZIhvcNAQEEBQADgYEAMho1ur9DJ9a01Lh25eObTWzAhsl3NbprFi0TRkqwMlOhW1rpmeIMhogXTg3+gqxOR+/7/zms7jXI+lI3CkmtWa3iiqkcxl8f+G9zfs2gMegMvvVN2bKrihK2MHhoEXwN8UlNo/2y6f8d8JH6VIX/M5Dowb+km6RiRr1hElmYQYk=</X509Certificate></X509Data></KeyInfo>", (info.GetXml().OuterXml)); Assert.Equal(1, info.Count); } } [Fact] public void Complex() { KeyInfoName name = new KeyInfoName(); name.Value = "CoreFx::"; info.AddClause(name); using (DSA keyDSA = DSA.Create()) { keyDSA.ImportParameters(new DSAParameters { P = Convert.FromBase64String(dsaP), Q = Convert.FromBase64String(dsaQ), G = Convert.FromBase64String(dsaG), Y = Convert.FromBase64String(dsaY), }); DSAKeyValue dsa = new DSAKeyValue(keyDSA); info.AddClause(dsa); using (RSA keyRSA = RSA.Create()) { keyRSA.ImportParameters(new RSAParameters() { Modulus = Convert.FromBase64String(rsaModulus), Exponent = Convert.FromBase64String(rsaExponent) }); RSAKeyValue rsa = new RSAKeyValue(keyRSA); info.AddClause(rsa); KeyInfoRetrievalMethod retrieval = new KeyInfoRetrievalMethod(); retrieval.Uri = "https://github.com/dotnet/corefx"; info.AddClause(retrieval); using (X509Certificate x509 = new X509Certificate(cert)) { KeyInfoX509Data x509data = new KeyInfoX509Data(x509); info.AddClause(x509data); string s = "<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><KeyName>CoreFx::</KeyName><KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><DSAKeyValue><P>rjxsMU368YOCTQejWkiuO9e/vUVwkLtq1jKiU3TtJ53hBJqjFRuTa228vZe+BH2su9RPn/vYFWfQDv6zgBYe3eNdu4Afw+Ny0FatX6dl3E77Ra6Tsd3MmLXBiGSQ1mMNd5G2XQGpbt9zsGlUaexXekeMLxIufgfZLwYp67M+2WM=</P><Q>tf0K9rMyvUrU4cIkwbCrDRhQAJk=</Q><G>S8Z+1pGCed00w6DtVcqZLKjfqlCJ7JsugEFIgSy/Vxtu9YGCMclV4ijGEbPo/jU8YOSMuD7E9M7UaopMRcmKQjoKZzoJjkgVFP48Ohxl1f08lERnButsxanx3+OstFwUGQ8XNaGg3KrIoZt1FUnfxN3RHHTvVhjzNSHxMGULGaU=</G><Y>LnrxxRGLYeV2XLtK3SYz8RQHlHFZYrtznDZyMotuRfO5uC5YODhSFyLXvb1qB3WeGtF4h3Eo4KzHgMgfN2ZMlffxFRhJgTtH3ctbL8lfQoDkjeiPPnYGhspdJxr0tyZmiy0gkjJG3vwHYrLnvZWx9Wm/unqiOlGBPNuxJ+hOeP8=</Y></DSAKeyValue></KeyValue>"; s += "<KeyValue xmlns=\"http://www.w3.org/2000/09/xmldsig#\"><RSAKeyValue><Modulus>9DC4XNdQJwMRnz5pP2a6U51MHCODRilaIoVXqUPhCUb0lJdGroeqVYT84ZyIVrcarzD7Tqs3aEOIa3rKox0N1bxQpZPqayVQeLAkjLLtzJW/ScRJx3uEDJdgT1JnM1FH0GZTinmEdCUXdLc7+Y/c/qqIkTfbwHbRZjW0bBJyExM=</Modulus><Exponent>AQAB</Exponent></RSAKeyValue></KeyValue>"; s += "<RetrievalMethod URI=\"https://github.com/dotnet/corefx\" />"; s += "<X509Data xmlns=\"http://www.w3.org/2000/09/xmldsig#\">"; s += "<X509Certificate>MIICHTCCAYYCARQwDQYJKoZIhvcNAQEEBQAwWDELMAkGA1UEBhMCQ0ExHzAdBgNVBAMTFktleXdpdG5lc3MgQ2FuYWRhIEluYy4xKDAmBgorBgEEASoCCwIBExhrZXl3aXRuZXNzQGtleXdpdG5lc3MuY2EwHhcNOTYwNTA3MDAwMDAwWhcNOTkwNTA3MDAwMDAwWjBYMQswCQYDVQQGEwJDQTEfMB0GA1UEAxMWS2V5d2l0bmVzcyBDYW5hZGEgSW5jLjEoMCYGCisGAQQBKgILAgETGGtleXdpdG5lc3NAa2V5d2l0bmVzcy5jYTCBnTANBgkqhkiG9w0BAQEFAAOBiwAwgYcCgYEAzSP6KuHtmPTp0JM+13qAAkzMwQKvXLYff/pXQm8w0SDFtSEHQCyphsLzZISuPYUu7YW9VLAYKO9q+BvnCxYfkyVPx/iOw7nKmIQOVdAv73h3xXIoX2C/GSvRcqK32D/glzRaAb0EnMh4Rc2TjRXydhARq7hbLp5S3YE+nGTIKZMCAQMwDQYJKoZIhvcNAQEEBQADgYEAMho1ur9DJ9a01Lh25eObTWzAhsl3NbprFi0TRkqwMlOhW1rpmeIMhogXTg3+gqxOR+/7/zms7jXI+lI3CkmtWa3iiqkcxl8f+G9zfs2gMegMvvVN2bKrihK2MHhoEXwN8UlNo/2y6f8d8JH6VIX/M5Dowb+km6RiRr1hElmYQYk=</X509Certificate></X509Data></KeyInfo>"; AssertCrypto.AssertXmlEquals("Complex", s, (info.GetXml().OuterXml)); Assert.Equal(5, info.Count); } } } } [Fact] public void ImportKeyNode() { string keyName = "Mono::"; string dsaP = "rjxsMU368YOCTQejWkiuO9e/vUVwkLtq1jKiU3TtJ53hBJqjFRuTa228vZe+BH2su9RPn/vYFWfQDv6zgBYe3eNdu4Afw+Ny0FatX6dl3E77Ra6Tsd3MmLXBiGSQ1mMNd5G2XQGpbt9zsGlUaexXekeMLxIufgfZLwYp67M+2WM="; string dsaQ = "tf0K9rMyvUrU4cIkwbCrDRhQAJk="; string dsaG = "S8Z+1pGCed00w6DtVcqZLKjfqlCJ7JsugEFIgSy/Vxtu9YGCMclV4ijGEbPo/jU8YOSMuD7E9M7UaopMRcmKQjoKZzoJjkgVFP48Ohxl1f08lERnButsxanx3+OstFwUGQ8XNaGg3KrIoZt1FUnfxN3RHHTvVhjzNSHxMGULGaU="; string dsaY = "LnrxxRGLYeV2XLtK3SYz8RQHlHFZYrtznDZyMotuRfO5uC5YODhSFyLXvb1qB3WeGtF4h3Eo4KzHgMgfN2ZMlffxFRhJgTtH3ctbL8lfQoDkjeiPPnYGhspdJxr0tyZmiy0gkjJG3vwHYrLnvZWx9Wm/unqiOlGBPNuxJ+hOeP8="; string dsaJ = "9RhE5TycDtdEIXxS3HfxFyXYgpy81zY5lVjwD6E9JP37MWEi80BlX6ab1YPm6xYSEoqReMPP9RgGiW6DuACpgI7+8vgCr4i/7VhzModJAA56PwvTu6UMt9xxKU/fT672v8ucREkMWoc7lEey"; string dsaSeed = "HxW3N4RHWVgqDQKuGg7iJTUTiCs="; string dsaPgenCounter = "Asw="; string rsaModulus = "9DC4XNdQJwMRnz5pP2a6U51MHCODRilaIoVXqUPhCUb0lJdGroeqVYT84ZyIVrcarzD7Tqs3aEOIa3rKox0N1bxQpZPqayVQeLAkjLLtzJW/ScRJx3uEDJdgT1JnM1FH0GZTinmEdCUXdLc7+Y/c/qqIkTfbwHbRZjW0bBJyExM="; string rsaExponent = "AQAB"; string x509cert = "MIICHTCCAYYCARQwDQYJKoZIhvcNAQEEBQAwWDELMAkGA1UEBhMCQ0ExHzAdBgNVBAMTFktleXdpdG5lc3MgQ2FuYWRhIEluYy4xKDAmBgorBgEEASoCCwIBExhrZXl3aXRuZXNzQGtleXdpdG5lc3MuY2EwHhcNOTYwNTA3MDAwMDAwWhcNOTkwNTA3MDAwMDAwWjBYMQswCQYDVQQGEwJDQTEfMB0GA1UEAxMWS2V5d2l0bmVzcyBDYW5hZGEgSW5jLjEoMCYGCisGAQQBKgILAgETGGtleXdpdG5lc3NAa2V5d2l0bmVzcy5jYTCBnTANBgkqhkiG9w0BAQEFAAOBiwAwgYcCgYEAzSP6KuHtmPTp0JM+13qAAkzMwQKvXLYff/pXQm8w0SDFtSEHQCyphsLzZISuPYUu7YW9VLAYKO9q+BvnCxYfkyVPx/iOw7nKmIQOVdAv73h3xXIoX2C/GSvRcqK32D/glzRaAb0EnMh4Rc2TjRXydhARq7hbLp5S3YE+nGTIKZMCAQMwDQYJKoZIhvcNAQEEBQADgYEAMho1ur9DJ9a01Lh25eObTWzAhsl3NbprFi0TRkqwMlOhW1rpmeIMhogXTg3+gqxOR+/7/zms7jXI+lI3CkmtWa3iiqkcxl8f+G9zfs2gMegMvvVN2bKrihK2MHhoEXwN8UlNo/2y6f8d8JH6VIX/M5Dowb+km6RiRr1hElmYQYk="; string retrievalElementUri = @"http://www.go-mono.org/"; string value = $@"<KeyInfo xmlns=""http://www.w3.org/2000/09/xmldsig#""> <KeyName>{keyName}</KeyName> <KeyValue xmlns=""http://www.w3.org/2000/09/xmldsig#""> <DSAKeyValue> <P>{dsaP}</P> <Q>{dsaQ}</Q> <G>{dsaG}</G> <Y>{dsaY}</Y> <J>{dsaJ}</J> <Seed>{dsaSeed}</Seed> <PgenCounter>{dsaPgenCounter}</PgenCounter> </DSAKeyValue> </KeyValue> <KeyValue xmlns=""http://www.w3.org/2000/09/xmldsig#""> <RSAKeyValue> <Modulus>{rsaModulus}</Modulus> <Exponent>{rsaExponent}</Exponent> </RSAKeyValue> </KeyValue> <RetrievalElement URI=""{retrievalElementUri}"" /> <X509Data xmlns=""http://www.w3.org/2000/09/xmldsig#""> <X509Certificate>{x509cert}</X509Certificate> </X509Data> </KeyInfo>"; XmlDocument doc = new XmlDocument(); doc.LoadXml(value); info.LoadXml(doc.DocumentElement); Assert.Equal(5, info.Count); int i = 0; int pathsCovered = 0; foreach (var clause in info) { i++; if (clause is KeyInfoName) { pathsCovered |= 1 << 0; var name = clause as KeyInfoName; Assert.Equal(keyName, name.Value); } else if (clause is DSAKeyValue) { pathsCovered |= 1 << 1; var dsaKV = clause as DSAKeyValue; DSA dsaKey = dsaKV.Key; DSAParameters dsaParams = dsaKey.ExportParameters(false); Assert.Equal(Convert.FromBase64String(dsaP), dsaParams.P); Assert.Equal(Convert.FromBase64String(dsaQ), dsaParams.Q); Assert.Equal(Convert.FromBase64String(dsaG), dsaParams.G); Assert.Equal(Convert.FromBase64String(dsaY), dsaParams.Y); // J is an optimization it should either be null or correct value if (dsaParams.J != null) { Assert.Equal(Convert.FromBase64String(dsaJ), dsaParams.J); } // Seed and Counter are not guaranteed to roundtrip // they should either both be non-null or both null if (dsaParams.Seed != null) { Assert.Equal(Convert.FromBase64String(dsaSeed), dsaParams.Seed); byte[] counter = Convert.FromBase64String(dsaPgenCounter); Assert.InRange(counter.Length, 1, 4); int counterVal = 0; for (int j = 0; j < counter.Length; j++) { counterVal <<= 8; counterVal |= counter[j]; } Assert.Equal(counterVal, dsaParams.Counter); } else { Assert.Null(dsaParams.Seed); Assert.Equal(default(int), dsaParams.Counter); } } else if (clause is RSAKeyValue) { pathsCovered |= 1 << 2; var rsaKV = clause as RSAKeyValue; RSA rsaKey = rsaKV.Key; RSAParameters rsaParameters = rsaKey.ExportParameters(false); Assert.Equal(Convert.FromBase64String(rsaModulus), rsaParameters.Modulus); Assert.Equal(Convert.FromBase64String(rsaExponent), rsaParameters.Exponent); } else if (clause is KeyInfoNode) { pathsCovered |= 1 << 3; var keyInfo = clause as KeyInfoNode; XmlElement keyInfoEl = keyInfo.GetXml(); Assert.Equal("RetrievalElement", keyInfoEl.LocalName); Assert.Equal("http://www.w3.org/2000/09/xmldsig#", keyInfoEl.NamespaceURI); Assert.Equal(1, keyInfoEl.Attributes.Count); Assert.Equal("URI", keyInfoEl.Attributes[0].Name); Assert.Equal(retrievalElementUri, keyInfoEl.GetAttribute("URI")); } else if (clause is KeyInfoX509Data) { pathsCovered |= 1 << 4; var x509data = clause as KeyInfoX509Data; Assert.Equal(1, x509data.Certificates.Count); X509Certificate cert = x509data.Certificates[0] as X509Certificate; Assert.NotNull(cert); Assert.Equal(Convert.FromBase64String(x509cert), cert.GetRawCertData()); } else { Assert.True(false, $"Unexpected clause type: {clause.GetType().FullName}"); } } // 0x1f = b11111, number of ones = 5 Assert.Equal(0x1f, pathsCovered); Assert.Equal(5, i); } [Fact] public void NullClause() { Assert.Equal(0, info.Count); // null is accepted... info.AddClause(null); Assert.Equal(1, info.Count); // but can't get XML out if it! Assert.Throws<NullReferenceException>(() => info.GetXml()); } [Fact] public void NullXml() { Assert.Throws<ArgumentNullException>(() => info.LoadXml(null)); } [Fact] public void InvalidXml() { string bad = "<Test></Test>"; XmlDocument doc = new XmlDocument(); doc.LoadXml(bad); info.LoadXml(doc.DocumentElement); // no expection but Xml isn't loaded Assert.Equal("<KeyInfo xmlns=\"http://www.w3.org/2000/09/xmldsig#\" />", (info.GetXml().OuterXml)); Assert.Equal(0, info.Count); } } }
using BellRichM.Helpers.Test; using BellRichM.Logging; using BellRichM.Weather.Api.Data; using BellRichM.Weather.Api.Models; using BellRichM.Weather.Api.Repositories; using FluentAssertions; using Machine.Specifications; using Moq; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using It = Machine.Specifications.It; namespace BellRichM.Weather.Api.Services.Test { public class ConditionServiceSpecs { protected const int Offset = 0; protected const int Limit = 5; protected static LoggingData loggingData; protected static ConditionService conditionService; protected static Mock<ILoggerAdapter<ConditionService>> loggerMock; protected static Mock<IConditionRepository> conditionRepositoryMock; protected static TimePeriodModel timePeriodModel; protected static MinMaxConditionPage minMaxConditionPage; protected static IEnumerable<MinMaxCondition> minMaxConditions; protected static IEnumerable<Condition> conditions; protected static ConditionPage conditionPage; protected static MinMaxGroupPage minMaxGroupPage; protected static IEnumerable<MinMaxGroup> minMaxGroups; Establish context = () => { // default to no logging loggingData = new LoggingData { EventLoggingData = new List<EventLoggingData>(), ErrorLoggingMessages = new List<string>() }; loggerMock = new Mock<ILoggerAdapter<ConditionService>>(); conditionRepositoryMock = new Mock<IConditionRepository>(); }; public static IEnumerable<MinMaxCondition> CreateMinMaxCondition() { minMaxConditions = new List<MinMaxCondition> { new MinMaxCondition { Year = 2018, Month = 9, Day = 1, Hour = 1, MaxTemp = "67.2", MinTemp = "65.6", MaxHumidity = "83.0", MinHumidity = "80.0", MaxDewpoint = "60.8725771445071", MinDewpoint = "60.0932637870109", MaxHeatIndex = "67.2", MinWindchill = "65.6", MaxBarometer = "29.694", MinBarometer = "29.687", MaxET = "0.001", MinET = "0.0", MaxUV = "0.0", MinUV = "0.0", MaxRadiation = "0.0", MinRadiation = "0.0", MaxRainRate = "0.0", MaxWindGust = "4.00000994196379" } }; return minMaxConditions; } public static IEnumerable<Condition> CreateCondition() { var conditions = new List<Condition> { new Condition { Year = 2018, Month = 9, Day = 1, Hour = 1, WindGustDirection = 61.8725771445071, WindGust = 4.00000994196379, WindDirection = 59.8725771445071, WindSpeed = 2.00000994196379, OutsideTemperature = 67.2, HeatIndex = 65.6, Windchill = 83.0, DewPoint = 60.8725771445071, Barometer = 29.694, RainRate = 0.0, Rain = 4.00000994196379, OutsideHumidity = 29.687 } }; return conditions.ToList(); } } internal class When_creating_page_of_year_weather_conditions : ConditionServiceSpecs { Cleanup after = () => { }; Establish context = () => { minMaxConditions = CreateMinMaxCondition(); conditionRepositoryMock.Setup(x => x.GetYearCount()).Returns(Task.FromResult(minMaxConditions.Count())); conditionRepositoryMock.Setup(x => x.GetYear(Offset, Limit)).Returns(Task.FromResult(minMaxConditions)); conditionService = new ConditionService(conditionRepositoryMock.Object); }; Because of = () => { minMaxConditionPage = conditionService.GetYearWeatherPage(Offset, Limit).Result; }; Behaves_like<LoggingBehaviors<ConditionService>> correct_logging = () => { }; It should_have_correct_total_count = () => { minMaxConditionPage.Paging.TotalCount.Should().Equals(minMaxConditions.Count()); }; It should_have_correct_offset = () => { minMaxConditionPage.Paging.Offset.Should().Equals(Offset); }; It should_have_correct_limit = () => { minMaxConditionPage.Paging.Limit.Should().Equals(Limit); }; It should_have_correct_condition_data = () => { minMaxConditionPage.MinMaxConditions.Should().BeEquivalentTo(minMaxConditions); }; } internal class When_GetMinMaxConditionsByMinute : ConditionServiceSpecs { Establish context = () => { conditions = CreateCondition().ToList(); var minMaxGroup = new MinMaxGroup { Month = conditions.First().Month, Day = conditions.First().Day }; var minMaxGroupList = new List<MinMaxGroup>(); minMaxGroupList.Add(minMaxGroup); minMaxGroups = minMaxGroupList; conditionRepositoryMock.Setup(x => x.GetMinMaxConditionsByMinute(1, 0, 0, Offset, Limit)).Returns(Task.FromResult(minMaxGroups)); conditionService = new ConditionService(conditionRepositoryMock.Object); }; Because of = () => minMaxGroupPage = conditionService.GetMinMaxConditionsByMinute(1, 0, 0, Offset, Limit).Result; Behaves_like<LoggingBehaviors<ConditionService>> correct_logging = () => { }; It should_have_correct_total_count = () => minMaxGroupPage.Paging.TotalCount.Should().Equals(conditions.Count()); It should_have_correct_offset = () => minMaxGroupPage.Paging.Offset.Should().Equals(Offset); It should_have_correct_limit = () => minMaxGroupPage.Paging.Limit.Should().Equals(Limit); It should_have_correct_condition_data = () => minMaxGroupPage.MinMaxGroups.Should().BeEquivalentTo(minMaxGroups); } internal class When_GetMinMaxConditionsByHour : ConditionServiceSpecs { Establish context = () => { conditions = CreateCondition().ToList(); var minMaxGroup = new MinMaxGroup { Month = conditions.First().Month, Day = conditions.First().Day }; var minMaxGroupList = new List<MinMaxGroup>(); minMaxGroupList.Add(minMaxGroup); minMaxGroups = minMaxGroupList; conditionRepositoryMock.Setup(x => x.GetMinMaxConditionsByHour(0, 0, Offset, Limit)).Returns(Task.FromResult(minMaxGroups)); conditionService = new ConditionService(conditionRepositoryMock.Object); }; Because of = () => minMaxGroupPage = conditionService.GetMinMaxConditionsByHour(0, 0, Offset, Limit).Result; Behaves_like<LoggingBehaviors<ConditionService>> correct_logging = () => { }; It should_have_correct_total_count = () => minMaxGroupPage.Paging.TotalCount.Should().Equals(conditions.Count()); It should_have_correct_offset = () => minMaxGroupPage.Paging.Offset.Should().Equals(Offset); It should_have_correct_limit = () => minMaxGroupPage.Paging.Limit.Should().Equals(Limit); It should_have_correct_condition_data = () => minMaxGroupPage.MinMaxGroups.Should().BeEquivalentTo(minMaxGroups); } internal class When_GetMinMaxConditionsByDay : ConditionServiceSpecs { Establish context = () => { conditions = CreateCondition().ToList(); var minMaxGroup = new MinMaxGroup { Month = conditions.First().Month, Day = conditions.First().Day }; var minMaxGroupList = new List<MinMaxGroup>(); minMaxGroupList.Add(minMaxGroup); minMaxGroups = minMaxGroupList; conditionRepositoryMock.Setup(x => x.GetMinMaxConditionsByDay(0, 0, Offset, Limit)).Returns(Task.FromResult(minMaxGroups)); conditionService = new ConditionService(conditionRepositoryMock.Object); }; Because of = () => minMaxGroupPage = conditionService.GetMinMaxConditionsByDay(0, 0, Offset, Limit).Result; Behaves_like<LoggingBehaviors<ConditionService>> correct_logging = () => { }; It should_have_correct_total_count = () => minMaxGroupPage.Paging.TotalCount.Should().Equals(conditions.Count()); It should_have_correct_offset = () => minMaxGroupPage.Paging.Offset.Should().Equals(Offset); It should_have_correct_limit = () => minMaxGroupPage.Paging.Limit.Should().Equals(Limit); It should_have_correct_condition_data = () => minMaxGroupPage.MinMaxGroups.Should().BeEquivalentTo(minMaxGroups); } internal class When_GetMinMaxConditionsByWeek : ConditionServiceSpecs { Establish context = () => { conditions = CreateCondition().ToList(); var minMaxGroup = new MinMaxGroup { Month = conditions.First().Month, Day = conditions.First().Day }; var minMaxGroupList = new List<MinMaxGroup>(); minMaxGroupList.Add(minMaxGroup); minMaxGroups = minMaxGroupList; conditionRepositoryMock.Setup(x => x.GetMinMaxConditionsByWeek(0, 0, Offset, Limit)).Returns(Task.FromResult(minMaxGroups)); conditionService = new ConditionService(conditionRepositoryMock.Object); }; Because of = () => minMaxGroupPage = conditionService.GetMinMaxConditionsByWeek(0, 0, Offset, Limit).Result; Behaves_like<LoggingBehaviors<ConditionService>> correct_logging = () => { }; It should_have_correct_total_count = () => minMaxGroupPage.Paging.TotalCount.Should().Equals(conditions.Count()); It should_have_correct_offset = () => minMaxGroupPage.Paging.Offset.Should().Equals(Offset); It should_have_correct_limit = () => minMaxGroupPage.Paging.Limit.Should().Equals(Limit); It should_have_correct_condition_data = () => minMaxGroupPage.MinMaxGroups.Should().BeEquivalentTo(minMaxGroups); } internal class When_GetConditionsByDay : ConditionServiceSpecs { Establish context = () => { conditions = CreateCondition(); conditionRepositoryMock.Setup(x => x.GetYearCount()).Returns(Task.FromResult(conditions.Count())); conditionRepositoryMock.Setup(x => x.GetConditionsByDay(Offset, Limit, timePeriodModel)).Returns(Task.FromResult(conditions)); conditionService = new ConditionService(conditionRepositoryMock.Object); }; Because of = () => conditionPage = conditionService.GetConditionsByDay(Offset, Limit, timePeriodModel).Result; Behaves_like<LoggingBehaviors<ConditionService>> correct_logging = () => { }; It should_have_correct_total_count = () => conditionPage.Paging.TotalCount.Should().Equals(conditions.Count()); It should_have_correct_offset = () => conditionPage.Paging.Offset.Should().Equals(Offset); It should_have_correct_limit = () => conditionPage.Paging.Limit.Should().Equals(Limit); It should_have_correct_condition_data = () => conditionPage.Conditions.Should().BeEquivalentTo(conditions); } }
using System; using System.Collections.Generic; using Loon.Core; using Loon.Core.Geom; namespace Loon.Utils { public class MathUtils { public static RectBox GetBounds(float x, float y, float width, float height, float rotate, RectBox result) { int[] rect = GetLimit(x, y, width, height, rotate); if (result == null) { result = new RectBox(rect[0], rect[1], rect[2], rect[3]); } else { result.SetBounds(rect[0], rect[1], rect[2], rect[3]); } return result; } public static RectBox GetBounds(float x, float y, float width, float height, float rotate) { return GetBounds(x, y, width, height, rotate, null); } public static int[] GetLimit(float x, float y, float width, float height, float rotate) { float rotation = MathUtils.ToRadians(rotate); float angSin = MathUtils.Sin(rotation); float angCos = MathUtils.Cos(rotation); int newW = MathUtils.Floor((width * MathUtils.Abs(angCos)) + (height * MathUtils.Abs(angSin))); int newH = MathUtils.Floor((height * MathUtils.Abs(angCos)) + (width * MathUtils.Abs(angSin))); int centerX = (int)(x + (width / 2)); int centerY = (int)(y + (height / 2)); int newX = (int)(centerX - (newW / 2)); int newY = (int)(centerY - (newH / 2)); return new int[] { newX, newY, newW, newH }; } static private readonly string[] zeros = { "", "0", "00", "000", "0000", "00000", "000000", "0000000", "00000000", "000000000", "0000000000" }; public static string AddZeros(long number, int numDigits) { return AddZeros(number.ToString(), numDigits); } public static string AddZeros(string number, int numDigits) { int length = numDigits - number.Length; if (length != 0) { number = zeros[length] + number; } return number; } public static bool IsNan(string str) { if (StringUtils.IsEmpty(str)) { return false; } char[] chars = str.ToCharArray(); int sz = chars.Length; bool hasExp = false; bool hasDecPoint = false; bool allowSigns = false; bool foundDigit = false; int start = (chars[0] == '-') ? 1 : 0; if (sz > start + 1) { if (chars[start] == '0' && chars[start + 1] == 'x') { int i = start + 2; if (i == sz) { return false; } for (; i < chars.Length; i++) { if ((chars[i] < '0' || chars[i] > '9') && (chars[i] < 'a' || chars[i] > 'f') && (chars[i] < 'A' || chars[i] > 'F')) { return false; } } return true; } } sz--; int i_0 = start; while (i_0 < sz || (i_0 < sz + 1 && allowSigns && !foundDigit)) { if (chars[i_0] >= '0' && chars[i_0] <= '9') { foundDigit = true; allowSigns = false; } else if (chars[i_0] == '.') { if (hasDecPoint || hasExp) { return false; } hasDecPoint = true; } else if (chars[i_0] == 'e' || chars[i_0] == 'E') { if (hasExp) { return false; } if (!foundDigit) { return false; } hasExp = true; allowSigns = true; } else if (chars[i_0] == '+' || chars[i_0] == '-') { if (!allowSigns) { return false; } allowSigns = false; foundDigit = false; } else { return false; } i_0++; } if (i_0 < chars.Length) { if (chars[i_0] >= '0' && chars[i_0] <= '9') { return true; } if (chars[i_0] == 'e' || chars[i_0] == 'E') { return false; } if (!allowSigns && (chars[i_0] == 'd' || chars[i_0] == 'D' || chars[i_0] == 'f' || chars[i_0] == 'F')) { return foundDigit; } if (chars[i_0] == 'l' || chars[i_0] == 'L') { return foundDigit && !hasExp; } return false; } return !allowSigns && foundDigit; } public const float PI_OVER2 = 1.5708f; public const float PI_OVER4 = 0.785398f; private const int BIG_ENOUGH_INT = 16 * 1024; private const double BIG_ENOUGH_FLOOR = BIG_ENOUGH_INT; private const double CEIL = 0.9999999d; static private readonly double BIG_ENOUGH_CEIL = BitConverter.Int64BitsToDouble(BitConverter.DoubleToInt64Bits(BIG_ENOUGH_INT + 1) - 1); private const double BIG_ENOUGH_ROUND = BIG_ENOUGH_INT + 0.5f; private const int ATAN2_BITS = 7; private const int ATAN2_BITS2 = ATAN2_BITS << 1; private const int ATAN2_MASK = ~(-1 << ATAN2_BITS2); private const int ATAN2_COUNT = ATAN2_MASK + 1; static private readonly int ATAN2_DIM = (int)Math.Sqrt(ATAN2_COUNT); static private readonly float INV_ATAN2_DIM_MINUS_1 = 1.0f / (ATAN2_DIM - 1); static private readonly float[] atan2 = new float[ATAN2_COUNT]; public const float PI = 3.1415927f; public const float TWO_PI = 6.28319f; private const int SIN_BITS = 13; private const int SIN_MASK = ~(-1 << SIN_BITS); private const int SIN_COUNT = SIN_MASK + 1; private const float radFull = PI * 2; private const float degFull = 360; private const float radToIndex = SIN_COUNT / radFull; private const float degToIndex = SIN_COUNT / degFull; public const float RAD_TO_DEG = 180.0f / PI; public const float DEG_TO_RAD = PI / 180.0f; public static readonly float[] sin = new float[SIN_COUNT]; public static readonly float[] cos = new float[SIN_COUNT]; public const int ZERO_FIXED = 0; public const int ONE_FIXED = 1 << 16; public static readonly int ONE_HALF_FIXED = FromFloat(0.5f); public const double EPSILON = 2.220446049250313E-16d; public static readonly int EPSILON_FIXED = FromFloat(0.002f); public const int PI_FIXED = 205887; public const int PI_OVER_2_FIXED = PI_FIXED / 2; public const int E_FIXED = 178145; public const int HALF_FIXED = 2 << 15; public static bool IsZero(float value_ren) { return Math.Abs(value_ren) <= 0.00000001d; } public static int ToInt(int x) { return x >> 16; } public static double ToDouble(int x) { return (double)x / ONE_FIXED; } public static float ToFloat(int x) { return (float)x / ONE_FIXED; } public static int FromInt(int x) { return x << 16; } public static int FromFloat(float x) { return (int)(x * ONE_FIXED); } public static int FromDouble(double x) { return (int)(x * ONE_FIXED); } public static int Mul(int x, int y) { long z = (long)x * (long)y; return ((int)(z >> 16)); } public static int Mid(int i, int min, int max) { return MathUtils.Max(i, MathUtils.Min(min, max)); } public static int Div(int x, int y) { long z = (((long)x) << 32); return (int)((z / y) >> 16); } public static double Sqrt(double n) { return Math.Round(n); } public static int Sqrt(int n) { int s = (n + 65536) >> 1; for (int i = 0; i < 8; i++) { s = (s + Div(n, s)) >> 1; } return s; } public static double Round(double n) { return Math.Round(n); } public static int Round(int n) { if (n > 0) { if ((n & 0x8000) != 0) { return (((n + 0x10000) >> 16) << 16); } else { return (((n) >> 16) << 16); } } else { int k; n = -n; if ((n & 0x8000) != 0) { k = (((n + 0x10000) >> 16) << 16); } else { k = (((n) >> 16) << 16); } return -k; } } public static bool Equal(int a, int b) { if (a > b) return a - b <= EPSILON_FIXED; else return b - a <= EPSILON_FIXED; } internal const int SK1 = 498; internal const int SK2 = 10882; public static int Sin(int f) { int sign = 1; if ((f > PI_OVER_2_FIXED) && (f <= PI_FIXED)) { f = PI_FIXED - f; } else if ((f > PI_FIXED) && (f <= (PI_FIXED + PI_OVER_2_FIXED))) { f = f - PI_FIXED; sign = -1; } else if (f > (PI_FIXED + PI_OVER_2_FIXED)) { f = (PI_FIXED << 1) - f; sign = -1; } int sqr = Mul(f, f); int result = SK1; result = Mul(result, sqr); result -= SK2; result = Mul(result, sqr); result += ONE_FIXED; result = Mul(result, f); return sign * result; } internal const int CK1 = 2328; internal const int CK2 = 32551; private static double ReduceSinAngle(double radians) { radians %= System.Math.PI * 2.0d; if (Math.Abs(radians) > System.Math.PI) { radians = radians - (System.Math.PI * 2.0d); } if (Math.Abs(radians) > System.Math.PI / 2) { radians = System.Math.PI - radians; } return radians; } public static double Sin(double radians) { radians = ReduceSinAngle(radians); if (Math.Abs(radians) <= System.Math.PI / 4) { return System.Math.Sin(radians); } else { return System.Math.Cos(System.Math.PI / 2 - radians); } } public static double Cos(double radians) { return Sin(radians + System.Math.PI / 2); } public static int Cos(int f) { int sign = 1; if ((f > PI_OVER_2_FIXED) && (f <= PI_FIXED)) { f = PI_FIXED - f; sign = -1; } else if ((f > PI_OVER_2_FIXED) && (f <= (PI_FIXED + PI_OVER_2_FIXED))) { f = f - PI_FIXED; sign = -1; } else if (f > (PI_FIXED + PI_OVER_2_FIXED)) { f = (PI_FIXED << 1) - f; } int sqr = Mul(f, f); int result = CK1; result = Mul(result, sqr); result -= CK2; result = Mul(result, sqr); result += ONE_FIXED; return result * sign; } internal const int TK1 = 13323; internal const int TK2 = 20810; public static int Tan(int f) { int sqr = Mul(f, f); int result = TK1; result = Mul(result, sqr); result += TK2; result = Mul(result, sqr); result += ONE_FIXED; result = Mul(result, f); return result; } public static int Atan(int f) { int sqr = Mul(f, f); int result = 1365; result = Mul(result, sqr); result -= 5579; result = Mul(result, sqr); result += 11805; result = Mul(result, sqr); result -= 21646; result = Mul(result, sqr); result += 65527; result = Mul(result, f); return result; } internal const int AS1 = -1228; internal const int AS2 = 4866; internal const int AS3 = 13901; internal const int AS4 = 102939; public static int Asin(int f) { int fRoot = Sqrt(ONE_FIXED - f); int result = AS1; result = Mul(result, f); result += AS2; result = Mul(result, f); result -= AS3; result = Mul(result, f); result += AS4; result = PI_OVER_2_FIXED - (Mul(fRoot, result)); return result; } public static int Acos(int f) { int fRoot = Sqrt(ONE_FIXED - f); int result = AS1; result = Mul(result, f); result += AS2; result = Mul(result, f); result -= AS3; result = Mul(result, f); result += AS4; result = Mul(fRoot, result); return result; } static internal int[] log2arr = { 26573, 14624, 7719, 3973, 2017, 1016, 510, 256, 128, 64, 32, 16, 8, 4, 2, 1, 0, 0, 0 }; static internal int[] lnscale = { 0, 45426, 90852, 136278, 181704, 227130, 272557, 317983, 363409, 408835, 454261, 499687, 545113, 590539, 635965, 681391, 726817 }; public static int Ln(int x) { int shift = 0; while (x > 1 << 17) { shift++; x >>= 1; } int g = 0; int d = HALF_FIXED; for (int i = 1; i < 16; i++) { if (x > (ONE_FIXED + d)) { x = Div(x, (ONE_FIXED + d)); g += log2arr[i - 1]; } d >>= 1; } return g + lnscale[shift]; } static public float Tan(float angle) { return (float)System.Math.Tan(angle); } static public float Asin(float value_ren) { return (float)System.Math.Asin(value_ren); } static public float Acos(float value_ren) { return (float)System.Math.Acos(value_ren); } static public float Atan(float value_ren) { return (float)System.Math.Atan(value_ren); } static public float Mag(float a, float b) { return (float)Math.Sqrt(a * a + b * b); } static public float Mag(float a, float b, float c) { return (float)Math.Sqrt(a * a + b * b + c * c); } static public float Dist(float x1, float y1, float x2, float y2) { return Sqrt(Sq(x2 - x1) + Sq(y2 - y1)); } static public float Dist(float x1, float y1, float z1, float x2, float y2, float z2) { return Sqrt(Sq(x2 - x1) + Sq(y2 - y1) + Sq(z2 - z1)); } static public double Abs(double n) { return Math.Abs(n); } static public float Abs(float n) { return (n < 0) ? -n : n; } static public int Abs(int n) { return (n < 0) ? -n : n; } static public float Sq(float a) { return a * a; } static public float Sqrt(float a) { return (float)Math.Sqrt(a); } static public float Log(float a) { return (float)Math.Log(a); } static public float Exp(float a) { return (float)Math.Exp(a); } static public float Pow(float a, float b) { return (float)Math.Pow(a, b); } static public int Max(int a, int b) { return (a > b) ? a : b; } static public float Max(float a, float b) { return (a > b) ? a : b; } static public long Max(long a, long b) { return (a > b) ? a : b; } static public int Max(int a, int b, int c) { return (a > b) ? ((a > c) ? a : c) : ((b > c) ? b : c); } static public float Max(float a, float b, float c) { return (a > b) ? ((a > c) ? a : c) : ((b > c) ? b : c); } static public float Min(float a, float b) { return (a <= b) ? a : b; } public static int Min(int a, int b) { return (a <= b) ? a : b; } static public float Norm(float value_ren, float start, float stop) { return (value_ren - start) / (stop - start); } static public float Map(float value_ren, float istart, float istop, float ostart, float ostop) { return ostart + (ostop - ostart) * ((value_ren - istart) / (istop - istart)); } static public double Distance(Point p1, Point p2) { float dx = p1.x - p2.x; float dy = p1.y - p2.y; return Math.Sqrt(dx * dx + dy * dy); } static public float Degrees(float radians) { return radians * MathUtils.RAD_TO_DEG; } static public float Radians(float degrees) { return degrees * MathUtils.DEG_TO_RAD; } public static float Sin(float rad) { return sin[(int)(rad * radToIndex) & SIN_MASK]; } public static float Cos(float rad) { return cos[(int)(rad * radToIndex) & SIN_MASK]; } public static float SinDeg(float deg) { return sin[(int)(deg * degToIndex) & SIN_MASK]; } public static float CosDeg(float deg) { return cos[(int)(deg * degToIndex) & SIN_MASK]; } public static double Atan2(double y, double x) { if (y == 0.0D && x == 0.0D) { return System.Math.Atan2(0.0D, 1.0D); } else { return System.Math.Atan2(y, x); } } public static float Atan2(float y, float x) { float add, mul; if (x < 0) { if (y < 0) { y = -y; mul = 1; } else mul = -1; x = -x; add = -3.141592653f; } else { if (y < 0) { y = -y; mul = -1; } else mul = 1; add = 0; } float invDiv = 1 / (((x < y) ? y : x) * INV_ATAN2_DIM_MINUS_1); int xi = (int)(x * invDiv); int yi = (int)(y * invDiv); return (atan2[yi * ATAN2_DIM + xi] + add) * mul; } public static float RadToDeg(float rad) { return RAD_TO_DEG * rad; } public static int BringToBounds(int minValue, int maxValue, int v) { return Math.Max(minValue, Math.Min(maxValue, v)); } public static float BringToBounds(float minValue, float maxValue, float v) { return Math.Max(minValue, Math.Min(maxValue, v)); } public static int NextInt(int range) { return (range <= 0) ? 0 : LSystem.random.Next(range); } public static int NextInt(int start, int end) { return (end <= 0) ? 0 : start + LSystem.random.Next(end - start); } public static int Random(int range) { return LSystem.random.Next(range + 1); } public static int Random(int start, int end) { return start + LSystem.random.Next(end - start + 1); } public static bool RandomBoolean() { return (LSystem.random.Next(0, 1) == 1 ? true : false); } public static float Random() { return (float)LSystem.random.NextDouble(); } public static float Random(float range) { return (float)LSystem.random.NextDouble() * range; } public static float Random(float start, float end) { return start + (float)LSystem.random.NextDouble() * (end - start); } public static int Floor(float x) { return (int)(x + BIG_ENOUGH_FLOOR) - BIG_ENOUGH_INT; } public static int FloorPositive(float x) { return (int)x; } public static int Ceil(float x) { return (int)(x + BIG_ENOUGH_CEIL) - BIG_ENOUGH_INT; } public static int CeilPositive(float x) { return (int)(x + CEIL); } public static int Round(float x) { return (int)(x + BIG_ENOUGH_ROUND) - BIG_ENOUGH_INT; } public static int RoundPositive(float x) { return (int)(x + 0.5f); } public static float Barycentric(float value1, float value2, float value3, float amount1, float amount2) { return value1 + (value2 - value1) * amount1 + (value3 - value1) * amount2; } public static float CatmullRom(float value1, float value2, float value3, float value4, float amount) { double amountSquared = amount * amount; double amountCubed = amountSquared * amount; return (float)(0.5d * (2.0d * value2 + (value3 - value1) * amount + (2.0d * value1 - 5.0d * value2 + 4.0d * value3 - value4) * amountSquared + (3.0d * value2 - value1 - 3.0d * value3 + value4) * amountCubed)); } public static float Clamp(float value_ren, float min, float max) { value_ren = (value_ren > max) ? max : value_ren; value_ren = (value_ren < min) ? min : value_ren; return value_ren; } public static float Distance(float value1, float value2) { return Math.Abs(value1 - value2); } public static float Hermite(float value1, float tangent1, float value2, float tangent2, float amount) { double v1 = value1, v2 = value2, t1 = tangent1, t2 = tangent2, s = amount, result; double sCubed = s * s * s; double sSquared = s * s; if (amount == 0f) { result = value1; } else if (amount == 1f) { result = value2; } else { result = (2 * v1 - 2 * v2 + t2 + t1) * sCubed + (3 * v2 - 3 * v1 - 2 * t1 - t2) * sSquared + t1 * s + v1; } return (float)result; } public static float Lerp(float value1, float value2, float amount) { return value1 + (value2 - value1) * amount; } public static float SmoothStep(float value1, float value2, float amount) { float result = Clamp(amount, 0f, 1f); result = Hermite(value1, 0f, value2, 0f, result); return result; } public static float ToDegrees(float radians) { return (float)(radians * 57.295779513082320876798154814105d); } public static float ToRadians(float degrees) { return (float)(degrees * 0.017453292519943295769236907684886d); } public static float WrapAngle(float angle) { angle = (float)System.Math.IEEERemainder((double)angle, 6.2831854820251465d); if (angle <= -3.141593f) { angle += 6.283185f; return angle; } if (angle > 3.141593f) { angle -= 6.283185f; } return angle; } static MathUtils() { for (int i = 0; i < SIN_COUNT; i++) { float a = (i + 0.5f) / SIN_COUNT * radFull; sin[i] = (float)Math.Sin(a); cos[i] = (float)Math.Cos(a); } for (int i = 0; i < 360; i += 90) { sin[(int)(i * degToIndex) & SIN_MASK] = (float)Math.Sin(i * DEG_TO_RAD); cos[(int)(i * degToIndex) & SIN_MASK] = (float)Math.Cos(i * DEG_TO_RAD); } for (int i = 0; i < ATAN2_DIM; i++) { for (int j = 0; j < ATAN2_DIM; j++) { float x0 = (float)i / ATAN2_DIM; float y0 = (float)j / ATAN2_DIM; atan2[j * ATAN2_DIM + i] = (float)System.Math.Atan2(y0, x0); } } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; using Test.Utilities; using Xunit; namespace Microsoft.NetCore.Analyzers.Runtime.UnitTests { public class CallGCSuppressFinalizeCorrectlyTests : DiagnosticAnalyzerTestBase { private const string GCSuppressFinalizeMethodSignature_CSharp = "GC.SuppressFinalize(object)"; private const string GCSuppressFinalizeMethodSignature_Basic = "GC.SuppressFinalize(Object)"; private static DiagnosticResult GetCA1816CSharpResultAt(int line, int column, DiagnosticDescriptor rule, string containingMethodName, string gcSuppressFinalizeMethodName) { return GetCSharpResultAt(line, column, rule, containingMethodName, gcSuppressFinalizeMethodName); } private static DiagnosticResult GetCA1816BasicResultAt(int line, int column, DiagnosticDescriptor rule, string containingMethodName, string gcSuppressFinalizeMethodName) { return GetBasicResultAt(line, column, rule, containingMethodName, gcSuppressFinalizeMethodName); } protected override DiagnosticAnalyzer GetBasicDiagnosticAnalyzer() { return new CallGCSuppressFinalizeCorrectlyAnalyzer(); } protected override DiagnosticAnalyzer GetCSharpDiagnosticAnalyzer() { return new CallGCSuppressFinalizeCorrectlyAnalyzer(); } #region NoDiagnosticCases [Fact] public void DisposableWithoutFinalizer_CSharp_NoDiagnostic() { this.PrintActualDiagnosticsOnFailure = true; var code = @" using System; using System.ComponentModel; public class DisposableWithoutFinalizer : IDisposable { public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; VerifyCSharp(code); } [Fact] public void DisposableWithoutFinalizer_Basic_NoDiagnostic() { var code = @" Imports System Imports System.ComponentModel Public Class DisposableWithoutFinalizer Implements IDisposable Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) GC.SuppressFinalize(Me) End Sub Protected Overridable Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; VerifyBasic(code); } [Fact] public void DisposableWithFinalizer_CSharp_NoDiagnostic() { var code = @" using System; using System.ComponentModel; public class DisposableWithFinalizer : IDisposable { ~DisposableWithFinalizer() { Dispose(false); } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; VerifyCSharp(code); } [Fact] public void DisposableWithFinalizer_Basic_NoDiagnostic() { var code = @" Imports System Imports System.ComponentModel Public Class DisposableWithFinalizer Implements IDisposable Protected Overrides Sub Finalize() Try Dispose(False) Finally MyBase.Finalize() End Try End Sub Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) GC.SuppressFinalize(Me) End Sub Protected Overridable Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; VerifyBasic(code); } [Fact] public void SealedDisposableWithoutFinalizer_CSharp_NoDiagnostic() { var code = @" using System; using System.ComponentModel; public sealed class SealedDisposableWithoutFinalizer : IDisposable { public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } private void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; VerifyCSharp(code); } [Fact] public void SealedDisposableWithoutFinalizer_Basic_NoDiagnostic() { var code = @" Imports System Imports System.ComponentModel Public NotInheritable Class SealedDisposableWithoutFinalizer Implements IDisposable Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) GC.SuppressFinalize(Me) End Sub Private Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; VerifyBasic(code); } [Fact] public void SealedDisposableWithFinalizer_CSharp_NoDiagnostic() { var code = @" using System; using System.ComponentModel; public sealed class SealedDisposableWithFinalizer : IDisposable { ~SealedDisposableWithFinalizer() { Dispose(false); } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } private void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; VerifyCSharp(code); } [Fact] public void SealedDisposableWithFinalizer_Basic_NoDiagnostic() { var code = @" Imports System Imports System.ComponentModel Public NotInheritable Class SealedDisposableWithFinalizer Implements IDisposable Protected Overrides Sub Finalize() Try Dispose(False) Finally MyBase.Finalize() End Try End Sub Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) GC.SuppressFinalize(Me) End Sub Private Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; VerifyBasic(code); } [Fact] public void InternalDisposableWithoutFinalizer_CSharp_NoDiagnostic() { var code = @" using System; using System.ComponentModel; internal class InternalDisposableWithoutFinalizer : IDisposable { public void Dispose() { Dispose(true); // GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; VerifyCSharp(code); } [Fact] public void InternalDisposableWithoutFinalizer_Basic_NoDiagnostic() { var code = @" Imports System Imports System.ComponentModel Friend Class InternalDisposableWithoutFinalizer Implements IDisposable Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) ' GC.SuppressFinalize(this); End Sub Protected Overridable Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; VerifyBasic(code); } [Fact] public void PrivateDisposableWithoutFinalizer_CSharp_NoDiagnostic() { var code = @" using System; using System.ComponentModel; public static class NestedClassHolder { private class PrivateDisposableWithoutFinalizer : IDisposable { public void Dispose() { Dispose(true); // GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } } }"; VerifyCSharp(code); } [Fact] public void PrivateDisposableWithoutFinalizer_Basic_NoDiagnostic() { var code = @" Imports System Imports System.ComponentModel Public NotInheritable Class NestedClassHolder Private Sub New() End Sub Private Class PrivateDisposableWithoutFinalizer Implements IDisposable Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) ' GC.SuppressFinalize(this); End Sub Protected Overridable Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class End Class"; VerifyBasic(code); } [Fact] public void SealedDisposableWithoutFinalizerAndWithoutCallingSuppressFinalize_CSharp_NoDiagnostic() { var code = @" using System; using System.ComponentModel; public sealed class SealedDisposableWithoutFinalizerAndWithoutCallingSuppressFinalize : IDisposable { public void Dispose() { Dispose(true); } private void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; VerifyCSharp(code); } [Fact] public void SealedDisposableWithoutFinalizerAndWithoutCallingSuppressFinalize_Basic_NoDiagnostic() { var code = @" Imports System Imports System.ComponentModel Public NotInheritable Class SealedDisposableWithoutFinalizerAndWithoutCallingSuppressFinalize Implements IDisposable Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) End Sub Private Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; VerifyBasic(code); } [Fact] public void DisposableStruct_CSharp_NoDiagnostic() { var code = @" using System; using System.ComponentModel; public struct DisposableStruct : IDisposable { public void Dispose() { Dispose(true); } private void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; VerifyCSharp(code); } [Fact] public void DisposableStruct_Basic_NoDiagnostic() { var code = @" Imports System Imports System.ComponentModel Public Structure DisposableStruct Implements IDisposable Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) End Sub Private Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Structure"; VerifyBasic(code); } [Fact] public void SealedDisposableCallingGCSuppressFinalizeInConstructor_CSharp_NoDiagnostic() { var code = @" using System; using System.ComponentModel; public sealed class SealedDisposableCallingGCSuppressFinalizeInConstructor : Component { public SealedDisposableCallingGCSuppressFinalizeInConstructor() { // We don't ever want our finalizer (that we inherit from Component) to run // (We are sealed and we don't own any unmanaged resources). GC.SuppressFinalize(this); } }"; VerifyCSharp(code); } [Fact] public void SealedDisposableCallingGCSuppressFinalizeInConstructor_Basic_NoDiagnostic() { var code = @" Imports System Imports System.ComponentModel Public NotInheritable Class SealedDisposableCallingGCSuppressFinalizeInConstructor Inherits Component Public Sub New() ' We don't ever want our finalizer (that we inherit from Component) to run ' (We are sealed and we don't own any unmanaged resources). GC.SuppressFinalize(Me) End Sub End Class"; VerifyBasic(code); } #endregion #region DiagnosticCases [Fact] public void SealedDisposableWithFinalizer_CSharp_Diagnostic() { var code = @" using System; using System.ComponentModel; public class SealedDisposableWithFinalizer : IDisposable { public static void Main(string[] args) { } ~SealedDisposableWithFinalizer() { Dispose(false); } public void Dispose() { Dispose(true); // GC.SuppressFinalize(this); } private void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; var diagnosticResult = GetCA1816CSharpResultAt( line: 17, column: 21, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule, containingMethodName: "SealedDisposableWithFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); VerifyCSharp(code, diagnosticResult); } [Fact] public void SealedDisposableWithFinalizer_Basic_Diagnostic() { var code = @" Imports System Imports System.ComponentModel Public Class SealedDisposableWithFinalizer Implements IDisposable Public Shared Sub Main(args As String()) End Sub Protected Overrides Sub Finalize() Try Dispose(False) Finally MyBase.Finalize() End Try End Sub Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) ' GC.SuppressFinalize(this); End Sub Private Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; var diagnosticResult = GetCA1816BasicResultAt( line: 19, column: 13, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule, containingMethodName: "SealedDisposableWithFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); VerifyBasic(code, diagnosticResult); } [Fact] public void DisposableWithFinalizer_CSharp_Diagnostic() { var code = @" using System; using System.ComponentModel; public class DisposableWithFinalizer : IDisposable { ~DisposableWithFinalizer() { Dispose(false); } public void Dispose() { Dispose(true); // GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; var diagnosticResult = GetCA1816CSharpResultAt( line: 12, column: 17, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule, containingMethodName: "DisposableWithFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); VerifyCSharp(code, diagnosticResult); } [Fact] public void DisposableWithFinalizer_Basic_Diagnostic() { var code = @" Imports System Imports System.ComponentModel Public Class DisposableWithFinalizer Implements IDisposable Protected Overrides Sub Finalize() Try Dispose(False) Finally MyBase.Finalize() End Try End Sub Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) ' GC.SuppressFinalize(this); End Sub Protected Overridable Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; var diagnosticResult = GetCA1816BasicResultAt( line: 15, column: 13, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule, containingMethodName: "DisposableWithFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); VerifyBasic(code, diagnosticResult); } [Fact] public void InternalDisposableWithFinalizer_CSharp_Diagnostic() { var code = @" using System; using System.ComponentModel; internal class InternalDisposableWithFinalizer : IDisposable { ~InternalDisposableWithFinalizer() { Dispose(false); } public void Dispose() { Dispose(true); // GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; var diagnosticResult = GetCA1816CSharpResultAt( line: 12, column: 17, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule, containingMethodName: "InternalDisposableWithFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); VerifyCSharp(code, diagnosticResult); } [Fact] public void InternalDisposableWithFinalizer_Basic_Diagnostic() { var code = @" Imports System Imports System.ComponentModel Friend Class InternalDisposableWithFinalizer Implements IDisposable Protected Overrides Sub Finalize() Try Dispose(False) Finally MyBase.Finalize() End Try End Sub Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) ' GC.SuppressFinalize(this); End Sub Protected Overridable Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; var diagnosticResult = GetCA1816BasicResultAt( line: 15, column: 13, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule, containingMethodName: "InternalDisposableWithFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); VerifyBasic(code, diagnosticResult); } [Fact] public void PrivateDisposableWithFinalizer_CSharp_Diagnostic() { var code = @" using System; using System.ComponentModel; public static class NestedClassHolder { private class PrivateDisposableWithFinalizer : IDisposable { ~PrivateDisposableWithFinalizer() { Dispose(false); } public void Dispose() { Dispose(true); // GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } } }"; var diagnosticResult = GetCA1816CSharpResultAt( line: 14, column: 21, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule, containingMethodName: "NestedClassHolder.PrivateDisposableWithFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); VerifyCSharp(code, diagnosticResult); } [Fact] public void PrivateDisposableWithFinalizer_Basic_Diagnostic() { var code = @" Imports System Imports System.ComponentModel Public NotInheritable Class NestedClassHolder Private Sub New() End Sub Private Class PrivateDisposableWithFinalizer Implements IDisposable Protected Overrides Sub Finalize() Try Dispose(False) Finally MyBase.Finalize() End Try End Sub Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) ' GC.SuppressFinalize(this); End Sub Protected Overridable Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class End Class"; var diagnosticResult = GetCA1816BasicResultAt( line: 18, column: 14, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledWithFinalizerRule, containingMethodName: "NestedClassHolder.PrivateDisposableWithFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); VerifyBasic(code, diagnosticResult); } [Fact] public void DisposableWithoutFinalizer_CSharp_Diagnostic() { var code = @" using System; using System.ComponentModel; public class DisposableWithoutFinalizer : IDisposable { public void Dispose() { Dispose(true); // GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { Console.WriteLine(this); Console.WriteLine(disposing); } }"; var diagnosticResult = GetCA1816CSharpResultAt( line: 7, column: 17, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule, containingMethodName: "DisposableWithoutFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); VerifyCSharp(code, diagnosticResult); } [Fact] public void DisposableWithoutFinalizer_Basic_Diagnostic() { var code = @" Imports System Imports System.ComponentModel Public Class DisposableWithoutFinalizer Implements IDisposable Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) ' GC.SuppressFinalize(this); End Sub Protected Overridable Sub Dispose(disposing As Boolean) Console.WriteLine(Me) Console.WriteLine(disposing) End Sub End Class"; var diagnosticResult = GetCA1816BasicResultAt( line: 7, column: 13, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule, containingMethodName: "DisposableWithoutFinalizer.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); VerifyBasic(code, diagnosticResult); } [Fact] public void DisposableComponent_CSharp_Diagnostic() { var code = @" using System; using System.ComponentModel; public class DisposableComponent : Component, IDisposable { public void Dispose() { Dispose(true); // GC.SuppressFinalize(this); } }"; var diagnosticResult = GetCA1816CSharpResultAt( line: 7, column: 17, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule, containingMethodName: "DisposableComponent.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); VerifyCSharp(code, diagnosticResult); } [Fact] public void DisposableComponent_Basic_Diagnostic() { var code = @" Imports System Imports System.ComponentModel Public Class DisposableComponent Inherits Component Implements IDisposable Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) ' GC.SuppressFinalize(this); End Sub Protected Overridable Sub Dispose(disposing As Boolean) End Sub End Class"; var diagnosticResult = GetCA1816BasicResultAt( line: 8, column: 13, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule, containingMethodName: "DisposableComponent.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); VerifyBasic(code, diagnosticResult); } [Fact] public void NotADisposableClass_CSharp_Diagnostic() { var code = @" using System; using System.ComponentModel; public class NotADisposableClass { public NotADisposableClass() { GC.SuppressFinalize(this); } }"; var diagnosticResult = GetCA1816CSharpResultAt( line: 9, column: 9, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule, containingMethodName: "NotADisposableClass.NotADisposableClass()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); VerifyCSharp(code, diagnosticResult); } [Fact] public void NotADisposableClass_Basic_Diagnostic() { var code = @" Imports System Imports System.ComponentModel Public Class NotADisposableClass Public Sub New() GC.SuppressFinalize(Me) End Sub End Class"; var diagnosticResult = GetCA1816BasicResultAt( line: 7, column: 3, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule, containingMethodName: "NotADisposableClass.New()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); VerifyBasic(code, diagnosticResult); } [Fact] public void DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces_CSharp_Diagnostic() { var code = @" using System; using System.ComponentModel; public class DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces : IDisposable { public DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces() { GC.SuppressFinalize(this); } public void Dispose() { Dispose(true); CallGCSuppressFinalize(); } private void CallGCSuppressFinalize() { GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { if (disposing) { Console.WriteLine(this); GC.SuppressFinalize(this); } } }"; var diagnosticResult1 = GetCA1816CSharpResultAt( line: 9, column: 9, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); var diagnosticResult2 = GetCA1816CSharpResultAt( line: 12, column: 17, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); var diagnosticResult3 = GetCA1816CSharpResultAt( line: 20, column: 9, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.CallGCSuppressFinalize()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); var diagnosticResult4 = GetCA1816CSharpResultAt( line: 28, column: 13, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.Dispose(bool)", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); VerifyCSharp(code, diagnosticResult1, diagnosticResult2, diagnosticResult3, diagnosticResult4); } [Fact] public void DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces_Basic_Diagnostic() { var code = @" Imports System Imports System.ComponentModel Public Class DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces Implements IDisposable Public Sub New() GC.SuppressFinalize(Me) End Sub Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) CallGCSuppressFinalize() End Sub Private Sub CallGCSuppressFinalize() GC.SuppressFinalize(Me) End Sub Protected Overridable Sub Dispose(disposing As Boolean) If disposing Then Console.WriteLine(Me) GC.SuppressFinalize(Me) End If End Sub End Class"; var diagnosticResult1 = GetCA1816BasicResultAt( line: 8, column: 3, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.New()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); var diagnosticResult2 = GetCA1816BasicResultAt( line: 11, column: 13, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotCalledRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); var diagnosticResult3 = GetCA1816BasicResultAt( line: 17, column: 3, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.CallGCSuppressFinalize()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); var diagnosticResult4 = GetCA1816BasicResultAt( line: 23, column: 4, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.OutsideDisposeRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeInTheWrongPlaces.Dispose(Boolean)", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); VerifyBasic(code, diagnosticResult1, diagnosticResult2, diagnosticResult3, diagnosticResult4); } [Fact] public void DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments_CSharp_Diagnostic() { var code = @" using System; using System.ComponentModel; public class DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments : IDisposable { public DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments() { } public void Dispose() { Dispose(true); GC.SuppressFinalize(true); } protected virtual void Dispose(bool disposing) { if (disposing) { Console.WriteLine(this); } } }"; var diagnosticResult = GetCA1816CSharpResultAt( line: 14, column: 9, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotPassedThisRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_CSharp); VerifyCSharp(code, diagnosticResult); } [Fact] public void DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments_Basic_Diagnostic() { var code = @" Imports System Imports System.ComponentModel Public Class DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments Implements IDisposable Public Sub New() End Sub Public Sub Dispose() Implements IDisposable.Dispose Dispose(True) GC.SuppressFinalize(True) End Sub Protected Overridable Sub Dispose(disposing As Boolean) If disposing Then Console.WriteLine(Me) End If End Sub End Class"; var diagnosticResult = GetCA1816BasicResultAt( line: 12, column: 3, rule: CallGCSuppressFinalizeCorrectlyAnalyzer.NotPassedThisRule, containingMethodName: "DisposableClassThatCallsGCSuppressFinalizeWithTheWrongArguments.Dispose()", gcSuppressFinalizeMethodName: GCSuppressFinalizeMethodSignature_Basic); VerifyBasic(code, diagnosticResult); } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Linq.Expressions; using System.Text; using System.Web; using System.Web.Mvc; using System.Web.Mvc.Html; namespace CheckBoxList.Mvc.Html { public static class CheckBoxListExtensions { public static MvcHtmlString CheckBoxList(this HtmlHelper htmlHelper, string name, IEnumerable<CheckBoxListItem> checkboxList) { return CheckBoxListHelper(htmlHelper, name, checkboxList, null); } public static MvcHtmlString CheckBoxList(this HtmlHelper htmlHelper, string name, IEnumerable<CheckBoxListItem> checkboxList, object htmlAttributes) { return CheckBoxListHelper(htmlHelper, name, checkboxList, HtmlHelper.AnonymousObjectToHtmlAttributes(htmlAttributes)); } public static MvcHtmlString CheckBoxList(this HtmlHelper htmlHelper, string name, IEnumerable<CheckBoxListItem> checkboxList, IDictionary<string, object> htmlAttributes) { return CheckBoxListHelper(htmlHelper, name, checkboxList, htmlAttributes); } public static MvcHtmlString CheckBoxListFor<TModel, TProperty>(this HtmlHelper<TModel> htmlHelper, Expression<Func<TModel, TProperty>> expression) where TProperty : IEnumerable<CheckBoxListItem> { return CheckBoxListFor(htmlHelper, expression, null); } public static MvcHtmlString CheckBoxListFor<TModel, TProperty>(this HtmlHelper<TModel> htmlHelper, Expression<Func<TModel, TProperty>> expression, object htmlAttributes) where TProperty : IEnumerable<CheckBoxListItem> { return CheckBoxListFor(htmlHelper, expression, HtmlHelper.AnonymousObjectToHtmlAttributes(htmlAttributes)); } public static MvcHtmlString CheckBoxListFor<TModel, TProperty>(this HtmlHelper<TModel> htmlHelper, Expression<Func<TModel, TProperty>> expression, IDictionary<string, object> htmlAttributes) where TProperty : IEnumerable<CheckBoxListItem> { if (expression == null) throw new ArgumentNullException("expression"); var name = ExpressionHelper.GetExpressionText(expression); var func = expression.Compile(); var checkboxList = func(htmlHelper.ViewData.Model) as IEnumerable<CheckBoxListItem>; return CheckBoxListHelper(htmlHelper, name, checkboxList, htmlAttributes); } public static MvcHtmlString EnumCheckBoxList<T>(this HtmlHelper htmlHelper, string name, IEnumerable<T> list) where T : struct { return EnumCheckBoxList(htmlHelper, name, list, null); } public static MvcHtmlString EnumCheckBoxList<T>(this HtmlHelper htmlHelper, string name, IEnumerable<T> list, object htmlAttributes) where T : struct { return EnumCheckBoxList(htmlHelper, name, list, HtmlHelper.AnonymousObjectToHtmlAttributes(htmlAttributes)); } public static MvcHtmlString EnumCheckBoxList<T>(this HtmlHelper htmlHelper, string name, IEnumerable<T> list, IDictionary<string, object> htmlAttributes) where T : struct { if (!typeof(T).IsEnum) throw new ArgumentException("T must be an enum type"); //text, value and if selected var tupleList = new List<Tuple<string, int, bool>>(); foreach (var value in Enum.GetValues(typeof(T)).Cast<T>()) { var selected = list.Contains(value); tupleList.Add(new Tuple<string, int, bool>(GetDisplayName(value), Convert.ToInt32(value), selected)); } return EnumCheckBoxListHelper(htmlHelper, name, tupleList, htmlAttributes); } public static MvcHtmlString EnumCheckBoxListFor<TModel, TProperty>(this HtmlHelper<TModel> htmlHelper, Expression<Func<TModel, TProperty>> expression) where TProperty : IEnumerable { return EnumCheckBoxListFor(htmlHelper, expression, null); } public static MvcHtmlString EnumCheckBoxListFor<TModel, TProperty>(this HtmlHelper<TModel> htmlHelper, Expression<Func<TModel, TProperty>> expression, object htmlAttributes) where TProperty : IEnumerable { return EnumCheckBoxListFor(htmlHelper, expression, HtmlHelper.AnonymousObjectToHtmlAttributes(htmlAttributes)); } public static MvcHtmlString EnumCheckBoxListFor<TModel, TProperty>(this HtmlHelper<TModel> htmlHelper, Expression<Func<TModel, TProperty>> expression, IDictionary<string, object> htmlAttributes) where TProperty : IEnumerable { if (expression == null) throw new ArgumentNullException("expression"); var name = ExpressionHelper.GetExpressionText(expression); var func = expression.Compile(); var enumList = func(htmlHelper.ViewData.Model); var enumType = enumList.GetType().IsGenericType ? enumList.GetType().GetGenericArguments()[0] : enumList.GetType().GetElementType(); if (!enumType.IsEnum) throw new ArgumentException("Must be a list of enum type"); var tupleList = new List<Tuple<string, int, bool>>(); foreach (var value in Enum.GetValues(enumType)) { var selected = enumList.Cast<object>().Any(s => s.ToString() == value.ToString()); tupleList.Add(new Tuple<string, int, bool>(GetDisplayName(value), (int)value, selected)); } return EnumCheckBoxListHelper(htmlHelper, name, tupleList, htmlAttributes); } private static MvcHtmlString CheckBoxListHelper(HtmlHelper htmlHelper, string name, IEnumerable<CheckBoxListItem> checkboxList, IDictionary<string, object> htmlAttributes) { var fullName = htmlHelper.ViewContext.ViewData.TemplateInfo.GetFullHtmlFieldName(name); if (string.IsNullOrEmpty(fullName)) { throw new ArgumentException("name"); } var listItemBuilder = BuildCheckBoxListItems(htmlHelper, name, checkboxList.ToList()); var tagBuilder = new TagBuilder("div") { InnerHtml = listItemBuilder.ToString() }; tagBuilder.MergeAttributes(htmlAttributes); tagBuilder.GenerateId(fullName); return new MvcHtmlString(tagBuilder.ToString(TagRenderMode.Normal)); } private static StringBuilder BuildCheckBoxListItems(this HtmlHelper htmlHelper, string name, IList<CheckBoxListItem> list) { var listItemBuilder = new StringBuilder(); for (var i = 0; i < list.Count(); i++) { var item = list[i]; var checkbox = htmlHelper.CheckBox(GetChildControlName(name, i, "IsChecked"), item.IsChecked); var text = htmlHelper.Hidden(GetChildControlName(name, i, "Text"), item.Text); var value = htmlHelper.Hidden(GetChildControlName(name, i, "Value"), item.Value); var sb = new StringBuilder(); sb.AppendLine("<div>"); sb.AppendLine("<label>"); sb.AppendLine(checkbox.ToHtmlString()); sb.AppendLine(HttpUtility.HtmlEncode(item.Text)); sb.AppendLine("</label>"); sb.AppendLine(text.ToHtmlString()); sb.AppendLine(value.ToHtmlString()); sb.AppendLine("</div>"); listItemBuilder.AppendLine(sb.ToString()); } return listItemBuilder; } private static string GetChildControlName(string parentName, int index, string childName) { return string.Format("{0}[{1}].{2}", parentName, index, childName); } private static MvcHtmlString EnumCheckBoxListHelper(HtmlHelper htmlHelper, string name, IEnumerable<Tuple<string, int, bool>> list, IDictionary<string, object> htmlAttributes) { var fullName = htmlHelper.ViewContext.ViewData.TemplateInfo.GetFullHtmlFieldName(name); if (string.IsNullOrEmpty(fullName)) { throw new ArgumentException("name"); } var listItemBuilder = BuildEnumCheckBoxListItems(fullName, list); var tagBuilder = new TagBuilder("div") { InnerHtml = listItemBuilder.ToString() }; tagBuilder.MergeAttributes(htmlAttributes); tagBuilder.GenerateId(fullName); return new MvcHtmlString(tagBuilder.ToString(TagRenderMode.Normal)); } private static MvcHtmlString BuildEnumCheckBoxListItems(string name, IEnumerable<Tuple<string, int, bool>> list) { var listItemBuilder = new StringBuilder(); foreach (var t in list) { listItemBuilder.AppendLine("<div>"); var checkBox = string.Format(@"<input name=""{0}"" type=""checkbox"" value=""{1}"" {2} />", name, t.Item2, t.Item3 ? @"checked=""checked""" : string.Empty); listItemBuilder.AppendLine(checkBox); listItemBuilder.AppendLine(t.Item1); listItemBuilder.AppendLine("</div>"); } return new MvcHtmlString(listItemBuilder.ToString()); } private static string GetDisplayName(object value) { var type = value.GetType(); var member = type.GetMember(value.ToString()); var displayAttributes = member[0].GetCustomAttributes(typeof(DisplayAttribute), false) as DisplayAttribute[]; if (displayAttributes != null && displayAttributes.Any()) return displayAttributes.First().Name; var descriptionAttributes = member[0].GetCustomAttributes(typeof(DescriptionAttribute), false) as DescriptionAttribute[]; if (descriptionAttributes != null && descriptionAttributes.Any()) return descriptionAttributes.First().Description; return value.ToString(); } } }
/* Copyright (c) 2006-2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* Change history * Oct 13 2008 Joe Feser joseph.feser@gmail.com * Converted ArrayLists and other .NET 1.1 collections to use Generics * Combined IExtensionElement and IExtensionElementFactory interfaces * */ using System; using System.IO; using System.Xml; using System.Collections; using System.Configuration; using System.Net; using NUnit.Framework; using Google.GData.Client; using Google.GData.GoogleBase; using System.Collections.Generic; namespace Google.GData.GoogleBase.UnitTests { public abstract class TypeConversionTestBase { protected ExtensionList list; protected GBaseAttributeCollectionWithTypeConversion attrs; [SetUp] public virtual void SetUp() { list = ExtensionList.NotVersionAware(); attrs = new GBaseAttributeCollectionWithTypeConversion(list); } } [TestFixture] [Category("GoogleBase")] public class NumberAttributesTest : TypeConversionTestBase { private GBaseAttribute aInt; private GBaseAttribute aFloat; private GBaseAttribute largeFloat; private GBaseAttribute aNumber; [SetUp] public override void SetUp() { base.SetUp(); aInt = new GBaseAttribute("a", GBaseAttributeType.Int, "12"); aFloat = new GBaseAttribute("a", GBaseAttributeType.Float, "3.14"); largeFloat = new GBaseAttribute("large", GBaseAttributeType.Float, "10000000"); aNumber = new GBaseAttribute("a", GBaseAttributeType.Number, "2.7"); attrs.Add(aInt); attrs.Add(aFloat); attrs.Add(aNumber); attrs.Add(largeFloat); } [Test] public void ExtractIntAttribute() { int ivalue; Assert.IsTrue(attrs.ExtractIntAttribute("a", out ivalue)); Assert.AreEqual(12, ivalue); } [Test] public void ExtractFloatAttribute() { float fvalue; Assert.IsTrue(attrs.ExtractFloatAttribute("a", out fvalue)); Assert.AreEqual((float)3.14, fvalue); } [Test] public void AddLargeFloatAttribute() { attrs.AddFloatAttribute("largeB", 1e9f); Assert.AreEqual("1000000000.00", attrs.GetAttribute("largeB", GBaseAttributeType.Float).Content); } [Test] public void ExtractLargeFloatAttribute() { float fvalue; Assert.IsTrue(attrs.ExtractFloatAttribute("large", out fvalue)); Assert.AreEqual(1e7f, fvalue); } [Test] public void ExtractNumberAttribute() { float fvalue; Assert.IsTrue(attrs.ExtractNumberAttribute("a", out fvalue)); Assert.AreEqual((float) 12, fvalue); attrs.Remove(aInt); Assert.IsTrue(attrs.ExtractNumberAttribute("a", out fvalue)); Assert.AreEqual((float)3.14, fvalue); attrs.Remove(aFloat); Assert.IsTrue(attrs.ExtractNumberAttribute("a", out fvalue)); Assert.AreEqual((float) 2.7, fvalue); } [Test] public void GetNumberAttributes() { List<float> values = attrs.GetNumberAttributes("a"); Assert.AreEqual(3, values.Count); Assert.AreEqual(12, values[0]); Assert.AreEqual(3.14f, values[1]); Assert.AreEqual(2.7f, values[2]); } [Test] public void GetFloatAttributes() { List<float> values = attrs.GetFloatAttributes("a"); Assert.AreEqual(1, values.Count); Assert.AreEqual(3.14f, values[0]); } [Test] public void GetIntAttributes() { List<int> values = attrs.GetIntAttributes("a"); Assert.AreEqual(1, values.Count); Assert.AreEqual(12, values[0]); } } [TestFixture] [Category("GoogleBase")] public class WithUnitAttributesTest : TypeConversionTestBase { private GBaseAttribute aIntUnit; private readonly IntUnit aIntUnitValue = new IntUnit(12, "minutes"); private GBaseAttribute aFloatUnit; private readonly FloatUnit aFloatUnitValue = new FloatUnit(3.14f, "mm"); private GBaseAttribute aNumberUnit; private readonly FloatUnit aNumberUnitValue = new FloatUnit(2.7f, "km"); [SetUp] public override void SetUp() { base.SetUp(); aFloatUnit = new GBaseAttribute("a", GBaseAttributeType.FloatUnit, aFloatUnitValue.ToString()); aIntUnit = new GBaseAttribute("a", GBaseAttributeType.IntUnit, aIntUnitValue.ToString()); aNumberUnit = new GBaseAttribute("a", GBaseAttributeType.NumberUnit, aNumberUnitValue.ToString()); attrs.Add(aFloatUnit); attrs.Add(aIntUnit); attrs.Add(aNumberUnit); } [Test] public void GetIntUnitAttribute() { Assert.AreEqual(aIntUnitValue, attrs.GetIntUnitAttribute("a")); } [Test] public void GetFloatUnitAttribute() { Assert.AreEqual(aFloatUnitValue, attrs.GetFloatUnitAttribute("a")); } [Test] public void AddLargeFloatUnitAttribute() { GBaseAttribute attribute = attrs.AddFloatUnitAttribute("large", 1e7f, "usd"); Assert.AreEqual("10000000.00 usd", attribute.Content); } [Test] public void GetNumberUnitAttribute() { Assert.AreEqual(aFloatUnitValue, attrs.GetNumberUnitAttribute("a")); } [Test] public void GetIntUnitAttributes() { List<IntUnit> values = attrs.GetIntUnitAttributes("a"); Assert.AreEqual(1, values.Count); Assert.AreEqual(aIntUnitValue, values[0]); } [Test] public void GetFloatUnitAttributes() { List<FloatUnit> values = attrs.GetFloatUnitAttributes("a"); Assert.AreEqual(1, values.Count); Assert.AreEqual(aFloatUnitValue, values[0]); } [Test] public void GetNumberUnitAttributes() { List<NumberUnit> values = attrs.GetNumberUnitAttributes("a"); Assert.AreEqual(3, values.Count); Assert.AreEqual(aFloatUnitValue, values[0]); Assert.AreEqual(aIntUnitValue, values[1]); Assert.AreEqual(aNumberUnitValue, values[2]); } } [TestFixture] [Category("GoogleBase")] public class DateTimeAttributeTest : TypeConversionTestBase { private static readonly string ADateString = "2005-09-02"; private static readonly string BDateString = "2005-10-20"; private static readonly string ADateTimeString = ADateString + "T17:32:10Z"; private static readonly string BDateTimeString = BDateString + "T13:24:08Z"; private static readonly DateTime ADate = DateTime.Parse(ADateString); private static readonly DateTime ADateTime = DateTime.Parse(ADateTimeString); private static readonly DateTime BDateTime = DateTime.Parse(BDateTimeString); private static readonly GBaseAttribute dateAttr = new GBaseAttribute("a", GBaseAttributeType.Date, ADateString); private static readonly GBaseAttribute dateTimeAttr = new GBaseAttribute("a", GBaseAttributeType.DateTime, ADateTimeString); private static readonly GBaseAttribute dateTimeRangeAttr = new GBaseAttribute("a", GBaseAttributeType.DateTimeRange, ADateTimeString + " " + BDateTimeString); [SetUp] public override void SetUp() { base.SetUp(); attrs.Add(dateTimeAttr); attrs.Add(dateAttr); attrs.Add(dateTimeRangeAttr); } [Test] public void GetDateAttributeTest() { DateTime value; Assert.IsTrue(attrs.ExtractDateAttribute("a", out value)); Assert.AreEqual(ADateTime, value); attrs.Remove(dateTimeAttr); Assert.IsTrue(attrs.ExtractDateAttribute("a", out value)); Assert.AreEqual(ADate, value); attrs.Remove(dateAttr); Assert.IsFalse(attrs.ExtractDateAttribute("a", out value)); } [Test] public void GetDateTimeAttributeTest() { DateTime value; Assert.IsTrue(attrs.ExtractDateTimeAttribute("a", out value)); Assert.AreEqual(ADateTime, value); attrs.Remove(dateTimeAttr); Assert.IsFalse(attrs.ExtractDateTimeAttribute("a", out value)); } [Test] public void GetDateTimeRangeAttributeTest() { Assert.AreEqual(new DateTimeRange(ADateTime), attrs.GetDateTimeRangeAttribute("a")); attrs.Remove(dateTimeAttr); Assert.AreEqual(new DateTimeRange(ADate), attrs.GetDateTimeRangeAttribute("a")); attrs.Remove(dateAttr); Assert.AreEqual(new DateTimeRange(ADateTime, BDateTime), attrs.GetDateTimeRangeAttribute("a")); attrs.Remove(dateTimeRangeAttr); Assert.AreEqual(null, attrs.GetDateTimeRangeAttribute("a")); } [Test] public void AddDateTimeAttributeTest() { GBaseAttribute attr = attrs.AddDateTimeAttribute("x", ADateTime); Assert.AreEqual(Utilities.LocalDateTimeInUTC(ADateTime), attr.Content); Assert.AreEqual(GBaseAttributeType.DateTime, attr.Type); Assert.IsTrue(attrs.Contains(attr)); } [Test] public void AddDateAttributeTest() { GBaseAttribute attr = attrs.AddDateAttribute("x", ADateTime); Assert.AreEqual(Utilities.LocalDateInUTC(ADateTime), attr.Content); Assert.AreEqual(GBaseAttributeType.Date, attr.Type); Assert.IsTrue(attrs.Contains(attr)); } [Test] public void AddDateTimeRangeAttributeTest() { DateTimeRange range = new DateTimeRange(ADateTime, BDateTime); GBaseAttribute attr = attrs.AddDateTimeRangeAttribute("x", range); Assert.AreEqual(range.ToString(), attr.Content); Assert.AreEqual(GBaseAttributeType.DateTimeRange, attr.Type); Assert.IsTrue(attrs.Contains(attr)); } [Test] public void AddEmpytDateTimeRangeAttributeTest() { try { attrs.AddDateTimeRangeAttribute("x", new DateTimeRange(ADateTime)); Assert.Fail("expected exception"); } catch(ArgumentException e) { Tracing.TraceInfo(e.ToString()); } } } }
#region File Description //----------------------------------------------------------------------------- // BackgroundMusicManager.cs // // Microsoft XNA Community Game Platform // Copyright (C) Microsoft Corporation. All rights reserved. //----------------------------------------------------------------------------- #endregion using System; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Media; namespace WP7MusicManagement { /// <summary> /// A game component that can be used to play background music, ensuring /// that the GameHasControl property is respected and that music playback /// will resume after any video playback. /// </summary> /// <remarks> /// The BackgroundMusicManager has two primary responsibilities: /// /// 1) If the game wishes to play a song, the BackgroundMusicManager will monitor the /// MediaPlayer.GameHasControl property to ensure that the game plays the music when /// it is allowed, without playing over the user's music. /// 2) If the game's music is paused for some reason (e.g. the headphones were unplugged) /// the BackgroundMusicManager will resume the playback automatically. /// /// The BackgroundMusicManager helps not only with the Windows Phone Certification /// Requirements for using GameHasControl, but also makes it easier to resume media after /// certain events, such as watching videos or unplugging headphones, both of which will /// pause music with no automatic resuming. /// </remarks> public class BackgroundMusicManager : GameComponent { // We don't want to poll constantly, so we set an amount of time (in seconds) of // how often we should poll. By default, we're going to poll every second. const float PollDelay = 1f; // A simple float for our polling timer. private float gameHasControlTimer; // We keep a member variable around to tell us if we have control of the music. private bool gameHasControl = false; // The song the game wants currently playing. private Song currentSong; /// <summary> /// Gets whether or not the game music is currently playing. /// </summary> public bool IsGameMusicPlaying { get; private set; } /// <summary> /// Invoked if the game tries to play a song and the game doesn't have control. This /// allows games the chance to prompt the user and turn off their music if they accept. /// </summary> public event EventHandler<EventArgs> PromptGameHasControl; /// <summary> /// Invoked if song playback fails to let the game prompt the user or respond /// as necessary. /// </summary> public event EventHandler<EventArgs> PlaybackFailed; /// <summary> /// Initializes a new BackgroundMusicManager. /// </summary> /// <param name="game">The Game that is using this manager.</param> public BackgroundMusicManager(Game game) : base(game) { // Grab the GameHasControl as our initial value gameHasControl = MediaPlayer.GameHasControl; // Hook the game's activated event so we can respond if the game gets backgrounded // such as when a video is played with MediaPlayerLauncher. game.Activated += game_Activated; } /// <summary> /// Event handler that is invoked when the game is activated. /// </summary> void game_Activated(object sender, EventArgs e) { // See if we have control of the music gameHasControl = MediaPlayer.GameHasControl; // If we have control, a song we want to play, and the media player isn't playing, // play our song. This will happen when coming back from deactivation with certain // launchers (mainly the MediaPlayerLauncher) which don't automatically play/resume // the song for us. We can detect this case and restart the song ourselves, that way // the user doesn't end up with a game without background music. if (gameHasControl && currentSong != null && MediaPlayer.State != MediaState.Playing) PlaySongSafe(); } /// <summary> /// Plays a given song as the background music. /// </summary> /// <param name="song">The song to play.</param> public void Play(Song song) { // Store the song in our member variable. currentSong = song; // If we have control, play the song immediately. if (gameHasControl) PlaySongSafe(); // Otherwise invoke our event so the game can check with the player // to see if they want to stop the current song so the game's music // will play. else if (PromptGameHasControl != null) PromptGameHasControl(this, EventArgs.Empty); } /// <summary> /// Stops playing our background music. /// </summary> public void Stop() { // Null out our member variable currentSong = null; // If we have control, stop the media player. if (gameHasControl) MediaPlayer.Stop(); } /// <summary> /// Allows the component to handle its update logic. /// </summary> /// <param name="gameTime">Provides a snapshot of timing values.</param> public override void Update(GameTime gameTime) { // Update our timer gameHasControlTimer += (float)gameTime.ElapsedGameTime.TotalSeconds; // If we've passed our poll delay, we want to handle our update if (gameHasControlTimer >= PollDelay) { // Reset the timer back to zero gameHasControlTimer = 0f; // Check to see if we have control of the media player gameHasControl = MediaPlayer.GameHasControl; // Get the current state and song from the MediaPlayer MediaState currentState = MediaPlayer.State; Song activeSong = MediaPlayer.Queue.ActiveSong; // If we have control of the music... if (gameHasControl) { // If we have a song that we want playing... if (currentSong != null) { // If the media player isn't playing anything... if (currentState != MediaState.Playing) { // If the song is paused, for example because the headphones // were removed, we call Resume() to continue playback. if (currentState == MediaState.Paused) { ResumeSongSafe(); } // Otherwise we play our desired song. else { PlaySongSafe(); } } } // If we don't have a song we want playing, we want to make sure we stop // any music we may have previously had playing. else { if (currentState != MediaState.Stopped) MediaPlayer.Stop(); } } // Store a value indicating if the game music is playing IsGameMusicPlaying = (currentState == MediaState.Playing) && gameHasControl; } } /// <summary> /// Helper method to wrap MediaPlayer.Play to handle exceptions. /// </summary> private void PlaySongSafe() { // Make sure we have a song to play if (currentSong == null) return; try { MediaPlayer.Play(currentSong); } catch (InvalidOperationException) { // Media playback will fail if Zune is connected. We don't want the // game to crash, however, so we catch the exception. // Null out the song so we don't keep trying to play it. That would // cause us to keep catching exceptions and will likely cause the game // to hitch occassionally. currentSong = null; // Invoke our PlaybackFailed event in case the game wants to handle this // scenario in some custom way. if (PlaybackFailed != null) PlaybackFailed(this, EventArgs.Empty); } } /// <summary> /// Helper method to wrap MediaPlayer.Resume to handle exceptions. /// </summary> private void ResumeSongSafe() { try { MediaPlayer.Resume(); } catch (InvalidOperationException) { // Media playback will fail if Zune is connected. We don't want the // game to crash, however, so we catch the exception. // Null out the song so we don't keep trying to resume it. That would // cause us to keep catching exceptions and will likely cause the game // to hitch occassionally. currentSong = null; // Invoke our PlaybackFailed event in case the game wants to handle this // scenario in some custom way. if (PlaybackFailed != null) PlaybackFailed(this, EventArgs.Empty); } } } }
using System; using System.Collections.Generic; using System.Threading.Tasks; using FluentAssertions; using Orleans.Runtime; using Orleans.Transactions.Abstractions; namespace Orleans.Transactions.TestKit { public abstract class TransactionalStateStorageTestRunner<TState> : TransactionTestRunnerBase where TState : class, ITestState, new() { protected Func<Task<ITransactionalStateStorage<TState>>> stateStorageFactory; protected Func<TState> stateFactory; /// <summary> /// Constructor /// </summary> /// <param name="stateStorageFactory">factory to create ITransactionalStateStorage, the test runner are assuming the state /// in storage is empty when ITransactionalStateStorage was created </param> /// <param name="stateFactory">factory to create TState for test</param> /// <param name="grainFactory">grain Factory needed for test runner</param> /// <param name="testOutput">test output to helpful messages</param> protected TransactionalStateStorageTestRunner(Func<Task<ITransactionalStateStorage<TState>>> stateStorageFactory, Func<TState> stateFactory, IGrainFactory grainFactory, Action<string> testOutput) :base(grainFactory, testOutput) { this.stateStorageFactory = stateStorageFactory; this.stateFactory = stateFactory; } public virtual async Task FirstTime_Load_ShouldReturnEmptyLoadResponse() { var stateStorage = await this.stateStorageFactory(); var response = await stateStorage.Load(); var defaultStateValue = new TState().state; //Assertion response.Should().NotBeNull(); response.ETag.Should().BeNull(); response.CommittedSequenceId.Should().Be(0); response.CommittedState.state.Should().Be(defaultStateValue); response.PendingStates.Should().BeEmpty(); } private static List<PendingTransactionState<TState>> emptyPendingStates = new List<PendingTransactionState<TState>>(); public virtual async Task StoreWithoutChanges() { var stateStorage = await this.stateStorageFactory(); // load first time var loadresponse = await stateStorage.Load(); // store without any changes var etag1 = await stateStorage.Store(loadresponse.ETag, loadresponse.Metadata, emptyPendingStates, null, null); // load again loadresponse = await stateStorage.Load(); loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.Metadata.TimeStamp.Should().Be(default(DateTime)); loadresponse.Metadata.CommitRecords.Should().BeEmpty(); loadresponse.ETag.Should().Be(etag1); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Should().BeEmpty(); // update metadata, then write back var now = DateTime.UtcNow; var cr = MakeCommitRecords(2, 2); var metadata = new TransactionalStateMetaData() { TimeStamp = now, CommitRecords = cr }; var etag2 = await stateStorage.Store(etag1, metadata, emptyPendingStates, null, null); // load again, check content loadresponse = await stateStorage.Load(); loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.Metadata.TimeStamp.Should().Be(now); loadresponse.Metadata.CommitRecords.Count.Should().Be(cr.Count); loadresponse.ETag.Should().Be(etag2); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Should().BeEmpty(); } public virtual async Task WrongEtags() { var stateStorage = await this.stateStorageFactory(); // load first time var loadresponse = await stateStorage.Load(); // store with wrong e-tag, must fail try { var etag1 = await stateStorage.Store("wrong-etag", loadresponse.Metadata, emptyPendingStates, null, null); throw new Exception("storage did not catch e-tag mismatch"); } catch (Exception) { } // load again loadresponse = await stateStorage.Load(); loadresponse.Should().NotBeNull(); loadresponse.Metadata.TimeStamp.Should().Be(default(DateTime)); loadresponse.Metadata.CommitRecords.Should().BeEmpty(); loadresponse.ETag.Should().BeNull(); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Should().BeEmpty(); // update timestamp in metadata, then write back with correct e-tag var now = DateTime.UtcNow; var cr = MakeCommitRecords(2,2); var metadata = new TransactionalStateMetaData() { TimeStamp = now, CommitRecords = cr }; var etag2 = await stateStorage.Store(null, metadata, emptyPendingStates, null, null); // update timestamp in metadata, then write back with wrong e-tag, must fail try { var now2 = DateTime.UtcNow; var metadata2 = new TransactionalStateMetaData() { TimeStamp = now2, CommitRecords = MakeCommitRecords(3,3) }; await stateStorage.Store(null, metadata, emptyPendingStates, null, null); throw new Exception("storage did not catch e-tag mismatch"); } catch (Exception) { } // load again, check content loadresponse = await stateStorage.Load(); loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.Metadata.TimeStamp.Should().Be(now); loadresponse.Metadata.CommitRecords.Count.Should().Be(cr.Count); loadresponse.ETag.Should().Be(etag2); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Should().BeEmpty(); } private PendingTransactionState<TState> MakePendingState(long seqno, int val, bool tm) { var result = new PendingTransactionState<TState>() { SequenceId = seqno, TimeStamp = DateTime.UtcNow, TransactionId = Guid.NewGuid().ToString(), TransactionManager = tm ? default(ParticipantId) : MakeParticipantId(), State = new TState() }; result.State.state = val; return result; } private ParticipantId MakeParticipantId() { return new ParticipantId( "tm", null, // (GrainReference) grainFactory.GetGrain<ITransactionTestGrain>(Guid.NewGuid(), TransactionTestConstants.SingleStateTransactionalGrain), ParticipantId.Role.Resource | ParticipantId.Role.Manager); } private Dictionary<Guid, CommitRecord> MakeCommitRecords(int count, int size) { var result = new Dictionary<Guid, CommitRecord>(); for (int j = 0; j < size; j++) { var r = new CommitRecord() { Timestamp = DateTime.UtcNow, WriteParticipants = new List<ParticipantId>(), }; for (int i = 0; i < size; i++) { r.WriteParticipants.Add(MakeParticipantId()); } result.Add(Guid.NewGuid(), r); } return result; } private async Task PrepareOne() { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstate = MakePendingState(1, 123, false); etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate }, null, null); loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Count.Should().Be(1); loadresponse.PendingStates[0].SequenceId.Should().Be(1); loadresponse.PendingStates[0].TimeStamp.Should().Be(pendingstate.TimeStamp); loadresponse.PendingStates[0].TransactionManager.Should().Be(pendingstate.TransactionManager); loadresponse.PendingStates[0].TransactionId.Should().Be(pendingstate.TransactionId); loadresponse.PendingStates[0].State.state.Should().Be(123); } public virtual async Task ConfirmOne(bool useTwoSteps) { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstate = MakePendingState(1, 123, false); if (useTwoSteps) { etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate }, null, null); etag = await stateStorage.Store(etag, metadata, emptyPendingStates, 1, null); } else { etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate }, 1, null); } loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(1); loadresponse.PendingStates.Count.Should().Be(0); loadresponse.CommittedState.state.Should().Be(123); loadresponse.Metadata.TimeStamp.Should().Be(default(DateTime)); loadresponse.Metadata.CommitRecords.Count.Should().Be(0); } public virtual async Task CancelOne() { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstate = MakePendingState(1, 123, false); etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate }, null, null); etag = await stateStorage.Store(etag, metadata, emptyPendingStates, null, 0); loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Count.Should().Be(0); loadresponse.CommittedState.state.Should().Be(initialstate); loadresponse.Metadata.TimeStamp.Should().Be(default(DateTime)); loadresponse.Metadata.CommitRecords.Count.Should().Be(0); } public virtual async Task ReplaceOne() { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstate1 = MakePendingState(1, 123, false); var pendingstate2 = MakePendingState(1, 456, false); etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate1 }, null, null); etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate2 }, null, null); loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Count.Should().Be(1); loadresponse.PendingStates[0].SequenceId.Should().Be(1); loadresponse.PendingStates[0].TimeStamp.Should().Be(pendingstate2.TimeStamp); loadresponse.PendingStates[0].TransactionManager.Should().Be(pendingstate2.TransactionManager); loadresponse.PendingStates[0].TransactionId.Should().Be(pendingstate2.TransactionId); loadresponse.PendingStates[0].State.state.Should().Be(456); } public virtual async Task ConfirmOneAndCancelOne(bool useTwoSteps = false, bool reverseOrder = false) { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstate1 = MakePendingState(1, 123, false); var pendingstate2 = MakePendingState(2, 456, false); etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate1, pendingstate2 }, null, null); if (useTwoSteps) { if (reverseOrder) { etag = await stateStorage.Store(etag, metadata, emptyPendingStates, 1, null); etag = await stateStorage.Store(etag, metadata, emptyPendingStates, null, 1); } else { etag = await stateStorage.Store(etag, metadata, emptyPendingStates, 1, null); etag = await stateStorage.Store(etag, metadata, emptyPendingStates, null, 1); } } else { etag = await stateStorage.Store(etag, metadata, emptyPendingStates, 1, 1); } loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(1); loadresponse.PendingStates.Count.Should().Be(0); loadresponse.CommittedState.state.Should().Be(123); loadresponse.Metadata.TimeStamp.Should().Be(default(DateTime)); loadresponse.Metadata.CommitRecords.Count.Should().Be(0); } public virtual async Task PrepareMany(int count) { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstates = new List<PendingTransactionState<TState>>(); for (int i = 0; i < count; i++) { pendingstates.Add(MakePendingState(i + 1, i * 1000, false)); } etag = await stateStorage.Store(etag, metadata, pendingstates, null, null); loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Count.Should().Be(count); for (int i = 0; i < count; i++) { loadresponse.PendingStates[i].SequenceId.Should().Be(i+1); loadresponse.PendingStates[i].TimeStamp.Should().Be(pendingstates[i].TimeStamp); loadresponse.PendingStates[i].TransactionManager.Should().Be(pendingstates[i].TransactionManager); loadresponse.PendingStates[i].TransactionId.Should().Be(pendingstates[i].TransactionId); loadresponse.PendingStates[i].State.state.Should().Be(i*1000); } } public virtual async Task ConfirmMany(int count, bool useTwoSteps) { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstates = new List<PendingTransactionState<TState>>(); for (int i = 0; i < count; i++) { pendingstates.Add(MakePendingState(i + 1, i * 1000, false)); } if (useTwoSteps) { etag = await stateStorage.Store(etag, metadata, pendingstates, null, null); etag = await stateStorage.Store(etag, metadata, emptyPendingStates, count, null); } else { etag = await stateStorage.Store(etag, metadata, pendingstates, count, null); } loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(count); loadresponse.PendingStates.Count.Should().Be(0); loadresponse.CommittedState.state.Should().Be((count - 1)*1000); loadresponse.Metadata.TimeStamp.Should().Be(default(DateTime)); loadresponse.Metadata.CommitRecords.Count.Should().Be(0); } public virtual async Task CancelMany(int count) { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstates = new List<PendingTransactionState<TState>>(); for (int i = 0; i < count; i++) { pendingstates.Add(MakePendingState(i + 1, i * 1000, false)); } etag = await stateStorage.Store(etag, metadata, pendingstates, null, null); etag = await stateStorage.Store(etag, metadata, emptyPendingStates, null, 0); loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Count.Should().Be(0); loadresponse.CommittedState.state.Should().Be(initialstate); loadresponse.Metadata.TimeStamp.Should().Be(default(DateTime)); loadresponse.Metadata.CommitRecords.Count.Should().Be(0); } public virtual async Task ReplaceMany(int count) { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstates1 = new List<PendingTransactionState<TState>>(); for (int i = 0; i < count; i++) { pendingstates1.Add(MakePendingState(i + 1, i * 1000 + 1, false)); } var pendingstates2 = new List<PendingTransactionState<TState>>(); for (int i = 0; i < count; i++) { pendingstates2.Add(MakePendingState(i + 1, i * 1000, false)); } etag = await stateStorage.Store(etag, metadata, pendingstates1, null, null); etag = await stateStorage.Store(etag, metadata, pendingstates2, null, null); loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(0); loadresponse.PendingStates.Count.Should().Be(count); for (int i = 0; i < count; i++) { loadresponse.PendingStates[i].SequenceId.Should().Be(i + 1); loadresponse.PendingStates[i].TimeStamp.Should().Be(pendingstates2[i].TimeStamp); loadresponse.PendingStates[i].TransactionManager.Should().Be(pendingstates2[i].TransactionManager); loadresponse.PendingStates[i].TransactionId.Should().Be(pendingstates2[i].TransactionId); loadresponse.PendingStates[i].State.state.Should().Be(i * 1000); } } public virtual async Task GrowingBatch() { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstate1 = MakePendingState(1, 11, false); var pendingstate2 = MakePendingState(2, 22, false); var pendingstate3a = MakePendingState(3, 333, false); var pendingstate4a = MakePendingState(4, 444, false); var pendingstate3b = MakePendingState(3, 33, false); var pendingstate4b = MakePendingState(4, 44, false); var pendingstate5 = MakePendingState(5, 55, false); var pendingstate6 = MakePendingState(6, 66, false); var pendingstate7 = MakePendingState(7, 77, false); var pendingstate8 = MakePendingState(8, 88, false); // prepare 1,2,3a,4a etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate1, pendingstate2, pendingstate3a, pendingstate4a}, null, null); // replace 3b,4b, prepare 5, 6, 7, 8 confirm 1, 2, 3b, 4b, 5, 6 etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate3b, pendingstate4b, pendingstate5, pendingstate6, pendingstate7, pendingstate8 }, 6, 6); loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(6); loadresponse.CommittedState.state.Should().Be(66); loadresponse.Metadata.TimeStamp.Should().Be(default(DateTime)); loadresponse.Metadata.CommitRecords.Count.Should().Be(0); loadresponse.PendingStates.Count.Should().Be(2); loadresponse.PendingStates[0].SequenceId.Should().Be(7); loadresponse.PendingStates[0].TimeStamp.Should().Be(pendingstate7.TimeStamp); loadresponse.PendingStates[0].TransactionManager.Should().Be(pendingstate7.TransactionManager); loadresponse.PendingStates[0].TransactionId.Should().Be(pendingstate7.TransactionId); loadresponse.PendingStates[0].State.state.Should().Be(77); loadresponse.PendingStates[1].SequenceId.Should().Be(8); loadresponse.PendingStates[1].TimeStamp.Should().Be(pendingstate8.TimeStamp); loadresponse.PendingStates[1].TransactionManager.Should().Be(pendingstate8.TransactionManager); loadresponse.PendingStates[1].TransactionId.Should().Be(pendingstate8.TransactionId); loadresponse.PendingStates[1].State.state.Should().Be(88); } public virtual async Task ShrinkingBatch() { var stateStorage = await this.stateStorageFactory(); var loadresponse = await stateStorage.Load(); var etag = loadresponse.ETag; var metadata = loadresponse.Metadata; var initialstate = loadresponse.CommittedState.state; var pendingstate1 = MakePendingState(1, 11, false); var pendingstate2 = MakePendingState(2, 22, false); var pendingstate3a = MakePendingState(3, 333, false); var pendingstate4a = MakePendingState(4, 444, false); var pendingstate5 = MakePendingState(5, 55, false); var pendingstate6 = MakePendingState(6, 66, false); var pendingstate7 = MakePendingState(7, 77, false); var pendingstate8 = MakePendingState(8, 88, false); var pendingstate3b = MakePendingState(3, 33, false); var pendingstate4b = MakePendingState(4, 44, false); // prepare 1,2,3a,4a, 5, 6, 7, 8 etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate1, pendingstate2, pendingstate3a, pendingstate4a, pendingstate5, pendingstate6, pendingstate7, pendingstate8 }, null, null); // replace 3b,4b, confirm 1, 2, 3b, cancel 5, 6, 7, 8 etag = await stateStorage.Store(etag, metadata, new List<PendingTransactionState<TState>>() { pendingstate3b, pendingstate4b }, 3, 4); loadresponse = await stateStorage.Load(); etag = loadresponse.ETag; metadata = loadresponse.Metadata; loadresponse.Should().NotBeNull(); loadresponse.Metadata.Should().NotBeNull(); loadresponse.CommittedSequenceId.Should().Be(3); loadresponse.CommittedState.state.Should().Be(33); loadresponse.Metadata.TimeStamp.Should().Be(default(DateTime)); loadresponse.Metadata.CommitRecords.Count.Should().Be(0); loadresponse.PendingStates.Count.Should().Be(1); loadresponse.PendingStates[0].SequenceId.Should().Be(4); loadresponse.PendingStates[0].TimeStamp.Should().Be(pendingstate4b.TimeStamp); loadresponse.PendingStates[0].TransactionManager.Should().Be(pendingstate4b.TransactionManager); loadresponse.PendingStates[0].TransactionId.Should().Be(pendingstate4b.TransactionId); loadresponse.PendingStates[0].State.state.Should().Be(44); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Diagnostics; using System.Globalization; using System.IO; using System.Reflection; using System.Text; using System.Xml; using System.Collections.Generic; using System.Xml.Serialization; using System.Security; #if !NET_NATIVE using ExtensionDataObject = System.Object; #endif namespace System.Runtime.Serialization { #if USE_REFEMIT || NET_NATIVE public class XmlObjectSerializerWriteContext : XmlObjectSerializerContext #else internal class XmlObjectSerializerWriteContext : XmlObjectSerializerContext #endif { private ObjectReferenceStack _byValObjectsInScope = new ObjectReferenceStack(); private XmlSerializableWriter _xmlSerializableWriter; private const int depthToCheckCyclicReference = 512; private ObjectToIdCache _serializedObjects; private bool _isGetOnlyCollection; private readonly bool _unsafeTypeForwardingEnabled; protected bool serializeReadOnlyTypes; protected bool preserveObjectReferences; internal static XmlObjectSerializerWriteContext CreateContext(DataContractSerializer serializer, DataContract rootTypeDataContract, DataContractResolver dataContractResolver) { return (serializer.PreserveObjectReferences || serializer.SerializationSurrogateProvider != null) ? new XmlObjectSerializerWriteContextComplex(serializer, rootTypeDataContract, dataContractResolver) : new XmlObjectSerializerWriteContext(serializer, rootTypeDataContract, dataContractResolver); } protected XmlObjectSerializerWriteContext(DataContractSerializer serializer, DataContract rootTypeDataContract, DataContractResolver resolver) : base(serializer, rootTypeDataContract, resolver) { this.serializeReadOnlyTypes = serializer.SerializeReadOnlyTypes; // Known types restricts the set of types that can be deserialized _unsafeTypeForwardingEnabled = true; } internal XmlObjectSerializerWriteContext(XmlObjectSerializer serializer, int maxItemsInObjectGraph, StreamingContext streamingContext, bool ignoreExtensionDataObject) : base(serializer, maxItemsInObjectGraph, streamingContext, ignoreExtensionDataObject) { // Known types restricts the set of types that can be deserialized _unsafeTypeForwardingEnabled = true; } #if USE_REFEMIT || NET_NATIVE internal ObjectToIdCache SerializedObjects #else protected ObjectToIdCache SerializedObjects #endif { get { if (_serializedObjects == null) _serializedObjects = new ObjectToIdCache(); return _serializedObjects; } } internal override bool IsGetOnlyCollection { get { return _isGetOnlyCollection; } set { _isGetOnlyCollection = value; } } internal bool SerializeReadOnlyTypes { get { return this.serializeReadOnlyTypes; } } internal bool UnsafeTypeForwardingEnabled { get { return _unsafeTypeForwardingEnabled; } } #if USE_REFEMIT public void StoreIsGetOnlyCollection() #else internal void StoreIsGetOnlyCollection() #endif { _isGetOnlyCollection = true; } #if USE_REFEMIT public void InternalSerializeReference(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle) #else internal void InternalSerializeReference(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle) #endif { if (!OnHandleReference(xmlWriter, obj, true /*canContainCyclicReference*/)) InternalSerialize(xmlWriter, obj, isDeclaredType, writeXsiType, declaredTypeID, declaredTypeHandle); OnEndHandleReference(xmlWriter, obj, true /*canContainCyclicReference*/); } #if USE_REFEMIT public virtual void InternalSerialize(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle) #else internal virtual void InternalSerialize(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle) #endif { if (writeXsiType) { Type declaredType = Globals.TypeOfObject; SerializeWithXsiType(xmlWriter, obj, obj.GetType().TypeHandle, null/*type*/, -1, declaredType.TypeHandle, declaredType); } else if (isDeclaredType) { DataContract contract = GetDataContract(declaredTypeID, declaredTypeHandle); SerializeWithoutXsiType(contract, xmlWriter, obj, declaredTypeHandle); } else { RuntimeTypeHandle objTypeHandle = obj.GetType().TypeHandle; if (declaredTypeHandle.GetHashCode() == objTypeHandle.GetHashCode()) // semantically the same as Value == Value; Value is not available in SL { DataContract dataContract = (declaredTypeID >= 0) ? GetDataContract(declaredTypeID, declaredTypeHandle) : GetDataContract(declaredTypeHandle, null /*type*/); SerializeWithoutXsiType(dataContract, xmlWriter, obj, declaredTypeHandle); } else { SerializeWithXsiType(xmlWriter, obj, objTypeHandle, null /*type*/, declaredTypeID, declaredTypeHandle, Type.GetTypeFromHandle(declaredTypeHandle)); } } } internal void SerializeWithoutXsiType(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle declaredTypeHandle) { if (OnHandleIsReference(xmlWriter, dataContract, obj)) return; if (dataContract.KnownDataContracts != null) { scopedKnownTypes.Push(dataContract.KnownDataContracts); WriteDataContractValue(dataContract, xmlWriter, obj, declaredTypeHandle); scopedKnownTypes.Pop(); } else { WriteDataContractValue(dataContract, xmlWriter, obj, declaredTypeHandle); } } internal virtual void SerializeWithXsiTypeAtTopLevel(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle originalDeclaredTypeHandle, Type graphType) { bool verifyKnownType = false; Type declaredType = rootTypeDataContract.UnderlyingType; if (declaredType.GetTypeInfo().IsInterface && CollectionDataContract.IsCollectionInterface(declaredType)) { if (DataContractResolver != null) { WriteResolvedTypeInfo(xmlWriter, graphType, declaredType); } } else if (!declaredType.IsArray) //Array covariance is not supported in XSD. If declared type is array do not write xsi:type. Instead write xsi:type for each item { verifyKnownType = WriteTypeInfo(xmlWriter, dataContract, rootTypeDataContract); } SerializeAndVerifyType(dataContract, xmlWriter, obj, verifyKnownType, originalDeclaredTypeHandle, declaredType); } protected virtual void SerializeWithXsiType(XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle objectTypeHandle, Type objectType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle, Type declaredType) { bool verifyKnownType = false; #if !NET_NATIVE DataContract dataContract; if (declaredType.GetTypeInfo().IsInterface && CollectionDataContract.IsCollectionInterface(declaredType)) { dataContract = GetDataContractSkipValidation(DataContract.GetId(objectTypeHandle), objectTypeHandle, objectType); if (OnHandleIsReference(xmlWriter, dataContract, obj)) return; dataContract = GetDataContract(declaredTypeHandle, declaredType); #else DataContract dataContract = DataContract.GetDataContractFromGeneratedAssembly(declaredType); if (dataContract.TypeIsInterface && dataContract.TypeIsCollectionInterface) { if (OnHandleIsReference(xmlWriter, dataContract, obj)) return; if (this.Mode == SerializationMode.SharedType && dataContract.IsValidContract(this.Mode)) dataContract = dataContract.GetValidContract(this.Mode); else dataContract = GetDataContract(declaredTypeHandle, declaredType); #endif if (!WriteClrTypeInfo(xmlWriter, dataContract) && DataContractResolver != null) { if (objectType == null) { objectType = Type.GetTypeFromHandle(objectTypeHandle); } WriteResolvedTypeInfo(xmlWriter, objectType, declaredType); } } else if (declaredType.IsArray)//Array covariance is not supported in XSD. If declared type is array do not write xsi:type. Instead write xsi:type for each item { // A call to OnHandleIsReference is not necessary here -- arrays cannot be IsReference dataContract = GetDataContract(objectTypeHandle, objectType); WriteClrTypeInfo(xmlWriter, dataContract); dataContract = GetDataContract(declaredTypeHandle, declaredType); } else { dataContract = GetDataContract(objectTypeHandle, objectType); if (OnHandleIsReference(xmlWriter, dataContract, obj)) return; if (!WriteClrTypeInfo(xmlWriter, dataContract)) { DataContract declaredTypeContract = (declaredTypeID >= 0) ? GetDataContract(declaredTypeID, declaredTypeHandle) : GetDataContract(declaredTypeHandle, declaredType); verifyKnownType = WriteTypeInfo(xmlWriter, dataContract, declaredTypeContract); } } SerializeAndVerifyType(dataContract, xmlWriter, obj, verifyKnownType, declaredTypeHandle, declaredType); } internal bool OnHandleIsReference(XmlWriterDelegator xmlWriter, DataContract contract, object obj) { if (!contract.IsReference || _isGetOnlyCollection) { return false; } bool isNew = true; int objectId = SerializedObjects.GetId(obj, ref isNew); _byValObjectsInScope.EnsureSetAsIsReference(obj); if (isNew) { xmlWriter.WriteAttributeString(Globals.SerPrefix, DictionaryGlobals.IdLocalName, DictionaryGlobals.SerializationNamespace, string.Format(CultureInfo.InvariantCulture, "{0}{1}", "i", objectId)); return false; } else { xmlWriter.WriteAttributeString(Globals.SerPrefix, DictionaryGlobals.RefLocalName, DictionaryGlobals.SerializationNamespace, string.Format(CultureInfo.InvariantCulture, "{0}{1}", "i", objectId)); return true; } } protected void SerializeAndVerifyType(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, bool verifyKnownType, RuntimeTypeHandle declaredTypeHandle, Type declaredType) { bool knownTypesAddedInCurrentScope = false; if (dataContract.KnownDataContracts != null) { scopedKnownTypes.Push(dataContract.KnownDataContracts); knownTypesAddedInCurrentScope = true; } #if !NET_NATIVE if (verifyKnownType) { if (!IsKnownType(dataContract, declaredType)) { DataContract knownContract = ResolveDataContractFromKnownTypes(dataContract.StableName.Name, dataContract.StableName.Namespace, null /*memberTypeContract*/); if (knownContract == null || knownContract.UnderlyingType != dataContract.UnderlyingType) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.DcTypeNotFoundOnSerialize, DataContract.GetClrTypeFullName(dataContract.UnderlyingType), dataContract.StableName.Name, dataContract.StableName.Namespace))); } } } #endif WriteDataContractValue(dataContract, xmlWriter, obj, declaredTypeHandle); if (knownTypesAddedInCurrentScope) { scopedKnownTypes.Pop(); } } internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, DataContract dataContract) { return false; } internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, string clrTypeName, string clrAssemblyName) { return false; } internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, Type dataContractType, string clrTypeName, string clrAssemblyName) { return false; } #if USE_REFEMIT || NET_NATIVE public virtual void WriteAnyType(XmlWriterDelegator xmlWriter, object value) #else internal virtual void WriteAnyType(XmlWriterDelegator xmlWriter, object value) #endif { xmlWriter.WriteAnyType(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteString(XmlWriterDelegator xmlWriter, string value) #else internal virtual void WriteString(XmlWriterDelegator xmlWriter, string value) #endif { xmlWriter.WriteString(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteString(XmlWriterDelegator xmlWriter, string value, XmlDictionaryString name, XmlDictionaryString ns) #else internal virtual void WriteString(XmlWriterDelegator xmlWriter, string value, XmlDictionaryString name, XmlDictionaryString ns) #endif { if (value == null) WriteNull(xmlWriter, typeof(string), true/*isMemberTypeSerializable*/, name, ns); else { xmlWriter.WriteStartElementPrimitive(name, ns); xmlWriter.WriteString(value); xmlWriter.WriteEndElementPrimitive(); } } #if USE_REFEMIT || NET_NATIVE public virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value) #else internal virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value) #endif { xmlWriter.WriteBase64(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value, XmlDictionaryString name, XmlDictionaryString ns) #else internal virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value, XmlDictionaryString name, XmlDictionaryString ns) #endif { if (value == null) WriteNull(xmlWriter, typeof(byte[]), true/*isMemberTypeSerializable*/, name, ns); else { xmlWriter.WriteStartElementPrimitive(name, ns); xmlWriter.WriteBase64(value); xmlWriter.WriteEndElementPrimitive(); } } #if USE_REFEMIT || NET_NATIVE public virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value) #else internal virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value) #endif { xmlWriter.WriteUri(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value, XmlDictionaryString name, XmlDictionaryString ns) #else internal virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value, XmlDictionaryString name, XmlDictionaryString ns) #endif { if (value == null) WriteNull(xmlWriter, typeof(Uri), true/*isMemberTypeSerializable*/, name, ns); else { xmlWriter.WriteStartElementPrimitive(name, ns); xmlWriter.WriteUri(value); xmlWriter.WriteEndElementPrimitive(); } } #if USE_REFEMIT || NET_NATIVE public virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value) #else internal virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value) #endif { xmlWriter.WriteQName(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value, XmlDictionaryString name, XmlDictionaryString ns) #else internal virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value, XmlDictionaryString name, XmlDictionaryString ns) #endif { if (value == null) WriteNull(xmlWriter, typeof(XmlQualifiedName), true/*isMemberTypeSerializable*/, name, ns); else { if (ns != null && ns.Value != null && ns.Value.Length > 0) xmlWriter.WriteStartElement(Globals.ElementPrefix, name, ns); else xmlWriter.WriteStartElement(name, ns); xmlWriter.WriteQName(value); xmlWriter.WriteEndElement(); } } internal void HandleGraphAtTopLevel(XmlWriterDelegator writer, object obj, DataContract contract) { writer.WriteXmlnsAttribute(Globals.XsiPrefix, DictionaryGlobals.SchemaInstanceNamespace); OnHandleReference(writer, obj, true /*canContainReferences*/); } internal virtual bool OnHandleReference(XmlWriterDelegator xmlWriter, object obj, bool canContainCyclicReference) { if (xmlWriter.depth < depthToCheckCyclicReference) return false; if (canContainCyclicReference) { if (_byValObjectsInScope.Contains(obj)) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.CannotSerializeObjectWithCycles, DataContract.GetClrTypeFullName(obj.GetType())))); _byValObjectsInScope.Push(obj); } return false; } internal virtual void OnEndHandleReference(XmlWriterDelegator xmlWriter, object obj, bool canContainCyclicReference) { if (xmlWriter.depth < depthToCheckCyclicReference) return; if (canContainCyclicReference) { _byValObjectsInScope.Pop(obj); } } #if USE_REFEMIT public void WriteNull(XmlWriterDelegator xmlWriter, Type memberType, bool isMemberTypeSerializable) #else internal void WriteNull(XmlWriterDelegator xmlWriter, Type memberType, bool isMemberTypeSerializable) #endif { CheckIfTypeSerializable(memberType, isMemberTypeSerializable); WriteNull(xmlWriter); } internal void WriteNull(XmlWriterDelegator xmlWriter, Type memberType, bool isMemberTypeSerializable, XmlDictionaryString name, XmlDictionaryString ns) { xmlWriter.WriteStartElement(name, ns); WriteNull(xmlWriter, memberType, isMemberTypeSerializable); xmlWriter.WriteEndElement(); } #if USE_REFEMIT public void IncrementArrayCount(XmlWriterDelegator xmlWriter, Array array) #else internal void IncrementArrayCount(XmlWriterDelegator xmlWriter, Array array) #endif { IncrementCollectionCount(xmlWriter, array.GetLength(0)); } #if USE_REFEMIT public void IncrementCollectionCount(XmlWriterDelegator xmlWriter, ICollection collection) #else internal void IncrementCollectionCount(XmlWriterDelegator xmlWriter, ICollection collection) #endif { IncrementCollectionCount(xmlWriter, collection.Count); } #if USE_REFEMIT public void IncrementCollectionCountGeneric<T>(XmlWriterDelegator xmlWriter, ICollection<T> collection) #else internal void IncrementCollectionCountGeneric<T>(XmlWriterDelegator xmlWriter, ICollection<T> collection) #endif { IncrementCollectionCount(xmlWriter, collection.Count); } private void IncrementCollectionCount(XmlWriterDelegator xmlWriter, int size) { IncrementItemCount(size); WriteArraySize(xmlWriter, size); } internal virtual void WriteArraySize(XmlWriterDelegator xmlWriter, int size) { } #if USE_REFEMIT public static bool IsMemberTypeSameAsMemberValue(object obj, Type memberType) #else internal static bool IsMemberTypeSameAsMemberValue(object obj, Type memberType) #endif { if (obj == null || memberType == null) return false; return obj.GetType().TypeHandle.Equals(memberType.TypeHandle); } #if USE_REFEMIT public static T GetDefaultValue<T>() #else internal static T GetDefaultValue<T>() #endif { return default(T); } #if USE_REFEMIT public static T GetNullableValue<T>(Nullable<T> value) where T : struct #else internal static T GetNullableValue<T>(Nullable<T> value) where T : struct #endif { // value.Value will throw if hasValue is false return value.Value; } #if USE_REFEMIT public static void ThrowRequiredMemberMustBeEmitted(string memberName, Type type) #else internal static void ThrowRequiredMemberMustBeEmitted(string memberName, Type type) #endif { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SerializationException(SR.Format(SR.RequiredMemberMustBeEmitted, memberName, type.FullName))); } #if USE_REFEMIT public static bool GetHasValue<T>(Nullable<T> value) where T : struct #else internal static bool GetHasValue<T>(Nullable<T> value) where T : struct #endif { return value.HasValue; } internal void WriteIXmlSerializable(XmlWriterDelegator xmlWriter, object obj) { if (_xmlSerializableWriter == null) _xmlSerializableWriter = new XmlSerializableWriter(); WriteIXmlSerializable(xmlWriter, obj, _xmlSerializableWriter); } internal static void WriteRootIXmlSerializable(XmlWriterDelegator xmlWriter, object obj) { WriteIXmlSerializable(xmlWriter, obj, new XmlSerializableWriter()); } private static void WriteIXmlSerializable(XmlWriterDelegator xmlWriter, object obj, XmlSerializableWriter xmlSerializableWriter) { xmlSerializableWriter.BeginWrite(xmlWriter.Writer, obj); IXmlSerializable xmlSerializable = obj as IXmlSerializable; if (xmlSerializable != null) xmlSerializable.WriteXml(xmlSerializableWriter); else { XmlElement xmlElement = obj as XmlElement; if (xmlElement != null) xmlElement.WriteTo(xmlSerializableWriter); else { XmlNode[] xmlNodes = obj as XmlNode[]; if (xmlNodes != null) foreach (XmlNode xmlNode in xmlNodes) xmlNode.WriteTo(xmlSerializableWriter); else throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.UnknownXmlType, DataContract.GetClrTypeFullName(obj.GetType())))); } } xmlSerializableWriter.EndWrite(); } protected virtual void WriteDataContractValue(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle declaredTypeHandle) { dataContract.WriteXmlValue(xmlWriter, obj, this); } protected virtual void WriteNull(XmlWriterDelegator xmlWriter) { XmlObjectSerializer.WriteNull(xmlWriter); } private void WriteResolvedTypeInfo(XmlWriterDelegator writer, Type objectType, Type declaredType) { XmlDictionaryString typeName, typeNamespace; if (ResolveType(objectType, declaredType, out typeName, out typeNamespace)) { WriteTypeInfo(writer, typeName, typeNamespace); } } private bool ResolveType(Type objectType, Type declaredType, out XmlDictionaryString typeName, out XmlDictionaryString typeNamespace) { if (!DataContractResolver.TryResolveType(objectType, declaredType, KnownTypeResolver, out typeName, out typeNamespace)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ResolveTypeReturnedFalse, DataContract.GetClrTypeFullName(DataContractResolver.GetType()), DataContract.GetClrTypeFullName(objectType)))); } if (typeName == null) { if (typeNamespace == null) { return false; } else { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ResolveTypeReturnedNull, DataContract.GetClrTypeFullName(DataContractResolver.GetType()), DataContract.GetClrTypeFullName(objectType)))); } } if (typeNamespace == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ResolveTypeReturnedNull, DataContract.GetClrTypeFullName(DataContractResolver.GetType()), DataContract.GetClrTypeFullName(objectType)))); } return true; } protected virtual bool WriteTypeInfo(XmlWriterDelegator writer, DataContract contract, DataContract declaredContract) { if (XmlObjectSerializer.IsContractDeclared(contract, declaredContract)) { return false; } bool hasResolver = DataContractResolver != null; if (hasResolver) { WriteResolvedTypeInfo(writer, contract.UnderlyingType, declaredContract.UnderlyingType); } else { WriteTypeInfo(writer, contract.Name, contract.Namespace); } return hasResolver; } protected virtual void WriteTypeInfo(XmlWriterDelegator writer, string dataContractName, string dataContractNamespace) { writer.WriteAttributeQualifiedName(Globals.XsiPrefix, DictionaryGlobals.XsiTypeLocalName, DictionaryGlobals.SchemaInstanceNamespace, dataContractName, dataContractNamespace); } protected virtual void WriteTypeInfo(XmlWriterDelegator writer, XmlDictionaryString dataContractName, XmlDictionaryString dataContractNamespace) { writer.WriteAttributeQualifiedName(Globals.XsiPrefix, DictionaryGlobals.XsiTypeLocalName, DictionaryGlobals.SchemaInstanceNamespace, dataContractName, dataContractNamespace); } #if !NET_NATIVE public void WriteExtensionData(XmlWriterDelegator xmlWriter, ExtensionDataObject extensionData, int memberIndex) { // Needed by the code generator, but not called. } #endif } }
/* * Copyright (c) InWorldz Halcyon Developers * Copyright (c) Contributors, http://opensimulator.org/ * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using System.Text.RegularExpressions; using System.Threading; using log4net; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Framework.Scenes.Serialization; namespace OpenSim.Region.Framework.Scenes { /// <summary> /// Gather uuids for a given entity. /// </summary> /// /// This does a deep inspection of the entity to retrieve all the assets it uses (whether as textures, as scripts /// contained in inventory, as scripts contained in objects contained in another object's inventory, etc. Assets /// are only retrieved when they are necessary to carry out the inspection (i.e. a serialized object needs to be /// retrieved to work out which assets it references). public class UuidGatherer { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); /// <summary> /// Asset cache used for gathering assets /// </summary> protected IAssetCache m_assetCache; /// <summary> /// Used as a temporary store of an asset which represents an object. This can be a null if no appropriate /// asset was found by the asset service. /// </summary> protected AssetBase m_requestedObjectAsset; /// <summary> /// Signal whether we are currently waiting for the asset service to deliver an asset. /// </summary> protected bool m_waitingForObjectAsset; public UuidGatherer(IAssetCache assetCache) { m_assetCache = assetCache; } /// <summary> /// Gather all the asset uuids associated with the asset referenced by a given uuid /// </summary> /// /// This includes both those directly associated with /// it (e.g. face textures) and recursively, those of items within it's inventory (e.g. objects contained /// within this object). /// /// <param name="assetUuid">The uuid of the asset for which to gather referenced assets</param> /// <param name="assetType">The type of the asset for the uuid given</param> /// <param name="assetUuids">The assets gathered</param> public void GatherAssetUuids(UUID assetUuid, AssetType assetType, IDictionary<UUID, int> assetUuids) { assetUuids[assetUuid] = 1; if (AssetType.Bodypart == assetType || AssetType.Clothing == assetType) { GetWearableAssetUuids(assetUuid, assetUuids); } else if (AssetType.LSLText == assetType) { GetScriptAssetUuids(assetUuid, assetUuids); } else if (AssetType.Object == assetType) { GetSceneObjectAssetUuids(assetUuid, assetUuids); } } /// <summary> /// Gather all the asset uuids associated with a given object. /// </summary> /// /// This includes both those directly associated with /// it (e.g. face textures) and recursively, those of items within it's inventory (e.g. objects contained /// within this object). /// /// <param name="sceneObject">The scene object for which to gather assets</param> /// <param name="assetUuids">The assets gathered</param> public void GatherAssetUuids(SceneObjectGroup sceneObject, IDictionary<UUID, int> assetUuids) { // m_log.DebugFormat( // "[ASSET GATHERER]: Getting assets for object {0}, {1}", sceneObject.Name, sceneObject.UUID); foreach (SceneObjectPart part in sceneObject.GetParts()) { //m_log.DebugFormat( // "[ARCHIVER]: Getting part {0}, {1} for object {2}", part.Name, part.UUID, sceneObject.UUID); try { Primitive.TextureEntry textureEntry = part.Shape.Textures; // Get the prim's default texture. This will be used for faces which don't have their own texture assetUuids[textureEntry.DefaultTexture.TextureID] = 1; // XXX: Not a great way to iterate through face textures, but there's no // other method available to tell how many faces there actually are //int i = 0; foreach (Primitive.TextureEntryFace texture in textureEntry.FaceTextures) { if (texture != null) { //m_log.DebugFormat("[ARCHIVER]: Got face {0}", i++); assetUuids[texture.TextureID] = 1; } } // If the prim is a sculpt then preserve this information too if (part.Shape.SculptTexture != UUID.Zero) assetUuids[part.Shape.SculptTexture] = 1; // scan through the rendermaterials of this part for any textures used as materials if (part.Shape.RenderMaterials != null) { lock (part.Shape.RenderMaterials) { List<RenderMaterial> mats = part.Shape.RenderMaterials.GetMaterials(); foreach(var entry in mats) { if (entry.NormalID != UUID.Zero) assetUuids[entry.NormalID] = 1; if (entry.SpecularID != UUID.Zero) assetUuids[entry.SpecularID] = 1; } } } TaskInventoryDictionary taskDictionary = (TaskInventoryDictionary)part.TaskInventory.Clone(); // Now analyze this prim's inventory items to preserve all the uuids that they reference foreach (TaskInventoryItem tii in taskDictionary.Values) { //m_log.DebugFormat("[ARCHIVER]: Analysing item asset type {0}", tii.Type); if (!assetUuids.ContainsKey(tii.AssetID)) GatherAssetUuids(tii.AssetID, (AssetType)tii.Type, assetUuids); } } catch (Exception e) { m_log.ErrorFormat("[ASSET GATHERER]: Failed to get part - {0}", e); m_log.DebugFormat("[ASSET GATHERER]: Texture entry length for prim was {0} (min is 46)", part.Shape.TextureEntryBytes.Length); } } } /// <summary> /// The callback made when we request the asset for an object from the asset service. /// </summary> protected void AssetRequestCallback(UUID assetID, AssetBase asset) { lock (this) { m_requestedObjectAsset = asset; m_waitingForObjectAsset = false; Monitor.Pulse(this); } } /// <summary> /// Get an asset synchronously, potentially using an asynchronous callback. If the /// asynchronous callback is used, we will wait for it to complete. /// </summary> /// <param name="uuid"></param> /// <returns></returns> protected AssetBase GetAsset(UUID uuid) { m_waitingForObjectAsset = true; m_assetCache.GetAsset(uuid, AssetRequestCallback, AssetRequestInfo.InternalRequest()); // The asset cache callback can either // // 1. Complete on the same thread (if the asset is already in the cache) or // 2. Come in via a different thread (if we need to go fetch it). // // The code below handles both these alternatives. lock (this) { if (m_waitingForObjectAsset) { Monitor.Wait(this); m_waitingForObjectAsset = false; } } return m_requestedObjectAsset; } /// <summary> /// Record the asset uuids embedded within the given script. /// </summary> /// <param name="scriptUuid"></param> /// <param name="assetUuids">Dictionary in which to record the references</param> protected void GetScriptAssetUuids(UUID scriptUuid, IDictionary<UUID, int> assetUuids) { AssetBase scriptAsset = GetAsset(scriptUuid); if (null != scriptAsset) { string script = Utils.BytesToString(scriptAsset.Data); //m_log.DebugFormat("[ARCHIVER]: Script {0}", script); MatchCollection uuidMatches = Util.UUIDPattern.Matches(script); //m_log.DebugFormat("[ARCHIVER]: Found {0} matches in script", uuidMatches.Count); foreach (Match uuidMatch in uuidMatches) { UUID uuid = new UUID(uuidMatch.Value); //m_log.DebugFormat("[ARCHIVER]: Recording {0} in script", uuid); assetUuids[uuid] = 1; } } } /// <summary> /// Record the uuids referenced by the given wearable asset /// </summary> /// <param name="wearableAssetUuid"></param> /// <param name="assetUuids">Dictionary in which to record the references</param> protected void GetWearableAssetUuids(UUID wearableAssetUuid, IDictionary<UUID, int> assetUuids) { AssetBase assetBase = GetAsset(wearableAssetUuid); //m_log.Debug(new System.Text.ASCIIEncoding().GetString(bodypartAsset.Data)); OpenMetaverse.Assets.AssetWearable wearableAsset = new OpenMetaverse.Assets.AssetBodypart(wearableAssetUuid, assetBase.Data); wearableAsset.Decode(); //m_log.DebugFormat( // "[ARCHIVER]: Wearable asset {0} references {1} assets", wearableAssetUuid, wearableAsset.Textures.Count); foreach (UUID uuid in wearableAsset.Textures.Values) { //m_log.DebugFormat("[ARCHIVER]: Got bodypart uuid {0}", uuid); assetUuids[uuid] = 1; } } /// <summary> /// Get all the asset uuids associated with a given object. This includes both those directly associated with /// it (e.g. face textures) and recursively, those of items within it's inventory (e.g. objects contained /// within this object). /// </summary> /// <param name="sceneObject"></param> /// <param name="assetUuids"></param> protected void GetSceneObjectAssetUuids(UUID sceneObjectUuid, IDictionary<UUID, int> assetUuids) { AssetBase objectAsset = GetAsset(sceneObjectUuid); if (null != objectAsset) { string xml = Utils.BytesToString(objectAsset.Data); SceneObjectGroup sog = SceneObjectSerializer.FromOriginalXmlFormat(xml); if (sog != null) GatherAssetUuids(sog, assetUuids); } } } }
using System; using System.IO; using System.Collections.Generic; using RestSharp; using IO.Swagger.Client; using IO.Swagger.Model; namespace IO.Swagger.Api { public interface ILayerApi { /// <summary> /// Returns a list of all layers and their associated storage engine Returns a list of all layers and their associated storage /// </summary> /// <returns>Dictionary<string, Layer></returns> Dictionary<string, Layer> AllLayers (); /// <summary> /// Returns a list of all layers and their associated storage engine Returns a list of all layers and their associated storage /// </summary> /// <returns>Dictionary<string, Layer></returns> System.Threading.Tasks.Task<Dictionary<string, Layer>> AllLayersAsync (); /// <summary> /// Add a new layer to the system Adds a new layer /// </summary> /// <param name="body">Layer object that needs to be added to the datastorage</param> /// <returns></returns> void AddLayer (Layer body); /// <summary> /// Add a new layer to the system Adds a new layer /// </summary> /// <param name="body">Layer object that needs to be added to the datastorage</param> /// <returns></returns> System.Threading.Tasks.Task AddLayerAsync (Layer body); /// <summary> /// Find layer by ID Returns a single layer /// </summary> /// <param name="layerId">ID of the layer to be returned</param> /// <returns>Layer</returns> Layer GetLayer (string layerId); /// <summary> /// Find layer by ID Returns a single layer /// </summary> /// <param name="layerId">ID of the layer to be returned</param> /// <returns>Layer</returns> System.Threading.Tasks.Task<Layer> GetLayerAsync (string layerId); /// <summary> /// Update an existing layer /// </summary> /// <param name="body">JSON that will be used to update the layer</param> /// <param name="layerId">ID or Name of the Layer to update</param> /// <returns></returns> void UpdateLayer (Feature body, string layerId); /// <summary> /// Update an existing layer /// </summary> /// <param name="body">JSON that will be used to update the layer</param> /// <param name="layerId">ID or Name of the Layer to update</param> /// <returns></returns> System.Threading.Tasks.Task UpdateLayerAsync (Feature body, string layerId); /// <summary> /// Delete a layer by ID Deletes an entire layer /// </summary> /// <param name="layerId">ID of layer to be deleted</param> /// <returns></returns> void DeleteLayer (string layerId); /// <summary> /// Delete a layer by ID Deletes an entire layer /// </summary> /// <param name="layerId">ID of layer to be deleted</param> /// <returns></returns> System.Threading.Tasks.Task DeleteLayerAsync (string layerId); } /// <summary> /// Represents a collection of functions to interact with the API endpoints /// </summary> public class LayerApi : ILayerApi { /// <summary> /// Initializes a new instance of the <see cref="LayerApi"/> class. /// </summary> /// <param name="apiClient"> an instance of ApiClient (optional)</param> /// <returns></returns> public LayerApi(ApiClient apiClient = null) { if (apiClient == null) // use the default one in Configuration this.ApiClient = Configuration.DefaultApiClient; else this.ApiClient = apiClient; } /// <summary> /// Initializes a new instance of the <see cref="LayerApi"/> class. /// </summary> /// <returns></returns> public LayerApi(String basePath) { this.ApiClient = new ApiClient(basePath); } /// <summary> /// Sets the base path of the API client. /// </summary> /// <param name="basePath">The base path</param> /// <value>The base path</value> public void SetBasePath(String basePath) { this.ApiClient.BasePath = basePath; } /// <summary> /// Gets the base path of the API client. /// </summary> /// <value>The base path</value> public String GetBasePath() { return this.ApiClient.BasePath; } /// <summary> /// Gets or sets the API client. /// </summary> /// <value>An instance of the ApiClient</param> public ApiClient ApiClient {get; set;} /// <summary> /// Returns a list of all layers and their associated storage engine Returns a list of all layers and their associated storage /// </summary> /// <returns>Dictionary<string, Layer></returns> public Dictionary<string, Layer> AllLayers () { var path = "/layers/"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) ApiClient.CallApi(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling AllLayers: " + response.Content, response.Content); else if (((int)response.StatusCode) == 0) throw new ApiException ((int)response.StatusCode, "Error calling AllLayers: " + response.ErrorMessage, response.ErrorMessage); return (Dictionary<string, Layer>) ApiClient.Deserialize(response.Content, typeof(Dictionary<string, Layer>), response.Headers); } /// <summary> /// Returns a list of all layers and their associated storage engine Returns a list of all layers and their associated storage /// </summary> /// <returns>Dictionary<string, Layer></returns> public async System.Threading.Tasks.Task<Dictionary<string, Layer>> AllLayersAsync () { var path = "/layers/"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) await ApiClient.CallApiAsync(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling AllLayers: " + response.Content, response.Content); return (Dictionary<string, Layer>) ApiClient.Deserialize(response.Content, typeof(Dictionary<string, Layer>), response.Headers); } /// <summary> /// Add a new layer to the system Adds a new layer /// </summary> /// <param name="body">Layer object that needs to be added to the datastorage</param> /// <returns></returns> public void AddLayer (Layer body) { // verify the required parameter 'body' is set if (body == null) throw new ApiException(400, "Missing required parameter 'body' when calling AddLayer"); var path = "/layers/"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); postBody = ApiClient.Serialize(body); // http body (model) parameter // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) ApiClient.CallApi(path, Method.POST, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling AddLayer: " + response.Content, response.Content); else if (((int)response.StatusCode) == 0) throw new ApiException ((int)response.StatusCode, "Error calling AddLayer: " + response.ErrorMessage, response.ErrorMessage); return; } /// <summary> /// Add a new layer to the system Adds a new layer /// </summary> /// <param name="body">Layer object that needs to be added to the datastorage</param> /// <returns></returns> public async System.Threading.Tasks.Task AddLayerAsync (Layer body) { // verify the required parameter 'body' is set if (body == null) throw new ApiException(400, "Missing required parameter 'body' when calling AddLayer"); var path = "/layers/"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); postBody = ApiClient.Serialize(body); // http body (model) parameter // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) await ApiClient.CallApiAsync(path, Method.POST, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling AddLayer: " + response.Content, response.Content); return; } /// <summary> /// Find layer by ID Returns a single layer /// </summary> /// <param name="layerId">ID of the layer to be returned</param> /// <returns>Layer</returns> public Layer GetLayer (string layerId) { // verify the required parameter 'layerId' is set if (layerId == null) throw new ApiException(400, "Missing required parameter 'layerId' when calling GetLayer"); var path = "/layers/{layerId}"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); if (layerId != null) pathParams.Add("layerId", ApiClient.ParameterToString(layerId)); // path parameter // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) ApiClient.CallApi(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling GetLayer: " + response.Content, response.Content); else if (((int)response.StatusCode) == 0) throw new ApiException ((int)response.StatusCode, "Error calling GetLayer: " + response.ErrorMessage, response.ErrorMessage); return (Layer) ApiClient.Deserialize(response.Content, typeof(Layer), response.Headers); } /// <summary> /// Find layer by ID Returns a single layer /// </summary> /// <param name="layerId">ID of the layer to be returned</param> /// <returns>Layer</returns> public async System.Threading.Tasks.Task<Layer> GetLayerAsync (string layerId) { // verify the required parameter 'layerId' is set if (layerId == null) throw new ApiException(400, "Missing required parameter 'layerId' when calling GetLayer"); var path = "/layers/{layerId}"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); if (layerId != null) pathParams.Add("layerId", ApiClient.ParameterToString(layerId)); // path parameter // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) await ApiClient.CallApiAsync(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling GetLayer: " + response.Content, response.Content); return (Layer) ApiClient.Deserialize(response.Content, typeof(Layer), response.Headers); } /// <summary> /// Update an existing layer /// </summary> /// <param name="body">JSON that will be used to update the layer</param> /// <param name="layerId">ID or Name of the Layer to update</param> /// <returns></returns> public void UpdateLayer (Feature body, string layerId) { // verify the required parameter 'body' is set if (body == null) throw new ApiException(400, "Missing required parameter 'body' when calling UpdateLayer"); // verify the required parameter 'layerId' is set if (layerId == null) throw new ApiException(400, "Missing required parameter 'layerId' when calling UpdateLayer"); var path = "/layers/{layerId}"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); if (layerId != null) pathParams.Add("layerId", ApiClient.ParameterToString(layerId)); // path parameter postBody = ApiClient.Serialize(body); // http body (model) parameter // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) ApiClient.CallApi(path, Method.PUT, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling UpdateLayer: " + response.Content, response.Content); else if (((int)response.StatusCode) == 0) throw new ApiException ((int)response.StatusCode, "Error calling UpdateLayer: " + response.ErrorMessage, response.ErrorMessage); return; } /// <summary> /// Update an existing layer /// </summary> /// <param name="body">JSON that will be used to update the layer</param> /// <param name="layerId">ID or Name of the Layer to update</param> /// <returns></returns> public async System.Threading.Tasks.Task UpdateLayerAsync (Feature body, string layerId) { // verify the required parameter 'body' is set if (body == null) throw new ApiException(400, "Missing required parameter 'body' when calling UpdateLayer"); // verify the required parameter 'layerId' is set if (layerId == null) throw new ApiException(400, "Missing required parameter 'layerId' when calling UpdateLayer"); var path = "/layers/{layerId}"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); if (layerId != null) pathParams.Add("layerId", ApiClient.ParameterToString(layerId)); // path parameter postBody = ApiClient.Serialize(body); // http body (model) parameter // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) await ApiClient.CallApiAsync(path, Method.PUT, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling UpdateLayer: " + response.Content, response.Content); return; } /// <summary> /// Delete a layer by ID Deletes an entire layer /// </summary> /// <param name="layerId">ID of layer to be deleted</param> /// <returns></returns> public void DeleteLayer (string layerId) { // verify the required parameter 'layerId' is set if (layerId == null) throw new ApiException(400, "Missing required parameter 'layerId' when calling DeleteLayer"); var path = "/layers/{layerId}"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); if (layerId != null) pathParams.Add("layerId", ApiClient.ParameterToString(layerId)); // path parameter // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) ApiClient.CallApi(path, Method.DELETE, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling DeleteLayer: " + response.Content, response.Content); else if (((int)response.StatusCode) == 0) throw new ApiException ((int)response.StatusCode, "Error calling DeleteLayer: " + response.ErrorMessage, response.ErrorMessage); return; } /// <summary> /// Delete a layer by ID Deletes an entire layer /// </summary> /// <param name="layerId">ID of layer to be deleted</param> /// <returns></returns> public async System.Threading.Tasks.Task DeleteLayerAsync (string layerId) { // verify the required parameter 'layerId' is set if (layerId == null) throw new ApiException(400, "Missing required parameter 'layerId' when calling DeleteLayer"); var path = "/layers/{layerId}"; var pathParams = new Dictionary<String, String>(); var queryParams = new Dictionary<String, String>(); var headerParams = new Dictionary<String, String>(); var formParams = new Dictionary<String, String>(); var fileParams = new Dictionary<String, FileParameter>(); String postBody = null; pathParams.Add("format", "json"); if (layerId != null) pathParams.Add("layerId", ApiClient.ParameterToString(layerId)); // path parameter // authentication setting, if any String[] authSettings = new String[] { }; // make the HTTP request IRestResponse response = (IRestResponse) await ApiClient.CallApiAsync(path, Method.DELETE, queryParams, postBody, headerParams, formParams, fileParams, pathParams, authSettings); if (((int)response.StatusCode) >= 400) throw new ApiException ((int)response.StatusCode, "Error calling DeleteLayer: " + response.Content, response.Content); return; } } }
/* * http://www.codeproject.com/Articles/15633/Manipulating-NTFS-Junction-Points-in-NET */ using System; using System.IO; using System.Runtime.InteropServices; using System.Text; using Microsoft.Win32.SafeHandles; namespace PrettyJunction { /// <summary> /// Provides access to NTFS junction points in .Net. /// </summary> public static class JunctionPoint { /// <summary> /// The file or directory is not a reparse point. /// </summary> private const int ERROR_NOT_A_REPARSE_POINT = 4390; /// <summary> /// The reparse point attribute cannot be set because it conflicts with an existing attribute. /// </summary> private const int ERROR_REPARSE_ATTRIBUTE_CONFLICT = 4391; /// <summary> /// The data present in the reparse point buffer is invalid. /// </summary> private const int ERROR_INVALID_REPARSE_DATA = 4392; /// <summary> /// The tag present in the reparse point buffer is invalid. /// </summary> private const int ERROR_REPARSE_TAG_INVALID = 4393; /// <summary> /// There is a mismatch between the tag specified in the request and the tag present in the reparse point. /// </summary> private const int ERROR_REPARSE_TAG_MISMATCH = 4394; /// <summary> /// Command to set the reparse point data block. /// </summary> private const int FSCTL_SET_REPARSE_POINT = 0x000900A4; /// <summary> /// Command to get the reparse point data block. /// </summary> private const int FSCTL_GET_REPARSE_POINT = 0x000900A8; /// <summary> /// Command to delete the reparse point data base. /// </summary> private const int FSCTL_DELETE_REPARSE_POINT = 0x000900AC; /// <summary> /// Reparse point tag used to identify mount points and junction points. /// </summary> private const uint IO_REPARSE_TAG_MOUNT_POINT = 0xA0000003; /// <summary> /// This prefix indicates to NTFS that the path is to be treated as a non-interpreted /// path in the virtual file system. /// </summary> private const string NonInterpretedPathPrefix = @"\??\"; [Flags] private enum EFileAccess : uint { GenericRead = 0x80000000, GenericWrite = 0x40000000, GenericExecute = 0x20000000, GenericAll = 0x10000000, } [Flags] private enum EFileShare : uint { None = 0x00000000, Read = 0x00000001, Write = 0x00000002, Delete = 0x00000004, } private enum ECreationDisposition : uint { New = 1, CreateAlways = 2, OpenExisting = 3, OpenAlways = 4, TruncateExisting = 5, } [Flags] private enum EFileAttributes : uint { Readonly = 0x00000001, Hidden = 0x00000002, System = 0x00000004, Directory = 0x00000010, Archive = 0x00000020, Device = 0x00000040, Normal = 0x00000080, Temporary = 0x00000100, SparseFile = 0x00000200, ReparsePoint = 0x00000400, Compressed = 0x00000800, Offline = 0x00001000, NotContentIndexed = 0x00002000, Encrypted = 0x00004000, Write_Through = 0x80000000, Overlapped = 0x40000000, NoBuffering = 0x20000000, RandomAccess = 0x10000000, SequentialScan = 0x08000000, DeleteOnClose = 0x04000000, BackupSemantics = 0x02000000, PosixSemantics = 0x01000000, OpenReparsePoint = 0x00200000, OpenNoRecall = 0x00100000, FirstPipeInstance = 0x00080000 } [StructLayout(LayoutKind.Sequential)] private struct REPARSE_DATA_BUFFER { /// <summary> /// Reparse point tag. Must be a Microsoft reparse point tag. /// </summary> public uint ReparseTag; /// <summary> /// Size, in bytes, of the data after the Reserved member. This can be calculated by: /// (4 * sizeof(ushort)) + SubstituteNameLength + PrintNameLength + /// (namesAreNullTerminated ? 2 * sizeof(char) : 0); /// </summary> public ushort ReparseDataLength; /// <summary> /// Reserved; do not use. /// </summary> public ushort Reserved; /// <summary> /// Offset, in bytes, of the substitute name string in the PathBuffer array. /// </summary> public ushort SubstituteNameOffset; /// <summary> /// Length, in bytes, of the substitute name string. If this string is null-terminated, /// SubstituteNameLength does not include space for the null character. /// </summary> public ushort SubstituteNameLength; /// <summary> /// Offset, in bytes, of the print name string in the PathBuffer array. /// </summary> public ushort PrintNameOffset; /// <summary> /// Length, in bytes, of the print name string. If this string is null-terminated, /// PrintNameLength does not include space for the null character. /// </summary> public ushort PrintNameLength; /// <summary> /// A buffer containing the unicode-encoded path string. The path string contains /// the substitute name string and print name string. /// </summary> [MarshalAs(UnmanagedType.ByValArray, SizeConst = 0x3FF0)] public byte[] PathBuffer; } [DllImport("kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)] private static extern bool DeviceIoControl(IntPtr hDevice, uint dwIoControlCode, IntPtr InBuffer, int nInBufferSize, IntPtr OutBuffer, int nOutBufferSize, out int pBytesReturned, IntPtr lpOverlapped); [DllImport("kernel32.dll", SetLastError = true)] private static extern IntPtr CreateFile( string lpFileName, EFileAccess dwDesiredAccess, EFileShare dwShareMode, IntPtr lpSecurityAttributes, ECreationDisposition dwCreationDisposition, EFileAttributes dwFlagsAndAttributes, IntPtr hTemplateFile); /// <summary> /// Creates a junction point from the specified directory to the specified target directory. /// </summary> /// <remarks> /// Only works on NTFS. /// </remarks> /// <param name="junctionPoint">The junction point path</param> /// <param name="targetDir">The target directory</param> /// <param name="overwrite">If true overwrites an existing reparse point or empty directory</param> /// <exception cref="IOException">Thrown when the junction point could not be created or when /// an existing directory was found and <paramref name="overwrite" /> if false</exception> public static void Create(string junctionPoint, string targetDir, bool overwrite) { targetDir = Path.GetFullPath(targetDir); if (!Directory.Exists(targetDir)) { Console.WriteLine(targetDir); throw new IOException("Target path does not exist or is not a directory."); } if (Directory.Exists(junctionPoint)) { if (!overwrite) { Console.WriteLine(targetDir); throw new IOException("Directory already exists and overwrite parameter is false."); } } else { Directory.CreateDirectory(junctionPoint); } using (SafeFileHandle handle = OpenReparsePoint(junctionPoint, EFileAccess.GenericWrite)) { byte[] targetDirBytes = Encoding.Unicode.GetBytes(NonInterpretedPathPrefix + Path.GetFullPath(targetDir)); REPARSE_DATA_BUFFER reparseDataBuffer = new REPARSE_DATA_BUFFER(); reparseDataBuffer.ReparseTag = IO_REPARSE_TAG_MOUNT_POINT; reparseDataBuffer.ReparseDataLength = (ushort)(targetDirBytes.Length + 12); reparseDataBuffer.SubstituteNameOffset = 0; reparseDataBuffer.SubstituteNameLength = (ushort)targetDirBytes.Length; reparseDataBuffer.PrintNameOffset = (ushort)(targetDirBytes.Length + 2); reparseDataBuffer.PrintNameLength = 0; reparseDataBuffer.PathBuffer = new byte[0x3ff0]; Array.Copy(targetDirBytes, reparseDataBuffer.PathBuffer, targetDirBytes.Length); int inBufferSize = Marshal.SizeOf(reparseDataBuffer); IntPtr inBuffer = Marshal.AllocHGlobal(inBufferSize); try { Marshal.StructureToPtr(reparseDataBuffer, inBuffer, false); int bytesReturned; bool result = DeviceIoControl(handle.DangerousGetHandle(), FSCTL_SET_REPARSE_POINT, inBuffer, targetDirBytes.Length + 20, IntPtr.Zero, 0, out bytesReturned, IntPtr.Zero); if (!result) { Console.WriteLine("junction point={0},target dir={1}",junctionPoint,targetDir); ThrowLastWin32Error("Unable to create junction point."); } } finally { Marshal.FreeHGlobal(inBuffer); } } } /// <summary> /// Deletes a junction point at the specified source directory along with the directory itself. /// Does nothing if the junction point does not exist. /// </summary> /// <remarks> /// Only works on NTFS. /// </remarks> /// <param name="junctionPoint">The junction point path</param> public static void Delete(string junctionPoint) { if (!Directory.Exists(junctionPoint)) { if (File.Exists(junctionPoint)) throw new IOException("Path is not a junction point."); return; } using (SafeFileHandle handle = OpenReparsePoint(junctionPoint, EFileAccess.GenericWrite)) { REPARSE_DATA_BUFFER reparseDataBuffer = new REPARSE_DATA_BUFFER(); reparseDataBuffer.ReparseTag = IO_REPARSE_TAG_MOUNT_POINT; reparseDataBuffer.ReparseDataLength = 0; reparseDataBuffer.PathBuffer = new byte[0x3ff0]; int inBufferSize = Marshal.SizeOf(reparseDataBuffer); IntPtr inBuffer = Marshal.AllocHGlobal(inBufferSize); try { Marshal.StructureToPtr(reparseDataBuffer, inBuffer, false); int bytesReturned; bool result = DeviceIoControl(handle.DangerousGetHandle(), FSCTL_DELETE_REPARSE_POINT, inBuffer, 8, IntPtr.Zero, 0, out bytesReturned, IntPtr.Zero); if (!result) ThrowLastWin32Error("Unable to delete junction point."); } finally { Marshal.FreeHGlobal(inBuffer); } try { Directory.Delete(junctionPoint); } catch (IOException ex) { throw new IOException("Unable to delete junction point.", ex); } } } /// <summary> /// Determines whether the specified path exists and refers to a junction point. /// </summary> /// <param name="path">The junction point path</param> /// <returns>True if the specified path represents a junction point</returns> /// <exception cref="IOException">Thrown if the specified path is invalid /// or some other error occurs</exception> public static bool Exists(string path) { if (! Directory.Exists(path)) return false; using (SafeFileHandle handle = OpenReparsePoint(path, EFileAccess.GenericRead)) { string target = InternalGetTarget(handle); return target != null; } } /// <summary> /// Gets the target of the specified junction point. /// </summary> /// <remarks> /// Only works on NTFS. /// </remarks> /// <param name="junctionPoint">The junction point path</param> /// <returns>The target of the junction point</returns> /// <exception cref="IOException">Thrown when the specified path does not /// exist, is invalid, is not a junction point, or some other error occurs</exception> public static string GetTarget(string junctionPoint) { using (SafeFileHandle handle = OpenReparsePoint(junctionPoint, EFileAccess.GenericRead)) { string target = InternalGetTarget(handle); if (target == null) throw new IOException("Path is not a junction point."); return target; } } private static string InternalGetTarget(SafeFileHandle handle) { int outBufferSize = Marshal.SizeOf(typeof(REPARSE_DATA_BUFFER)); IntPtr outBuffer = Marshal.AllocHGlobal(outBufferSize); try { int bytesReturned; bool result = DeviceIoControl(handle.DangerousGetHandle(), FSCTL_GET_REPARSE_POINT, IntPtr.Zero, 0, outBuffer, outBufferSize, out bytesReturned, IntPtr.Zero); if (!result) { int error = Marshal.GetLastWin32Error(); if (error == ERROR_NOT_A_REPARSE_POINT) return null; ThrowLastWin32Error("Unable to get information about junction point."); } REPARSE_DATA_BUFFER reparseDataBuffer = (REPARSE_DATA_BUFFER) Marshal.PtrToStructure(outBuffer, typeof(REPARSE_DATA_BUFFER)); if (reparseDataBuffer.ReparseTag != IO_REPARSE_TAG_MOUNT_POINT) return null; string targetDir = Encoding.Unicode.GetString(reparseDataBuffer.PathBuffer, reparseDataBuffer.SubstituteNameOffset, reparseDataBuffer.SubstituteNameLength); if (targetDir.StartsWith(NonInterpretedPathPrefix)) targetDir = targetDir.Substring(NonInterpretedPathPrefix.Length); return targetDir; } finally { Marshal.FreeHGlobal(outBuffer); } } private static SafeFileHandle OpenReparsePoint(string reparsePoint, EFileAccess accessMode) { SafeFileHandle reparsePointHandle = new SafeFileHandle(CreateFile(reparsePoint, accessMode, EFileShare.Read | EFileShare.Write | EFileShare.Delete, IntPtr.Zero, ECreationDisposition.OpenExisting, EFileAttributes.BackupSemantics | EFileAttributes.OpenReparsePoint, IntPtr.Zero), true); if (Marshal.GetLastWin32Error() != 0) ThrowLastWin32Error("Unable to open reparse point."); return reparsePointHandle; } private static void ThrowLastWin32Error(string message) { throw new JunctionException(message, Marshal.GetExceptionForHR(Marshal.GetHRForLastWin32Error())); } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace RebbauReichenbach.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
#region -- License Terms -- // // MessagePack for CLI // // Copyright (C) 2010-2015 FUJIWARA, Yusuke // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion -- License Terms -- #if UNITY_STANDALONE || UNITY_WEBPLAYER || UNITY_WII || UNITY_IPHONE || UNITY_ANDROID || UNITY_PS3 || UNITY_XBOX360 || UNITY_FLASH || UNITY_BKACKBERRY || UNITY_WINRT #define UNITY #endif using System; using System.Collections; using System.Collections.Generic; #if !CORLIB_ONLY using System.ComponentModel; #endif //!CORLIB_ONLY using System.Reflection; #if !UNITY #if XAMIOS || XAMDROID using Contract = MsgPack.MPContract; #else using System.Diagnostics.Contracts; #endif // XAMIOS || XAMDROID #endif // !UNITY using MsgPack.Serialization.DefaultSerializers; namespace MsgPack.Serialization { /// <summary> /// <strong>This is intened to MsgPack for CLI internal use. Do not use this type from application directly.</strong> /// Defines serialization helper APIs. /// </summary> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY public static partial class UnpackHelpers { private static readonly MessagePackSerializer<MessagePackObject> _messagePackObjectSerializer = new MsgPack_MessagePackObjectMessagePackSerializer( SerializationContext.Default ); /// <summary> /// Unpacks the array to the specified array. /// </summary> /// <typeparam name="T">The type of the array element.</typeparam> /// <param name="unpacker">The unpacker to unpack the underlying stream.</param> /// <param name="serializer">The serializer to deserialize array.</param> /// <param name="array">The array instance to be filled.</param> /// <exception cref="System.Runtime.Serialization.SerializationException"> /// Failed to deserialization. /// </exception> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY [Obsolete( "This API is not used at generated serializers in current release, so this API will be removed future." )] public static void UnpackArrayTo<T>( Unpacker unpacker, MessagePackSerializer<T> serializer, T[] array ) { if ( unpacker == null ) { throw new ArgumentNullException( "unpacker" ); } if ( serializer == null ) { throw new ArgumentNullException( "serializer" ); } if ( array == null ) { throw new ArgumentNullException( "array" ); } if ( !unpacker.IsArrayHeader ) { throw SerializationExceptions.NewIsNotArrayHeader(); } #if !UNITY Contract.EndContractBlock(); #endif // !UNITY int count = GetItemsCount( unpacker ); for ( int i = 0; i < count; i++ ) { if ( !unpacker.Read() ) { throw SerializationExceptions.NewMissingItem( i ); } T item; if ( !unpacker.IsArrayHeader && !unpacker.IsMapHeader ) { item = serializer.UnpackFrom( unpacker ); } else { using ( Unpacker subtreeUnpacker = unpacker.ReadSubtree() ) { item = serializer.UnpackFrom( subtreeUnpacker ); } } array[ i ] = item; } } /// <summary> /// Unpacks the collection with the specified method as colletion of <see cref="MessagePackObject"/>. /// </summary> /// <param name="unpacker">The unpacker to unpack the underlying stream.</param> /// <param name="collection">The non-generic collection instance to be added unpacked elements.</param> /// <param name="addition">The delegate which contains the instance method of the <paramref name="collection"/>. The parameter is unpacked object.</param> /// <exception cref="System.Runtime.Serialization.SerializationException"> /// Failed to deserialization. /// </exception> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY [Obsolete( "This API is not used at generated serializers in current release, so this API will be removed future." )] public static void UnpackCollectionTo( Unpacker unpacker, IEnumerable collection, Action<object> addition ) { if ( unpacker == null ) { throw new ArgumentNullException( "unpacker" ); } if ( collection == null ) { throw new ArgumentNullException( "collection" ); } if ( addition == null ) { throw new ArgumentNullException( "addition" ); } if ( !unpacker.IsArrayHeader ) { throw SerializationExceptions.NewIsNotArrayHeader(); } #if !UNITY Contract.EndContractBlock(); #endif // !UNITY int count = GetItemsCount( unpacker ); for ( int i = 0; i < count; i++ ) { if ( !unpacker.Read() ) { throw SerializationExceptions.NewMissingItem( i ); } MessagePackObject item; if ( !unpacker.IsArrayHeader && !unpacker.IsMapHeader ) { item = _messagePackObjectSerializer.UnpackFrom( unpacker ); } else { using ( Unpacker subtreeUnpacker = unpacker.ReadSubtree() ) { item = _messagePackObjectSerializer.UnpackFrom( subtreeUnpacker ); } } addition( item ); } } /// <summary> /// Unpacks the dictionary with the specified method as colletion of <see cref="MessagePackObject"/>. /// </summary> /// <typeparam name="T">The type of elements.</typeparam> /// <param name="unpacker">The unpacker to unpack the underlying stream.</param> /// <param name="serializer">The serializer to deserialize elements.</param> /// <param name="collection">The generic collection instance to be added unpacked elements.</param> /// <param name="addition">The delegate which contains the instance method of the <paramref name="collection"/>. The parameter is unpacked object.</param> /// <exception cref="System.Runtime.Serialization.SerializationException"> /// Failed to deserialization. /// </exception> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY [Obsolete( "This API is not used at generated serializers in current release, so this API will be removed future." )] public static void UnpackCollectionTo<T>( Unpacker unpacker, MessagePackSerializer<T> serializer, IEnumerable<T> collection, Action<T> addition ) { if ( unpacker == null ) { throw new ArgumentNullException( "unpacker" ); } if ( serializer == null ) { throw new ArgumentNullException( "serializer" ); } if ( collection == null ) { throw new ArgumentNullException( "collection" ); } if ( addition == null ) { throw new ArgumentNullException( "addition" ); } if ( !unpacker.IsArrayHeader ) { throw SerializationExceptions.NewIsNotArrayHeader(); } #if !UNITY Contract.EndContractBlock(); #endif // !UNITY int count = GetItemsCount( unpacker ); for ( int i = 0; i < count; i++ ) { if ( !unpacker.Read() ) { throw SerializationExceptions.NewMissingItem( i ); } T item; if ( !unpacker.IsArrayHeader && !unpacker.IsMapHeader ) { item = serializer.UnpackFrom( unpacker ); } else { using ( Unpacker subtreeUnpacker = unpacker.ReadSubtree() ) { item = serializer.UnpackFrom( subtreeUnpacker ); } } addition( item ); } } /// <summary> /// Unpacks the collection with the specified method as colletion of <see cref="MessagePackObject"/>. /// </summary> /// <typeparam name="TDiscarded">The return type of Add method.</typeparam> /// <param name="unpacker">The unpacker to unpack the underlying stream.</param> /// <param name="collection">The non-generic collection instance to be added unpacked elements.</param> /// <param name="addition">The delegate which contains the instance method of the <paramref name="collection"/>. The parameter is unpacked object.</param> /// <exception cref="System.Runtime.Serialization.SerializationException"> /// Failed to deserialization. /// </exception> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY [Obsolete( "This API is not used at generated serializers in current release, so this API will be removed future." )] public static void UnpackCollectionTo<TDiscarded>( Unpacker unpacker, IEnumerable collection, Func<object, TDiscarded> addition ) { if ( unpacker == null ) { throw new ArgumentNullException( "unpacker" ); } if ( collection == null ) { throw new ArgumentNullException( "collection" ); } if ( addition == null ) { throw new ArgumentNullException( "addition" ); } if ( !unpacker.IsArrayHeader ) { throw SerializationExceptions.NewIsNotArrayHeader(); } #if !UNITY Contract.EndContractBlock(); #endif // !UNITY int count = GetItemsCount( unpacker ); for ( int i = 0; i < count; i++ ) { if ( !unpacker.Read() ) { throw SerializationExceptions.NewMissingItem( i ); } MessagePackObject item; if ( !unpacker.IsArrayHeader && !unpacker.IsMapHeader ) { item = _messagePackObjectSerializer.UnpackFrom( unpacker ); } else { using ( Unpacker subtreeUnpacker = unpacker.ReadSubtree() ) { item = _messagePackObjectSerializer.UnpackFrom( subtreeUnpacker ); } } addition( item ); } } /// <summary> /// Unpacks the dictionary with the specified method as colletion of <see cref="MessagePackObject"/>. /// </summary> /// <typeparam name="T">The type of elements.</typeparam> /// <typeparam name="TDiscarded">The return type of Add method.</typeparam> /// <param name="unpacker">The unpacker to unpack the underlying stream.</param> /// <param name="serializer">The serializer to deserialize elements.</param> /// <param name="collection">The generic collection instance to be added unpacked elements.</param> /// <param name="addition">The delegate which contains the instance method of the <paramref name="collection"/>. The parameter is unpacked object.</param> /// <exception cref="System.Runtime.Serialization.SerializationException"> /// Failed to deserialization. /// </exception> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY [Obsolete( "This API is not used at generated serializers in current release, so this API will be removed future." )] public static void UnpackCollectionTo<T, TDiscarded>( Unpacker unpacker, MessagePackSerializer<T> serializer, IEnumerable<T> collection, Func<T, TDiscarded> addition ) { if ( unpacker == null ) { throw new ArgumentNullException( "unpacker" ); } if ( serializer == null ) { throw new ArgumentNullException( "serializer" ); } if ( collection == null ) { throw new ArgumentNullException( "collection" ); } if ( addition == null ) { throw new ArgumentNullException( "addition" ); } if ( !unpacker.IsArrayHeader ) { throw SerializationExceptions.NewIsNotArrayHeader(); } #if !UNITY Contract.EndContractBlock(); #endif // !UNITY int count = GetItemsCount( unpacker ); for ( int i = 0; i < count; i++ ) { if ( !unpacker.Read() ) { throw SerializationExceptions.NewMissingItem( i ); } T item; if ( !unpacker.IsArrayHeader && !unpacker.IsMapHeader ) { item = serializer.UnpackFrom( unpacker ); } else { using ( Unpacker subtreeUnpacker = unpacker.ReadSubtree() ) { item = serializer.UnpackFrom( subtreeUnpacker ); } } addition( item ); } } /// <summary> /// Unpacks the dictionary with the specified method as colletion of <see cref="MessagePackObject"/>. /// </summary> /// <typeparam name="TKey">The type of keys.</typeparam> /// <typeparam name="TValue">The type of values.</typeparam> /// <param name="unpacker">The unpacker to unpack the underlying stream.</param> /// <param name="keySerializer">The serializer to deserialize key elements.</param> /// <param name="valueSerializer">The serializer to deserialize value elements.</param> /// <param name="dictionary">The generic dictionary instance to be added unpacked elements.</param> /// <exception cref="System.Runtime.Serialization.SerializationException"> /// Failed to deserialization. /// </exception> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY [Obsolete( "This API is not used at generated serializers in current release, so this API will be removed future." )] public static void UnpackMapTo<TKey, TValue>( Unpacker unpacker, MessagePackSerializer<TKey> keySerializer, MessagePackSerializer<TValue> valueSerializer, IDictionary<TKey, TValue> dictionary ) { if ( unpacker == null ) { throw new ArgumentNullException( "unpacker" ); } if ( keySerializer == null ) { throw new ArgumentNullException( "keySerializer" ); } if ( valueSerializer == null ) { throw new ArgumentNullException( "valueSerializer" ); } if ( dictionary == null ) { throw new ArgumentNullException( "dictionary" ); } if ( !unpacker.IsMapHeader ) { throw SerializationExceptions.NewIsNotMapHeader(); } #if !UNITY Contract.EndContractBlock(); #endif // !UNITY int count = GetItemsCount( unpacker ); for ( int i = 0; i < count; i++ ) { if ( !unpacker.Read() ) { throw SerializationExceptions.NewMissingItem( i ); } TKey key; if ( !unpacker.IsArrayHeader && !unpacker.IsMapHeader ) { key = keySerializer.UnpackFrom( unpacker ); } else { using ( Unpacker subtreeUnpacker = unpacker.ReadSubtree() ) { key = keySerializer.UnpackFrom( subtreeUnpacker ); } } if ( !unpacker.Read() ) { throw SerializationExceptions.NewMissingItem( i ); } TValue value; if ( !unpacker.IsArrayHeader && !unpacker.IsMapHeader ) { value = valueSerializer.UnpackFrom( unpacker ); } else { using ( Unpacker subtreeUnpacker = unpacker.ReadSubtree() ) { value = valueSerializer.UnpackFrom( subtreeUnpacker ); } } dictionary.Add( key, value ); } } /// <summary> /// Unpacks the dictionary with the specified method as colletion of <see cref="MessagePackObject"/>. /// </summary> /// <param name="unpacker">The unpacker to unpack the underlying stream.</param> /// <param name="dictionary">The non-generic dictionary instance to be added unpacked elements.</param> /// <exception cref="System.Runtime.Serialization.SerializationException"> /// Failed to deserialization. /// </exception> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY [Obsolete( "This API is not used at generated serializers in current release, so this API will be removed future." )] public static void UnpackMapTo( Unpacker unpacker, IDictionary dictionary ) { if ( unpacker == null ) { throw new ArgumentNullException( "unpacker" ); } if ( dictionary == null ) { throw new ArgumentNullException( "dictionary" ); } if ( !unpacker.IsMapHeader ) { throw SerializationExceptions.NewIsNotMapHeader(); } #if !UNITY Contract.EndContractBlock(); #endif // !UNITY int count = GetItemsCount( unpacker ); for ( int i = 0; i < count; i++ ) { if ( !unpacker.Read() ) { throw SerializationExceptions.NewMissingItem( i ); } MessagePackObject key; if ( !unpacker.IsArrayHeader && !unpacker.IsMapHeader ) { key = _messagePackObjectSerializer.UnpackFrom( unpacker ); } else { using ( Unpacker subtreeUnpacker = unpacker.ReadSubtree() ) { key = _messagePackObjectSerializer.UnpackFrom( subtreeUnpacker ); } } if ( !unpacker.Read() ) { throw SerializationExceptions.NewMissingItem( i ); } MessagePackObject value; if ( !unpacker.IsArrayHeader && !unpacker.IsMapHeader ) { value = _messagePackObjectSerializer.UnpackFrom( unpacker ); } else { using ( Unpacker subtreeUnpacker = unpacker.ReadSubtree() ) { value = _messagePackObjectSerializer.UnpackFrom( subtreeUnpacker ); } } dictionary.Add( key, value ); } } /// <summary> /// Gets the items count as <see cref="Int32"/>. /// </summary> /// <param name="unpacker">The unpacker.</param> /// <returns>The items count as <see cref="Int32"/>.</returns> /// <exception cref="System.ArgumentNullException"><paramref name="unpacker"/> is <c>null.</c></exception> /// <exception cref="System.Runtime.Serialization.SerializationException">The items count is greater than <see cref="Int32.MaxValue"/>.</exception> /// <remarks> /// The items count of the collection can be between <see cref="Int32.MaxValue"/> and <see cref="UInt32.MaxValue"/>, /// but most collections do not support so big count. /// </remarks> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY public static int GetItemsCount( Unpacker unpacker ) { if( unpacker == null ) { throw new ArgumentNullException( "unpacker" ); } long rawItemsCount; try { rawItemsCount = unpacker.ItemsCount; } catch ( InvalidOperationException ex ) { throw SerializationExceptions.NewIsIncorrectStream( ex ); } if ( rawItemsCount > Int32.MaxValue ) { throw SerializationExceptions.NewIsTooLargeCollection(); } int count = unchecked( ( int )rawItemsCount ); return count; } /// <summary> /// Ensures the boxed type is not null thus it cannot be unboxing. /// </summary> /// <typeparam name="T">The type of the member.</typeparam> /// <param name="boxed">The boxed deserializing value.</param> /// <param name="name">The name of the member.</param> /// <param name="targetType">The type of the target.</param> /// <returns>The unboxed value.</returns> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY [Obsolete( "This API is not used at generated serializers in current release, so this API will be removed future." )] public static T ConvertWithEnsuringNotNull<T>( object boxed, string name, Type targetType ) { if ( typeof( T ).GetIsValueType() && boxed == null && Nullable.GetUnderlyingType( typeof( T ) ) == null ) { throw SerializationExceptions.NewValueTypeCannotBeNull( name, typeof( T ), targetType ); } return ( T )boxed; } /// <summary> /// Invokes <see cref="MessagePackSerializer{T}.UnpackFromCore"/> FAMANDASM method directly. /// </summary> /// <typeparam name="T">The type of deserializing object.</typeparam> /// <param name="serializer">The invocation target <see cref="MessagePackSerializer{T}"/>.</param> /// <param name="unpacker">The unpacker to be passed to the method.</param> /// <returns>A deserialized value.</returns> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY [Obsolete( "This API is not used at generated serializers in current release, so this API will be removed future." )] public static T InvokeUnpackFrom<T>( MessagePackSerializer<T> serializer, Unpacker unpacker ) { if ( serializer == null ) { throw new ArgumentNullException( "serializer" ); } return serializer.UnpackFromCore( unpacker ); } /// <summary> /// Retrieves a most appropriate constructor with <see cref="Int32"/> capacity parameter and <see cref="IEqualityComparer{T}"/> comparer parameter or both of them, >or default constructor of the <paramref name="instanceType"/>. /// </summary> /// <param name="instanceType">The target collection type to be instanciated.</param> /// <returns>A constructor of the <paramref name="instanceType"/>.</returns> internal static ConstructorInfo GetCollectionConstructor( Type instanceType ) { const int noParameters = 0; const int withCapacity = 10; const int withComparer = 11; const int withComparerAndCapacity = 20; const int withCapacityAndComparer = 21; ConstructorInfo constructor = null; var currentScore = -1; foreach ( var candidate in instanceType.GetConstructors() ) { var parameters = candidate.GetParameters(); switch ( parameters.Length ) { case 0: { if ( currentScore < noParameters ) { constructor = candidate; currentScore = noParameters; } break; } case 1: { if ( currentScore < withCapacity && parameters[ 0 ].ParameterType == typeof( int ) ) { constructor = candidate; currentScore = noParameters; } else if ( currentScore < withComparer && IsIEqualityComparer( parameters[ 0 ].ParameterType ) ) { constructor = candidate; currentScore = noParameters; } break; } case 2: { if ( currentScore < withCapacityAndComparer && parameters[ 0 ].ParameterType == typeof( int ) && IsIEqualityComparer( parameters[ 1 ].ParameterType ) ) { constructor = candidate; currentScore = withCapacityAndComparer; } else if ( currentScore < withComparerAndCapacity && parameters[ 1 ].ParameterType == typeof( int ) && IsIEqualityComparer( parameters[ 0 ].ParameterType ) ) { constructor = candidate; currentScore = withComparerAndCapacity; } break; } } } if ( constructor == null ) { throw SerializationExceptions.NewTargetDoesNotHavePublicDefaultConstructorNorInitialCapacity( instanceType ); } return constructor; } /// <summary> /// Determines the type is <see cref="IEqualityComparer{T}"/>. /// </summary> /// <param name="type">The type should be <see cref="IEqualityComparer{T}"/>.</param> /// <returns> /// <c>true</c>, if <paramref name="type"/> is open <see cref="IEqualityComparer{T}"/> generic type; <c>false</c>, otherwise. /// </returns> internal static bool IsIEqualityComparer( Type type ) { #if DEBUG && !UNITY Contract.Assert( !type.GetIsGenericTypeDefinition(), "!(" + type + ").GetIsGenericTypeDefinition()" ); #endif // DEBUG && !UNITY return type.GetIsGenericType() && type.GetGenericTypeDefinition() == typeof( IEqualityComparer<> ); } #if UNITY internal static object GetEqualityComparer( Type comparerType ) { return AotHelper.GetEqualityComparer( comparerType ); } #endif // UNITY /// <summary> /// Gets an <see cref="IEqualityComparer{T}"/> with platform safe fashion. /// </summary> /// <typeparam name="T">The type to be compared.</typeparam> /// <returns> /// An <see cref="IEqualityComparer{T}"/> instance. /// </returns> #if !CORLIB_ONLY [EditorBrowsable( EditorBrowsableState.Never )] #endif // !CORLIB_ONLY public static IEqualityComparer<T> GetEqualityComparer<T>() { #if !UNITY return EqualityComparer<T>.Default; #else // AotHelper is internal because it should not be API -- it is subject to change when the Unity's Mono is updated or IL2CPP becomes stable. return AotHelper.GetEqualityComparer<T>(); #endif // !UNITY } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.Editor.Host; using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.Extensions; using Microsoft.CodeAnalysis.Text; using Microsoft.VisualStudio.Imaging.Interop; using Microsoft.VisualStudio.Language.Intellisense; using Microsoft.VisualStudio.Text; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.Implementation.Suggestions { /// <summary> /// Base class for all Roslyn light bulb menu items. /// </summary> internal partial class SuggestedAction : ForegroundThreadAffinitizedObject, ISuggestedAction, IEquatable<ISuggestedAction> { protected readonly Workspace Workspace; protected readonly ITextBuffer SubjectBuffer; protected readonly ICodeActionEditHandlerService EditHandler; protected readonly object Provider; protected readonly CodeAction CodeAction; protected SuggestedAction( Workspace workspace, ITextBuffer subjectBuffer, ICodeActionEditHandlerService editHandler, CodeAction codeAction, object provider) { Contract.ThrowIfTrue(provider == null); this.Workspace = workspace; this.SubjectBuffer = subjectBuffer; this.CodeAction = codeAction; this.EditHandler = editHandler; this.Provider = provider; } public bool TryGetTelemetryId(out Guid telemetryId) { // TODO: this is temporary. Diagnostic team needs to figure out how to provide unique id per a fix. // for now, we will use type of CodeAction, but there are some predefined code actions that are used by multiple fixes // and this will not distinguish those // AssemblyQualifiedName will change across version numbers, FullName won't var type = CodeAction.GetType(); type = type.IsConstructedGenericType ? type.GetGenericTypeDefinition() : type; telemetryId = new Guid(type.FullName.GetHashCode(), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); return true; } // NOTE: We want to avoid computing the operations on the UI thread. So we use Task.Run() to do this work on the background thread. protected Task<ImmutableArray<CodeActionOperation>> GetOperationsAsync(CancellationToken cancellationToken) { return Task.Run( async () => await CodeAction.GetOperationsAsync(cancellationToken).ConfigureAwait(false), cancellationToken); } protected Task<IEnumerable<CodeActionOperation>> GetOperationsAsync(CodeActionWithOptions actionWithOptions, object options, CancellationToken cancellationToken) { return Task.Run( async () => await actionWithOptions.GetOperationsAsync(options, cancellationToken).ConfigureAwait(false), cancellationToken); } protected Task<ImmutableArray<CodeActionOperation>> GetPreviewOperationsAsync(CancellationToken cancellationToken) { return Task.Run( async () => await CodeAction.GetPreviewOperationsAsync(cancellationToken).ConfigureAwait(false), cancellationToken); } public virtual void Invoke(CancellationToken cancellationToken) { var snapshot = this.SubjectBuffer.CurrentSnapshot; using (new CaretPositionRestorer(this.SubjectBuffer, this.EditHandler.AssociatedViewService)) { var extensionManager = this.Workspace.Services.GetService<IExtensionManager>(); extensionManager.PerformAction(Provider, () => { IEnumerable<CodeActionOperation> operations = null; // NOTE: As mentoned above, we want to avoid computing the operations on the UI thread. // However, for CodeActionWithOptions, GetOptions() might involve spinning up a dialog // to compute the options and must be done on the UI thread. var actionWithOptions = this.CodeAction as CodeActionWithOptions; if (actionWithOptions != null) { var options = actionWithOptions.GetOptions(cancellationToken); if (options != null) { operations = GetOperationsAsync(actionWithOptions, options, cancellationToken).WaitAndGetResult(cancellationToken); } } else { operations = GetOperationsAsync(cancellationToken).WaitAndGetResult(cancellationToken); } if (operations != null) { var document = this.SubjectBuffer.CurrentSnapshot.GetOpenDocumentInCurrentContextWithChanges(); EditHandler.Apply(Workspace, document, operations, CodeAction.Title, cancellationToken); } }); } } public string DisplayText { get { // Underscores will become an accelerator in the VS smart tag. So we double all // underscores so they actually get represented as an underscore in the UI. var extensionManager = this.Workspace.Services.GetService<IExtensionManager>(); var text = extensionManager.PerformFunction(Provider, () => CodeAction.Title, defaultValue: string.Empty); return text.Replace("_", "__"); } } protected async Task<SolutionPreviewResult> GetPreviewResultAsync(CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); // We will always invoke this from the UI thread. AssertIsForeground(); // We use ConfigureAwait(true) to stay on the UI thread. var operations = await GetPreviewOperationsAsync(cancellationToken).ConfigureAwait(true); return EditHandler.GetPreviews(Workspace, operations, cancellationToken); } public virtual bool HasPreview { get { // HasPreview is called synchronously on the UI thread. In order to avoid blocking the UI thread, // we need to provide a 'quick' answer here as opposed to the 'right' answer. Providing the 'right' // answer is expensive (because we will need to call CodeAction.GetPreviewOperationsAsync() for this // and this will involve computing the changed solution for the ApplyChangesOperation for the fix / // refactoring). So we always return 'true' here (so that platform will call GetActionSetsAsync() // below). Platform guarantees that nothing bad will happen if we return 'true' here and later return // 'null' / empty collection from within GetPreviewAsync(). return true; } } public virtual async Task<object> GetPreviewAsync(CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); // Light bulb will always invoke this function on the UI thread. AssertIsForeground(); var preferredDocumentId = Workspace.GetDocumentIdInCurrentContext(SubjectBuffer.AsTextContainer()); var preferredProjectId = preferredDocumentId?.ProjectId; var extensionManager = this.Workspace.Services.GetService<IExtensionManager>(); var previewContent = await extensionManager.PerformFunctionAsync(Provider, async () => { // We need to stay on UI thread after GetPreviewResultAsync() so that TakeNextPreviewAsync() // below can execute on UI thread. We use ConfigureAwait(true) to stay on the UI thread. var previewResult = await GetPreviewResultAsync(cancellationToken).ConfigureAwait(true); if (previewResult == null) { return null; } else { // TakeNextPreviewAsync() needs to run on UI thread. AssertIsForeground(); return await previewResult.TakeNextPreviewAsync(preferredDocumentId, preferredProjectId, cancellationToken).ConfigureAwait(true); } // GetPreviewPane() below needs to run on UI thread. We use ConfigureAwait(true) to stay on the UI thread. }, defaultValue: null).ConfigureAwait(true); var previewPaneService = Workspace.Services.GetService<IPreviewPaneService>(); if (previewPaneService == null) { return null; } cancellationToken.ThrowIfCancellationRequested(); // GetPreviewPane() needs to run on the UI thread. AssertIsForeground(); string language; string projectType; Workspace.GetLanguageAndProjectType(preferredProjectId, out language, out projectType); return previewPaneService.GetPreviewPane(GetDiagnostic(), language, projectType, previewContent); } protected virtual Diagnostic GetDiagnostic() { return null; } #region not supported void IDisposable.Dispose() { // do nothing } public virtual bool HasActionSets { get { return false; } } public virtual Task<IEnumerable<SuggestedActionSet>> GetActionSetsAsync(CancellationToken cancellationToken) { return SpecializedTasks.Default<IEnumerable<SuggestedActionSet>>(); } string ISuggestedAction.IconAutomationText { get { // same as display text return DisplayText; } } ImageMoniker ISuggestedAction.IconMoniker { get { // no icon support return default(ImageMoniker); } } string ISuggestedAction.InputGestureText { get { // no shortcut support return null; } } #endregion #region IEquatable<ISuggestedAction> public bool Equals(ISuggestedAction other) { return Equals(other as SuggestedAction); } public override bool Equals(object obj) { return Equals(obj as SuggestedAction); } public bool Equals(SuggestedAction otherSuggestedAction) { if (otherSuggestedAction == null) { return false; } if (ReferenceEquals(this, otherSuggestedAction)) { return true; } if (!ReferenceEquals(Provider, otherSuggestedAction.Provider)) { return false; } var otherCodeAction = otherSuggestedAction.CodeAction; if (CodeAction.EquivalenceKey == null || otherCodeAction.EquivalenceKey == null) { return false; } return CodeAction.EquivalenceKey == otherCodeAction.EquivalenceKey; } public override int GetHashCode() { if (CodeAction.EquivalenceKey == null) { return base.GetHashCode(); } return Hash.Combine(Provider.GetHashCode(), CodeAction.EquivalenceKey.GetHashCode()); } #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // QueuedMap.cs // // // A key-value pair queue, where pushing an existing key into the collection overwrites // the existing value. // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.Contracts; namespace System.Threading.Tasks.Dataflow.Internal { /// <summary> /// Provides a data structure that supports pushing and popping key/value pairs. /// Pushing a key/value pair for which the key already exists results in overwriting /// the existing key entry's value. /// </summary> /// <typeparam name="TKey">Specifies the type of keys in the map.</typeparam> /// <typeparam name="TValue">Specifies the type of values in the map.</typeparam> /// <remarks>This type is not thread-safe.</remarks> [DebuggerDisplay("Count = {Count}")] [DebuggerTypeProxy(typeof(EnumerableDebugView<,>))] internal sealed class QueuedMap<TKey, TValue> { /// <summary> /// A queue structure that uses an array-based list to store its items /// and that supports overwriting elements at specific indices. /// </summary> /// <typeparam name="T">The type of the items storedin the queue</typeparam> /// <remarks>This type is not thread-safe.</remarks> private sealed class ArrayBasedLinkedQueue<T> { /// <summary>Terminator index.</summary> private const int TERMINATOR_INDEX = -1; /// <summary> /// The queue where the items will be stored. /// The key of each entry is the index of the next entry in the queue. /// </summary> private readonly List<KeyValuePair<int, T>> _storage; /// <summary>Index of the first queue item.</summary> private int _headIndex = TERMINATOR_INDEX; /// <summary>Index of the last queue item.</summary> private int _tailIndex = TERMINATOR_INDEX; /// <summary>Index of the first free slot.</summary> private int _freeIndex = TERMINATOR_INDEX; /// <summary>Initializes the Queue instance.</summary> internal ArrayBasedLinkedQueue() { _storage = new List<KeyValuePair<int, T>>(); } /// <summary>Initializes the Queue instance.</summary> /// <param name="capacity">The capacity of the internal storage.</param> internal ArrayBasedLinkedQueue(int capacity) { _storage = new List<KeyValuePair<int, T>>(capacity); } /// <summary>Enqueues an item.</summary> /// <param name="item">The item to be enqueued.</param> /// <returns>The index of the slot where item was stored.</returns> internal int Enqueue(T item) { int newIndex; // If there is a free slot, reuse it if (_freeIndex != TERMINATOR_INDEX) { Debug.Assert(0 <= _freeIndex && _freeIndex < _storage.Count, "Index is out of range."); newIndex = _freeIndex; _freeIndex = _storage[_freeIndex].Key; _storage[newIndex] = new KeyValuePair<int, T>(TERMINATOR_INDEX, item); } // If there is no free slot, add one else { newIndex = _storage.Count; _storage.Add(new KeyValuePair<int, T>(TERMINATOR_INDEX, item)); } if (_headIndex == TERMINATOR_INDEX) { // Point _headIndex to newIndex if the queue was empty Debug.Assert(_tailIndex == TERMINATOR_INDEX, "If head indicates empty, so too should tail."); _headIndex = newIndex; } else { // Point the tail slot to newIndex if the queue was not empty Debug.Assert(_tailIndex != TERMINATOR_INDEX, "If head does not indicate empty, neither should tail."); _storage[_tailIndex] = new KeyValuePair<int, T>(newIndex, _storage[_tailIndex].Value); } // Point the tail slot newIndex _tailIndex = newIndex; return newIndex; } /// <summary>Tries to dequeue an item.</summary> /// <param name="item">The item that is dequeued.</param> internal bool TryDequeue(out T item) { // If the queue is empty, just initialize the output item and return false if (_headIndex == TERMINATOR_INDEX) { Debug.Assert(_tailIndex == TERMINATOR_INDEX, "If head indicates empty, so too should tail."); item = default(T); return false; } // If there are items in the queue, start with populating the output item Debug.Assert(0 <= _headIndex && _headIndex < _storage.Count, "Head is out of range."); item = _storage[_headIndex].Value; // Move the popped slot to the head of the free list int newHeadIndex = _storage[_headIndex].Key; _storage[_headIndex] = new KeyValuePair<int, T>(_freeIndex, default(T)); _freeIndex = _headIndex; _headIndex = newHeadIndex; if (_headIndex == TERMINATOR_INDEX) _tailIndex = TERMINATOR_INDEX; return true; } /// <summary>Replaces the item of a given slot.</summary> /// <param name="index">The index of the slot where the value should be replaced.</param> /// <param name="item">The item to be places.</param> internal void Replace(int index, T item) { Debug.Assert(0 <= index && index < _storage.Count, "Index is out of range."); #if DEBUG // Also assert that index does not belong to the list of free slots for (int idx = _freeIndex; idx != TERMINATOR_INDEX; idx = _storage[idx].Key) Debug.Assert(idx != index, "Index should not belong to the list of free slots."); #endif _storage[index] = new KeyValuePair<int, T>(_storage[index].Key, item); } internal bool IsEmpty { get { return _headIndex == TERMINATOR_INDEX; } } } /// <summary>The queue of elements.</summary> private readonly ArrayBasedLinkedQueue<KeyValuePair<TKey, TValue>> _queue; /// <summary>A map from key to index into the list.</summary> /// <remarks>The correctness of this map relies on the list only having elements removed from its end.</remarks> private readonly Dictionary<TKey, int> _mapKeyToIndex; /// <summary>Initializes the QueuedMap.</summary> internal QueuedMap() { _queue = new ArrayBasedLinkedQueue<KeyValuePair<TKey, TValue>>(); _mapKeyToIndex = new Dictionary<TKey, int>(); } /// <summary>Initializes the QueuedMap.</summary> /// <param name="capacity">The initial capacity of the data structure.</param> internal QueuedMap(int capacity) { _queue = new ArrayBasedLinkedQueue<KeyValuePair<TKey, TValue>>(capacity); _mapKeyToIndex = new Dictionary<TKey, int>(capacity); } /// <summary>Pushes a key/value pair into the data structure.</summary> /// <param name="key">The key for the pair.</param> /// <param name="value">The value for the pair.</param> internal void Push(TKey key, TValue value) { // Try to get the index of the key in the queue. If it's there, replace the value. int indexOfKeyInQueue; if (!_queue.IsEmpty && _mapKeyToIndex.TryGetValue(key, out indexOfKeyInQueue)) { _queue.Replace(indexOfKeyInQueue, new KeyValuePair<TKey, TValue>(key, value)); } // If it's not there, add it to the queue and then add the mapping. else { indexOfKeyInQueue = _queue.Enqueue(new KeyValuePair<TKey, TValue>(key, value)); _mapKeyToIndex.Add(key, indexOfKeyInQueue); } } /// <summary>Try to pop the next element from the data structure.</summary> /// <param name="item">The popped pair.</param> /// <returns>true if an item could be popped; otherwise, false.</returns> internal bool TryPop(out KeyValuePair<TKey, TValue> item) { bool popped = _queue.TryDequeue(out item); if (popped) _mapKeyToIndex.Remove(item.Key); return popped; } /// <summary>Tries to pop one or more elements from the data structure.</summary> /// <param name="items">The items array into which the popped elements should be stored.</param> /// <param name="arrayOffset">The offset into the array at which to start storing popped items.</param> /// <param name="count">The number of items to be popped.</param> /// <returns>The number of items popped, which may be less than the requested number if fewer existed in the data structure.</returns> internal int PopRange(KeyValuePair<TKey, TValue>[] items, int arrayOffset, int count) { // As this data structure is internal, only assert incorrect usage. // If this were to ever be made public, these would need to be real argument checks. Contract.Requires(items != null, "Requires non-null array to store into."); Contract.Requires(count >= 0 && arrayOffset >= 0, "Count and offset must be non-negative"); Contract.Requires(arrayOffset + count >= 0, "Offset plus count overflowed"); Contract.Requires(arrayOffset + count <= items.Length, "Range must be within array size"); int actualCount = 0; for (int i = arrayOffset; actualCount < count; i++, actualCount++) { KeyValuePair<TKey, TValue> item; if (TryPop(out item)) items[i] = item; else break; } return actualCount; } /// <summary>Gets the number of items in the data structure.</summary> internal int Count { get { return _mapKeyToIndex.Count; } } } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.IO; using System.Globalization; using Newtonsoft.Json.Utilities; #if NET20 using Newtonsoft.Json.Utilities.LinqBridge; #else using System.Linq; #endif namespace Newtonsoft.Json { /// <summary> /// Represents a reader that provides fast, non-cached, forward-only access to serialized Json data. /// </summary> public abstract class JsonReader : IDisposable { /// <summary> /// Specifies the state of the reader. /// </summary> protected internal enum State { /// <summary> /// The Read method has not been called. /// </summary> Start, /// <summary> /// The end of the file has been reached successfully. /// </summary> Complete, /// <summary> /// Reader is at a property. /// </summary> Property, /// <summary> /// Reader is at the start of an object. /// </summary> ObjectStart, /// <summary> /// Reader is in an object. /// </summary> Object, /// <summary> /// Reader is at the start of an array. /// </summary> ArrayStart, /// <summary> /// Reader is in an array. /// </summary> Array, /// <summary> /// The Close method has been called. /// </summary> Closed, /// <summary> /// Reader has just read a value. /// </summary> PostValue, /// <summary> /// Reader is at the start of a constructor. /// </summary> ConstructorStart, /// <summary> /// Reader in a constructor. /// </summary> Constructor, /// <summary> /// An error occurred that prevents the read operation from continuing. /// </summary> Error, /// <summary> /// The end of the file has been reached successfully. /// </summary> Finished } // current Token data private JsonToken _tokenType; private object _value; internal char _quoteChar; internal State _currentState; internal ReadType _readType; private JsonPosition _currentPosition; private CultureInfo _culture; private DateTimeZoneHandling _dateTimeZoneHandling; private int? _maxDepth; private bool _hasExceededMaxDepth; internal DateParseHandling _dateParseHandling; internal FloatParseHandling _floatParseHandling; private readonly List<JsonPosition> _stack; /// <summary> /// Gets the current reader state. /// </summary> /// <value>The current reader state.</value> protected State CurrentState { get { return _currentState; } } /// <summary> /// Gets or sets a value indicating whether the underlying stream or /// <see cref="TextReader"/> should be closed when the reader is closed. /// </summary> /// <value> /// true to close the underlying stream or <see cref="TextReader"/> when /// the reader is closed; otherwise false. The default is true. /// </value> public bool CloseInput { get; set; } /// <summary> /// Gets or sets a value indicating whether multiple pieces of JSON content can /// be read from a continuous stream without erroring. /// </summary> /// <value> /// true to support reading multiple pieces of JSON content; otherwise false. The default is false. /// </value> public bool SupportMultipleContent { get; set; } /// <summary> /// Gets the quotation mark character used to enclose the value of a string. /// </summary> public virtual char QuoteChar { get { return _quoteChar; } protected internal set { _quoteChar = value; } } /// <summary> /// Get or set how <see cref="DateTime"/> time zones are handling when reading JSON. /// </summary> public DateTimeZoneHandling DateTimeZoneHandling { get { return _dateTimeZoneHandling; } set { _dateTimeZoneHandling = value; } } /// <summary> /// Get or set how date formatted strings, e.g. "\/Date(1198908717056)\/" and "2012-03-21T05:40Z", are parsed when reading JSON. /// </summary> public DateParseHandling DateParseHandling { get { return _dateParseHandling; } set { _dateParseHandling = value; } } /// <summary> /// Get or set how floating point numbers, e.g. 1.0 and 9.9, are parsed when reading JSON text. /// </summary> public FloatParseHandling FloatParseHandling { get { return _floatParseHandling; } set { _floatParseHandling = value; } } /// <summary> /// Gets or sets the maximum depth allowed when reading JSON. Reading past this depth will throw a <see cref="JsonReaderException"/>. /// </summary> public int? MaxDepth { get { return _maxDepth; } set { if (value <= 0) throw new ArgumentException("Value must be positive.", "value"); _maxDepth = value; } } /// <summary> /// Gets the type of the current JSON token. /// </summary> public virtual JsonToken TokenType { get { return _tokenType; } } /// <summary> /// Gets the text value of the current JSON token. /// </summary> public virtual object Value { get { return _value; } } /// <summary> /// Gets The Common Language Runtime (CLR) type for the current JSON token. /// </summary> public virtual Type ValueType { get { return (_value != null) ? _value.GetType() : null; } } /// <summary> /// Gets the depth of the current token in the JSON document. /// </summary> /// <value>The depth of the current token in the JSON document.</value> public virtual int Depth { get { int depth = _stack.Count; if (IsStartToken(TokenType) || _currentPosition.Type == JsonContainerType.None) return depth; else return depth + 1; } } /// <summary> /// Gets the path of the current JSON token. /// </summary> public virtual string Path { get { if (_currentPosition.Type == JsonContainerType.None) return string.Empty; bool insideContainer = (_currentState != State.ArrayStart && _currentState != State.ConstructorStart && _currentState != State.ObjectStart); IEnumerable<JsonPosition> positions = (!insideContainer) ? _stack : _stack.Concat(new[] { _currentPosition }); return JsonPosition.BuildPath(positions); } } /// <summary> /// Gets or sets the culture used when reading JSON. Defaults to <see cref="CultureInfo.InvariantCulture"/>. /// </summary> public CultureInfo Culture { get { return _culture ?? CultureInfo.InvariantCulture; } set { _culture = value; } } internal JsonPosition GetPosition(int depth) { if (depth < _stack.Count) return _stack[depth]; return _currentPosition; } /// <summary> /// Initializes a new instance of the <see cref="JsonReader"/> class with the specified <see cref="TextReader"/>. /// </summary> protected JsonReader() { _currentState = State.Start; _stack = new List<JsonPosition>(4); _dateTimeZoneHandling = DateTimeZoneHandling.RoundtripKind; _dateParseHandling = DateParseHandling.DateTime; _floatParseHandling = FloatParseHandling.Double; CloseInput = true; } private void Push(JsonContainerType value) { UpdateScopeWithFinishedValue(); if (_currentPosition.Type == JsonContainerType.None) { _currentPosition = new JsonPosition(value); } else { _stack.Add(_currentPosition); _currentPosition = new JsonPosition(value); // this is a little hacky because Depth increases when first property/value is written but only testing here is faster/simpler if (_maxDepth != null && Depth + 1 > _maxDepth && !_hasExceededMaxDepth) { _hasExceededMaxDepth = true; throw JsonReaderException.Create(this, "The reader's MaxDepth of {0} has been exceeded.".FormatWith(CultureInfo.InvariantCulture, _maxDepth)); } } } private JsonContainerType Pop() { JsonPosition oldPosition; if (_stack.Count > 0) { oldPosition = _currentPosition; _currentPosition = _stack[_stack.Count - 1]; _stack.RemoveAt(_stack.Count - 1); } else { oldPosition = _currentPosition; _currentPosition = new JsonPosition(); } if (_maxDepth != null && Depth <= _maxDepth) _hasExceededMaxDepth = false; return oldPosition.Type; } private JsonContainerType Peek() { return _currentPosition.Type; } /// <summary> /// Reads the next JSON token from the stream. /// </summary> /// <returns>true if the next token was read successfully; false if there are no more tokens to read.</returns> public abstract bool Read(); /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{Int32}"/>. /// </summary> /// <returns>A <see cref="Nullable{Int32}"/>. This method will return <c>null</c> at the end of an array.</returns> public abstract int? ReadAsInt32(); /// <summary> /// Reads the next JSON token from the stream as a <see cref="String"/>. /// </summary> /// <returns>A <see cref="String"/>. This method will return <c>null</c> at the end of an array.</returns> public abstract string ReadAsString(); /// <summary> /// Reads the next JSON token from the stream as a <see cref="T:Byte[]"/>. /// </summary> /// <returns>A <see cref="T:Byte[]"/> or a null reference if the next JSON token is null. This method will return <c>null</c> at the end of an array.</returns> public abstract byte[] ReadAsBytes(); /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{Decimal}"/>. /// </summary> /// <returns>A <see cref="Nullable{Decimal}"/>. This method will return <c>null</c> at the end of an array.</returns> public abstract decimal? ReadAsDecimal(); /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{DateTime}"/>. /// </summary> /// <returns>A <see cref="String"/>. This method will return <c>null</c> at the end of an array.</returns> public abstract DateTime? ReadAsDateTime(); #if !NET20 /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{DateTimeOffset}"/>. /// </summary> /// <returns>A <see cref="Nullable{DateTimeOffset}"/>. This method will return <c>null</c> at the end of an array.</returns> public abstract DateTimeOffset? ReadAsDateTimeOffset(); #endif internal virtual bool ReadInternal() { throw new NotImplementedException(); } #if !NET20 internal DateTimeOffset? ReadAsDateTimeOffsetInternal() { _readType = ReadType.ReadAsDateTimeOffset; JsonToken t; do { if (!ReadInternal()) { SetToken(JsonToken.None); return null; } else { t = TokenType; } } while (t == JsonToken.Comment); if (t == JsonToken.Date) { if (Value is DateTime) SetToken(JsonToken.Date, new DateTimeOffset((DateTime)Value)); return (DateTimeOffset)Value; } if (t == JsonToken.Null) return null; DateTimeOffset dt; if (t == JsonToken.String) { string s = (string)Value; if (string.IsNullOrEmpty(s)) { SetToken(JsonToken.Null); return null; } if (DateTimeOffset.TryParse(s, Culture, DateTimeStyles.RoundtripKind, out dt)) { SetToken(JsonToken.Date, dt); return dt; } else { throw JsonReaderException.Create(this, "Could not convert string to DateTimeOffset: {0}.".FormatWith(CultureInfo.InvariantCulture, Value)); } } if (t == JsonToken.EndArray) return null; throw JsonReaderException.Create(this, "Error reading date. Unexpected token: {0}.".FormatWith(CultureInfo.InvariantCulture, t)); } #endif internal byte[] ReadAsBytesInternal() { _readType = ReadType.ReadAsBytes; JsonToken t; do { if (!ReadInternal()) { SetToken(JsonToken.None); return null; } else { t = TokenType; } } while (t == JsonToken.Comment); if (IsWrappedInTypeObject()) { byte[] data = ReadAsBytes(); ReadInternal(); SetToken(JsonToken.Bytes, data); return data; } // attempt to convert possible base 64 string to bytes if (t == JsonToken.String) { string s = (string)Value; byte[] data = (s.Length == 0) ? new byte[0] : Convert.FromBase64String(s); SetToken(JsonToken.Bytes, data); return data; } if (t == JsonToken.Null) return null; if (t == JsonToken.Bytes) return (byte[])Value; if (t == JsonToken.StartArray) { List<byte> data = new List<byte>(); while (ReadInternal()) { t = TokenType; switch (t) { case JsonToken.Integer: data.Add(Convert.ToByte(Value, CultureInfo.InvariantCulture)); break; case JsonToken.EndArray: byte[] d = data.ToArray(); SetToken(JsonToken.Bytes, d); return d; case JsonToken.Comment: // skip break; default: throw JsonReaderException.Create(this, "Unexpected token when reading bytes: {0}.".FormatWith(CultureInfo.InvariantCulture, t)); } } throw JsonReaderException.Create(this, "Unexpected end when reading bytes."); } if (t == JsonToken.EndArray) return null; throw JsonReaderException.Create(this, "Error reading bytes. Unexpected token: {0}.".FormatWith(CultureInfo.InvariantCulture, t)); } internal decimal? ReadAsDecimalInternal() { _readType = ReadType.ReadAsDecimal; JsonToken t; do { if (!ReadInternal()) { SetToken(JsonToken.None); return null; } else { t = TokenType; } } while (t == JsonToken.Comment); if (t == JsonToken.Integer || t == JsonToken.Float) { if (!(Value is decimal)) SetToken(JsonToken.Float, Convert.ToDecimal(Value, CultureInfo.InvariantCulture)); return (decimal)Value; } if (t == JsonToken.Null) return null; decimal d; if (t == JsonToken.String) { string s = (string)Value; if (string.IsNullOrEmpty(s)) { SetToken(JsonToken.Null); return null; } if (decimal.TryParse(s, NumberStyles.Number, Culture, out d)) { SetToken(JsonToken.Float, d); return d; } else { throw JsonReaderException.Create(this, "Could not convert string to decimal: {0}.".FormatWith(CultureInfo.InvariantCulture, Value)); } } if (t == JsonToken.EndArray) return null; throw JsonReaderException.Create(this, "Error reading decimal. Unexpected token: {0}.".FormatWith(CultureInfo.InvariantCulture, t)); } internal int? ReadAsInt32Internal() { _readType = ReadType.ReadAsInt32; JsonToken t; do { if (!ReadInternal()) { SetToken(JsonToken.None); return null; } else { t = TokenType; } } while (t == JsonToken.Comment); if (t == JsonToken.Integer || t == JsonToken.Float) { if (!(Value is int)) SetToken(JsonToken.Integer, Convert.ToInt32(Value, CultureInfo.InvariantCulture)); return (int)Value; } if (t == JsonToken.Null) return null; int i; if (t == JsonToken.String) { string s = (string)Value; if (string.IsNullOrEmpty(s)) { SetToken(JsonToken.Null); return null; } if (int.TryParse(s, NumberStyles.Integer, Culture, out i)) { SetToken(JsonToken.Integer, i); return i; } else { throw JsonReaderException.Create(this, "Could not convert string to integer: {0}.".FormatWith(CultureInfo.InvariantCulture, Value)); } } if (t == JsonToken.EndArray) return null; throw JsonReaderException.Create(this, "Error reading integer. Unexpected token: {0}.".FormatWith(CultureInfo.InvariantCulture, TokenType)); } internal string ReadAsStringInternal() { _readType = ReadType.ReadAsString; JsonToken t; do { if (!ReadInternal()) { SetToken(JsonToken.None); return null; } else { t = TokenType; } } while (t == JsonToken.Comment); if (t == JsonToken.String) return (string)Value; if (t == JsonToken.Null) return null; if (IsPrimitiveToken(t)) { if (Value != null) { string s; if (Value is IFormattable) s = ((IFormattable)Value).ToString(null, Culture); else s = Value.ToString(); SetToken(JsonToken.String, s); return s; } } if (t == JsonToken.EndArray) return null; throw JsonReaderException.Create(this, "Error reading string. Unexpected token: {0}.".FormatWith(CultureInfo.InvariantCulture, t)); } internal DateTime? ReadAsDateTimeInternal() { _readType = ReadType.ReadAsDateTime; do { if (!ReadInternal()) { SetToken(JsonToken.None); return null; } } while (TokenType == JsonToken.Comment); if (TokenType == JsonToken.Date) return (DateTime)Value; if (TokenType == JsonToken.Null) return null; DateTime dt; if (TokenType == JsonToken.String) { string s = (string)Value; if (string.IsNullOrEmpty(s)) { SetToken(JsonToken.Null); return null; } if (DateTime.TryParse(s, Culture, DateTimeStyles.RoundtripKind, out dt)) { dt = DateTimeUtils.EnsureDateTime(dt, DateTimeZoneHandling); SetToken(JsonToken.Date, dt); return dt; } else { throw JsonReaderException.Create(this, "Could not convert string to DateTime: {0}.".FormatWith(CultureInfo.InvariantCulture, Value)); } } if (TokenType == JsonToken.EndArray) return null; throw JsonReaderException.Create(this, "Error reading date. Unexpected token: {0}.".FormatWith(CultureInfo.InvariantCulture, TokenType)); } private bool IsWrappedInTypeObject() { _readType = ReadType.Read; if (TokenType == JsonToken.StartObject) { if (!ReadInternal()) throw JsonReaderException.Create(this, "Unexpected end when reading bytes."); if (Value.ToString() == "$type") { ReadInternal(); if (Value != null && Value.ToString().StartsWith("System.Byte[]")) { ReadInternal(); if (Value.ToString() == "$value") { return true; } } } throw JsonReaderException.Create(this, "Error reading bytes. Unexpected token: {0}.".FormatWith(CultureInfo.InvariantCulture, JsonToken.StartObject)); } return false; } /// <summary> /// Skips the children of the current token. /// </summary> public void Skip() { if (TokenType == JsonToken.PropertyName) Read(); if (IsStartToken(TokenType)) { int depth = Depth; while (Read() && (depth < Depth)) { } } } /// <summary> /// Sets the current token. /// </summary> /// <param name="newToken">The new token.</param> protected void SetToken(JsonToken newToken) { SetToken(newToken, null); } /// <summary> /// Sets the current token and value. /// </summary> /// <param name="newToken">The new token.</param> /// <param name="value">The value.</param> protected void SetToken(JsonToken newToken, object value) { _tokenType = newToken; _value = value; switch (newToken) { case JsonToken.StartObject: _currentState = State.ObjectStart; Push(JsonContainerType.Object); break; case JsonToken.StartArray: _currentState = State.ArrayStart; Push(JsonContainerType.Array); break; case JsonToken.StartConstructor: _currentState = State.ConstructorStart; Push(JsonContainerType.Constructor); break; case JsonToken.EndObject: ValidateEnd(JsonToken.EndObject); break; case JsonToken.EndArray: ValidateEnd(JsonToken.EndArray); break; case JsonToken.EndConstructor: ValidateEnd(JsonToken.EndConstructor); break; case JsonToken.PropertyName: _currentState = State.Property; _currentPosition.PropertyName = (string)value; break; case JsonToken.Undefined: case JsonToken.Integer: case JsonToken.Float: case JsonToken.Boolean: case JsonToken.Null: case JsonToken.Date: case JsonToken.String: case JsonToken.Raw: case JsonToken.Bytes: if (Peek() != JsonContainerType.None) _currentState = State.PostValue; else SetFinished(); UpdateScopeWithFinishedValue(); break; } } private void UpdateScopeWithFinishedValue() { if (_currentPosition.HasIndex) _currentPosition.Position++; } private void ValidateEnd(JsonToken endToken) { JsonContainerType currentObject = Pop(); if (GetTypeForCloseToken(endToken) != currentObject) throw JsonReaderException.Create(this, "JsonToken {0} is not valid for closing JsonType {1}.".FormatWith(CultureInfo.InvariantCulture, endToken, currentObject)); if (Peek() != JsonContainerType.None) _currentState = State.PostValue; else SetFinished(); } /// <summary> /// Sets the state based on current token type. /// </summary> protected void SetStateBasedOnCurrent() { JsonContainerType currentObject = Peek(); switch (currentObject) { case JsonContainerType.Object: _currentState = State.Object; break; case JsonContainerType.Array: _currentState = State.Array; break; case JsonContainerType.Constructor: _currentState = State.Constructor; break; case JsonContainerType.None: SetFinished(); break; default: throw JsonReaderException.Create(this, "While setting the reader state back to current object an unexpected JsonType was encountered: {0}".FormatWith(CultureInfo.InvariantCulture, currentObject)); } } private void SetFinished() { if (SupportMultipleContent) _currentState = State.Start; else _currentState = State.Finished; } internal static bool IsPrimitiveToken(JsonToken token) { switch (token) { case JsonToken.Integer: case JsonToken.Float: case JsonToken.String: case JsonToken.Boolean: case JsonToken.Undefined: case JsonToken.Null: case JsonToken.Date: case JsonToken.Bytes: return true; default: return false; } } internal static bool IsStartToken(JsonToken token) { switch (token) { case JsonToken.StartObject: case JsonToken.StartArray: case JsonToken.StartConstructor: return true; default: return false; } } private JsonContainerType GetTypeForCloseToken(JsonToken token) { switch (token) { case JsonToken.EndObject: return JsonContainerType.Object; case JsonToken.EndArray: return JsonContainerType.Array; case JsonToken.EndConstructor: return JsonContainerType.Constructor; default: throw JsonReaderException.Create(this, "Not a valid close JsonToken: {0}".FormatWith(CultureInfo.InvariantCulture, token)); } } /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> void IDisposable.Dispose() { Dispose(true); } /// <summary> /// Releases unmanaged and - optionally - managed resources /// </summary> /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> protected virtual void Dispose(bool disposing) { if (_currentState != State.Closed && disposing) Close(); } /// <summary> /// Changes the <see cref="State"/> to Closed. /// </summary> public virtual void Close() { _currentState = State.Closed; _tokenType = JsonToken.None; _value = null; } } }
// Copyright (c) 2007-2017 ppy Pty Ltd <contact@ppy.sh>. // Licensed under the MIT Licence - https://raw.githubusercontent.com/ppy/osu/master/LICENCE using System; using System.Collections.Generic; using System.Linq; using OpenTK; using OpenTK.Graphics; using osu.Framework.Allocation; using osu.Framework.Extensions.Color4Extensions; using osu.Framework.Graphics; using osu.Framework.Graphics.Colour; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Primitives; using osu.Framework.Graphics.Sprites; using osu.Framework.Graphics.Textures; using osu.Framework.Localisation; using osu.Game.Beatmaps; using osu.Game.Database; using osu.Game.Graphics; using osu.Game.Graphics.Sprites; using osu.Game.Graphics.UserInterface; using osu.Game.Rulesets.Scoring; using osu.Game.Screens.Play; using osu.Game.Screens.Select.Leaderboards; using osu.Game.Users; namespace osu.Game.Screens.Ranking { internal class ResultsPageScore : ResultsPage { private ScoreCounter scoreCounter; public ResultsPageScore(Score score, WorkingBeatmap beatmap) : base(score, beatmap) { } private FillFlowContainer<DrawableScoreStatistic> statisticsContainer; [BackgroundDependencyLoader] private void load(OsuColour colours) { const float user_header_height = 120; Children = new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, Padding = new MarginPadding { Top = user_header_height }, Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = Color4.White, }, } }, new FillFlowContainer { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Direction = FillDirection.Vertical, Children = new Drawable[] { new UserHeader(Score.User) { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, RelativeSizeAxes = Axes.X, Height = user_header_height, }, new DrawableRank(Score.Rank) { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Size = new Vector2(150, 60), Margin = new MarginPadding(20), }, new Container { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, RelativeSizeAxes = Axes.X, Height = 60, Children = new Drawable[] { new SongProgressGraph { RelativeSizeAxes = Axes.Both, Alpha = 0.5f, Objects = Beatmap.Beatmap.HitObjects, }, scoreCounter = new SlowScoreCounter(6) { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Colour = colours.PinkDarker, Y = 10, TextSize = 56, }, } }, new OsuSpriteText { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Colour = colours.PinkDarker, Shadow = false, Font = @"Exo2.0-Bold", TextSize = 16, Text = "total score", Margin = new MarginPadding { Bottom = 15 }, }, new BeatmapDetails(Beatmap.BeatmapInfo) { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Margin = new MarginPadding { Bottom = 10 }, }, new DateDisplay(Score.Date) { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, }, new Container { RelativeSizeAxes = Axes.X, Size = new Vector2(0.75f, 1), Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Margin = new MarginPadding { Top = 10, Bottom = 10 }, Children = new Drawable[] { new Box { ColourInfo = ColourInfo.GradientHorizontal( colours.GrayC.Opacity(0), colours.GrayC.Opacity(0.9f)), RelativeSizeAxes = Axes.Both, Size = new Vector2(0.5f, 1), }, new Box { Anchor = Anchor.TopRight, Origin = Anchor.TopRight, ColourInfo = ColourInfo.GradientHorizontal( colours.GrayC.Opacity(0.9f), colours.GrayC.Opacity(0)), RelativeSizeAxes = Axes.Both, Size = new Vector2(0.5f, 1), }, } }, statisticsContainer = new FillFlowContainer<DrawableScoreStatistic> { AutoSizeAxes = Axes.Both, Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Direction = FillDirection.Horizontal, LayoutDuration = 200, LayoutEasing = EasingTypes.OutQuint } } } }; statisticsContainer.Children = Score.Statistics.Select(s => new DrawableScoreStatistic(s)); } protected override void LoadComplete() { base.LoadComplete(); Schedule(() => { scoreCounter.Increment(Score.TotalScore); int delay = 0; foreach (var s in statisticsContainer.Children) { s.FadeOut(); s.Delay(delay += 200); s.FadeIn(300 + delay, EasingTypes.Out); } }); } private class DrawableScoreStatistic : Container { private readonly KeyValuePair<string, dynamic> statistic; public DrawableScoreStatistic(KeyValuePair<string, dynamic> statistic) { this.statistic = statistic; AutoSizeAxes = Axes.Both; Margin = new MarginPadding { Left = 5, Right = 5 }; } [BackgroundDependencyLoader] private void load(OsuColour colours) { Children = new Drawable[] { new SpriteText { Text = statistic.Value.ToString().PadLeft(4, '0'), Colour = colours.Gray7, TextSize = 30, Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, }, new SpriteText { Text = statistic.Key, Colour = colours.Gray7, Font = @"Exo2.0-Bold", Y = 26, Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, }, }; } } private class DateDisplay : Container { private DateTime date; public DateDisplay(DateTime date) { this.date = date; AutoSizeAxes = Axes.Y; Width = 140; Masking = true; CornerRadius = 5; } [BackgroundDependencyLoader] private void load(OsuColour colours) { Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Gray6, }, new OsuSpriteText { Origin = Anchor.CentreLeft, Anchor = Anchor.CentreLeft, Text = date.ToString("HH:mm"), Padding = new MarginPadding { Left = 10, Right = 10, Top = 5, Bottom = 5 }, Colour = Color4.White, }, new OsuSpriteText { Origin = Anchor.CentreRight, Anchor = Anchor.CentreRight, Text = date.ToString("yyyy/MM/dd"), Padding = new MarginPadding { Left = 10, Right = 10, Top = 5, Bottom = 5 }, Colour = Color4.White, } }; } } private class BeatmapDetails : Container { private readonly BeatmapInfo beatmap; private readonly OsuSpriteText title; private readonly OsuSpriteText artist; private readonly OsuSpriteText versionMapper; public BeatmapDetails(BeatmapInfo beatmap) { this.beatmap = beatmap; AutoSizeAxes = Axes.Both; Children = new Drawable[] { new FillFlowContainer { Direction = FillDirection.Vertical, RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Children = new Drawable[] { title = new OsuSpriteText { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Shadow = false, TextSize = 24, Font = @"Exo2.0-BoldItalic", }, artist = new OsuSpriteText { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Shadow = false, TextSize = 20, Font = @"Exo2.0-BoldItalic", }, versionMapper = new OsuSpriteText { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, Shadow = false, TextSize = 16, Font = @"Exo2.0-Bold", }, } } }; } [BackgroundDependencyLoader] private void load(OsuColour colours, LocalisationEngine localisation) { title.Colour = artist.Colour = colours.BlueDarker; versionMapper.Colour = colours.Gray8; versionMapper.Text = $"{beatmap.Version} - mapped by {beatmap.Metadata.Author}"; title.Current = localisation.GetUnicodePreference(beatmap.Metadata.TitleUnicode, beatmap.Metadata.Title); artist.Current = localisation.GetUnicodePreference(beatmap.Metadata.ArtistUnicode, beatmap.Metadata.Artist); } } private class UserHeader : Container { private readonly User user; private readonly Sprite cover; public UserHeader(User user) { this.user = user; Children = new Drawable[] { cover = new Sprite { FillMode = FillMode.Fill, Anchor = Anchor.Centre, Origin = Anchor.Centre, }, new OsuSpriteText { Font = @"Exo2.0-RegularItalic", Text = user.Username, Anchor = Anchor.BottomCentre, Origin = Anchor.BottomCentre, TextSize = 30, Padding = new MarginPadding { Bottom = 10 }, } }; } [BackgroundDependencyLoader] private void load(TextureStore textures) { if (!string.IsNullOrEmpty(user.CoverUrl)) cover.Texture = textures.Get(user.CoverUrl); } } private class SlowScoreCounter : ScoreCounter { protected override double RollingDuration => 3000; protected override EasingTypes RollingEasing => EasingTypes.OutPow10; public SlowScoreCounter(uint leading = 0) : base(leading) { DisplayedCountSpriteText.Shadow = false; DisplayedCountSpriteText.Font = @"Venera-Light"; UseCommaSeparator = true; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.ServiceModel; using System.ServiceModel.Channels; using TestTypes; using System.Text; using System.Threading; using System.Threading.Tasks; using Xunit; public static class TypedProxyTests { // ServiceContract typed proxy tests create a ChannelFactory using a provided [ServiceContract] Interface which... // returns a generated proxy based on that Interface. // ChannelShape typed proxy tests create a ChannelFactory using a WCF understood channel shape which... // returns a generated proxy based on the channel shape used, such as... // IRequestChannel (for a request-reply message exchange pattern) // IDuplexChannel (for a two-way duplex message exchange pattern) private const string action = "http://tempuri.org/IWcfService/MessageRequestReply"; private const string clientMessage = "[client] This is my request."; static TimeSpan maxTestWaitTime = TimeSpan.FromSeconds(10); [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_AsyncBeginEnd_Call() { CustomBinding customBinding = new CustomBinding(); customBinding.Elements.Add(new TextMessageEncodingBindingElement()); customBinding.Elements.Add(new HttpTransportBindingElement()); ServiceContract_TypedProxy_AsyncBeginEnd_Call(customBinding, Endpoints.DefaultCustomHttp_Address, "ServiceContract_TypedProxy_AsyncBeginEnd_Call"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_NetTcpBinding_AsyncBeginEnd_Call() { NetTcpBinding netTcpBinding = new NetTcpBinding(SecurityMode.None); ServiceContract_TypedProxy_AsyncBeginEnd_Call(netTcpBinding, Endpoints.Tcp_NoSecurity_Address, "ServiceContract_TypedProxy_NetTcpBinding_AsyncBeginEnd_Call"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_AsyncBeginEnd_Call_WithNoCallback() { CustomBinding customBinding = new CustomBinding(); customBinding.Elements.Add(new TextMessageEncodingBindingElement()); customBinding.Elements.Add(new HttpTransportBindingElement()); ServiceContract_TypedProxy_AsyncBeginEnd_Call_WithNoCallback(customBinding, Endpoints.DefaultCustomHttp_Address, "ServiceContract_TypedProxy_AsyncBeginEnd_Call_WithNoCallback"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_NetTcpBinding_AsyncBeginEnd_Call_WithNoCallback() { NetTcpBinding netTcpBinding = new NetTcpBinding(SecurityMode.None); ServiceContract_TypedProxy_AsyncBeginEnd_Call_WithNoCallback(netTcpBinding, Endpoints.Tcp_NoSecurity_Address, "ServiceContract_TypedProxy_NetTcpBinding_AsyncBeginEnd_Call_WithNoCallback"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_AsyncBeginEnd_Call_WithSingleThreadedSyncContext() { bool success = Task.Run(() => { SingleThreadSynchronizationContext.Run(() => { Task.Factory.StartNew(() => TypedProxyTests.ServiceContract_TypedProxy_AsyncBeginEnd_Call(), CancellationToken.None, TaskCreationOptions.None, TaskScheduler.FromCurrentSynchronizationContext()).Wait(); }); }).Wait(ScenarioTestHelpers.TestTimeout); Assert.True(success, "Test Scenario: TypedProxy_AsyncBeginEnd_Call_WithSingleThreadedSyncContext timed out"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_NetTcpBinding_AsyncBeginEnd_Call_WithSingleThreadedSyncContext() { bool success = Task.Run(() => { SingleThreadSynchronizationContext.Run(() => { Task.Factory.StartNew(() => TypedProxyTests.ServiceContract_TypedProxy_NetTcpBinding_AsyncBeginEnd_Call(), CancellationToken.None, TaskCreationOptions.None, TaskScheduler.FromCurrentSynchronizationContext()).Wait(); }); }).Wait(ScenarioTestHelpers.TestTimeout); Assert.True(success, "Test Scenario: ServiceContract_TypedProxy_NetTcpBinding_AsyncBeginEnd_Call_WithSingleThreadedSyncContext timed out"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_AsyncTask_Call() { CustomBinding customBinding = new CustomBinding(); customBinding.Elements.Add(new TextMessageEncodingBindingElement()); customBinding.Elements.Add(new HttpTransportBindingElement()); ServiceContract_TypedProxy_AsyncTask_Call(customBinding, Endpoints.DefaultCustomHttp_Address, "ServiceContract_TypedProxy_AsyncTask_Call"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_NetTcpBinding_AsyncTask_Call() { NetTcpBinding netTcpBinding = new NetTcpBinding(); netTcpBinding.Security.Mode = SecurityMode.None; ServiceContract_TypedProxy_AsyncTask_Call(netTcpBinding, Endpoints.Tcp_NoSecurity_Address, "ServiceContract_TypedProxy_NetTcpBinding_AsyncTask_Call"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_AsyncTask_Call_WithSingleThreadedSyncContext() { bool success = Task.Run(() => { SingleThreadSynchronizationContext.Run(() => { Task.Factory.StartNew(() => TypedProxyTests.ServiceContract_TypedProxy_AsyncTask_Call(), CancellationToken.None, TaskCreationOptions.None, TaskScheduler.FromCurrentSynchronizationContext()).Wait(); }); }).Wait(ScenarioTestHelpers.TestTimeout); Assert.True(success, "Test Scenario: TypedProxy_AsyncTask_Call_WithSingleThreadedSyncContext timed out"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy__NetTcpBinding_AsyncTask_Call_WithSingleThreadedSyncContext() { bool success = Task.Run(() => { SingleThreadSynchronizationContext.Run(() => { Task.Factory.StartNew(() => TypedProxyTests.ServiceContract_TypedProxy_NetTcpBinding_AsyncTask_Call(), CancellationToken.None, TaskCreationOptions.None, TaskScheduler.FromCurrentSynchronizationContext()).Wait(); }); }).Wait(ScenarioTestHelpers.TestTimeout); Assert.True(success, "Test Scenario: ServiceContract_TypedProxy__NetTcpBinding_AsyncTask_Call_WithSingleThreadedSyncContext timed out"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_Synchronous_Call() { // This test verifies a typed proxy can call a service operation synchronously StringBuilder errorBuilder = new StringBuilder(); try { CustomBinding customBinding = new CustomBinding(); customBinding.Elements.Add(new TextMessageEncodingBindingElement()); customBinding.Elements.Add(new HttpTransportBindingElement()); // Note the service interface used. It was manually generated with svcutil. ChannelFactory<IWcfServiceGenerated> factory = new ChannelFactory<IWcfServiceGenerated>(customBinding, new EndpointAddress(Endpoints.DefaultCustomHttp_Address)); IWcfServiceGenerated serviceProxy = factory.CreateChannel(); string result = serviceProxy.Echo("Hello"); if (!string.Equals(result, "Hello")) { errorBuilder.AppendLine(String.Format("Expected response from Service: {0} Actual was: {1}", "Hello", result)); } factory.Close(); } catch (Exception ex) { errorBuilder.AppendLine(String.Format("Unexpected exception was caught: {0}", ex.ToString())); } Assert.True(errorBuilder.Length == 0, string.Format("Test Scenario: TypedProxySynchronousCall FAILED with the following errors: {0}", errorBuilder)); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_Synchronous_Call_WithSingleThreadedSyncContext() { bool success = Task.Run(() => { TestTypes.SingleThreadSynchronizationContext.Run(() => { Task.Factory.StartNew(() => TypedProxyTests.ServiceContract_TypedProxy_Synchronous_Call(), CancellationToken.None, TaskCreationOptions.None, TaskScheduler.FromCurrentSynchronizationContext()).Wait(); }); }).Wait(ScenarioTestHelpers.TestTimeout); Assert.True(success, "Test Scenario: TypedProxy_Synchronous_Call_WithSingleThreadedSyncContext timed out"); } [Fact] [OuterLoop] public static void ServiceContract_TypedProxy_Task_Call_WithSyncContext_ContinuesOnSameThread() { // This test verifies a task based call to a service operation continues on the same thread StringBuilder errorBuilder = new StringBuilder(); try { CustomBinding customBinding = new CustomBinding(); customBinding.Elements.Add(new TextMessageEncodingBindingElement()); customBinding.Elements.Add(new HttpTransportBindingElement()); ChannelFactory<IWcfServiceGenerated> factory = new ChannelFactory<IWcfServiceGenerated>(customBinding, new EndpointAddress(Endpoints.DefaultCustomHttp_Address)); IWcfServiceGenerated serviceProxy = factory.CreateChannel(); string result = String.Empty; bool success = Task.Run(() => { SingleThreadSynchronizationContext.Run(async delegate { int startThread = Environment.CurrentManagedThreadId; result = await serviceProxy.EchoAsync("Hello"); if (startThread != Environment.CurrentManagedThreadId) { errorBuilder.AppendLine(String.Format("Expected continuation to happen on thread {0} but actually continued on thread {1}", startThread, Environment.CurrentManagedThreadId)); } }); }).Wait(ScenarioTestHelpers.TestTimeout); if (!success) { errorBuilder.AppendLine(String.Format("Test didn't complete within the expected time")); } if (!string.Equals(result, "Hello")) { errorBuilder.AppendLine(String.Format("Expected response from Service: {0} Actual was: {1}", "Hello", result)); } factory.Close(); } catch (Exception ex) { errorBuilder.AppendLine(String.Format("Unexpected exception was caught: {0}", ex.ToString())); } Assert.True(errorBuilder.Length == 0, string.Format("Test Scenario: TaskCallWithSynchContextContinuesOnSameThread FAILED with the following errors: {0}", errorBuilder)); } [Fact] [OuterLoop] public static void ChannelShape_TypedProxy_InvokeIRequestChannel() { string address = Endpoints.DefaultCustomHttp_Address; StringBuilder errorBuilder = new StringBuilder(); try { CustomBinding binding = new CustomBinding(new BindingElement[] { new TextMessageEncodingBindingElement(MessageVersion.Default, Encoding.UTF8), new HttpTransportBindingElement() }); EndpointAddress endpointAddress = new EndpointAddress(address); // Create the channel factory for the request-reply message exchange pattern. var factory = new ChannelFactory<IRequestChannel>(binding, endpointAddress); // Create the channel. IRequestChannel channel = factory.CreateChannel(); channel.Open(); // Create the Message object to send to the service. Message requestMessage = Message.CreateMessage( binding.MessageVersion, action, new CustomBodyWriter(clientMessage)); // Send the Message and receive the Response. Message replyMessage = channel.Request(requestMessage); string replyMessageAction = replyMessage.Headers.Action; if (!string.Equals(replyMessageAction, action + "Response")) { errorBuilder.AppendLine(String.Format("A response was received from the Service but it was not the expected Action, expected: {0} actual: {1}", action + "Response", replyMessageAction)); } var replyReader = replyMessage.GetReaderAtBodyContents(); string actualResponse = replyReader.ReadElementContentAsString(); string expectedResponse = "[client] This is my request.[service] Request received, this is my Reply."; if (!string.Equals(actualResponse, expectedResponse)) { errorBuilder.AppendLine(String.Format("Actual MessageBodyContent from service did not match the expected MessageBodyContent, expected: {0} actual: {1}", expectedResponse, actualResponse)); } replyMessage.Close(); channel.Close(); factory.Close(); } catch (Exception ex) { errorBuilder.AppendLine(String.Format("Unexpected exception was caught: {0}", ex.ToString())); } Assert.True(errorBuilder.Length == 0, string.Format("Test Scenario: InvokeRequestChannelViaProxy FAILED with the following errors: {0}", errorBuilder)); } [Fact] [OuterLoop] public static void ChannelShape_TypedProxy_InvokeIRequestChannelTimeout() { string address = Endpoints.DefaultCustomHttp_Address; StringBuilder errorBuilder = new StringBuilder(); try { CustomBinding binding = new CustomBinding(new BindingElement[] { new TextMessageEncodingBindingElement(MessageVersion.Default, Encoding.UTF8), new HttpTransportBindingElement() }); EndpointAddress endpointAddress = new EndpointAddress(address); // Create the channel factory for the request-reply message exchange pattern. var factory = new ChannelFactory<IRequestChannel>(binding, endpointAddress); // Create the channel. IRequestChannel channel = factory.CreateChannel(); channel.Open(); // Create the Message object to send to the service. Message requestMessage = Message.CreateMessage( binding.MessageVersion, action, new CustomBodyWriter(clientMessage)); // Send the Message and receive the Response. Message replyMessage = channel.Request(requestMessage, TimeSpan.FromSeconds(60)); string replyMessageAction = replyMessage.Headers.Action; if (!string.Equals(replyMessageAction, action + "Response")) { errorBuilder.AppendLine(String.Format("A response was received from the Service but it was not the expected Action, expected: {0} actual: {1}", action + "Response", replyMessageAction)); } var replyReader = replyMessage.GetReaderAtBodyContents(); string actualResponse = replyReader.ReadElementContentAsString(); string expectedResponse = "[client] This is my request.[service] Request received, this is my Reply."; if (!string.Equals(actualResponse, expectedResponse)) { errorBuilder.AppendLine(String.Format("Actual MessageBodyContent from service did not match the expected MessageBodyContent, expected: {0} actual: {1}", expectedResponse, actualResponse)); } replyMessage.Close(); channel.Close(); factory.Close(); } catch (Exception ex) { errorBuilder.AppendLine(String.Format("Unexpected exception was caught: {0}", ex.ToString())); } Assert.True(errorBuilder.Length == 0, string.Format("Test Scenario: InvokeIRequestChannelViaProxyTimeout FAILED with the following errors: {0}", errorBuilder)); } [Fact] [OuterLoop] public static void ChannelShape_TypedProxy_InvokeIRequestChannelAsync() { string address = Endpoints.DefaultCustomHttp_Address; StringBuilder errorBuilder = new StringBuilder(); try { CustomBinding binding = new CustomBinding(new BindingElement[] { new TextMessageEncodingBindingElement(MessageVersion.Default, Encoding.UTF8), new HttpTransportBindingElement() }); EndpointAddress endpointAddress = new EndpointAddress(address); // Create the channel factory for the request-reply message exchange pattern. var factory = new ChannelFactory<IRequestChannel>(binding, endpointAddress); // Create the channel. IRequestChannel channel = factory.CreateChannel(); channel.Open(); // Create the Message object to send to the service. Message requestMessage = Message.CreateMessage( binding.MessageVersion, action, new CustomBodyWriter(clientMessage)); // Send the Message and receive the Response. IAsyncResult ar = channel.BeginRequest(requestMessage, null, null); Message replyMessage = channel.EndRequest(ar); string replyMessageAction = replyMessage.Headers.Action; if (!string.Equals(replyMessageAction, action + "Response")) { errorBuilder.AppendLine(String.Format("A response was received from the Service but it was not the expected Action, expected: {0} actual: {1}", action + "Response", replyMessageAction)); } var replyReader = replyMessage.GetReaderAtBodyContents(); string actualResponse = replyReader.ReadElementContentAsString(); string expectedResponse = "[client] This is my request.[service] Request received, this is my Reply."; if (!string.Equals(actualResponse, expectedResponse)) { errorBuilder.AppendLine(String.Format("Actual MessageBodyContent from service did not match the expected MessageBodyContent, expected: {0} actual: {1}", expectedResponse, actualResponse)); } replyMessage.Close(); channel.Close(); factory.Close(); } catch (Exception ex) { errorBuilder.AppendLine(String.Format("Unexpected exception was caught: {0}", ex.ToString())); } Assert.True(errorBuilder.Length == 0, string.Format("Test Scenario: InvokeIRequestChannelViaProxyAsync FAILED with the following errors: {0}", errorBuilder)); } [Fact] [ActiveIssue(157)] [OuterLoop] public static void ServiceContract_TypedProxy_DuplexCallback() { DuplexChannelFactory<IDuplexChannelService> factory = null; StringBuilder errorBuilder = new StringBuilder(); Guid guid = Guid.NewGuid(); try { NetTcpBinding binding = new NetTcpBinding(); binding.Security.Mode = SecurityMode.None; DuplexChannelServiceCallback callbackService = new DuplexChannelServiceCallback(); InstanceContext context = new InstanceContext(callbackService); factory = new DuplexChannelFactory<IDuplexChannelService>(context, binding, new EndpointAddress(Endpoints.Tcp_NoSecurity_DuplexCallback_Address)); IDuplexChannelService serviceProxy = factory.CreateChannel(); serviceProxy.Ping(guid); Guid returnedGuid = callbackService.CallbackGuid; if (guid != returnedGuid) { errorBuilder.AppendLine(String.Format("The sent GUID does not match the returned GUID. Sent: {0} Received: {1}", guid, returnedGuid)); } factory.Close(); } catch (Exception ex) { errorBuilder.AppendLine(String.Format("Unexpected exception was caught: {0}", ex.ToString())); for (Exception innerException = ex.InnerException; innerException != null; innerException = innerException.InnerException) { errorBuilder.AppendLine(String.Format("Inner exception: {0}", innerException.ToString())); } } finally { if (factory != null && factory.State != CommunicationState.Closed) { factory.Abort(); } } if (errorBuilder.Length != 0) { Assert.True(errorBuilder.Length == 0, string.Format("Test Scenario: ServiceContract_TypedProxy_DuplexCallback FAILED with the following errors: {0}", errorBuilder)); } } public class DuplexChannelServiceCallback : IDuplexChannelCallback { private TaskCompletionSource<Guid> _tcs; public DuplexChannelServiceCallback() { _tcs = new TaskCompletionSource<Guid>(); } public Guid CallbackGuid { get { if (_tcs.Task.Wait(maxTestWaitTime)) { return _tcs.Task.Result; } throw new TimeoutException(string.Format("Not completed within the alloted time of {0}", maxTestWaitTime)); } } public void OnPingCallback(Guid guid) { _tcs.SetResult(guid); } } [ServiceContract(CallbackContract = typeof(IDuplexChannelCallback))] public interface IDuplexChannelService { [OperationContract(IsOneWay = true)] void Ping(Guid guid); } public interface IDuplexChannelCallback { [OperationContract(IsOneWay = true)] void OnPingCallback(Guid guid); } private static void ServiceContract_TypedProxy_AsyncBeginEnd_Call(Binding binding, string endpoint, string testName) { // Verifies a typed proxy can call a service operation asynchronously using Begin/End StringBuilder errorBuilder = new StringBuilder(); try { ChannelFactory<IWcfServiceBeginEndGenerated> factory = new ChannelFactory<IWcfServiceBeginEndGenerated>(binding, new EndpointAddress(endpoint)); IWcfServiceBeginEndGenerated serviceProxy = factory.CreateChannel(); string result = null; ManualResetEvent waitEvent = new ManualResetEvent(false); // The callback is optional with this Begin call, but we want to test that it works. // This delegate should execute when the call has completed, and that is how it gets the result of the call. AsyncCallback callback = (iar) => { result = serviceProxy.EndEcho(iar); waitEvent.Set(); }; IAsyncResult ar = serviceProxy.BeginEcho("Hello", callback, null); // This test requires the callback to be called. // An actual timeout should call the callback, but we still set // a maximum wait time in case that does not happen. bool success = waitEvent.WaitOne(ScenarioTestHelpers.TestTimeout); if (!success) { errorBuilder.AppendLine("AsyncCallback was not called."); } if (!string.Equals(result, "Hello")) { errorBuilder.AppendLine(String.Format("Expected response from Service: {0} Actual was: {1}", "Hello", result)); } factory.Close(); } catch (Exception ex) { errorBuilder.AppendLine(String.Format("Unexpected exception was caught: {0}", ex.ToString())); } Assert.True(errorBuilder.Length == 0, string.Format("Test Scenario: {0} FAILED with the following errors: {1}", testName, errorBuilder)); } private static void ServiceContract_TypedProxy_AsyncBeginEnd_Call_WithNoCallback(Binding binding, string endpoint, string testName) { // This test verifies a typed proxy can call a service operation asynchronously using Begin/End StringBuilder errorBuilder = new StringBuilder(); try { ChannelFactory<IWcfServiceBeginEndGenerated> factory = new ChannelFactory<IWcfServiceBeginEndGenerated>(binding, new EndpointAddress(endpoint)); IWcfServiceBeginEndGenerated serviceProxy = factory.CreateChannel(); string result = null; IAsyncResult ar = serviceProxy.BeginEcho("Hello", null, null); // An actual timeout should complete the ar, but we still set // a maximum wait time in case that does not happen. bool success = ar.AsyncWaitHandle.WaitOne(ScenarioTestHelpers.TestTimeout); if (success) { result = serviceProxy.EndEcho(ar); } else { errorBuilder.AppendLine("AsyncCallback was not called."); } if (!string.Equals(result, "Hello")) { errorBuilder.AppendLine(String.Format("Expected response from Service: {0} Actual was: {1}", "Hello", result)); } factory.Close(); } catch (Exception ex) { errorBuilder.AppendLine(String.Format("Unexpected exception was caught: {0}", ex.ToString())); } Assert.True(errorBuilder.Length == 0, string.Format("Test Scenario: {0} FAILED with the following errors: {1}", testName, errorBuilder)); } private static void ServiceContract_TypedProxy_AsyncTask_Call(Binding binding, string endpoint, string testName) { // This test verifies a typed proxy can call a service operation asynchronously using Task<string> StringBuilder errorBuilder = new StringBuilder(); try { ChannelFactory<IWcfServiceGenerated> factory = new ChannelFactory<IWcfServiceGenerated>(binding, new EndpointAddress(endpoint)); IWcfServiceGenerated serviceProxy = factory.CreateChannel(); Task<string> task = serviceProxy.EchoAsync("Hello"); string result = task.Result; if (!string.Equals(result, "Hello")) { errorBuilder.AppendLine(String.Format("Expected response from Service: {0} Actual was: {1}", "Hello", result)); } factory.Close(); } catch (Exception ex) { errorBuilder.AppendLine(String.Format("Unexpected exception was caught: {0}", ex.ToString())); } Assert.True(errorBuilder.Length == 0, string.Format("Test Scenario: TypedProxyAsyncTaskCall FAILED with the following errors: {0}", errorBuilder)); } }
// ******************************************************************************************************** // Product Name: DotSpatial.Positioning.dll // Description: A library for managing GPS connections. // ******************************************************************************************************** // // The Original Code is from http://geoframework.codeplex.com/ version 2.0 // // The Initial Developer of this original code is Jon Pearson. Submitted Oct. 21, 2010 by Ben Tombs (tidyup) // // Contributor(s): (Open source contributors should list themselves and their modifications here). // ------------------------------------------------------------------------------------------------------- // | Developer | Date | Comments // |--------------------------|------------|-------------------------------------------------------------- // | Tidyup (Ben Tombs) | 10/21/2010 | Original copy submitted from modified GeoFrameworks 2.0 // | Shade1974 (Ted Dunsford) | 10/21/2010 | Added file headers reviewed formatting with resharper. // ******************************************************************************************************** using System; using System.Collections.Generic; using System.Linq; using System.Xml; using System.Xml.Schema; using System.Xml.Serialization; #if !PocketPC || DesignTime using System.ComponentModel; #endif namespace DotSpatial.Positioning { #if !PocketPC || DesignTime /// <summary> /// Represents a flattened sphere which approximates Earth's size and shape. /// </summary> /// <remarks><para>Mathematics involving points on Earth's surface are difficult to perform with /// precision because the Earth's surface is rugged. In order to maximize precision, /// scientists developed "ellipsoids," smooth ellipsoidal shapes (known as "oblate /// spheriods" or flattened spheres) which attempt to approximate Earth's exact shape. /// Like datums, ellipsoids have been subject to frequent revisions thanks to advances /// in technology, yet countries cannot quickly abandon outdated ellipsoids because so /// much infrastructure is built upon them. As a result, multiple ellipsoids are /// tracked and utilized when converting coordinates from one locale to another. Today, /// there are approximately thirty known ellipsoids upon which an estimated 120 /// individual coordinate systems are built.</para> /// <para>This class is typically used during coordinate conversion to convert from one /// interpretation of Earth's shape to another. All known worldwide ellipsoids such as /// WGS84 and Clarke 1880 are provided as static (Shared in Visual Basic) fields. Most /// developers will not have to use this class until coordinates must be plotted on a /// map. For most purposes, using the default ellipsoid of WGS84 is sufficient.</para> /// <para>Instances of this class are guaranteed to be thread-safe because the class is /// immutable (its properties can only be set via constructors).</para></remarks> [TypeConverter(typeof(ExpandableObjectConverter))] #endif public sealed class Ellipsoid : IEquatable<Ellipsoid>, IXmlSerializable { /// <summary> /// /// </summary> private int _epsgNumber = 32767; /// <summary> /// /// </summary> private Distance _equatorialRadius; /// <summary> /// /// </summary> private double _equatorialRadiusMeters; // Cached for frequent use during calculations /// <summary> /// /// </summary> private Distance _polarRadius; /// <summary> /// /// </summary> private double _polarRadiusMeters; // Cached for frequent use during calculations /// <summary> /// /// </summary> private string _name; /// <summary> /// /// </summary> private double _flattening; /// <summary> /// /// </summary> private double _inverseFlattening; /// <summary> /// /// </summary> private double _eccentricity; /// <summary> /// /// </summary> private double _eccentricitySquared; /// <summary> /// /// </summary> private static readonly List<Ellipsoid> _ellipsoids = new List<Ellipsoid>(32); /// <summary> /// /// </summary> private static readonly List<Ellipsoid> _epsgEllipsoids = new List<Ellipsoid>(32); #region Fields #region EPSG Ellipsoids /// <summary> /// Represents the Airy ellipsoid of 1830. /// </summary> public static readonly Ellipsoid Airy1830 = new Ellipsoid(7001, 6377563.396, 299.3249646, 0, "Airy 1830"); /// <summary> /// Represents the Modified Airy ellipsoid. /// </summary> public static readonly Ellipsoid AiryModified1949 = new Ellipsoid(7002, 6377340.189, 299.3249646, 0, "Airy Modified 1849"); /// <summary> /// Represents the Australian National ellipsoid of 1965. /// </summary> public static readonly Ellipsoid AustralianNational1965 = new Ellipsoid(7003, 6378160, 298.25, 0, "Australian National Spheroid"); /// <summary> /// Represents the Bessel ellipsoid of 1841. /// </summary> public static readonly Ellipsoid Bessel1841 = new Ellipsoid(7004, 6377397.155, 299.1528128, 0, "Bessel 1841"); /// <summary> /// Represents the Bessel Modified ellipsoid of 1841. /// </summary> public static readonly Ellipsoid Bessel1841Mod = new Ellipsoid(7005, 6377492.018, 299.1528128, 0, "Bessel Modified"); /// <summary> /// Represents the Bessel (Namibia) ellipsoid of 1841. /// </summary> public static readonly Ellipsoid Bessel1841Namibia = new Ellipsoid(7006, 6377483.865, 299.1528128, 0, "Bessel Namibia"); /// <summary> /// Represents the Clarke ellipsoid of 1858. /// </summary> public static readonly Ellipsoid Clarke1858 = new Ellipsoid(7007, 20926348 * 0.3047972651151, 0, 20855233 * 0.3047972651151, "Clarke 1858"); /// <summary> /// Represents the Clarke ellipsoid of 1866. /// </summary> public static readonly Ellipsoid Clarke1866 = new Ellipsoid(7008, 6378206.4, 0, 6356583.8, "Clarke 1866"); /// <summary> /// Represents the Clarke (Michigan) ellipsoid of 1866. /// </summary> public static readonly Ellipsoid Clarke1866Michigan = new Ellipsoid(7009, 20926631.53 * 0.3048006096012, 0, 20855688.67 * 0.3048006096012, "Clarke 1866 Michigan"); /// <summary> /// Represents the Clarke (Benoit) ellipsoid of 1880. /// </summary> public static readonly Ellipsoid Clarke1880Benoit = new Ellipsoid(7010, 6378300.789, 0, 6356566.435, "Clarke 1880 (Benoit)"); /// <summary> /// Represents the Clarke (IGN) ellipsoid of 1880. /// </summary> public static readonly Ellipsoid Clarke1880IGN = new Ellipsoid(7011, 6378249.2, 0, 6356515, "Clarke 1880 (IGN)"); /// <summary> /// Represents the Clarke (RGS) ellipsoid of 1880. /// </summary> public static readonly Ellipsoid Clarke1880RGS = new Ellipsoid(7012, 6378249.145, 293.465, 0, "Clarke 1880 (RGS)"); /// <summary> /// Represents the Clarke (Arc) ellipsoid of 1880. /// </summary> public static readonly Ellipsoid Clarke1880Arc = new Ellipsoid(7013, 6378249.145, 293.4663077, 0, "Clarke 1880 (Arc)"); /// <summary> /// Represents the Clarke (SGA 1822) ellipsoid of 1880. /// </summary> public static readonly Ellipsoid Clarke1880SGA = new Ellipsoid(7014, 6378249.2, 293.46598, 0, "Clarke 1880 (SGA 1922)"); /// <summary> /// Represents the Everest (1937 Adjustment) ellipsoid of 1830. /// </summary> public static readonly Ellipsoid Everest1937 = new Ellipsoid(7015, 6377276.345, 300.8017, 0, "Everest 1830 (1937 Adjustment)"); /// <summary> /// Represents the Everest (1967 Definition) ellipsoid of 1830. /// </summary> public static readonly Ellipsoid Everest1967 = new Ellipsoid(7016, 6377298.556, 300.8017, 0, "Everest 1830 (1967 Definition)"); //No 7017 in EPSG /// <summary> /// Represents the Everest (Modified 1948) ellipsoid of 1880. /// </summary> public static readonly Ellipsoid Everest1830Modified = new Ellipsoid(7018, 6377304.063, 300.8017, 0, "Everest 1830 Modified"); /// <summary> /// Represents the Geodetic Reference System ellipsoid of 1980. /// </summary> public static readonly Ellipsoid Grs80 = new Ellipsoid(7019, 6378137, 298.2572221, 0, "GRS 1980"); /// <summary> /// Represents the Helmert ellipsoid of 1906. /// </summary> public static readonly Ellipsoid Helmert1906 = new Ellipsoid(7020, 6378200, 298.3, 0, "Helmert 1906"); /// <summary> /// Represents the Indonesian ellipsoid of 1974. /// </summary> public static readonly Ellipsoid Indonesian1974 = new Ellipsoid(7021, 6378160, 298.247, 0, "Indonesian National Spheroid"); /// <summary> /// Represents the International ellipsoid of 1909 (1924 alias). /// </summary> public static readonly Ellipsoid International1909 = new Ellipsoid(7022, 6378388, 297, 0, "International 1924"); /// <summary> /// Represents the International ellipsoid of 1924. /// </summary> public static readonly Ellipsoid International1924 = new Ellipsoid(7022, 6378388, 297, 0, "International 1924"); //No 7023 in EPSG /// <summary> /// Represents the Krassovsky ellipsoid of 1940. /// </summary> public static readonly Ellipsoid Krassovsky1940 = new Ellipsoid(7024, 6378245, 298.3, 0, "Krassowsky 1940"); /// <summary> /// Represents the Naval Weapons Lab ellipsoid of 1965. /// </summary> public static readonly Ellipsoid Nwl9D = new Ellipsoid(7025, 6378145, 298.25, 0, "NWL 9D"); //No 7026 in EPSG /// <summary> /// Represents the Plessis ellipsoid of 1817. /// </summary> public static readonly Ellipsoid Plessis1817 = new Ellipsoid(7027, 6376523, 308.64, 0, "Plessis 1817"); /// <summary> /// Represents the Struve ellipsoid of 1860. /// </summary> public static readonly Ellipsoid Struve1860 = new Ellipsoid(7028, 6378298.3, 294.73, 0, "Struve 1860"); /// <summary> /// Represents the War Office ellipsoid. /// </summary> public static readonly Ellipsoid WarOffice = new Ellipsoid(7029, 6378300, 296, 0, "War Office"); /// <summary> /// Represents the World Geodetic System ellipsoid of 1984. /// </summary> public static readonly Ellipsoid Wgs1984 = new Ellipsoid(7030, 6378137, 298.2572236, 0, "WGS 84"); /// <summary> /// Represents the GEM 10C Gravity Potential Model ellipsoid. /// </summary> public static readonly Ellipsoid Gem10C = new Ellipsoid(7031, 6378137, 298.2572236, 0, "GEM 10C"); /// <summary> /// Represents the OSU86 gravity potential (geoidal) model ellipsoid. /// </summary> public static readonly Ellipsoid Osu86F = new Ellipsoid(7032, 6378136.2, 298.2572236, 0, "OSU86F"); /// <summary> /// Represents the OSU91 gravity potential (geoidal) model ellipsoid. /// </summary> public static readonly Ellipsoid Osu91A = new Ellipsoid(7033, 6378136.3, 298.2572236, 0, "OSU91A"); /// <summary> /// Represents the Clarke ellipsoid of 1880. /// </summary> public static readonly Ellipsoid Clarke1880 = new Ellipsoid(7034, 20926202 * 0.3047972651151, 293.465, 20854895 * 0.3047972651151, "Clarke 1880"); /// <summary> /// Represents the Authalic Sphere (r=6371000). /// </summary> public static readonly Ellipsoid AuthalicSphere = new Ellipsoid(7035, 6371000, 0, 6371000, "Authalic Sphere"); /// <summary> /// Represents the Geodetic Reference System ellipsoid of 1967. /// </summary> public static readonly Ellipsoid Grs67 = new Ellipsoid(7036, 6378160, 298.2471674, 0, "GRS 1967"); //No 7037 - 7040 in EPSG /// <summary> /// Represents the Average Terrestrial System ellipsoid of 1977. /// </summary> public static readonly Ellipsoid Ats1977 = new Ellipsoid(7041, 6378135, 298.257, 0, "Average Terrestrial System 1977"); /// <summary> /// Represents the Everest (1830 Definition) ellipsoid. /// </summary> public static readonly Ellipsoid Everest1830 = new Ellipsoid(7042, 20922931.8 * 0.3047995102481, 300.8017, 20853374.58 * 0.3047995102481, "Everest (1830 Definition)"); /// <summary> /// Represents the World Geodetic System ellipsoid of 1972. /// </summary> public static readonly Ellipsoid Wgs1972 = new Ellipsoid(7043, 6378135, 298.26, 0, "WGS 72"); /// <summary> /// Represents the Everest (1962 Definition) ellipsoid of 1830. /// </summary> public static readonly Ellipsoid Everest1962 = new Ellipsoid(7044, 6377301.243, 300.8017255, 0, "Everest 1830 (1962 Definition)"); /// <summary> /// Represents the Everest (1975 Definition) ellipsoid of 1830. /// </summary> public static readonly Ellipsoid Everest1975 = new Ellipsoid(7045, 6377299.151, 300.8017255, 0, "Everest 1830 (1975 Definition)"); /// <summary> /// Represents the Bessel (Japan) ellipsoid of 1841. /// </summary> public static readonly Ellipsoid Bessel1841Japan = new Ellipsoid(7046, 6377397.155, 299.1528128, 0, "Bessel Namibia (GLM)"); //7047 depricated in EPSG /// <summary> /// Represents the GRS 1980 Authalic Sphere (r=6371007). /// </summary> public static readonly Ellipsoid Grs1980AuthalicSphere = new Ellipsoid(7048, 6371007, 0, 6371007, "GRS 1980 Authalic Sphere"); /// <summary> /// Represents the Xian ellipsoid of 1980. /// </summary> public static readonly Ellipsoid Xian1980 = new Ellipsoid(7049, 6378140, 298.257, 0, "Xian 1980"); /// <summary> /// Represents the IAU ellipsoid of 1976. /// </summary> public static readonly Ellipsoid Iau76 = Xian1980; /// <summary> /// Represents the Geodetic Reference System (SAD69) ellipsoid of 1967. /// </summary> public static readonly Ellipsoid Grs67Sad69 = new Ellipsoid(7050, 6378160, 298.25, 0, "GRS 1967 (SAD69)"); /// <summary> /// Represents the Danish ellipsoid of 1876. /// </summary> public static readonly Ellipsoid Danish1876 = new Ellipsoid(7051, 6377019.27, 300, 0, "Danish 1876"); /// <summary> /// Represents the Andrae (Danish 1876 alternate) ellipsoid of 1876. /// </summary> public static readonly Ellipsoid Andrae = Danish1876; /// <summary> /// Represents the Common Sphere (Clarke 1866 Authalic Sphere alias). /// </summary> public static readonly Ellipsoid NormalSphere = new Ellipsoid(7052, 6370997, 0, 6370997, "Clarke 1866 Authalic Sphere"); /// <summary> /// Represents the Clarke 1866 Authalic Sphere (r=6370997). /// </summary> public static readonly Ellipsoid Clarke1866AuthalicSphere = NormalSphere; /// <summary> /// Represents the Hough ellipsoid of 1960. /// </summary> public static readonly Ellipsoid Hough1960 = new Ellipsoid(7053, 6378270, 297, 0, "Hough 1960"); /// <summary> /// Represents the PZ90 ellipsoid. /// </summary> public static readonly Ellipsoid Pz1990 = new Ellipsoid(7054, 6378136, 298.2578393, 0, "PZ-90"); /// <summary> /// Represents the Clarke (international foot) ellipsoid of 1880. /// </summary> public static readonly Ellipsoid Clarke1880InternationalFoot = new Ellipsoid(7055, 20926202 * 0.3048, 0, 20854895 * 0.3048, "Clarke 1880 (international foot)"); /// <summary> /// Represents the Everest (RSO 1969) ellipsoid of 1880. /// </summary> public static readonly Ellipsoid Everest1880Rso = new Ellipsoid(7056, 6377295.664, 300.8017, 0, "Everest 1830 (RSO 1969)"); /// <summary> /// Represents the International 1924 Authalic Sphere. /// </summary> public static readonly Ellipsoid International1924AuthalicSphere = new Ellipsoid(7057, 6371228, 0, 6371228, "International 1924 Authalic Sphere"); /// <summary> /// Represents the Hughes ellipsoid of 1980. /// </summary> public static readonly Ellipsoid Hughes1980 = new Ellipsoid(7058, 6378273, 0, 6356889.449, "Hughes 1980"); #endregion EPSG Ellipsoids #region Non-EPSG Ellipsoids /// <summary> /// Represents the Applied Physics ellipsoid of 1965. /// </summary> public static readonly Ellipsoid Apl49 = new Ellipsoid("Appl. Physics. 1965", new Distance(6378137.0, DistanceUnit.Meters), 298.25); /// <summary> /// Represents the Comm. des Poids et Mesures ellipsoid of 1799. /// </summary> public static readonly Ellipsoid Cpm = new Ellipsoid("Comm. des Poids et Mesures 1799", new Distance(6375738.7, DistanceUnit.Meters), 334.29); /// <summary> /// Represents the Delambre (Belgium) ellipsoid of 1810. /// </summary> public static readonly Ellipsoid Delmabre = new Ellipsoid("Delambre 1810 (Belgium)", new Distance(6376428, DistanceUnit.Meters), 311.5); /// <summary> /// Represents the Engelis ellipsoid of 1985. /// </summary> public static readonly Ellipsoid Engelis = new Ellipsoid("Engelis 1985", new Distance(6376428, DistanceUnit.Meters), 311.5); /// <summary> /// Represents the Fisher ellipsoid of 1960. /// </summary> public static readonly Ellipsoid Fischer1960 = new Ellipsoid("Fisher (Mercury Datum) 1960", new Distance(6378166.0, DistanceUnit.Meters), new Distance(6356784.283666, DistanceUnit.Meters)); /// <summary> /// Represents the Modified Fisher ellipsoid of 1960. /// </summary> public static readonly Ellipsoid ModifiedFischer1960 = new Ellipsoid("Modified Fisher 1960", new Distance(6378155.0, DistanceUnit.Meters), new Distance(6356773.3205, DistanceUnit.Meters)); /// <summary> /// Represents the Fisher ellipsoid of 1968. /// </summary> public static readonly Ellipsoid Fischer1968 = new Ellipsoid("Fisher 1968", new Distance(6378150.0, DistanceUnit.Meters), new Distance(6356768.337303, DistanceUnit.Meters)); /// <summary> /// Represents the New International ellipsoid of 1967. /// </summary> public static readonly Ellipsoid NewInternational1967 = new Ellipsoid("New International 1967", new Distance(6378157.5, DistanceUnit.Meters), new Distance(6356772.2, DistanceUnit.Meters)); /// <summary> /// Represents the Kaula ellipsoid of 1961. /// </summary> public static readonly Ellipsoid Kaula = new Ellipsoid("Kaula 1961", new Distance(6378163.0, DistanceUnit.Meters), 298.24); /// <summary> /// Represents the Lerch ellipsoid of 1979. /// </summary> public static readonly Ellipsoid Lerch = new Ellipsoid("Lerch 1979", new Distance(6378139.0, DistanceUnit.Meters), 298.257); /// <summary> /// Represents the MERIT ellipsoid of 1983. /// </summary> public static readonly Ellipsoid Merit = new Ellipsoid("Merit 1983", new Distance(6378137.0, DistanceUnit.Meters), 298.257); /// <summary> /// Represents the Maupertius ellipsoid of 1738. /// </summary> public static readonly Ellipsoid Maupertius = new Ellipsoid("Maupertius 1738", new Distance(639730.0, DistanceUnit.Meters), 191); /// <summary> /// Represents the Southeast Asia (Modified Fisher ellipsoid of 1960) ellipsoid. /// </summary> public static readonly Ellipsoid SoutheastAsia = new Ellipsoid("Southeast Asia", new Distance(6378155.0, DistanceUnit.Meters), new Distance(6356773.3205, DistanceUnit.Meters)); /// <summary> /// Represents the SGS ellipsoid of 1985. /// </summary> public static readonly Ellipsoid Sgs1985 = new Ellipsoid("SGS 85", new Distance(6378136.0, DistanceUnit.Meters), new Distance(6356751.301569, DistanceUnit.Meters)); /// <summary> /// Represents the South American ellipsoid of 1969. /// </summary> public static readonly Ellipsoid SouthAmerican1969 = new Ellipsoid("South American 1969", new Distance(6378160.0, DistanceUnit.Meters), new Distance(6356774.719, DistanceUnit.Meters)); /// <summary> /// Represents the Walbeck ellipsoid. /// </summary> public static readonly Ellipsoid Walbeck = new Ellipsoid("Walbeck", new Distance(6376896.0, DistanceUnit.Meters), new Distance(6355834.8467, DistanceUnit.Meters)); /// <summary> /// Represents the World Geodetic System ellipsoid of 1960. /// </summary> public static readonly Ellipsoid Wgs1960 = new Ellipsoid("WGS 60", new Distance(6378165.0, DistanceUnit.Meters), new Distance(6356783.286959, DistanceUnit.Meters)); /// <summary> /// Represents the World Geodetic System ellipsoid of 1966. /// </summary> public static readonly Ellipsoid Wgs1966 = new Ellipsoid("WGS 1966", new Distance(6378145.0, DistanceUnit.Meters), new Distance(6356759.769356, DistanceUnit.Meters)); #endregion Non-EPSG Ellipsoids /* IMPORTANT: The Default field must be after Ellipsoid.Wgs1984 is initialized, otherwise it will be null! */ /// <summary> /// Represents the default ellipsoid, WGS1984. /// </summary> public static readonly Ellipsoid Default = Wgs1984; #endregion Fields #region Constructors // ReSharper disable UnusedMember.Global // Needed for xml serialization\deserialization internal Ellipsoid() { } // ReSharper restore UnusedMember.Global /// <summary> /// Creates a new instance with the specified type, name, equatorial raduis and polar radius. /// </summary> /// <param name="name">The name.</param> /// <param name="equatorialRadius">The equatorial radius.</param> /// <param name="polarRadius">The polar radius.</param> /// <remarks>This constructor allows user-defined ellipsoids to be created for specialized applications.</remarks> public Ellipsoid(string name, Distance equatorialRadius, Distance polarRadius) { _name = name; _equatorialRadius = equatorialRadius; _polarRadius = polarRadius; // Perform calculations Calculate(); SanityCheck(); // And add it to the list _ellipsoids.Add(this); } /// <summary> /// Internal contructor for static list generation /// </summary> /// <param name="name">The name.</param> /// <param name="equatorialRadius">The equatorial radius.</param> /// <param name="inverseFlattening">The inverse flattening.</param> public Ellipsoid(string name, Distance equatorialRadius, double inverseFlattening) { _name = name; _equatorialRadius = equatorialRadius; _inverseFlattening = inverseFlattening; // Perform calculations Calculate(); SanityCheck(); // And add it to the list _ellipsoids.Add(this); } /// <summary> /// Internal contructor for static list generation /// </summary> /// <param name="epsgNumber">The epsg number.</param> /// <param name="a">A.</param> /// <param name="invf">The invf.</param> /// <param name="b">The b.</param> /// <param name="name">The name.</param> internal Ellipsoid(int epsgNumber, double a, double invf, double b, string name) { _name = name; _epsgNumber = epsgNumber; _equatorialRadius = Distance.FromMeters(a); _polarRadius = Distance.FromMeters(b); _inverseFlattening = invf; Calculate(); SanityCheck(); _epsgEllipsoids.Add(this); } /// <summary> /// Creates a new instance from the specified XML. /// </summary> /// <param name="reader">The reader.</param> public Ellipsoid(XmlReader reader) { ReadXml(reader); } #endregion Constructors #region Private Methods /// <summary> /// Validates the ellipsoid. Called in the constructor. /// </summary> private void SanityCheck() { if ((_equatorialRadius.IsEmpty && _inverseFlattening == 0) || (_equatorialRadius.IsEmpty && _polarRadius.IsEmpty)) throw new ArgumentException("The radii and inverse flattening of an allipsoid cannot be zero. Please specify either the equatorial and polar radius, or the equatorial radius and the inverse flattening for this ellipsoid."); } /// <summary> /// Calculates the common ellipsoid properties. Called from the constructor /// </summary> private void Calculate() { double a = _equatorialRadius.ToMeters().Value; double b = _polarRadius.ToMeters().Value; double invf = _inverseFlattening; // Check the input. If a minor axis wasn't supplied, calculate it. if (b == 0) b = -(((1.0 / invf) * a) - a); _polarRadius = Distance.FromMeters(b); _flattening = (_equatorialRadius.ToMeters().Value - _polarRadius.ToMeters().Value) / _equatorialRadius.ToMeters().Value; _inverseFlattening = 1.0 / _flattening; _eccentricity = Math.Sqrt((Math.Pow(_equatorialRadius.Value, 2) - Math.Pow(_polarRadius.Value, 2)) / Math.Pow(_equatorialRadius.Value, 2)); _eccentricitySquared = Math.Pow(Eccentricity, 2); // This is used very frequently by calculations. Since ellipsoids do not change, there's // no need to call .ToMeters() thousands of times. _equatorialRadiusMeters = _equatorialRadius.ToMeters().Value; _polarRadiusMeters = _polarRadius.ToMeters().Value; } #endregion Private Methods #region Overrides /// <summary> /// Determines whether the specified <see cref="System.Object"/> is equal to this instance. /// </summary> /// <param name="obj">The <see cref="T:System.Object"/> to compare with the current <see cref="T:System.Object"/>.</param> /// <returns><c>true</c> if the specified <see cref="System.Object"/> is equal to this instance; otherwise, <c>false</c>.</returns> public override bool Equals(object obj) { if (obj is Ellipsoid) return Equals((Ellipsoid)obj); return false; } /// <summary> /// Returns a hash code for this instance. /// </summary> /// <returns>A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table.</returns> public override int GetHashCode() { return _equatorialRadius.GetHashCode() ^ _polarRadius.GetHashCode(); } /// <summary> /// Returns a <see cref="System.String"/> that represents this instance. /// </summary> /// <returns>A <see cref="System.String"/> that represents this instance.</returns> public override string ToString() { return _name; } #endregion Overrides #region Public Properties /// <summary> /// European Petroleum Survey Group number for this ellipsoid. The ESPG standards are now maintained by OGP /// (International Association of Oil and Gas Producers). /// </summary> public int EpsgNumber { get { return _epsgNumber; } } /// <summary> /// Indicates the descriptive name of the ellipsoid. /// </summary> /// <value>A <strong>String</strong> containing the name of the ellipsoid.</value> /// <remarks>This property is typically used to display ellipsoid information on a user interface.</remarks> public string Name { get { return _name; } } /// <summary> /// Represents the distance from Earth's center to the equator. /// </summary> /// <value>A <strong>Distance</strong> object.</value> /// <seealso cref="PolarRadius">PolarRadius Property</seealso> /// <remarks>This property defines the radius of the Earth from its center to the equator. /// This property is used in conjunction with the <strong>PolarRadius</strong> property /// to define an ellipsoidal shape. This property returns the same value as the /// <strong>SemiMajorAxis</strong> property.</remarks> public Distance EquatorialRadius { get { return _equatorialRadius; } } /// <summary> /// Represents the distance from Earth's center to the North or South pole. /// </summary> /// <value>A <strong>Distance</strong> object.</value> /// <seealso cref="EquatorialRadius">EquatorialRadius Property</seealso> /// <remarks>This property defines the radius of the Earth from its center to the equator. /// This property is used in conjunction with the <strong>EquatorialRadius</strong> /// property to define an ellipsoidal shape. This property returns the same value as /// the <strong>SemiMinorAxis</strong> property.</remarks> public Distance PolarRadius { get { return _polarRadius; } } /// <summary> /// Represents the distance from Earth's center to the equator. /// </summary> /// <value>A <strong>Distance</strong> containing Earth's equatorial radius.</value> /// <seealso cref="EquatorialRadius">EquatorialRadius Property</seealso> /// <remarks>This property defines the radius of the Earth from its center to the equator. /// This property is used in conjunction with the <strong>SemiMinorAxis</strong> /// property to define an ellipsoidal shape. This property returns the same value as /// the <strong>EquatorialRadius</strong> property.</remarks> public Distance SemiMajorAxis { get { return _equatorialRadius; } } /// <summary> /// Represents the distance from Earth's center to the North or South pole. /// </summary> /// <value>A <strong>Distance</strong> containing Earth's polar radius.</value> /// <seealso cref="EquatorialRadius">EquatorialRadius Property</seealso> /// <remarks>This property defines the radius of the Earth from its center to the equator. /// This property is used in conjunction with the <strong>SemiMajorAxis</strong> /// property to define an ellipsoidal shape. This property returns the same value as /// the <strong>PolarRadius</strong> property.</remarks> public Distance SemiMinorAxis { get { return _polarRadius; } } /// <summary> /// Indicates if the ellipsoid is describing a perfect sphere. /// </summary> /// <remarks>Mathematical formulas such as map projection and coordinate conversion can be /// optimized if the ellipsoid they are working with is spherical. For more precise /// results, however, spherical ellipsoids should not be used. This property, when used /// correctly, can improve performance for mathematics when coordinate precision is less of /// a concern, such as viewing a map from a high altitude.</remarks> public bool IsSpherical { get { return _equatorialRadius.Equals(_polarRadius); } } /// <summary> /// Indicates the inverse of the shape of an ellipsoid relative to a sphere. /// </summary> /// <value>A <strong>Double</strong> containing the ellipsoid's flattening.</value> /// <seealso cref="EquatorialRadius">EquatorialRadius Property</seealso> /// <remarks>This property is used frequently in equations. Inverse flattening is defined as /// one divided by the <strong>Flattening</strong> property.:</remarks> public double InverseFlattening { get { return _inverseFlattening; } } /// <summary> /// Indicates the shape of the ellipsoid relative to a sphere. /// </summary> /// <value>A <strong>Double</strong> containing the ellipsoid's flattening.</value> /// <seealso cref="EquatorialRadius">EquatorialRadius Property</seealso> /// <remarks>This property compares the equatorial radius with the polar radius to measure the /// amount that the ellipsoid is "squished" vertically.</remarks> public double Flattening { get { return _flattening; } } /// <summary> /// Returns the rate of flattening of the ellipsoid. /// </summary> /// <value>A <strong>Double</strong> measuring how elongated the ellipsoid is.</value> /// <remarks>The eccentricity is a positive number less than 1, or 0 in the case of a circle. /// The greater the eccentricity is, the larger the ratio of the equatorial radius to the /// polar radius is, and therefore the more elongated the ellipse is.</remarks> public double Eccentricity { get { return _eccentricity; } } /// <summary> /// Returns the square of the eccentricity. /// </summary> /// <remarks>This property returns the value of the <strong>Eccentricity</strong> property, /// squared. It is used frequently during coordinate conversion formulas.</remarks> public double EccentricitySquared { get { return _eccentricitySquared; } } #endregion Public Properties #region Internal Propertis /// <summary> /// Gets the polar radius meters. /// </summary> internal double PolarRadiusMeters { get { return _polarRadiusMeters; } } /// <summary> /// Gets the equatorial radius meters. /// </summary> internal double EquatorialRadiusMeters { get { return _equatorialRadiusMeters; } } /// <summary> /// Gets the semi major axis meters. /// </summary> internal double SemiMajorAxisMeters { get { return _equatorialRadiusMeters; } } /// <summary> /// Gets the semi minor meters. /// </summary> internal double SemiMinorMeters { get { return _polarRadiusMeters; } } #endregion Internal Propertis #region Static Methods /// <summary> /// Returns a Ellipsoid object matching the specified name. /// </summary> /// <param name="name">A <strong>String</strong> describing the name of an existing Ellipsoid.</param> /// <returns>A <strong>Ellipsoid</strong> object matching the specified string, or null if no Ellipsoid was found.</returns> public static Ellipsoid FromName(string name) { // Search the custom objects foreach (Ellipsoid item in _ellipsoids) { if (item.Name == name) return item; } // Search the EPSG objects return _epsgEllipsoids.FirstOrDefault(item => item.Name == name); } /// <summary> /// Returns the datum corresponding to the EPSG code /// </summary> /// <param name="epsgNumber">The epsg number.</param> /// <returns></returns> public static Ellipsoid FromEpsgNumber(int epsgNumber) { // Search the EPSG objects return _epsgEllipsoids.FirstOrDefault(item => item.EpsgNumber == epsgNumber); } #endregion Static Methods #region IEquatable<Ellipsoid> Members /// <summary> /// Returns whether the current ellipsoid has the same value as the specified ellipsoid. /// </summary> /// <param name="other">An <strong>Ellipsoid</strong> object to compare against.</param> /// <returns>A <strong>Boolean</strong>, <strong>True</strong> if the equatorial radius and polar radius /// of both ellipsoids are equal. When both radii are equal, all other calculated properties will also /// be equal. The name of the ellipsoid is not compared.</returns> public bool Equals(Ellipsoid other) { if (other == null) return false; return other.EquatorialRadius.Equals(_equatorialRadius) && other.PolarRadius.Equals(_polarRadius); } /// <summary> /// Returns whether the current ellipsoid has the same value as the specified ellipsoid. /// </summary> /// <param name="other">An <strong>Ellipsoid</strong> object to compare against.</param> /// <param name="decimals">An <strong>integer</strong> specifies the precision for the comparison.</param> /// <returns>A <strong>Boolean</strong>, <strong>True</strong> if the equatorial radius and polar radius /// of both ellipsoids are equal. When both radii are equal, all other calculated properties will also /// be equal. The name of the ellipsoid is not compared.</returns> public bool Equals(Ellipsoid other, int decimals) { if (other == null) return false; return other.EquatorialRadius.Equals(_equatorialRadius, decimals) && other.PolarRadius.Equals(_polarRadius, decimals); } #endregion IEquatable<Ellipsoid> Members #region IXmlSerializable Members /// <summary> /// This method is reserved and should not be used. When implementing the IXmlSerializable interface, you should return null (Nothing in Visual Basic) from this method, and instead, if specifying a custom schema is required, apply the <see cref="T:System.Xml.Serialization.XmlSchemaProviderAttribute"/> to the class. /// </summary> /// <returns>An <see cref="T:System.Xml.Schema.XmlSchema"/> that describes the XML representation of the object that is produced by the <see cref="M:System.Xml.Serialization.IXmlSerializable.WriteXml(System.Xml.XmlWriter)"/> method and consumed by the <see cref="M:System.Xml.Serialization.IXmlSerializable.ReadXml(System.Xml.XmlReader)"/> method.</returns> XmlSchema IXmlSerializable.GetSchema() { return null; } /// <summary> /// Converts an object into its XML representation. /// </summary> /// <param name="writer">The <see cref="T:System.Xml.XmlWriter"/> stream to which the object is serialized.</param> public void WriteXml(XmlWriter writer) { /* The GML specification defines an ellipsoid as follows: * * <gml:Ellipsoid gml:id="ogrcrs44"> * <gml:ellipsoidName>GRS 1980</gml:ellipsoidName> * <gml:ellipsoidID> * <gml:name gml:codeSpace="urn:ogc:def:ellipsoid:EPSG::">7019</gml:name> * </gml:ellipsoidID> * <gml:semiMajorAxis gml:uom="urn:ogc:def:uom:EPSG::9001">6378137</gml:semiMajorAxis> * <gml:secondDefiningParameter> * <gml:inverseFlattening gml:uom="urn:ogc:def:uom:EPSG::9201">298.257222101</gml:inverseFlattening> * </gml:secondDefiningParameter> * </gml:Ellipsoid> * */ writer.WriteStartElement(Xml.GML_XML_PREFIX, "Ellipsoid", Xml.GML_XML_NAMESPACE); writer.WriteElementString(Xml.GML_XML_PREFIX, "ellipsoidName", Xml.GML_XML_NAMESPACE, _name); writer.WriteStartElement(Xml.GML_XML_PREFIX, "ellipsoidID", Xml.GML_XML_NAMESPACE); writer.WriteString(_epsgNumber.ToString()); writer.WriteEndElement(); writer.WriteStartElement(Xml.GML_XML_PREFIX, "semiMajorAxis", Xml.GML_XML_NAMESPACE); writer.WriteString(SemiMajorAxis.ToMeters().Value.ToString("G17")); writer.WriteEndElement(); writer.WriteStartElement(Xml.GML_XML_PREFIX, "semiMinorAxis", Xml.GML_XML_NAMESPACE); writer.WriteString(SemiMinorAxis.ToMeters().Value.ToString("G17")); writer.WriteEndElement(); writer.WriteStartElement(Xml.GML_XML_PREFIX, "secondDefiningParameter", Xml.GML_XML_NAMESPACE); writer.WriteElementString(Xml.GML_XML_PREFIX, "inverseFlattening", Xml.GML_XML_NAMESPACE, InverseFlattening.ToString("G17")); writer.WriteEndElement(); writer.WriteEndElement(); SanityCheck(); Calculate(); } /// <summary> /// Generates an object from its XML representation. /// </summary> /// <param name="reader">The <see cref="T:System.Xml.XmlReader"/> stream from which the object is deserialized.</param> public void ReadXml(XmlReader reader) { // Read until we have an element while (!reader.EOF && reader.NodeType != XmlNodeType.Element) reader.Read(); // If we're at EOF, exit if (reader.EOF) return; // Remember the current depth. We'll keep reading until we return to this depth int depth = reader.Depth; // Notify of the read OnReadXml(reader); // Have we returned to the original depth? If we're at a deeper depth, // keep reading. while (!reader.EOF && reader.Depth > depth) { // Is this an element? If not, keep reading deeper while (!reader.EOF && reader.Depth > depth && reader.NodeType != XmlNodeType.Element) { reader.Read(); } // If this is an element, process it if (reader.NodeType == XmlNodeType.Element) OnReadXml(reader); } reader.Read(); } /// <summary> /// Called when [read XML]. /// </summary> /// <param name="reader">The reader.</param> private void OnReadXml(XmlReader reader) { switch (reader.LocalName) { case "ellipsoidName": _name = reader.ReadElementContentAsString(); break; case "ellipsoidID": _epsgNumber = reader.ReadElementContentAsInt(); break; case "semiMajorAxis": _equatorialRadius = new Distance(reader.ReadElementContentAsDouble(), DistanceUnit.Meters); break; case "semiMinorAxis": _polarRadius = new Distance(reader.ReadElementContentAsDouble(), DistanceUnit.Meters); break; case "secondDefiningParameter": // Read deeper reader.Read(); break; case "inverseFlattening": _inverseFlattening = reader.ReadElementContentAsDouble(); break; default: // Read deeper reader.Read(); break; } } #endregion IXmlSerializable Members } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Globalization; using System.Security.Cryptography; using System.Runtime.InteropServices; using Microsoft.Win32.SafeHandles; using ErrorCode = Interop.NCrypt.ErrorCode; namespace Internal.Cryptography { internal static class Helpers { public static byte[] CloneByteArray(this byte[] src) { if (src == null) { return null; } return (byte[])(src.Clone()); } // // The C# construct // // fixed (byte* p = new byte[0]) // // sets "p" to 0 rather than a valid address. Sometimes, we actually want a non-NULL pointer instead. (Some CNG apis actually care whether the buffer pointer is // NULL or not, even if the accompanying size argument is 0.) // // This helper enables the syntax: // // fixed (byte* p = new byte[0].MapZeroLengthArrayToNonNullPointer()) // // which always sets "p" to a non-NULL pointer for a non-null byte array. // public static byte[] MapZeroLengthArrayToNonNullPointer(this byte[] src) { if (src != null && src.Length == 0) return new byte[1]; return src; } public static SafeNCryptProviderHandle OpenStorageProvider(this CngProvider provider) { string providerName = provider.Provider; SafeNCryptProviderHandle providerHandle; ErrorCode errorCode = Interop.NCrypt.NCryptOpenStorageProvider(out providerHandle, providerName, 0); if (errorCode != ErrorCode.ERROR_SUCCESS) throw errorCode.ToCryptographicException(); return providerHandle; } /// <summary> /// Returns a CNG key property. /// </summary> /// <returns> /// null - if property not defined on key. /// throws - for any other type of error. /// </returns> public static byte[] GetProperty(this SafeNCryptHandle ncryptHandle, string propertyName, CngPropertyOptions options) { unsafe { int numBytesNeeded; ErrorCode errorCode = Interop.NCrypt.NCryptGetProperty(ncryptHandle, propertyName, null, 0, out numBytesNeeded, options); if (errorCode == ErrorCode.NTE_NOT_FOUND) return null; if (errorCode != ErrorCode.ERROR_SUCCESS) throw errorCode.ToCryptographicException(); byte[] propertyValue = new byte[numBytesNeeded]; fixed (byte* pPropertyValue = propertyValue) { errorCode = Interop.NCrypt.NCryptGetProperty(ncryptHandle, propertyName, pPropertyValue, propertyValue.Length, out numBytesNeeded, options); } if (errorCode == ErrorCode.NTE_NOT_FOUND) return null; if (errorCode != ErrorCode.ERROR_SUCCESS) throw errorCode.ToCryptographicException(); Array.Resize(ref propertyValue, numBytesNeeded); return propertyValue; } } /// <summary> /// Retrieve a well-known CNG string property. (Note: desktop compat: this helper likes to return special values rather than throw exceptions for missing /// or ill-formatted property values. Only use it for well-known properties that are unlikely to be ill-formatted.) /// </summary> public static string GetPropertyAsString(this SafeNCryptHandle ncryptHandle, string propertyName, CngPropertyOptions options) { byte[] value = ncryptHandle.GetProperty(propertyName, options); if (value == null) return null; // Desktop compat: return null if key not present. if (value.Length == 0) return string.Empty; // Desktop compat: return empty if property value is 0-length. unsafe { fixed (byte* pValue = value) { string valueAsString = Marshal.PtrToStringUni((IntPtr)pValue); return valueAsString; } } } /// <summary> /// Retrieve a well-known CNG dword property. (Note: desktop compat: this helper likes to return special values rather than throw exceptions for missing /// or ill-formatted property values. Only use it for well-known properties that are unlikely to be ill-formatted.) /// </summary> public static int GetPropertyAsDword(this SafeNCryptHandle ncryptHandle, string propertyName, CngPropertyOptions options) { byte[] value = ncryptHandle.GetProperty(propertyName, options); if (value == null) return 0; // Desktop compat: return 0 if key not present. return BitConverter.ToInt32(value, 0); } /// <summary> /// Retrieve a well-known CNG pointer property. (Note: desktop compat: this helper likes to return special values rather than throw exceptions for missing /// or ill-formatted property values. Only use it for well-known properties that are unlikely to be ill-formatted.) /// </summary> public static IntPtr GetPropertyAsIntPtr(this SafeNCryptHandle ncryptHandle, string propertyName, CngPropertyOptions options) { unsafe { int numBytesNeeded; IntPtr value; ErrorCode errorCode = Interop.NCrypt.NCryptGetProperty(ncryptHandle, propertyName, &value, IntPtr.Size, out numBytesNeeded, options); if (errorCode == ErrorCode.NTE_NOT_FOUND) return IntPtr.Zero; if (errorCode != ErrorCode.ERROR_SUCCESS) throw errorCode.ToCryptographicException(); return value; } } /// <summary> /// Modify a CNG key's export policy. /// </summary> public static void SetExportPolicy(this SafeNCryptKeyHandle keyHandle, CngExportPolicies exportPolicy) { unsafe { ErrorCode errorCode = Interop.NCrypt.NCryptSetProperty(keyHandle, KeyPropertyName.ExportPolicy, &exportPolicy, sizeof(CngExportPolicies), CngPropertyOptions.Persist); if (errorCode != ErrorCode.ERROR_SUCCESS) throw errorCode.ToCryptographicException(); } } public static bool IsLegalSize(this int size, KeySizes[] legalSizes) { for (int i = 0; i < legalSizes.Length; i++) { KeySizes currentSizes = legalSizes[i]; // If a cipher has only one valid key size, MinSize == MaxSize and SkipSize will be 0 if (currentSizes.SkipSize == 0) { if (currentSizes.MinSize == size) return true; } else if (size >= currentSizes.MinSize && size <= currentSizes.MaxSize) { // If the number is in range, check to see if it's a legal increment above MinSize int delta = size - currentSizes.MinSize; // While it would be unusual to see KeySizes { 10, 20, 5 } and { 11, 14, 1 }, it could happen. // So don't return false just because this one doesn't match. if (delta % currentSizes.SkipSize == 0) { return true; } } } return false; } public static int BitSizeToByteSize(this int bits) { return (bits + 7) / 8; } public static byte[] GenerateRandom(int count) { byte[] buffer = new byte[count]; using (RandomNumberGenerator rng = RandomNumberGenerator.Create()) { rng.GetBytes(buffer); } return buffer; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Text; namespace System.Net.NetworkInformation { public class PhysicalAddress { private readonly byte[] _address = null; private bool _hashNotComputed = true; private int _hash = 0; public static readonly PhysicalAddress None = new PhysicalAddress(Array.Empty<byte>()); public PhysicalAddress(byte[] address) { _address = address; } public override int GetHashCode() { if (_hashNotComputed) { _hashNotComputed = false; _hash = 0; int i; int size = _address.Length & ~3; for (i = 0; i < size; i += 4) { _hash ^= (int)_address[i] | ((int)_address[i + 1] << 8) | ((int)_address[i + 2] << 16) | ((int)_address[i + 3] << 24); } if ((_address.Length & 3) != 0) { int remnant = 0; int shift = 0; for (; i < _address.Length; ++i) { remnant |= ((int)_address[i]) << shift; shift += 8; } _hash ^= remnant; } } return _hash; } public override bool Equals(object comparand) { PhysicalAddress address = comparand as PhysicalAddress; if (address == null) { return false; } if (_address.Length != address._address.Length) { return false; } if (GetHashCode() != address.GetHashCode()) { return false; } for (int i = 0; i < address._address.Length; i++) { if (_address[i] != address._address[i]) { return false; } } return true; } public override string ToString() { StringBuilder addressString = new StringBuilder(); foreach (byte value in _address) { int tmp = (value >> 4) & 0x0F; for (int i = 0; i < 2; i++) { if (tmp < 0x0A) { addressString.Append((char)(tmp + 0x30)); } else { addressString.Append((char)(tmp + 0x37)); } tmp = ((int)value & 0x0F); } } return addressString.ToString(); } public byte[] GetAddressBytes() { return (byte[])_address.Clone(); } public static PhysicalAddress Parse(string address) { int validCount = 0; bool hasDashes = false; byte[] buffer = null; if (address == null) { return PhysicalAddress.None; } // Has dashes? if (address.IndexOf('-') >= 0) { hasDashes = true; buffer = new byte[(address.Length + 1) / 3]; } else { if (address.Length % 2 > 0) { throw new FormatException(SR.net_bad_mac_address); } buffer = new byte[address.Length / 2]; } int j = 0; for (int i = 0; i < address.Length; i++) { int value = (int)address[i]; if (value >= 0x30 && value <= 0x39) { value -= 0x30; } else if (value >= 0x41 && value <= 0x46) { value -= 0x37; } else if (value == (int)'-') { if (validCount == 2) { validCount = 0; continue; } else { throw new FormatException(SR.net_bad_mac_address); } } else { throw new FormatException(SR.net_bad_mac_address); } //we had too many characters after the last dash if (hasDashes && validCount >= 2) { throw new FormatException(SR.net_bad_mac_address); } if (validCount % 2 == 0) { buffer[j] = (byte)(value << 4); } else { buffer[j++] |= (byte)value; } validCount++; } //we too few characters after the last dash if (validCount < 2) { throw new FormatException(SR.net_bad_mac_address); } return new PhysicalAddress(buffer); } } }
namespace iControl.Dialogs { partial class ConnectionDialog { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(ConnectionDialog)); this.label1 = new System.Windows.Forms.Label(); this.label2 = new System.Windows.Forms.Label(); this.PortTextBox = new System.Windows.Forms.TextBox(); this.UsernameTextBox = new System.Windows.Forms.TextBox(); this.label3 = new System.Windows.Forms.Label(); this.PasswordTextBox = new System.Windows.Forms.TextBox(); this.label4 = new System.Windows.Forms.Label(); this.EndpointTextBox = new System.Windows.Forms.TextBox(); this.OKLinkLabel = new System.Windows.Forms.LinkLabel(); this.CancelLinkLabel = new System.Windows.Forms.LinkLabel(); this.label5 = new System.Windows.Forms.Label(); this.HostnameComboBox = new System.Windows.Forms.ComboBox(); this.SaveConfigCheckBox = new System.Windows.Forms.CheckBox(); this.ClearLinkLabel = new System.Windows.Forms.LinkLabel(); this.label6 = new System.Windows.Forms.Label(); this.groupBox1 = new System.Windows.Forms.GroupBox(); this.ProxyUserTextBox = new System.Windows.Forms.TextBox(); this.label7 = new System.Windows.Forms.Label(); this.label9 = new System.Windows.Forms.Label(); this.ProxyPassTextBox = new System.Windows.Forms.TextBox(); this.UseProxyCheckBox = new System.Windows.Forms.CheckBox(); this.label8 = new System.Windows.Forms.Label(); this.ProxyAddressTextBox = new System.Windows.Forms.TextBox(); this.ProxyPortTextBox = new System.Windows.Forms.TextBox(); this.UseHttpsCheckbox = new System.Windows.Forms.CheckBox(); this.groupBox1.SuspendLayout(); this.SuspendLayout(); // // label1 // this.label1.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.label1.Location = new System.Drawing.Point(6, 13); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(80, 24); this.label1.TabIndex = 0; this.label1.Text = "Hostname"; this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // label2 // this.label2.Location = new System.Drawing.Point(4, 63); this.label2.Name = "label2"; this.label2.Size = new System.Drawing.Size(80, 24); this.label2.TabIndex = 3; this.label2.Text = "Username"; this.label2.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // PortTextBox // this.PortTextBox.BackColor = System.Drawing.SystemColors.Control; this.PortTextBox.Location = new System.Drawing.Point(272, 16); this.PortTextBox.Name = "PortTextBox"; this.PortTextBox.Size = new System.Drawing.Size(40, 20); this.PortTextBox.TabIndex = 3; this.PortTextBox.Text = "443"; // // UsernameTextBox // this.UsernameTextBox.BackColor = System.Drawing.SystemColors.Control; this.UsernameTextBox.Location = new System.Drawing.Point(100, 69); this.UsernameTextBox.Name = "UsernameTextBox"; this.UsernameTextBox.Size = new System.Drawing.Size(144, 20); this.UsernameTextBox.TabIndex = 4; // // label3 // this.label3.Location = new System.Drawing.Point(4, 89); this.label3.Name = "label3"; this.label3.Size = new System.Drawing.Size(80, 24); this.label3.TabIndex = 5; this.label3.Text = "Password"; this.label3.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // PasswordTextBox // this.PasswordTextBox.BackColor = System.Drawing.SystemColors.Control; this.PasswordTextBox.Location = new System.Drawing.Point(100, 93); this.PasswordTextBox.Name = "PasswordTextBox"; this.PasswordTextBox.PasswordChar = '*'; this.PasswordTextBox.Size = new System.Drawing.Size(144, 20); this.PasswordTextBox.TabIndex = 6; // // label4 // this.label4.Location = new System.Drawing.Point(4, 39); this.label4.Name = "label4"; this.label4.Size = new System.Drawing.Size(80, 24); this.label4.TabIndex = 1; this.label4.Text = "Endpoint"; this.label4.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // EndpointTextBox // this.EndpointTextBox.BackColor = System.Drawing.SystemColors.Control; this.EndpointTextBox.Location = new System.Drawing.Point(100, 43); this.EndpointTextBox.Name = "EndpointTextBox"; this.EndpointTextBox.Size = new System.Drawing.Size(144, 20); this.EndpointTextBox.TabIndex = 2; this.EndpointTextBox.Text = "/iControl/iControlPortal.cgi"; // // OKLinkLabel // this.OKLinkLabel.LinkBehavior = System.Windows.Forms.LinkBehavior.HoverUnderline; this.OKLinkLabel.Location = new System.Drawing.Point(260, 72); this.OKLinkLabel.Name = "OKLinkLabel"; this.OKLinkLabel.Size = new System.Drawing.Size(40, 16); this.OKLinkLabel.TabIndex = 7; this.OKLinkLabel.TabStop = true; this.OKLinkLabel.Text = "OK"; this.OKLinkLabel.TextAlign = System.Drawing.ContentAlignment.MiddleCenter; this.OKLinkLabel.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.OKLinkLabel_LinkClicked); // // CancelLinkLabel // this.CancelLinkLabel.LinkBehavior = System.Windows.Forms.LinkBehavior.HoverUnderline; this.CancelLinkLabel.Location = new System.Drawing.Point(261, 94); this.CancelLinkLabel.Name = "CancelLinkLabel"; this.CancelLinkLabel.Size = new System.Drawing.Size(40, 16); this.CancelLinkLabel.TabIndex = 8; this.CancelLinkLabel.TabStop = true; this.CancelLinkLabel.Text = "Cancel"; this.CancelLinkLabel.TextAlign = System.Drawing.ContentAlignment.MiddleCenter; this.CancelLinkLabel.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.CancelLinkLabel_LinkClicked); // // label5 // this.label5.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.label5.Location = new System.Drawing.Point(256, 8); this.label5.Name = "label5"; this.label5.Size = new System.Drawing.Size(16, 32); this.label5.TabIndex = 2; this.label5.Text = ":"; this.label5.TextAlign = System.Drawing.ContentAlignment.MiddleCenter; // // HostnameComboBox // this.HostnameComboBox.FormattingEnabled = true; this.HostnameComboBox.Location = new System.Drawing.Point(112, 15); this.HostnameComboBox.Name = "HostnameComboBox"; this.HostnameComboBox.Size = new System.Drawing.Size(144, 21); this.HostnameComboBox.TabIndex = 1; this.HostnameComboBox.SelectedIndexChanged += new System.EventHandler(this.HostnameComboBox_SelectedIndexChanged); // // SaveConfigCheckBox // this.SaveConfigCheckBox.AutoSize = true; this.SaveConfigCheckBox.Checked = true; this.SaveConfigCheckBox.CheckState = System.Windows.Forms.CheckState.Checked; this.SaveConfigCheckBox.Location = new System.Drawing.Point(100, 203); this.SaveConfigCheckBox.Name = "SaveConfigCheckBox"; this.SaveConfigCheckBox.Size = new System.Drawing.Size(143, 17); this.SaveConfigCheckBox.TabIndex = 18; this.SaveConfigCheckBox.Text = "Save Connection Details"; this.SaveConfigCheckBox.UseVisualStyleBackColor = true; // // ClearLinkLabel // this.ClearLinkLabel.LinkBehavior = System.Windows.Forms.LinkBehavior.HoverUnderline; this.ClearLinkLabel.Location = new System.Drawing.Point(249, 202); this.ClearLinkLabel.Name = "ClearLinkLabel"; this.ClearLinkLabel.Size = new System.Drawing.Size(67, 16); this.ClearLinkLabel.TabIndex = 19; this.ClearLinkLabel.TabStop = true; this.ClearLinkLabel.Text = "Clear History"; this.ClearLinkLabel.TextAlign = System.Drawing.ContentAlignment.MiddleCenter; this.ClearLinkLabel.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.ClearLinkLabel_LinkClicked); // // label6 // this.label6.Location = new System.Drawing.Point(4, 196); this.label6.Name = "label6"; this.label6.Size = new System.Drawing.Size(80, 24); this.label6.TabIndex = 17; this.label6.Text = "History"; this.label6.TextAlign = System.Drawing.ContentAlignment.MiddleRight; this.label6.Click += new System.EventHandler(this.label6_Click); // // groupBox1 // this.groupBox1.Controls.Add(this.UseHttpsCheckbox); this.groupBox1.Controls.Add(this.label6); this.groupBox1.Controls.Add(this.ProxyUserTextBox); this.groupBox1.Controls.Add(this.ClearLinkLabel); this.groupBox1.Controls.Add(this.UsernameTextBox); this.groupBox1.Controls.Add(this.SaveConfigCheckBox); this.groupBox1.Controls.Add(this.label7); this.groupBox1.Controls.Add(this.label2); this.groupBox1.Controls.Add(this.label9); this.groupBox1.Controls.Add(this.label3); this.groupBox1.Controls.Add(this.ProxyPassTextBox); this.groupBox1.Controls.Add(this.EndpointTextBox); this.groupBox1.Controls.Add(this.UseProxyCheckBox); this.groupBox1.Controls.Add(this.label8); this.groupBox1.Controls.Add(this.label1); this.groupBox1.Controls.Add(this.PasswordTextBox); this.groupBox1.Controls.Add(this.label4); this.groupBox1.Controls.Add(this.OKLinkLabel); this.groupBox1.Controls.Add(this.CancelLinkLabel); this.groupBox1.Controls.Add(this.ProxyAddressTextBox); this.groupBox1.Controls.Add(this.ProxyPortTextBox); this.groupBox1.Location = new System.Drawing.Point(12, -1); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(322, 231); this.groupBox1.TabIndex = 0; this.groupBox1.TabStop = false; // // ProxyUserTextBox // this.ProxyUserTextBox.BackColor = System.Drawing.SystemColors.Control; this.ProxyUserTextBox.Enabled = false; this.ProxyUserTextBox.Location = new System.Drawing.Point(101, 142); this.ProxyUserTextBox.Name = "ProxyUserTextBox"; this.ProxyUserTextBox.Size = new System.Drawing.Size(144, 20); this.ProxyUserTextBox.TabIndex = 14; // // label7 // this.label7.Location = new System.Drawing.Point(5, 136); this.label7.Name = "label7"; this.label7.Size = new System.Drawing.Size(80, 24); this.label7.TabIndex = 13; this.label7.Text = "Username"; this.label7.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // label9 // this.label9.Location = new System.Drawing.Point(5, 162); this.label9.Name = "label9"; this.label9.Size = new System.Drawing.Size(80, 24); this.label9.TabIndex = 15; this.label9.Text = "Password"; this.label9.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // ProxyPassTextBox // this.ProxyPassTextBox.BackColor = System.Drawing.SystemColors.Control; this.ProxyPassTextBox.Enabled = false; this.ProxyPassTextBox.Location = new System.Drawing.Point(101, 166); this.ProxyPassTextBox.Name = "ProxyPassTextBox"; this.ProxyPassTextBox.PasswordChar = '*'; this.ProxyPassTextBox.Size = new System.Drawing.Size(144, 20); this.ProxyPassTextBox.TabIndex = 16; // // UseProxyCheckBox // this.UseProxyCheckBox.AutoSize = true; this.UseProxyCheckBox.Location = new System.Drawing.Point(33, 120); this.UseProxyCheckBox.Name = "UseProxyCheckBox"; this.UseProxyCheckBox.RightToLeft = System.Windows.Forms.RightToLeft.No; this.UseProxyCheckBox.Size = new System.Drawing.Size(52, 17); this.UseProxyCheckBox.TabIndex = 9; this.UseProxyCheckBox.Text = "Proxy"; this.UseProxyCheckBox.UseVisualStyleBackColor = true; this.UseProxyCheckBox.CheckedChanged += new System.EventHandler(this.UseProxyCheckBox_CheckedChanged); // // label8 // this.label8.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0))); this.label8.Location = new System.Drawing.Point(243, 111); this.label8.Name = "label8"; this.label8.Size = new System.Drawing.Size(16, 32); this.label8.TabIndex = 11; this.label8.Text = ":"; this.label8.TextAlign = System.Drawing.ContentAlignment.MiddleCenter; // // ProxyAddressTextBox // this.ProxyAddressTextBox.BackColor = System.Drawing.SystemColors.Control; this.ProxyAddressTextBox.Enabled = false; this.ProxyAddressTextBox.Location = new System.Drawing.Point(101, 118); this.ProxyAddressTextBox.Name = "ProxyAddressTextBox"; this.ProxyAddressTextBox.Size = new System.Drawing.Size(144, 20); this.ProxyAddressTextBox.TabIndex = 10; // // ProxyPortTextBox // this.ProxyPortTextBox.BackColor = System.Drawing.SystemColors.Control; this.ProxyPortTextBox.Enabled = false; this.ProxyPortTextBox.Location = new System.Drawing.Point(261, 118); this.ProxyPortTextBox.Name = "ProxyPortTextBox"; this.ProxyPortTextBox.Size = new System.Drawing.Size(40, 20); this.ProxyPortTextBox.TabIndex = 12; this.ProxyPortTextBox.Text = "8080"; // // UseHttpsCheckbox // this.UseHttpsCheckbox.AutoSize = true; this.UseHttpsCheckbox.Checked = true; this.UseHttpsCheckbox.CheckState = System.Windows.Forms.CheckState.Checked; this.UseHttpsCheckbox.Location = new System.Drawing.Point(260, 46); this.UseHttpsCheckbox.Name = "UseHttpsCheckbox"; this.UseHttpsCheckbox.RightToLeft = System.Windows.Forms.RightToLeft.No; this.UseHttpsCheckbox.Size = new System.Drawing.Size(49, 17); this.UseHttpsCheckbox.TabIndex = 20; this.UseHttpsCheckbox.Text = "https"; this.UseHttpsCheckbox.UseVisualStyleBackColor = true; // // ConnectionDialog // this.AcceptButton = this.OKLinkLabel; this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.CancelButton = this.CancelLinkLabel; this.ClientSize = new System.Drawing.Size(350, 242); this.Controls.Add(this.HostnameComboBox); this.Controls.Add(this.label5); this.Controls.Add(this.PortTextBox); this.Controls.Add(this.groupBox1); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.HelpButton = true; this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon"))); this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "ConnectionDialog"; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "Connect to BIG-IP"; this.HelpButtonClicked += new System.ComponentModel.CancelEventHandler(this.ConnectionDialog_HelpButtonClicked); this.Load += new System.EventHandler(this.ConnectionDialog_Load); this.groupBox1.ResumeLayout(false); this.groupBox1.PerformLayout(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.Label label1; private System.Windows.Forms.TextBox PortTextBox; private System.Windows.Forms.Label label2; private System.Windows.Forms.TextBox UsernameTextBox; private System.Windows.Forms.Label label3; private System.Windows.Forms.TextBox PasswordTextBox; private System.Windows.Forms.Label label4; private System.Windows.Forms.TextBox EndpointTextBox; private System.Windows.Forms.LinkLabel OKLinkLabel; private System.Windows.Forms.LinkLabel CancelLinkLabel; private System.Windows.Forms.Label label5; private System.Windows.Forms.ComboBox HostnameComboBox; private System.Windows.Forms.CheckBox SaveConfigCheckBox; private System.Windows.Forms.LinkLabel ClearLinkLabel; private System.Windows.Forms.Label label6; private System.Windows.Forms.GroupBox groupBox1; private System.Windows.Forms.CheckBox UseProxyCheckBox; private System.Windows.Forms.Label label8; private System.Windows.Forms.TextBox ProxyPortTextBox; private System.Windows.Forms.TextBox ProxyAddressTextBox; private System.Windows.Forms.TextBox ProxyUserTextBox; private System.Windows.Forms.Label label7; private System.Windows.Forms.Label label9; private System.Windows.Forms.TextBox ProxyPassTextBox; private System.Windows.Forms.CheckBox UseHttpsCheckbox; } }
using System; using System.Linq; using System.Collections.Generic; using System.Security.Cryptography; using System.Threading.Tasks; using Coevery.Caching; using Coevery.Environment.Configuration; using Coevery.Environment.Extensions; using Coevery.Environment.ShellBuilders; using Coevery.Environment.State; using Coevery.Environment.Descriptor; using Coevery.Environment.Descriptor.Models; using Coevery.Localization; using Coevery.Logging; using Coevery.Utility.Extensions; namespace Coevery.Environment { // All the event handlers that DefaultCoeveryHost implements have to be declared in CoeveryStarter public class DefaultCoeveryHost : ICoeveryHost, IShellSettingsManagerEventHandler, IShellDescriptorManagerEventHandler { private readonly IHostLocalRestart _hostLocalRestart; private readonly IShellSettingsManager _shellSettingsManager; private readonly IShellContextFactory _shellContextFactory; private readonly IRunningShellTable _runningShellTable; private readonly IProcessingEngine _processingEngine; private readonly IExtensionLoaderCoordinator _extensionLoaderCoordinator; private readonly IExtensionMonitoringCoordinator _extensionMonitoringCoordinator; private readonly ICacheManager _cacheManager; private readonly static object _syncLock = new object(); private IEnumerable<ShellContext> _shellContexts; private readonly ContextState<IList<ShellSettings>> _tenantsToRestart; public DefaultCoeveryHost( IShellSettingsManager shellSettingsManager, IShellContextFactory shellContextFactory, IRunningShellTable runningShellTable, IProcessingEngine processingEngine, IExtensionLoaderCoordinator extensionLoaderCoordinator, IExtensionMonitoringCoordinator extensionMonitoringCoordinator, ICacheManager cacheManager, IHostLocalRestart hostLocalRestart ) { _shellSettingsManager = shellSettingsManager; _shellContextFactory = shellContextFactory; _runningShellTable = runningShellTable; _processingEngine = processingEngine; _extensionLoaderCoordinator = extensionLoaderCoordinator; _extensionMonitoringCoordinator = extensionMonitoringCoordinator; _cacheManager = cacheManager; _hostLocalRestart = hostLocalRestart; _tenantsToRestart = new ContextState<IList<ShellSettings>>("DefaultCoeveryHost.TenantsToRestart", () => new List<ShellSettings>()); T = NullLocalizer.Instance; Logger = NullLogger.Instance; } public Localizer T { get; set; } public ILogger Logger { get; set; } public IList<ShellContext> Current { get { return BuildCurrent().ToReadOnlyCollection(); } } public ShellContext GetShellContext(ShellSettings shellSettings) { return BuildCurrent().SingleOrDefault(shellContext => shellContext.Settings.Name.Equals(shellSettings.Name)); } void ICoeveryHost.Initialize() { Logger.Information("Initializing"); BuildCurrent(); Logger.Information("Initialized"); } void ICoeveryHost.ReloadExtensions() { DisposeShellContext(); } void ICoeveryHost.BeginRequest() { Logger.Debug("BeginRequest"); BeginRequest(); } void ICoeveryHost.EndRequest() { Logger.Debug("EndRequest"); EndRequest(); } IWorkContextScope ICoeveryHost.CreateStandaloneEnvironment(ShellSettings shellSettings) { Logger.Debug("Creating standalone environment for tenant {0}", shellSettings.Name); MonitorExtensions(); BuildCurrent(); var shellContext = CreateShellContext(shellSettings); return shellContext.LifetimeScope.CreateWorkContextScope(); } /// <summary> /// Ensures shells are activated, or re-activated if extensions have changed /// </summary> IEnumerable<ShellContext> BuildCurrent() { if (_shellContexts == null) { lock (_syncLock) { if (_shellContexts == null) { SetupExtensions(); MonitorExtensions(); CreateAndActivateShells(); } } } return _shellContexts; } void StartUpdatedShells() { while (_tenantsToRestart.GetState().Any()) { var settings = _tenantsToRestart.GetState().First(); _tenantsToRestart.GetState().Remove(settings); Logger.Debug("Updating shell: " + settings.Name); lock (_syncLock) { ActivateShell(settings); } } } void CreateAndActivateShells() { Logger.Information("Start creation of shells"); // is there any tenant right now ? var allSettings = _shellSettingsManager.LoadSettings().ToArray(); // load all tenants, and activate their shell if (allSettings.Any()) { Parallel.ForEach(allSettings, settings => { try { var context = CreateShellContext(settings); ActivateShell(context); } catch (Exception e) { Logger.Error(e, "A tenant could not be started: " + settings.Name); } }); } // no settings, run the Setup else { var setupContext = CreateSetupContext(); ActivateShell(setupContext); } Logger.Information("Done creating shells"); } /// <summary> /// Starts a Shell and registers its settings in RunningShellTable /// </summary> private void ActivateShell(ShellContext context) { Logger.Debug("Activating context for tenant {0}", context.Settings.Name); context.Shell.Activate(); _shellContexts = (_shellContexts ?? Enumerable.Empty<ShellContext>()) .Where(c => c.Settings.Name != context.Settings.Name) .Concat(new[] { context }) .ToArray(); _runningShellTable.Add(context.Settings); } /// <summary> /// Creates a transient shell for the default tenant's setup /// </summary> private ShellContext CreateSetupContext() { Logger.Debug("Creating shell context for root setup"); return _shellContextFactory.CreateSetupContext(new ShellSettings { Name = ShellSettings.DefaultName }); } /// <summary> /// Creates a shell context based on shell settings /// </summary> private ShellContext CreateShellContext(ShellSettings settings) { if (settings.State == TenantState.Uninitialized) { Logger.Debug("Creating shell context for tenant {0} setup", settings.Name); return _shellContextFactory.CreateSetupContext(settings); } Logger.Debug("Creating shell context for tenant {0}", settings.Name); return _shellContextFactory.CreateShellContext(settings); } private void SetupExtensions() { _extensionLoaderCoordinator.SetupExtensions(); } private void MonitorExtensions() { // This is a "fake" cache entry to allow the extension loader coordinator // notify us (by resetting _current to "null") when an extension has changed // on disk, and we need to reload new/updated extensions. _cacheManager.Get("CoeveryHost_Extensions", ctx => { _extensionMonitoringCoordinator.MonitorExtensions(ctx.Monitor); _hostLocalRestart.Monitor(ctx.Monitor); DisposeShellContext(); return ""; }); } /// <summary> /// Terminates all active shell contexts, and dispose their scope, forcing /// them to be reloaded if necessary. /// </summary> private void DisposeShellContext() { Logger.Information("Disposing active shell contexts"); if (_shellContexts != null) { lock (_syncLock) { if (_shellContexts != null) { foreach (var shellContext in _shellContexts) { shellContext.Shell.Terminate(); shellContext.LifetimeScope.Dispose(); } } } _shellContexts = null; } } protected virtual void BeginRequest() { // Ensure all shell contexts are loaded, or need to be reloaded if // extensions have changed MonitorExtensions(); BuildCurrent(); StartUpdatedShells(); } protected virtual void EndRequest() { // Synchronously process all pending tasks. It's safe to do this at this point // of the pipeline, as the request transaction has been closed, so creating a new // environment and transaction for these tasks will behave as expected.) while (_processingEngine.AreTasksPending()) { Logger.Debug("Processing pending task"); _processingEngine.ExecuteNextTask(); } StartUpdatedShells(); } void IShellSettingsManagerEventHandler.Saved(ShellSettings settings) { Logger.Debug("Shell saved: " + settings.Name); // if a tenant has been created if (settings.State != TenantState.Invalid) { if (!_tenantsToRestart.GetState().Any(t => t.Name.Equals(settings.Name))) { Logger.Debug("Adding tenant to restart: " + settings.Name + " " + settings.State); _tenantsToRestart.GetState().Add(settings); } } } public void ActivateShell(ShellSettings settings) { Logger.Debug("Activating shell: " + settings.Name); // look for the associated shell context var shellContext = _shellContexts.FirstOrDefault(c => c.Settings.Name == settings.Name); if (shellContext == null && settings.State == TenantState.Disabled) { return; } // is this is a new tenant ? or is it a tenant waiting for setup ? if (shellContext == null || settings.State == TenantState.Uninitialized) { // create the Shell var context = CreateShellContext(settings); // activate the Shell ActivateShell(context); } // terminate the shell if the tenant was disabled else if (settings.State == TenantState.Disabled) { shellContext.Shell.Terminate(); shellContext.LifetimeScope.Dispose(); _runningShellTable.Remove(settings); _shellContexts = _shellContexts.Where(shell => shell.Settings.Name != settings.Name); } // reload the shell as its settings have changed else { // dispose previous context shellContext.Shell.Terminate(); shellContext.LifetimeScope.Dispose(); var context = _shellContextFactory.CreateShellContext(settings); // activate and register modified context _shellContexts = _shellContexts.Where(shell => shell.Settings.Name != settings.Name).Union(new[] { context }); context.Shell.Activate(); _runningShellTable.Update(settings); } } /// <summary> /// A feature is enabled/disabled, the tenant needs to be restarted /// </summary> void IShellDescriptorManagerEventHandler.Changed(ShellDescriptor descriptor, string tenant) { if (_shellContexts == null) { return; } Logger.Debug("Shell changed: " + tenant); var context = _shellContexts.FirstOrDefault(x => x.Settings.Name == tenant); if (context == null) { return; } // don't restart when tenant is in setup if (context.Settings.State != TenantState.Running) { return; } // don't flag the tenant if already listed if (_tenantsToRestart.GetState().Any(x => x.Name == tenant)) { return; } Logger.Debug("Adding tenant to restart: " + tenant); _tenantsToRestart.GetState().Add(context.Settings); } } }
using System.Threading.Tasks; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; using Orleans; using Orleans.Configuration; using Orleans.Hosting; using Orleans.Providers.Streams.AzureQueue; using Orleans.Streams; using Orleans.TestingHost; using TestExtensions; using UnitTests.Streaming; using UnitTests.StreamingTests; using Xunit; namespace Tester.AzureUtils.Streaming { [TestCategory("Streaming"), TestCategory("Azure"), TestCategory("AzureQueue")] public class AQStreamingTests : TestClusterPerTest { public const string AzureQueueStreamProviderName = StreamTestsConstants.AZURE_QUEUE_STREAM_PROVIDER_NAME; public const string SmsStreamProviderName = StreamTestsConstants.SMS_STREAM_PROVIDER_NAME; private SingleStreamTestRunner runner; private const int queueCount = 8; protected override void ConfigureTestCluster(TestClusterBuilder builder) { TestUtils.CheckForAzureStorage(); builder.AddSiloBuilderConfigurator<SiloBuilderConfigurator>(); builder.AddClientBuilderConfigurator<MyClientBuilderConfigurator>(); } private class MyClientBuilderConfigurator : IClientBuilderConfigurator { public void Configure(IConfiguration configuration, IClientBuilder clientBuilder) { clientBuilder .AddSimpleMessageStreamProvider(SmsStreamProviderName) .AddAzureQueueStreams(AzureQueueStreamProviderName, b=> b.ConfigureAzureQueue(ob=>ob.Configure<IOptions<ClusterOptions>>( (options, dep) => { options.ConfigureTestDefaults(); options.QueueNames = AzureQueueUtilities.GenerateQueueNames(dep.Value.ClusterId, queueCount); }))); } } private class SiloBuilderConfigurator : ISiloConfigurator { public void Configure(ISiloBuilder hostBuilder) { hostBuilder .AddSimpleMessageStreamProvider(SmsStreamProviderName) .AddAzureTableGrainStorage("AzureStore", builder => builder.Configure<IOptions<ClusterOptions>>((options, silo) => { options.ConfigureTestDefaults(); options.DeleteStateOnClear = true; })) .AddAzureTableGrainStorage("PubSubStore", builder => builder.Configure<IOptions<ClusterOptions>>((options, silo) => { options.ConfigureTestDefaults(); options.DeleteStateOnClear = true; })) .AddMemoryGrainStorage("MemoryStore") .AddAzureQueueStreams(AzureQueueStreamProviderName, c=> c.ConfigureAzureQueue(ob => ob.Configure<IOptions<ClusterOptions>>( (options, dep) => { options.ConfigureTestDefaults(); options.QueueNames = AzureQueueUtilities.GenerateQueueNames(dep.Value.ClusterId, queueCount); }))); } } public override async Task InitializeAsync() { await base.InitializeAsync(); runner = new SingleStreamTestRunner(this.InternalClient, SingleStreamTestRunner.AQ_STREAM_PROVIDER_NAME); } public override async Task DisposeAsync() { await base.DisposeAsync(); if (!string.IsNullOrWhiteSpace(TestDefaultConfiguration.DataConnectionString)) { await AzureQueueStreamProviderUtils.ClearAllUsedAzureQueues(NullLoggerFactory.Instance, AzureQueueUtilities.GenerateQueueNames(this.HostedCluster.Options.ClusterId, queueCount), new AzureQueueOptions().ConfigureTestDefaults()); } } ////------------------------ One to One ----------------------// [SkippableFact, TestCategory("Functional")] public async Task AQ_01_OneProducerGrainOneConsumerGrain() { await runner.StreamTest_01_OneProducerGrainOneConsumerGrain(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_02_OneProducerGrainOneConsumerClient() { await runner.StreamTest_02_OneProducerGrainOneConsumerClient(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_03_OneProducerClientOneConsumerGrain() { await runner.StreamTest_03_OneProducerClientOneConsumerGrain(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_04_OneProducerClientOneConsumerClient() { await runner.StreamTest_04_OneProducerClientOneConsumerClient(); } //------------------------ MANY to Many different grains ----------------------// [SkippableFact, TestCategory("Functional")] public async Task AQ_05_ManyDifferent_ManyProducerGrainsManyConsumerGrains() { await runner.StreamTest_05_ManyDifferent_ManyProducerGrainsManyConsumerGrains(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_06_ManyDifferent_ManyProducerGrainManyConsumerClients() { await runner.StreamTest_06_ManyDifferent_ManyProducerGrainManyConsumerClients(); } [SkippableFact(Skip="https://github.com/dotnet/orleans/issues/5648"), TestCategory("Functional")] public async Task AQ_07_ManyDifferent_ManyProducerClientsManyConsumerGrains() { await runner.StreamTest_07_ManyDifferent_ManyProducerClientsManyConsumerGrains(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_08_ManyDifferent_ManyProducerClientsManyConsumerClients() { await runner.StreamTest_08_ManyDifferent_ManyProducerClientsManyConsumerClients(); } //------------------------ MANY to Many Same grains ----------------------// [SkippableFact, TestCategory("Functional")] public async Task AQ_09_ManySame_ManyProducerGrainsManyConsumerGrains() { await runner.StreamTest_09_ManySame_ManyProducerGrainsManyConsumerGrains(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_10_ManySame_ManyConsumerGrainsManyProducerGrains() { await runner.StreamTest_10_ManySame_ManyConsumerGrainsManyProducerGrains(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_11_ManySame_ManyProducerGrainsManyConsumerClients() { await runner.StreamTest_11_ManySame_ManyProducerGrainsManyConsumerClients(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_12_ManySame_ManyProducerClientsManyConsumerGrains() { await runner.StreamTest_12_ManySame_ManyProducerClientsManyConsumerGrains(); } //------------------------ MANY to Many producer consumer same grain ----------------------// [SkippableFact, TestCategory("Functional")] public async Task AQ_13_SameGrain_ConsumerFirstProducerLater() { await runner.StreamTest_13_SameGrain_ConsumerFirstProducerLater(false); } [SkippableFact, TestCategory("Functional")] public async Task AQ_14_SameGrain_ProducerFirstConsumerLater() { await runner.StreamTest_14_SameGrain_ProducerFirstConsumerLater(false); } //----------------------------------------------// [SkippableFact, TestCategory("Functional")] public async Task AQ_15_ConsumeAtProducersRequest() { await runner.StreamTest_15_ConsumeAtProducersRequest(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_16_MultipleStreams_ManyDifferent_ManyProducerGrainsManyConsumerGrains() { var multiRunner = new MultipleStreamsTestRunner(this.InternalClient, SingleStreamTestRunner.AQ_STREAM_PROVIDER_NAME, 16, false); await multiRunner.StreamTest_MultipleStreams_ManyDifferent_ManyProducerGrainsManyConsumerGrains(); } [SkippableFact, TestCategory("Functional")] public async Task AQ_17_MultipleStreams_1J_ManyProducerGrainsManyConsumerGrains() { var multiRunner = new MultipleStreamsTestRunner(this.InternalClient, SingleStreamTestRunner.AQ_STREAM_PROVIDER_NAME, 17, false); await multiRunner.StreamTest_MultipleStreams_ManyDifferent_ManyProducerGrainsManyConsumerGrains( this.HostedCluster.StartAdditionalSilo); } //[SkippableFact, TestCategory("BVT")] /*public async Task AQ_18_MultipleStreams_1J_1F_ManyProducerGrainsManyConsumerGrains() { var multiRunner = new MultipleStreamsTestRunner(this.InternalClient, SingleStreamTestRunner.AQ_STREAM_PROVIDER_NAME, 18, false); await multiRunner.StreamTest_MultipleStreams_ManyDifferent_ManyProducerGrainsManyConsumerGrains( this.HostedCluster.StartAdditionalSilo, this.HostedCluster.StopSilo); }*/ [SkippableFact] public async Task AQ_19_ConsumerImplicitlySubscribedToProducerClient() { // todo: currently, the Azure queue queue adaptor doesn't support namespaces, so this test will fail. await runner.StreamTest_19_ConsumerImplicitlySubscribedToProducerClient(); } [SkippableFact] public async Task AQ_20_ConsumerImplicitlySubscribedToProducerGrain() { // todo: currently, the Azure queue queue adaptor doesn't support namespaces, so this test will fail. await runner.StreamTest_20_ConsumerImplicitlySubscribedToProducerGrain(); } [SkippableFact(Skip = "Ignored"), TestCategory("Failures")] public async Task AQ_21_GenericConsumerImplicitlySubscribedToProducerGrain() { // todo: currently, the Azure queue queue adaptor doesn't support namespaces, so this test will fail. await runner.StreamTest_21_GenericConsumerImplicitlySubscribedToProducerGrain(); } } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.CompilerServices; using System.Text; using System.IO; using System.Globalization; using Newtonsoft.Json.Utilities; namespace Newtonsoft.Json { internal enum ReadType { Read, ReadAsInt32, ReadAsBytes, ReadAsString, ReadAsDecimal, ReadAsDateTime, } /// <summary> /// Represents a reader that provides fast, non-cached, forward-only access to JSON text data. /// </summary> public class JsonTextReader : JsonReader, IJsonLineInfo { private const char UnicodeReplacementChar = '\uFFFD'; private const int MaximumJavascriptIntegerCharacterLength = 380; private readonly TextReader _reader; private char[] _chars; private int _charsUsed; private int _charPos; private int _lineStartPos; private int _lineNumber; private bool _isEndOfFile; private StringBuffer _buffer; private StringReference _stringReference; internal PropertyNameTable NameTable; /// <summary> /// Initializes a new instance of the <see cref="JsonReader"/> class with the specified <see cref="TextReader"/>. /// </summary> /// <param name="reader">The <c>TextReader</c> containing the XML data to read.</param> public JsonTextReader(TextReader reader) { if (reader == null) throw new ArgumentNullException("reader"); _reader = reader; _lineNumber = 1; _chars = new char[1025]; } #if DEBUG internal void SetCharBuffer(char[] chars) { _chars = chars; } #endif private StringBuffer GetBuffer() { if (_buffer == null) { _buffer = new StringBuffer(1025); } else { _buffer.Position = 0; } return _buffer; } private void OnNewLine(int pos) { _lineNumber++; _lineStartPos = pos - 1; } private void ParseString(char quote) { _charPos++; ShiftBufferIfNeeded(); ReadStringIntoBuffer(quote); SetPostValueState(true); if (_readType == ReadType.ReadAsBytes) { byte[] data; if (_stringReference.Length == 0) { data = new byte[0]; } else { data = Convert.FromBase64CharArray(_stringReference.Chars, _stringReference.StartIndex, _stringReference.Length); } SetToken(JsonToken.Bytes, data, false); } else if (_readType == ReadType.ReadAsString) { string text = _stringReference.ToString(); SetToken(JsonToken.String, text, false); _quoteChar = quote; } else { string text = _stringReference.ToString(); if (_dateParseHandling != DateParseHandling.None) { DateParseHandling dateParseHandling; if (_readType == ReadType.ReadAsDateTime) dateParseHandling = DateParseHandling.DateTime; else dateParseHandling = _dateParseHandling; object dt; if (DateTimeUtils.TryParseDateTime(text, dateParseHandling, DateTimeZoneHandling, DateFormatString, Culture, out dt)) { SetToken(JsonToken.Date, dt, false); return; } } SetToken(JsonToken.String, text, false); _quoteChar = quote; } } private static void BlockCopyChars(char[] src, int srcOffset, char[] dst, int dstOffset, int count) { const int charByteCount = 2; Buffer.BlockCopy(src, srcOffset * charByteCount, dst, dstOffset * charByteCount, count * charByteCount); } private void ShiftBufferIfNeeded() { // once in the last 10% of the buffer shift the remaining content to the start to avoid // unnessesarly increasing the buffer size when reading numbers/strings int length = _chars.Length; if (length - _charPos <= length * 0.1) { int count = _charsUsed - _charPos; if (count > 0) BlockCopyChars(_chars, _charPos, _chars, 0, count); _lineStartPos -= _charPos; _charPos = 0; _charsUsed = count; _chars[_charsUsed] = '\0'; } } private int ReadData(bool append) { return ReadData(append, 0); } private int ReadData(bool append, int charsRequired) { if (_isEndOfFile) return 0; // char buffer is full if (_charsUsed + charsRequired >= _chars.Length - 1) { if (append) { // copy to new array either double the size of the current or big enough to fit required content int newArrayLength = Math.Max(_chars.Length * 2, _charsUsed + charsRequired + 1); // increase the size of the buffer char[] dst = new char[newArrayLength]; BlockCopyChars(_chars, 0, dst, 0, _chars.Length); _chars = dst; } else { int remainingCharCount = _charsUsed - _charPos; if (remainingCharCount + charsRequired + 1 >= _chars.Length) { // the remaining count plus the required is bigger than the current buffer size char[] dst = new char[remainingCharCount + charsRequired + 1]; if (remainingCharCount > 0) BlockCopyChars(_chars, _charPos, dst, 0, remainingCharCount); _chars = dst; } else { // copy any remaining data to the beginning of the buffer if needed and reset positions if (remainingCharCount > 0) BlockCopyChars(_chars, _charPos, _chars, 0, remainingCharCount); } _lineStartPos -= _charPos; _charPos = 0; _charsUsed = remainingCharCount; } } int attemptCharReadCount = _chars.Length - _charsUsed - 1; int charsRead = _reader.Read(_chars, _charsUsed, attemptCharReadCount); _charsUsed += charsRead; if (charsRead == 0) _isEndOfFile = true; _chars[_charsUsed] = '\0'; return charsRead; } private bool EnsureChars(int relativePosition, bool append) { if (_charPos + relativePosition >= _charsUsed) return ReadChars(relativePosition, append); return true; } private bool ReadChars(int relativePosition, bool append) { if (_isEndOfFile) return false; int charsRequired = _charPos + relativePosition - _charsUsed + 1; int totalCharsRead = 0; // it is possible that the TextReader doesn't return all data at once // repeat read until the required text is returned or the reader is out of content do { int charsRead = ReadData(append, charsRequired - totalCharsRead); // no more content if (charsRead == 0) break; totalCharsRead += charsRead; } while (totalCharsRead < charsRequired); if (totalCharsRead < charsRequired) return false; return true; } /// <summary> /// Reads the next JSON token from the stream. /// </summary> /// <returns> /// true if the next token was read successfully; false if there are no more tokens to read. /// </returns> [DebuggerStepThrough] public override bool Read() { _readType = ReadType.Read; if (!ReadInternal()) { SetToken(JsonToken.None); return false; } return true; } /// <summary> /// Reads the next JSON token from the stream as a <see cref="Byte"/>[]. /// </summary> /// <returns> /// A <see cref="Byte"/>[] or a null reference if the next JSON token is null. This method will return <c>null</c> at the end of an array. /// </returns> public override byte[] ReadAsBytes() { return ReadAsBytesInternal(); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{Decimal}"/>. /// </summary> /// <returns>A <see cref="Nullable{Decimal}"/>. This method will return <c>null</c> at the end of an array.</returns> public override decimal? ReadAsDecimal() { return ReadAsDecimalInternal(); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{Int32}"/>. /// </summary> /// <returns>A <see cref="Nullable{Int32}"/>. This method will return <c>null</c> at the end of an array.</returns> public override int? ReadAsInt32() { return ReadAsInt32Internal(); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="String"/>. /// </summary> /// <returns>A <see cref="String"/>. This method will return <c>null</c> at the end of an array.</returns> public override string ReadAsString() { return ReadAsStringInternal(); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{DateTime}"/>. /// </summary> /// <returns>A <see cref="String"/>. This method will return <c>null</c> at the end of an array.</returns> public override DateTime? ReadAsDateTime() { return ReadAsDateTimeInternal(); } internal override bool ReadInternal() { while (true) { switch (_currentState) { case State.Start: case State.Property: case State.Array: case State.ArrayStart: case State.Constructor: case State.ConstructorStart: return ParseValue(); case State.Object: case State.ObjectStart: return ParseObject(); case State.PostValue: // returns true if it hits // end of object or array if (ParsePostValue()) return true; break; case State.Finished: if (EnsureChars(0, false)) { EatWhitespace(false); if (_isEndOfFile) { return false; } if (_chars[_charPos] == '/') { ParseComment(); return true; } throw JsonReaderException.Create(this, "Additional text encountered after finished reading JSON content: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); } return false; default: throw JsonReaderException.Create(this, "Unexpected state: {0}.".FormatWith(CultureInfo.InvariantCulture, CurrentState)); } } } private void ReadStringIntoBuffer(char quote) { int charPos = _charPos; int initialPosition = _charPos; int lastWritePosition = _charPos; StringBuffer buffer = null; while (true) { switch (_chars[charPos++]) { case '\0': if (_charsUsed == charPos - 1) { charPos--; if (ReadData(true) == 0) { _charPos = charPos; throw JsonReaderException.Create(this, "Unterminated string. Expected delimiter: {0}.".FormatWith(CultureInfo.InvariantCulture, quote)); } } break; case '\\': _charPos = charPos; if (!EnsureChars(0, true)) { _charPos = charPos; throw JsonReaderException.Create(this, "Unterminated string. Expected delimiter: {0}.".FormatWith(CultureInfo.InvariantCulture, quote)); } // start of escape sequence int escapeStartPos = charPos - 1; char currentChar = _chars[charPos]; char writeChar; switch (currentChar) { case 'b': charPos++; writeChar = '\b'; break; case 't': charPos++; writeChar = '\t'; break; case 'n': charPos++; writeChar = '\n'; break; case 'f': charPos++; writeChar = '\f'; break; case 'r': charPos++; writeChar = '\r'; break; case '\\': charPos++; writeChar = '\\'; break; case '"': case '\'': case '/': writeChar = currentChar; charPos++; break; case 'u': charPos++; _charPos = charPos; writeChar = ParseUnicode(); if (StringUtils.IsLowSurrogate(writeChar)) { // low surrogate with no preceding high surrogate; this char is replaced writeChar = UnicodeReplacementChar; } else if (StringUtils.IsHighSurrogate(writeChar)) { bool anotherHighSurrogate; // loop for handling situations where there are multiple consecutive high surrogates do { anotherHighSurrogate = false; // potential start of a surrogate pair if (EnsureChars(2, true) && _chars[_charPos] == '\\' && _chars[_charPos + 1] == 'u') { char highSurrogate = writeChar; _charPos += 2; writeChar = ParseUnicode(); if (StringUtils.IsLowSurrogate(writeChar)) { // a valid surrogate pair! } else if (StringUtils.IsHighSurrogate(writeChar)) { // another high surrogate; replace current and start check over highSurrogate = UnicodeReplacementChar; anotherHighSurrogate = true; } else { // high surrogate not followed by low surrogate; original char is replaced highSurrogate = UnicodeReplacementChar; } if (buffer == null) buffer = GetBuffer(); WriteCharToBuffer(buffer, highSurrogate, lastWritePosition, escapeStartPos); lastWritePosition = _charPos; } else { // there are not enough remaining chars for the low surrogate or is not follow by unicode sequence // replace high surrogate and continue on as usual writeChar = UnicodeReplacementChar; } } while (anotherHighSurrogate); } charPos = _charPos; break; default: charPos++; _charPos = charPos; throw JsonReaderException.Create(this, "Bad JSON escape sequence: {0}.".FormatWith(CultureInfo.InvariantCulture, @"\" + currentChar)); } if (buffer == null) buffer = GetBuffer(); WriteCharToBuffer(buffer, writeChar, lastWritePosition, escapeStartPos); lastWritePosition = charPos; break; case StringUtils.CarriageReturn: _charPos = charPos - 1; ProcessCarriageReturn(true); charPos = _charPos; break; case StringUtils.LineFeed: _charPos = charPos - 1; ProcessLineFeed(); charPos = _charPos; break; case '"': case '\'': if (_chars[charPos - 1] == quote) { charPos--; if (initialPosition == lastWritePosition) { _stringReference = new StringReference(_chars, initialPosition, charPos - initialPosition); } else { if (buffer == null) buffer = GetBuffer(); if (charPos > lastWritePosition) buffer.Append(_chars, lastWritePosition, charPos - lastWritePosition); _stringReference = new StringReference(buffer.GetInternalBuffer(), 0, buffer.Position); } charPos++; _charPos = charPos; return; } break; } } } private void WriteCharToBuffer(StringBuffer buffer, char writeChar, int lastWritePosition, int writeToPosition) { if (writeToPosition > lastWritePosition) { buffer.Append(_chars, lastWritePosition, writeToPosition - lastWritePosition); } buffer.Append(writeChar); } private char ParseUnicode() { char writeChar; if (EnsureChars(4, true)) { string hexValues = new string(_chars, _charPos, 4); char hexChar = Convert.ToChar(int.Parse(hexValues, NumberStyles.HexNumber, NumberFormatInfo.InvariantInfo)); writeChar = hexChar; _charPos += 4; } else { throw JsonReaderException.Create(this, "Unexpected end while parsing unicode character."); } return writeChar; } private void ReadNumberIntoBuffer() { int charPos = _charPos; while (true) { switch (_chars[charPos]) { case '\0': _charPos = charPos; if (_charsUsed == charPos) { if (ReadData(true) == 0) return; } else { return; } break; case '-': case '+': case 'a': case 'A': case 'b': case 'B': case 'c': case 'C': case 'd': case 'D': case 'e': case 'E': case 'f': case 'F': case 'x': case 'X': case '.': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': charPos++; break; default: _charPos = charPos; char currentChar = _chars[_charPos]; if (char.IsWhiteSpace(currentChar) || currentChar == ',' || currentChar == '}' || currentChar == ']' || currentChar == ')' || currentChar == '/') { return; } throw JsonReaderException.Create(this, "Unexpected character encountered while parsing number: {0}.".FormatWith(CultureInfo.InvariantCulture, currentChar)); } } } private void ClearRecentString() { if (_buffer != null) _buffer.Position = 0; _stringReference = new StringReference(); } private bool ParsePostValue() { while (true) { char currentChar = _chars[_charPos]; switch (currentChar) { case '\0': if (_charsUsed == _charPos) { if (ReadData(false) == 0) { _currentState = State.Finished; return false; } } else { _charPos++; } break; case '}': _charPos++; SetToken(JsonToken.EndObject); return true; case ']': _charPos++; SetToken(JsonToken.EndArray); return true; case ')': _charPos++; SetToken(JsonToken.EndConstructor); return true; case '/': ParseComment(); return true; case ',': _charPos++; // finished parsing SetStateBasedOnCurrent(); return false; case ' ': case StringUtils.Tab: // eat _charPos++; break; case StringUtils.CarriageReturn: ProcessCarriageReturn(false); break; case StringUtils.LineFeed: ProcessLineFeed(); break; default: if (char.IsWhiteSpace(currentChar)) { // eat _charPos++; } else { throw JsonReaderException.Create(this, "After parsing a value an unexpected character was encountered: {0}.".FormatWith(CultureInfo.InvariantCulture, currentChar)); } break; } } } private bool ParseObject() { while (true) { char currentChar = _chars[_charPos]; switch (currentChar) { case '\0': if (_charsUsed == _charPos) { if (ReadData(false) == 0) return false; } else { _charPos++; } break; case '}': SetToken(JsonToken.EndObject); _charPos++; return true; case '/': ParseComment(); return true; case StringUtils.CarriageReturn: ProcessCarriageReturn(false); break; case StringUtils.LineFeed: ProcessLineFeed(); break; case ' ': case StringUtils.Tab: // eat _charPos++; break; default: if (char.IsWhiteSpace(currentChar)) { // eat _charPos++; } else { return ParseProperty(); } break; } } } private bool ParseProperty() { char firstChar = _chars[_charPos]; char quoteChar; if (firstChar == '"' || firstChar == '\'') { _charPos++; quoteChar = firstChar; ShiftBufferIfNeeded(); ReadStringIntoBuffer(quoteChar); } else if (ValidIdentifierChar(firstChar)) { quoteChar = '\0'; ShiftBufferIfNeeded(); ParseUnquotedProperty(); } else { throw JsonReaderException.Create(this, "Invalid property identifier character: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); } string propertyName; if (NameTable != null) { propertyName = NameTable.Get(_stringReference.Chars, _stringReference.StartIndex, _stringReference.Length); // no match in name table if (propertyName == null) propertyName = _stringReference.ToString(); } else { propertyName = _stringReference.ToString(); } EatWhitespace(false); if (_chars[_charPos] != ':') throw JsonReaderException.Create(this, "Invalid character after parsing property name. Expected ':' but got: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); _charPos++; SetToken(JsonToken.PropertyName, propertyName); _quoteChar = quoteChar; ClearRecentString(); return true; } private bool ValidIdentifierChar(char value) { return (char.IsLetterOrDigit(value) || value == '_' || value == '$'); } private void ParseUnquotedProperty() { int initialPosition = _charPos; // parse unquoted property name until whitespace or colon while (true) { switch (_chars[_charPos]) { case '\0': if (_charsUsed == _charPos) { if (ReadData(true) == 0) throw JsonReaderException.Create(this, "Unexpected end while parsing unquoted property name."); break; } _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition); return; default: char currentChar = _chars[_charPos]; if (ValidIdentifierChar(currentChar)) { _charPos++; break; } else if (char.IsWhiteSpace(currentChar) || currentChar == ':') { _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition); return; } throw JsonReaderException.Create(this, "Invalid JavaScript property identifier character: {0}.".FormatWith(CultureInfo.InvariantCulture, currentChar)); } } } private bool ParseValue() { while (true) { char currentChar = _chars[_charPos]; switch (currentChar) { case '\0': if (_charsUsed == _charPos) { if (ReadData(false) == 0) return false; } else { _charPos++; } break; case '"': case '\'': ParseString(currentChar); return true; case 't': ParseTrue(); return true; case 'f': ParseFalse(); return true; case 'n': if (EnsureChars(1, true)) { char next = _chars[_charPos + 1]; if (next == 'u') ParseNull(); else if (next == 'e') ParseConstructor(); else throw JsonReaderException.Create(this, "Unexpected character encountered while parsing value: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); } else { throw JsonReaderException.Create(this, "Unexpected end."); } return true; case 'N': ParseNumberNaN(); return true; case 'I': ParseNumberPositiveInfinity(); return true; case '-': if (EnsureChars(1, true) && _chars[_charPos + 1] == 'I') ParseNumberNegativeInfinity(); else ParseNumber(); return true; case '/': ParseComment(); return true; case 'u': ParseUndefined(); return true; case '{': _charPos++; SetToken(JsonToken.StartObject); return true; case '[': _charPos++; SetToken(JsonToken.StartArray); return true; case ']': _charPos++; SetToken(JsonToken.EndArray); return true; case ',': // don't increment position, the next call to read will handle comma // this is done to handle multiple empty comma values SetToken(JsonToken.Undefined); return true; case ')': _charPos++; SetToken(JsonToken.EndConstructor); return true; case StringUtils.CarriageReturn: ProcessCarriageReturn(false); break; case StringUtils.LineFeed: ProcessLineFeed(); break; case ' ': case StringUtils.Tab: // eat _charPos++; break; default: if (char.IsWhiteSpace(currentChar)) { // eat _charPos++; break; } if (char.IsNumber(currentChar) || currentChar == '-' || currentChar == '.') { ParseNumber(); return true; } throw JsonReaderException.Create(this, "Unexpected character encountered while parsing value: {0}.".FormatWith(CultureInfo.InvariantCulture, currentChar)); } } } private void ProcessLineFeed() { _charPos++; OnNewLine(_charPos); } private void ProcessCarriageReturn(bool append) { _charPos++; if (EnsureChars(1, append) && _chars[_charPos] == StringUtils.LineFeed) _charPos++; OnNewLine(_charPos); } private bool EatWhitespace(bool oneOrMore) { bool finished = false; bool ateWhitespace = false; while (!finished) { char currentChar = _chars[_charPos]; switch (currentChar) { case '\0': if (_charsUsed == _charPos) { if (ReadData(false) == 0) finished = true; } else { _charPos++; } break; case StringUtils.CarriageReturn: ProcessCarriageReturn(false); break; case StringUtils.LineFeed: ProcessLineFeed(); break; default: if (currentChar == ' ' || char.IsWhiteSpace(currentChar)) { ateWhitespace = true; _charPos++; } else { finished = true; } break; } } return (!oneOrMore || ateWhitespace); } private void ParseConstructor() { if (MatchValueWithTrailingSeparator("new")) { EatWhitespace(false); int initialPosition = _charPos; int endPosition; while (true) { char currentChar = _chars[_charPos]; if (currentChar == '\0') { if (_charsUsed == _charPos) { if (ReadData(true) == 0) throw JsonReaderException.Create(this, "Unexpected end while parsing constructor."); } else { endPosition = _charPos; _charPos++; break; } } else if (char.IsLetterOrDigit(currentChar)) { _charPos++; } else if (currentChar == StringUtils.CarriageReturn) { endPosition = _charPos; ProcessCarriageReturn(true); break; } else if (currentChar == StringUtils.LineFeed) { endPosition = _charPos; ProcessLineFeed(); break; } else if (char.IsWhiteSpace(currentChar)) { endPosition = _charPos; _charPos++; break; } else if (currentChar == '(') { endPosition = _charPos; break; } else { throw JsonReaderException.Create(this, "Unexpected character while parsing constructor: {0}.".FormatWith(CultureInfo.InvariantCulture, currentChar)); } } _stringReference = new StringReference(_chars, initialPosition, endPosition - initialPosition); string constructorName = _stringReference.ToString(); EatWhitespace(false); if (_chars[_charPos] != '(') throw JsonReaderException.Create(this, "Unexpected character while parsing constructor: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); _charPos++; ClearRecentString(); SetToken(JsonToken.StartConstructor, constructorName); } else { throw JsonReaderException.Create(this, "Unexpected content while parsing JSON."); } } private void ParseNumber() { ShiftBufferIfNeeded(); char firstChar = _chars[_charPos]; int initialPosition = _charPos; ReadNumberIntoBuffer(); // set state to PostValue now so that if there is an error parsing the number then the reader can continue SetPostValueState(true); _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition); object numberValue; JsonToken numberType; bool singleDigit = (char.IsDigit(firstChar) && _stringReference.Length == 1); bool nonBase10 = (firstChar == '0' && _stringReference.Length > 1 && _stringReference.Chars[_stringReference.StartIndex + 1] != '.' && _stringReference.Chars[_stringReference.StartIndex + 1] != 'e' && _stringReference.Chars[_stringReference.StartIndex + 1] != 'E'); if (_readType == ReadType.ReadAsInt32) { if (singleDigit) { // digit char values start at 48 numberValue = firstChar - 48; } else if (nonBase10) { string number = _stringReference.ToString(); try { int integer = number.StartsWith("0x", StringComparison.OrdinalIgnoreCase) ? Convert.ToInt32(number, 16) : Convert.ToInt32(number, 8); numberValue = integer; } catch (Exception ex) { throw JsonReaderException.Create(this, "Input string '{0}' is not a valid integer.".FormatWith(CultureInfo.InvariantCulture, number), ex); } } else { int value; ParseResult parseResult = ConvertUtils.Int32TryParse(_stringReference.Chars, _stringReference.StartIndex, _stringReference.Length, out value); if (parseResult == ParseResult.Success) numberValue = value; else if (parseResult == ParseResult.Overflow) throw JsonReaderException.Create(this, "JSON integer {0} is too large or small for an Int32.".FormatWith(CultureInfo.InvariantCulture, _stringReference.ToString())); else throw JsonReaderException.Create(this, "Input string '{0}' is not a valid integer.".FormatWith(CultureInfo.InvariantCulture, _stringReference.ToString())); } numberType = JsonToken.Integer; } else if (_readType == ReadType.ReadAsDecimal) { if (singleDigit) { // digit char values start at 48 numberValue = (decimal)firstChar - 48; } else if (nonBase10) { string number = _stringReference.ToString(); try { // decimal.Parse doesn't support parsing hexadecimal values long integer = number.StartsWith("0x", StringComparison.OrdinalIgnoreCase) ? Convert.ToInt64(number, 16) : Convert.ToInt64(number, 8); numberValue = Convert.ToDecimal(integer); } catch (Exception ex) { throw JsonReaderException.Create(this, "Input string '{0}' is not a valid decimal.".FormatWith(CultureInfo.InvariantCulture, number), ex); } } else { string number = _stringReference.ToString(); decimal value; if (decimal.TryParse(number, NumberStyles.Number | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out value)) numberValue = value; else throw JsonReaderException.Create(this, "Input string '{0}' is not a valid decimal.".FormatWith(CultureInfo.InvariantCulture, _stringReference.ToString())); } numberType = JsonToken.Float; } else { if (singleDigit) { // digit char values start at 48 numberValue = (long)firstChar - 48; numberType = JsonToken.Integer; } else if (nonBase10) { string number = _stringReference.ToString(); try { numberValue = number.StartsWith("0x", StringComparison.OrdinalIgnoreCase) ? Convert.ToInt64(number, 16) : Convert.ToInt64(number, 8); } catch (Exception ex) { throw JsonReaderException.Create(this, "Input string '{0}' is not a valid number.".FormatWith(CultureInfo.InvariantCulture, number), ex); } numberType = JsonToken.Integer; } else { long value; ParseResult parseResult = ConvertUtils.Int64TryParse(_stringReference.Chars, _stringReference.StartIndex, _stringReference.Length, out value); if (parseResult == ParseResult.Success) { numberValue = value; numberType = JsonToken.Integer; } else if (parseResult == ParseResult.Overflow) { throw JsonReaderException.Create(this, "JSON integer {0} is too large or small for an Int64.".FormatWith(CultureInfo.InvariantCulture, _stringReference.ToString())); } else { string number = _stringReference.ToString(); if (_floatParseHandling == FloatParseHandling.Decimal) { decimal d; if (decimal.TryParse(number, NumberStyles.Number | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out d)) numberValue = d; else throw JsonReaderException.Create(this, "Input string '{0}' is not a valid decimal.".FormatWith(CultureInfo.InvariantCulture, number)); } else { double d; if (double.TryParse(number, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out d)) numberValue = d; else throw JsonReaderException.Create(this, "Input string '{0}' is not a valid number.".FormatWith(CultureInfo.InvariantCulture, number)); } numberType = JsonToken.Float; } } } ClearRecentString(); // index has already been updated SetToken(numberType, numberValue, false); } private void ParseComment() { // should have already parsed / character before reaching this method _charPos++; if (!EnsureChars(1, false)) throw JsonReaderException.Create(this, "Unexpected end while parsing comment."); bool singlelineComment; if (_chars[_charPos] == '*') singlelineComment = false; else if (_chars[_charPos] == '/') singlelineComment = true; else throw JsonReaderException.Create(this, "Error parsing comment. Expected: *, got {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); _charPos++; int initialPosition = _charPos; bool commentFinished = false; while (!commentFinished) { switch (_chars[_charPos]) { case '\0': if (_charsUsed == _charPos) { if (ReadData(true) == 0) { if (!singlelineComment) throw JsonReaderException.Create(this, "Unexpected end while parsing comment."); _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition); commentFinished = true; } } else { _charPos++; } break; case '*': _charPos++; if (!singlelineComment) { if (EnsureChars(0, true)) { if (_chars[_charPos] == '/') { _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition - 1); _charPos++; commentFinished = true; } } } break; case StringUtils.CarriageReturn: if (singlelineComment) { _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition); commentFinished = true; } ProcessCarriageReturn(true); break; case StringUtils.LineFeed: if (singlelineComment) { _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition); commentFinished = true; } ProcessLineFeed(); break; default: _charPos++; break; } } SetToken(JsonToken.Comment, _stringReference.ToString()); ClearRecentString(); } private bool MatchValue(string value) { if (!EnsureChars(value.Length - 1, true)) return false; for (int i = 0; i < value.Length; i++) { if (_chars[_charPos + i] != value[i]) { return false; } } _charPos += value.Length; return true; } private bool MatchValueWithTrailingSeparator(string value) { // will match value and then move to the next character, checking that it is a separator character bool match = MatchValue(value); if (!match) return false; if (!EnsureChars(0, false)) return true; return IsSeparator(_chars[_charPos]) || _chars[_charPos] == '\0'; } private bool IsSeparator(char c) { switch (c) { case '}': case ']': case ',': return true; case '/': // check next character to see if start of a comment if (!EnsureChars(1, false)) return false; var nextChart = _chars[_charPos + 1]; return (nextChart == '*' || nextChart == '/'); case ')': if (CurrentState == State.Constructor || CurrentState == State.ConstructorStart) return true; break; case ' ': case StringUtils.Tab: case StringUtils.LineFeed: case StringUtils.CarriageReturn: return true; default: if (char.IsWhiteSpace(c)) return true; break; } return false; } private void ParseTrue() { // check characters equal 'true' // and that it is followed by either a separator character // or the text ends if (MatchValueWithTrailingSeparator(JsonConvert.True)) { SetToken(JsonToken.Boolean, true); } else { throw JsonReaderException.Create(this, "Error parsing boolean value."); } } private void ParseNull() { if (MatchValueWithTrailingSeparator(JsonConvert.Null)) { SetToken(JsonToken.Null); } else { throw JsonReaderException.Create(this, "Error parsing null value."); } } private void ParseUndefined() { if (MatchValueWithTrailingSeparator(JsonConvert.Undefined)) { SetToken(JsonToken.Undefined); } else { throw JsonReaderException.Create(this, "Error parsing undefined value."); } } private void ParseFalse() { if (MatchValueWithTrailingSeparator(JsonConvert.False)) { SetToken(JsonToken.Boolean, false); } else { throw JsonReaderException.Create(this, "Error parsing boolean value."); } } private void ParseNumberNegativeInfinity() { if (MatchValueWithTrailingSeparator(JsonConvert.NegativeInfinity)) { if (_floatParseHandling == FloatParseHandling.Decimal) throw new JsonReaderException("Cannot read -Infinity as a decimal."); SetToken(JsonToken.Float, double.NegativeInfinity); } else { throw JsonReaderException.Create(this, "Error parsing negative infinity value."); } } private void ParseNumberPositiveInfinity() { if (MatchValueWithTrailingSeparator(JsonConvert.PositiveInfinity)) { if (_floatParseHandling == FloatParseHandling.Decimal) throw new JsonReaderException("Cannot read Infinity as a decimal."); SetToken(JsonToken.Float, double.PositiveInfinity); } else { throw JsonReaderException.Create(this, "Error parsing positive infinity value."); } } private void ParseNumberNaN() { if (MatchValueWithTrailingSeparator(JsonConvert.NaN)) { if (_floatParseHandling == FloatParseHandling.Decimal) throw new JsonReaderException("Cannot read NaN as a decimal."); SetToken(JsonToken.Float, double.NaN); } else { throw JsonReaderException.Create(this, "Error parsing NaN value."); } } /// <summary> /// Changes the state to closed. /// </summary> public override void Close() { base.Close(); if (CloseInput && _reader != null) _reader.Close(); if (_buffer != null) _buffer.Clear(); } /// <summary> /// Gets a value indicating whether the class can return line information. /// </summary> /// <returns> /// <c>true</c> if LineNumber and LinePosition can be provided; otherwise, <c>false</c>. /// </returns> public bool HasLineInfo() { return true; } /// <summary> /// Gets the current line number. /// </summary> /// <value> /// The current line number or 0 if no line information is available (for example, HasLineInfo returns false). /// </value> public int LineNumber { get { if (CurrentState == State.Start && LinePosition == 0) return 0; return _lineNumber; } } /// <summary> /// Gets the current line position. /// </summary> /// <value> /// The current line position or 0 if no line information is available (for example, HasLineInfo returns false). /// </value> public int LinePosition { get { return _charPos - _lineStartPos; } } } }
// ------------------------------------------------------------------------------ // Copyright (c) 2014 Microsoft Corporation // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // ------------------------------------------------------------------------------ namespace Microsoft.Live.Phone.Operations { using System; using System.Diagnostics; using System.Threading.Tasks; using Microsoft.Phone.BackgroundTransfer; /// <summary> /// This class creates a BackgroundTransferRequest for downloads, sends the request to the BackgroundTransferService /// and forwards any events. /// </summary> internal class BackgroundDownloadOperation { #region Nested types /// <summary> /// Class used to build a BackgroundDownloadOperation because of its long list of parameters. /// </summary> public class Builder { private Uri requestUri; private Uri downloadLocationOnDevice; private string accessToken; private IBackgroundTransferService backgroundTransferService; public Builder() { this.backgroundTransferService = PhoneBackgroundTransferService.Instance; } public Uri RequestUri { get { return this.requestUri; } set { Debug.Assert(value != null); this.requestUri = value; } } public string AccessToken { get { return this.accessToken; } set { Debug.Assert(value != null); this.accessToken = value; } } public Uri DownloadLocationOnDevice { get { return this.downloadLocationOnDevice; } set { Debug.Assert(value != null); this.downloadLocationOnDevice = value; } } public IBackgroundTransferService BackgroundTransferService { get { return this.backgroundTransferService; } set { Debug.Assert(value != null); this.backgroundTransferService = value; } } public IProgress<LiveOperationProgress> Progress { get; set; } public BackgroundTransferPreferences BackgroundTransferPreferences { get; set; } public BackgroundDownloadOperation Build() { Debug.Assert(this.requestUri != null); Debug.Assert(this.accessToken != null); Debug.Assert(this.downloadLocationOnDevice != null); Debug.Assert(this.backgroundTransferService != null); return new BackgroundDownloadOperation(this); } } #endregion #region Fields private readonly Uri requestUri; private readonly string accessToken; private readonly Uri downloadLocationOnDevice; private readonly IBackgroundTransferService backgroundTransferService; private readonly IProgress<LiveOperationProgress> progress; private readonly TaskCompletionSource<LiveOperationResult> tcs; private readonly TransferPreferences transferPreferences; private BackgroundTransferRequest request; private OperationStatus status; #endregion #region Constructors private BackgroundDownloadOperation(Builder builder) { this.requestUri = builder.RequestUri; this.accessToken = builder.AccessToken; this.downloadLocationOnDevice = builder.DownloadLocationOnDevice; this.backgroundTransferService = builder.BackgroundTransferService; this.progress = builder.Progress; this.tcs = new TaskCompletionSource<LiveOperationResult>(); this.status = OperationStatus.NotStarted; this.transferPreferences = BackgroundTransferHelper.GetTransferPreferences(builder.BackgroundTransferPreferences); } #endregion #region Methods /// <summary> /// Cancels the given operation. /// </summary> public void Cancel() { // If we are already cancelled or completed we can just leave. if (this.status == OperationStatus.Cancelled || this.status == OperationStatus.Completed) { return; } // if we have started we must remove the request from the service to cancel. if (this.status == OperationStatus.Started) { this.backgroundTransferService.Remove(this.request); } // if we have not started, or started we must switch the state to cancelled and // notify the TaskCompletionSource to cancel. this.status = OperationStatus.Cancelled; this.tcs.TrySetCanceled(); } /// <summary> /// Performs the BackgroundDownloadOperation. /// </summary> public async Task<LiveOperationResult> ExecuteAsync() { Debug.Assert(this.status != OperationStatus.Completed, "Cannot execute on a completed operation."); var builder = new BackgroundDownloadRequestBuilder { AccessToken = this.accessToken, DownloadLocationOnDevice = this.downloadLocationOnDevice, RequestUri = this.requestUri, TransferPreferences = this.transferPreferences }; this.request = builder.Build(); var eventAdapter = new BackgroundDownloadEventAdapter(this.backgroundTransferService, this.tcs); Task<LiveOperationResult> task = this.progress == null ? eventAdapter.ConvertTransferStatusChangedToTask(this.request) : eventAdapter.ConvertTransferStatusChangedToTask(this.request, this.progress); Debug.Assert(this.tcs.Task == task, "EventAdapter returned a different task. This could affect cancel."); // if the request has already been cancelled do not add it to the service. if (this.status != OperationStatus.Cancelled) { this.backgroundTransferService.Add(this.request); this.status = OperationStatus.Started; } LiveOperationResult result = await task; this.status = OperationStatus.Completed; return result; } #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; #if ES_BUILD_STANDALONE namespace Microsoft.Diagnostics.Tracing #else namespace System.Diagnostics.Tracing #endif { /// <summary> /// TraceLogging: Used when implementing a custom TraceLoggingTypeInfo. /// These are passed to metadataCollector.Add to specify the low-level /// type of a field in the event payload. Note that a "formatted" /// TraceLoggingDataType consists of a core TraceLoggingDataType value /// (a TraceLoggingDataType with a value less than 32) plus an OutType. /// Any combination of TraceLoggingDataType + OutType is valid, but not /// all are useful. In particular, combinations not explicitly listed /// below are unlikely to be recognized by decoders, and will typically /// be decoded as the corresponding core type (i.e. the decoder will /// mask off any unrecognized OutType value). /// </summary> internal enum TraceLoggingDataType { /// <summary> /// Core type. /// Data type with no value (0-length payload). /// NOTE: arrays of Nil are illegal. /// NOTE: a fixed-length array of Nil is interpreted by the decoder as /// a struct (obsolete but retained for backwards-compatibility). /// </summary> Nil = 0, /// <summary> /// Core type. /// Encoding assumes null-terminated Char16 string. /// Decoding treats as UTF-16LE string. /// </summary> Utf16String = 1, /// <summary> /// Core type. /// Encoding assumes null-terminated Char8 string. /// Decoding treats as MBCS string. /// </summary> MbcsString = 2, /// <summary> /// Core type. /// Encoding assumes 8-bit value. /// Decoding treats as signed integer. /// </summary> Int8 = 3, /// <summary> /// Core type. /// Encoding assumes 8-bit value. /// Decoding treats as unsigned integer. /// </summary> UInt8 = 4, /// <summary> /// Core type. /// Encoding assumes 16-bit value. /// Decoding treats as signed integer. /// </summary> Int16 = 5, /// <summary> /// Core type. /// Encoding assumes 16-bit value. /// Decoding treats as unsigned integer. /// </summary> UInt16 = 6, /// <summary> /// Core type. /// Encoding assumes 32-bit value. /// Decoding treats as signed integer. /// </summary> Int32 = 7, /// <summary> /// Core type. /// Encoding assumes 32-bit value. /// Decoding treats as unsigned integer. /// </summary> UInt32 = 8, /// <summary> /// Core type. /// Encoding assumes 64-bit value. /// Decoding treats as signed integer. /// </summary> Int64 = 9, /// <summary> /// Core type. /// Encoding assumes 64-bit value. /// Decoding treats as unsigned integer. /// </summary> UInt64 = 10, /// <summary> /// Core type. /// Encoding assumes 32-bit value. /// Decoding treats as Float. /// </summary> Float = 11, /// <summary> /// Core type. /// Encoding assumes 64-bit value. /// Decoding treats as Double. /// </summary> Double = 12, /// <summary> /// Core type. /// Encoding assumes 32-bit value. /// Decoding treats as Boolean. /// </summary> Boolean32 = 13, /// <summary> /// Core type. /// Encoding assumes 16-bit bytecount followed by binary data. /// Decoding treats as binary data. /// </summary> Binary = 14, /// <summary> /// Core type. /// Encoding assumes 16-byte value. /// Decoding treats as GUID. /// </summary> Guid = 15, /// <summary> /// Core type. /// Encoding assumes 64-bit value. /// Decoding treats as FILETIME. /// </summary> FileTime = 17, /// <summary> /// Core type. /// Encoding assumes 16-byte value. /// Decoding treats as SYSTEMTIME. /// </summary> SystemTime = 18, /// <summary> /// Core type. /// Encoding assumes 32-bit value. /// Decoding treats as hexadecimal unsigned integer. /// </summary> HexInt32 = 20, /// <summary> /// Core type. /// Encoding assumes 64-bit value. /// Decoding treats as hexadecimal unsigned integer. /// </summary> HexInt64 = 21, /// <summary> /// Core type. /// Encoding assumes 16-bit bytecount followed by Char16 data. /// Decoding treats as UTF-16LE string. /// </summary> CountedUtf16String = 22, /// <summary> /// Core type. /// Encoding assumes 16-bit bytecount followed by Char8 data. /// Decoding treats as MBCS string. /// </summary> CountedMbcsString = 23, /// <summary> /// Core type. /// Special case: Struct indicates that this field plus the the /// subsequent N logical fields are to be considered as one logical /// field (i.e. a nested structure). The OutType is used to encode N. /// The maximum value for N is 127. This field has no payload by /// itself, but logically contains the payload of the following N /// fields. It is legal to have an array of Struct. /// </summary> Struct = 24, /// <summary> /// Formatted type. /// Encoding assumes 16-bit value. /// Decoding treats as UTF-16LE character. /// </summary> Char16 = UInt16 + (EventFieldFormat.String << 8), /// <summary> /// Formatted type. /// Encoding assumes 8-bit value. /// Decoding treats as character. /// </summary> Char8 = UInt8 + (EventFieldFormat.String << 8), /// <summary> /// Formatted type. /// Encoding assumes 8-bit value. /// Decoding treats as Boolean. /// </summary> Boolean8 = UInt8 + (EventFieldFormat.Boolean << 8), /// <summary> /// Formatted type. /// Encoding assumes 8-bit value. /// Decoding treats as hexadecimal unsigned integer. /// </summary> HexInt8 = UInt8 + (EventFieldFormat.Hexadecimal << 8), /// <summary> /// Formatted type. /// Encoding assumes 16-bit value. /// Decoding treats as hexadecimal unsigned integer. /// </summary> HexInt16 = UInt16 + (EventFieldFormat.Hexadecimal << 8), #if false /// <summary> /// Formatted type. /// Encoding assumes 32-bit value. /// Decoding treats as process identifier. /// </summary> ProcessId = UInt32 + (EventSourceFieldFormat.ProcessId << 8), /// <summary> /// Formatted type. /// Encoding assumes 32-bit value. /// Decoding treats as thread identifier. /// </summary> ThreadId = UInt32 + (EventSourceFieldFormat.ThreadId << 8), /// <summary> /// Formatted type. /// Encoding assumes 16-bit value. /// Decoding treats as IP port. /// </summary> Port = UInt16 + (EventSourceFieldFormat.Port << 8), /// <summary> /// Formatted type. /// Encoding assumes 32-bit value. /// Decoding treats as IPv4 address. /// </summary> Ipv4Address = UInt32 + (EventSourceFieldFormat.Ipv4Address << 8), /// <summary> /// Formatted type. /// Encoding assumes 16-bit bytecount followed by binary data. /// Decoding treats as IPv6 address. /// </summary> Ipv6Address = Binary + (EventSourceFieldFormat.Ipv6Address << 8), /// <summary> /// Formatted type. /// Encoding assumes 16-bit bytecount followed by binary data. /// Decoding treats as SOCKADDR. /// </summary> SocketAddress = Binary + (EventSourceFieldFormat.SocketAddress << 8), #endif /// <summary> /// Formatted type. /// Encoding assumes null-terminated Char16 string. /// Decoding treats as UTF-16LE XML string. /// </summary> Utf16Xml = Utf16String + (EventFieldFormat.Xml << 8), /// <summary> /// Formatted type. /// Encoding assumes null-terminated Char8 string. /// Decoding treats as MBCS XML string. /// </summary> MbcsXml = MbcsString + (EventFieldFormat.Xml << 8), /// <summary> /// Formatted type. /// Encoding assumes 16-bit bytecount followed by Char16 data. /// Decoding treats as UTF-16LE XML. /// </summary> CountedUtf16Xml = CountedUtf16String + (EventFieldFormat.Xml << 8), /// <summary> /// Formatted type. /// Encoding assumes 16-bit bytecount followed by Char8 data. /// Decoding treats as MBCS XML. /// </summary> CountedMbcsXml = CountedMbcsString + (EventFieldFormat.Xml << 8), /// <summary> /// Formatted type. /// Encoding assumes null-terminated Char16 string. /// Decoding treats as UTF-16LE JSON string. /// </summary> Utf16Json = Utf16String + (EventFieldFormat.Json << 8), /// <summary> /// Formatted type. /// Encoding assumes null-terminated Char8 string. /// Decoding treats as MBCS JSON string. /// </summary> MbcsJson = MbcsString + (EventFieldFormat.Json << 8), /// <summary> /// Formatted type. /// Encoding assumes 16-bit bytecount followed by Char16 data. /// Decoding treats as UTF-16LE JSON. /// </summary> CountedUtf16Json = CountedUtf16String + (EventFieldFormat.Json << 8), /// <summary> /// Formatted type. /// Encoding assumes 16-bit bytecount followed by Char8 data. /// Decoding treats as MBCS JSON. /// </summary> CountedMbcsJson = CountedMbcsString + (EventFieldFormat.Json << 8), #if false /// <summary> /// Formatted type. /// Encoding assumes 32-bit value. /// Decoding treats as Win32 error. /// </summary> Win32Error = UInt32 + (EventSourceFieldFormat.Win32Error << 8), /// <summary> /// Formatted type. /// Encoding assumes 32-bit value. /// Decoding treats as NTSTATUS. /// </summary> NTStatus = UInt32 + (EventSourceFieldFormat.NTStatus << 8), #endif /// <summary> /// Formatted type. /// Encoding assumes 32-bit value. /// Decoding treats as HRESULT. /// </summary> HResult = Int32 + (EventFieldFormat.HResult << 8) } }
// // (C) Copyright 2003-2011 by Autodesk, Inc. // // Permission to use, copy, modify, and distribute this software in // object code form for any purpose and without fee is hereby granted, // provided that the above copyright notice appears in all copies and // that both that copyright notice and the limited warranty and // restricted rights notice below appear in all supporting // documentation. // // AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS. // AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF // MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC. // DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE // UNINTERRUPTED OR ERROR FREE. // // Use, duplication, or disclosure by the U.S. Government is subject to // restrictions set forth in FAR 52.227-19 (Commercial Computer // Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii) // (Rights in Technical Data and Computer Software), as applicable. // using System; using System.Collections.Generic; using System.Text; using System.ComponentModel; using Autodesk.Revit.DB.Structure; namespace Revit.SDK.Samples.NewRebar.CS { /// <summary> /// Bend orientation enum. /// </summary> enum BendOrientation { /// <summary> /// Turn left. /// </summary> Left = 1, /// <summary> /// Turn right. /// </summary> Right = -1 } /// <summary> /// Segment's ends reference enum. /// </summary> enum EndReference { /// <summary> /// Segment's start reference. /// </summary> Begin = 0, /// <summary> /// Segment's end reference. /// </summary> End = 1 } /// <summary> /// Constraint to be added to RebarShapeDefBySegment. /// </summary> abstract class ConstraintOnSegmentShape : ConstraintOnRebarShape { public ConstraintOnSegmentShape(RebarShapeDefBySegment def) : base(def) { } /// <summary> /// Update list value for property grid. /// </summary> protected void UpdateSegmentIdTypeConverter() { TypeConverterSegmentId.SegmentCount = GetRebarShapeDefinitionBySegments.NumberOfSegments; } /// <summary> /// Get RebarShapeDefinitionBySegments object. /// </summary> protected RebarShapeDefinitionBySegments GetRebarShapeDefinitionBySegments { get { return m_shapeDef.RebarshapeDefinition as RebarShapeDefinitionBySegments; } } } /// <summary> /// Default radius dimension of bend. /// </summary> class ConstraintBendDefaultRadius : ConstraintOnSegmentShape { /// <summary> /// Segment to be added constraint on. /// </summary> private int m_segment; /// <summary> /// Bend orientation field. /// </summary> private BendOrientation m_turn; /// <summary> /// Bend angle field. /// </summary> private RebarShapeBendAngle m_bendAngle; /// <summary> /// Constructor /// </summary> /// <param name="def"></param> public ConstraintBendDefaultRadius(RebarShapeDefBySegment def) : base(def) { m_turn = BendOrientation.Left; m_bendAngle = RebarShapeBendAngle.Obtuse; } /// <summary> /// Segment to be added constraint on. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// Bend orientation property. /// </summary> public BendOrientation Turn { get { return m_turn; } set { m_turn = value; } } /// <summary> /// Bend angle property. /// </summary> public RebarShapeBendAngle BendAngle { get { return m_bendAngle; } set { m_bendAngle = value; } } /// <summary> /// Add bend default radius constraint to RebarShapeDefinitionBySegments. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.AddBendDefaultRadius( m_segment, (int)m_turn, m_bendAngle); } } /// <summary> /// Variable radius dimension of bend. /// </summary> class ConstraintBendVariableRadius : ConstraintOnSegmentShape { /// <summary> /// Segment to be added constraint on. /// </summary> private int m_segment; /// <summary> /// Bend orientation field. /// </summary> private BendOrientation m_turn; /// <summary> /// Bend angle field. /// </summary> private RebarShapeBendAngle m_bendAngle; /// <summary> /// Radius dimension field. /// </summary> private RebarShapeParameter m_radiusParameter; /// <summary> /// Measure length including bar thickness or not. /// </summary> private bool m_measureIncludingBarThickness; public ConstraintBendVariableRadius(RebarShapeDefBySegment def) : base(def) { m_bendAngle = RebarShapeBendAngle.Obtuse; m_turn = BendOrientation.Left; m_measureIncludingBarThickness = true; } /// <summary> /// Segment to be added constraint on. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// Bend orientation property. /// </summary> public BendOrientation Turn { get { return m_turn; } set { m_turn = value; } } /// <summary> /// Bend angle property. /// </summary> public RebarShapeBendAngle BendAngle { get { return m_bendAngle; } set { m_bendAngle = value; } } /// <summary> /// Radius dimension property. /// </summary> [TypeConverter(typeof(TypeConverterRebarShapeParameter))] public RebarShapeParameter RadiusParameter { get { UpdateParameterTypeConverter(); return m_radiusParameter; } set { m_radiusParameter = value; } } /// <summary> /// Measure including bar thickness or not. /// </summary> public bool MeasureIncludingBarThickness { get { return m_measureIncludingBarThickness; } set { m_measureIncludingBarThickness = value; } } /// <summary> /// Add Dimension to constrain the bend radius. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.AddBendVariableRadius( m_segment, (int)m_turn, m_bendAngle, m_radiusParameter.Parameter, m_measureIncludingBarThickness); } } /// <summary> /// Parallel dimension to segment. /// </summary> class ConstraintParallelToSegment : ConstraintOnSegmentShape { /// <summary> /// Segment to be added constraint on. /// </summary> private int m_segment; /// <summary> /// Dimension to constrain the length of segment. /// </summary> private RebarShapeParameter m_parameter; /// <summary> /// Measure segment's length to outside of bend 0 or not. /// </summary> private bool m_measureToOutsideOfBend0; /// <summary> /// Measure segment's length to outside of bend 1 or not. /// </summary> private bool m_measureToOutsideOfBend1; /// <summary> /// Constructor /// </summary> /// <param name="def"></param> public ConstraintParallelToSegment(RebarShapeDefBySegment def) : base(def) { m_measureToOutsideOfBend0 = true; m_measureToOutsideOfBend1 = true; } /// <summary> /// Segment to be added constraint on. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// Dimension to constrain the length of segment. /// </summary> [TypeConverter(typeof(TypeConverterRebarShapeParameter))] public RebarShapeParameter Parameter { get { UpdateParameterTypeConverter(); return m_parameter; } set { m_parameter = value; } } /// <summary> /// Measure segment's length to outside of bend 0 or not. /// </summary> public bool MeasureToOutsideOfBend0 { get { return m_measureToOutsideOfBend0; } set { m_measureToOutsideOfBend0 = value; } } /// <summary> /// Measure segment's length to outside of bend 1 or not. /// </summary> public bool MeasureToOutsideOfBend1 { get { return m_measureToOutsideOfBend1; } set { m_measureToOutsideOfBend1 = value; } } /// <summary> /// Add Dimension to constrain the segment length. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.AddConstraintParallelToSegment( m_segment, m_parameter.Parameter, m_measureToOutsideOfBend0, m_measureToOutsideOfBend1); } } /// <summary> /// Length dimension of segment in specified direction. /// </summary> class ConstraintToSegment : ConstraintOnSegmentShape { /// <summary> /// Segment to be added constraint on. /// </summary> private int m_segment; /// <summary> /// Dimension to constraint the length of segment in specified direction. /// </summary> private RebarShapeParameter m_parameter; /// <summary> /// X coordinate of constraint direction. /// </summary> private double m_constraintDirCoordX; /// <summary> /// Y coordinate of constraint direction. /// </summary> private double m_constraintDirCoordY; /// <summary> /// Sign of Z coordinate of cross product of constraint direction by segment direction. /// </summary> private int m_signOfZCoordOfCrossProductOfConstraintDirBySegmentDir; /// <summary> /// Measure segment's length to outside of bend 0 or not. /// </summary> private bool m_measureToOutsideOfBend0; /// <summary> /// Measure segment's length to outside of bend 1 or not. /// </summary> private bool m_measureToOutsideOfBend1; public ConstraintToSegment(RebarShapeDefBySegment def) : base(def) { m_measureToOutsideOfBend0 = true; m_measureToOutsideOfBend1 = false; m_signOfZCoordOfCrossProductOfConstraintDirBySegmentDir = -1; } /// <summary> /// Segment to be added constraint on. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// Dimension to constraint the length of segment in specified direction. /// </summary> [TypeConverter(typeof(TypeConverterRebarShapeParameter))] public RebarShapeParameter Parameter { get { UpdateParameterTypeConverter(); return m_parameter; } set { m_parameter = value; } } /// <summary> /// X coordinate of constraint direction. /// </summary> public double ConstraintDirCoordX { get { return m_constraintDirCoordX; } set { m_constraintDirCoordX = value; } } /// <summary> /// Y coordinate of constraint direction. /// </summary> public double ConstraintDirCoordY { get { return m_constraintDirCoordY; } set { m_constraintDirCoordY = value; } } /// <summary> /// Sign of Z coordinate of cross product of constraint direction by segment direction. /// </summary> public int SignOfZCoordOfCrossProductOfConstraintDirBySegmentDir { get { return m_signOfZCoordOfCrossProductOfConstraintDirBySegmentDir; } set { m_signOfZCoordOfCrossProductOfConstraintDirBySegmentDir = value; } } /// <summary> /// Measure segment's length to outside of bend 0 or not. /// </summary> public bool MeasureToOutsideOfBend0 { get { return m_measureToOutsideOfBend0; } set { m_measureToOutsideOfBend0 = value; } } /// <summary> /// Measure segment's length to outside of bend 1 or not. /// </summary> public bool MeasureToOutsideOfBend1 { get { return m_measureToOutsideOfBend1; } set { m_measureToOutsideOfBend1 = value; } } /// <summary> /// Add dimension to constrain the length of segment in the specified direction. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.AddConstraintToSegment( m_segment, m_parameter.Parameter, m_constraintDirCoordX, m_constraintDirCoordY, m_signOfZCoordOfCrossProductOfConstraintDirBySegmentDir, m_measureToOutsideOfBend0, m_measureToOutsideOfBend1); } } /// <summary> /// Listening length dimension between two bends. /// </summary> class ListeningDimensionBendToBend : ConstraintOnSegmentShape { /// <summary> /// Dimension to constraint the length of two bends in the specified direction. /// </summary> private RebarShapeParameter m_parameter; /// <summary> /// X coordinate of constraint direction. /// </summary> private double m_constraintDirCoordX; /// <summary> /// Y coordinate of constraint direction. /// </summary> private double m_constraintDirCoordY; /// <summary> /// Reference of segment 0. /// </summary> private int m_segment; /// <summary> /// End reference of segment 0. /// </summary> private EndReference m_end; /// <summary> /// Reference of segment 1. /// </summary> private int m_segment1; /// <summary> /// End reference of segment 1. /// </summary> private EndReference m_end1; public ListeningDimensionBendToBend(RebarShapeDefBySegment def) : base(def) { m_end = EndReference.Begin; m_end1 = EndReference.End; m_constraintDirCoordX = 0; m_constraintDirCoordY = 0; } /// <summary> /// Dimension to constraint the length of two bends in the specified direction. /// </summary> [TypeConverter(typeof(TypeConverterRebarShapeParameter))] public RebarShapeParameter Parameter { get { UpdateParameterTypeConverter(); return m_parameter; } set { m_parameter = value; } } /// <summary> /// X coordinate of constraint direction. /// </summary> public double ConstraintDirCoordX { get { return m_constraintDirCoordX; } set { m_constraintDirCoordX = value; } } /// <summary> /// Y coordinate of constraint direction. /// </summary> public double ConstraintDirCoordY { get { return m_constraintDirCoordY; } set { m_constraintDirCoordY = value; } } /// <summary> /// Reference of segment 0. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment0 { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// End reference of segment 0. /// </summary> public EndReference End0 { get { return m_end; } set { m_end = value; } } /// <summary> /// Reference of segment 1. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment1 { get { UpdateSegmentIdTypeConverter(); return m_segment1; } set { m_segment1 = value; } } /// <summary> /// End reference of segment 1. /// </summary> public EndReference End1 { get { return m_end1; } set { m_end1 = value; } } /// <summary> /// Add listening dimension to constrain the length of two bend in the specified direction. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.AddListeningDimensionBendToBend( m_parameter.Parameter, m_constraintDirCoordX, m_constraintDirCoordY, m_segment, (int)m_end, m_segment1, (int)m_end1); } } /// <summary> /// Listening length dimension between a segment and a bend. /// </summary> class ListeningDimensionSegmentToBend : ConstraintOnSegmentShape { /// <summary> /// Dimension to constrain the length between a segment and a bend /// in the specified direction. /// </summary> private RebarShapeParameter m_parameter; /// <summary> /// X coordinate of constraint direction. /// </summary> private double m_constraintDirCoordX; /// <summary> /// Y coordinate of constraint direction. /// </summary> private double m_constraintDirCoordY; /// <summary> /// Reference of segment 0. /// </summary> private int m_segment; /// <summary> /// Reference of segment 1. /// </summary> private int m_segment1; /// <summary> /// End reference of segment 1. /// </summary> private EndReference m_end1; public ListeningDimensionSegmentToBend(RebarShapeDefBySegment def) : base(def) { m_constraintDirCoordX = 0; m_constraintDirCoordY = 0; m_end1 = EndReference.End; } /// <summary> /// Dimension to constrain the length between a segment and a bend /// in the specified direction. /// </summary> [TypeConverter(typeof(TypeConverterRebarShapeParameter))] public RebarShapeParameter Parameter { get { UpdateParameterTypeConverter(); return m_parameter; } set { m_parameter = value; } } /// <summary> /// X coordinate of constraint direction. /// </summary> public double ConstraintDirCoordX { get { return m_constraintDirCoordX; } set { m_constraintDirCoordX = value; } } /// <summary> /// Y coordinate of constraint direction. /// </summary> public double ConstraintDirCoordY { get { return m_constraintDirCoordY; } set { m_constraintDirCoordY = value; } } /// <summary> /// Reference of segment 0. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment0 { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// Reference of segment 1. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment1 { get { UpdateSegmentIdTypeConverter(); return m_segment1; } set { m_segment1 = value; } } /// <summary> /// End reference of segment 1. /// </summary> public EndReference End1 { get { return m_end1; } set { m_end1 = value; } } /// <summary> /// Add listening dimension to constrain the length between a segment and a bend /// in the specified direction. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.AddListeningDimensionSegmentToBend( m_parameter.Parameter, m_constraintDirCoordX, m_constraintDirCoordY, m_segment, m_segment1, (int)m_end1); } } /// <summary> /// Listening length dimension between two segments. /// </summary> class ListeningDimensionSegmentToSegment : ConstraintOnSegmentShape { /// <summary> /// Dimension to constrain the perpendicular distance between two segment. /// The two segment should be parallel. /// </summary> private RebarShapeParameter m_parameter; /// <summary> /// X coordinate of constraint direction. /// </summary> private double m_constraintDirCoordX; /// <summary> /// Y coordinate of constraint direction. /// </summary> private double m_constraintDirCoordY; /// <summary> /// The first segment to be constrained. /// </summary> private int m_segment; /// <summary> /// The second segment to be constrained. /// </summary> private int m_segment1; public ListeningDimensionSegmentToSegment(RebarShapeDefBySegment def) : base(def) { m_constraintDirCoordX = 1; m_constraintDirCoordY = 0; } /// <summary> /// Dimension to constrain the perpendicular distance between two segment. /// </summary> [TypeConverter(typeof(TypeConverterRebarShapeParameter))] public RebarShapeParameter Parameter { get { UpdateParameterTypeConverter(); return m_parameter; } set { m_parameter = value; } } /// <summary> /// X coordinate of constraint direction. /// </summary> public double ConstraintDirCoordX { get { return m_constraintDirCoordX; } set { m_constraintDirCoordX = value; } } /// <summary> /// Y coordinate of constraint direction. /// </summary> public double ConstraintDirCoordY { get { return m_constraintDirCoordY; } set { m_constraintDirCoordY = value; } } /// <summary> /// The second segment to be constrained. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment0 { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// The second segment to be constrained. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment1 { get { UpdateSegmentIdTypeConverter(); return m_segment1; } set { m_segment1 = value; } } /// <summary> /// Add dimension to constrain the perpendicular distance between two segment. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.AddListeningDimensionSegmentToSegment( m_parameter.Parameter, m_constraintDirCoordX, m_constraintDirCoordY, m_segment, m_segment1); } } /// <summary> /// Remove a dimension from a segment. /// </summary> class RemoveParameterFromSegment : ConstraintOnSegmentShape { /// <summary> /// Reference of segment. /// </summary> private int m_segment; /// <summary> /// Dimension to be removed. /// </summary> private RebarShapeParameter m_radiusParameter; public RemoveParameterFromSegment(RebarShapeDefBySegment def) : base(def) { } /// <summary> /// Reference of segment. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// Dimension to be removed. /// </summary> [TypeConverter(typeof(TypeConverterRebarShapeParameter))] public RebarShapeParameter RadiusParameter { get { UpdateParameterTypeConverter(); return m_radiusParameter; } set { m_radiusParameter = value; } } /// <summary> /// Remove dimension from Rebar shape. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.RemoveParameterFromSegment( m_segment, m_radiusParameter.Parameter); } } /// <summary> /// A 180 degree bend dimension. /// </summary> class SetSegmentAs180DegreeBend : ConstraintOnSegmentShape { /// <summary> /// Reference of segment. /// </summary> private int m_segment; /// <summary> /// Dimension to constrain the bend's radius. /// </summary> private RebarShapeParameter m_radiusParameter; /// <summary> /// If measure to outside of bend. /// </summary> private bool m_measureToOutsideOfBend; public SetSegmentAs180DegreeBend(RebarShapeDefBySegment def) : base(def) { m_measureToOutsideOfBend = true; } /// <summary> /// Reference of segment. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// Dimension to constrain the bend's radius. /// </summary> [TypeConverter(typeof(TypeConverterRebarShapeParameter))] public RebarShapeParameter RadiusParameter { get { UpdateParameterTypeConverter(); return m_radiusParameter; } set { m_radiusParameter = value; } } /// <summary> /// If measure the length to outside of bend. /// </summary> public bool MeasureToOutsideOfBend { get { return m_measureToOutsideOfBend; } set { m_measureToOutsideOfBend = value; } } /// <summary> /// Add a dimension of 180 degree bend for a segment. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.SetSegmentAs180DegreeBend( m_segment, m_radiusParameter.Parameter, m_measureToOutsideOfBend); } } /// <summary> /// Length dimension of segment in its parallel direction. /// </summary> class SetSegmentFixedDirection : ConstraintOnSegmentShape { /// <summary> /// Reference of segment. /// </summary> private int m_segment; /// <summary> /// X coordinate of constraint direction. /// </summary> private double m_vecCoordX; /// <summary> /// Y coordinate of constraint direction. /// </summary> private double m_vecCoordY; public SetSegmentFixedDirection(RebarShapeDefBySegment def) : base(def) { m_vecCoordX = 1; m_vecCoordY = 0; } /// <summary> /// Reference of segment. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// X coordinate of constraint direction. /// </summary> public double VecCoordX { get { return m_vecCoordX; } set { m_vecCoordX = value; } } /// <summary> /// Y coordinate of constraint direction. /// </summary> public double VecCoordY { get { return m_vecCoordY; } set { m_vecCoordY = value; } } /// <summary> /// Add dimension to constrain the direction of the segment. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.SetSegmentFixedDirection( m_segment, m_vecCoordX, m_vecCoordY); } } /// <summary> /// Remove a dimension from a segment. /// </summary> class SetSegmentVariableDirection : ConstraintOnSegmentShape { /// <summary> /// Reference of segment. /// </summary> private int m_segment; public SetSegmentVariableDirection(RebarShapeDefBySegment def) : base(def) { } /// <summary> /// Reference of segment. /// </summary> [TypeConverter(typeof(TypeConverterSegmentId))] public int Segment { get { UpdateSegmentIdTypeConverter(); return m_segment; } set { m_segment = value; } } /// <summary> /// Remove the direction dimension of segment. /// </summary> public override void Commit() { GetRebarShapeDefinitionBySegments.SetSegmentVariableDirection(m_segment); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Microsoft.Win32; using Microsoft.Win32.SafeHandles; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Globalization; using System.Runtime.InteropServices; namespace System.Security.Principal { public sealed class NTAccount : IdentityReference { #region Private members private readonly string _name; // // Limit for nt account names for users is 20 while that for groups is 256 // internal const int MaximumAccountNameLength = 256; // // Limit for dns domain names is 255 // internal const int MaximumDomainNameLength = 255; #endregion #region Constructors public NTAccount(string domainName, string accountName) { if (accountName == null) { throw new ArgumentNullException(nameof(accountName)); } if (accountName.Length == 0) { throw new ArgumentException(SR.Argument_StringZeroLength, nameof(accountName)); } if (accountName.Length > MaximumAccountNameLength) { throw new ArgumentException(SR.IdentityReference_AccountNameTooLong, nameof(accountName)); } if (domainName != null && domainName.Length > MaximumDomainNameLength) { throw new ArgumentException(SR.IdentityReference_DomainNameTooLong, nameof(domainName)); } Contract.EndContractBlock(); if (domainName == null || domainName.Length == 0) { _name = accountName; } else { _name = domainName + "\\" + accountName; } } public NTAccount(string name) { if (name == null) { throw new ArgumentNullException(nameof(name)); } if (name.Length == 0) { throw new ArgumentException(SR.Argument_StringZeroLength, nameof(name)); } if (name.Length > (MaximumDomainNameLength + 1 /* '\' */ + MaximumAccountNameLength)) { throw new ArgumentException(SR.IdentityReference_AccountNameTooLong, nameof(name)); } Contract.EndContractBlock(); _name = name; } #endregion #region Inherited properties and methods public override string Value { get { return ToString(); } } public override bool IsValidTargetType(Type targetType) { if (targetType == typeof(SecurityIdentifier)) { return true; } else if (targetType == typeof(NTAccount)) { return true; } else { return false; } } public override IdentityReference Translate(Type targetType) { if (targetType == null) { throw new ArgumentNullException(nameof(targetType)); } Contract.EndContractBlock(); if (targetType == typeof(NTAccount)) { return this; // assumes that NTAccount objects are immutable } else if (targetType == typeof(SecurityIdentifier)) { IdentityReferenceCollection irSource = new IdentityReferenceCollection(1); irSource.Add(this); IdentityReferenceCollection irTarget; irTarget = NTAccount.Translate(irSource, targetType, true); return irTarget[0]; } else { throw new ArgumentException(SR.IdentityReference_MustBeIdentityReference, nameof(targetType)); } } public override bool Equals(object o) { return (this == o as NTAccount); // invokes operator== } public override int GetHashCode() { return StringComparer.OrdinalIgnoreCase.GetHashCode(_name); } public override string ToString() { return _name; } internal static IdentityReferenceCollection Translate(IdentityReferenceCollection sourceAccounts, Type targetType, bool forceSuccess) { bool SomeFailed = false; IdentityReferenceCollection Result; Result = Translate(sourceAccounts, targetType, out SomeFailed); if (forceSuccess && SomeFailed) { IdentityReferenceCollection UnmappedIdentities = new IdentityReferenceCollection(); foreach (IdentityReference id in Result) { if (id.GetType() != targetType) { UnmappedIdentities.Add(id); } } throw new IdentityNotMappedException(SR.IdentityReference_IdentityNotMapped, UnmappedIdentities); } return Result; } internal static IdentityReferenceCollection Translate(IdentityReferenceCollection sourceAccounts, Type targetType, out bool someFailed) { if (sourceAccounts == null) { throw new ArgumentNullException(nameof(sourceAccounts)); } Contract.EndContractBlock(); if (targetType == typeof(SecurityIdentifier)) { return TranslateToSids(sourceAccounts, out someFailed); } throw new ArgumentException(SR.IdentityReference_MustBeIdentityReference, nameof(targetType)); } #endregion #region Operators public static bool operator ==(NTAccount left, NTAccount right) { object l = left; object r = right; if (l == r) { return true; } else if (l == null || r == null) { return false; } else { return (left.ToString().Equals(right.ToString(), StringComparison.OrdinalIgnoreCase)); } } public static bool operator !=(NTAccount left, NTAccount right) { return !(left == right); // invoke operator== } #endregion #region Private methods private static IdentityReferenceCollection TranslateToSids(IdentityReferenceCollection sourceAccounts, out bool someFailed) { if (sourceAccounts == null) { throw new ArgumentNullException(nameof(sourceAccounts)); } if (sourceAccounts.Count == 0) { throw new ArgumentException(SR.Arg_EmptyCollection, nameof(sourceAccounts)); } Contract.EndContractBlock(); SafeLsaPolicyHandle LsaHandle = SafeLsaPolicyHandle.InvalidHandle; SafeLsaMemoryHandle ReferencedDomainsPtr = SafeLsaMemoryHandle.InvalidHandle; SafeLsaMemoryHandle SidsPtr = SafeLsaMemoryHandle.InvalidHandle; try { // // Construct an array of unicode strings // Interop.UNICODE_STRING[] Names = new Interop.UNICODE_STRING[sourceAccounts.Count]; int currentName = 0; foreach (IdentityReference id in sourceAccounts) { NTAccount nta = id as NTAccount; if (nta == null) { throw new ArgumentException(SR.Argument_ImproperType, nameof(sourceAccounts)); } Names[currentName].Buffer = nta.ToString(); if (Names[currentName].Buffer.Length * 2 + 2 > ushort.MaxValue) { // this should never happen since we are already validating account name length in constructor and // it is less than this limit Debug.Assert(false, "NTAccount::TranslateToSids - source account name is too long."); throw new InvalidOperationException(); } Names[currentName].Length = (ushort)(Names[currentName].Buffer.Length * 2); Names[currentName].MaximumLength = (ushort)(Names[currentName].Length + 2); currentName++; } // // Open LSA policy (for lookup requires it) // LsaHandle = Win32.LsaOpenPolicy(null, PolicyRights.POLICY_LOOKUP_NAMES); // // Now perform the actual lookup // someFailed = false; uint ReturnCode; ReturnCode = Interop.Advapi32.LsaLookupNames2(LsaHandle, 0, sourceAccounts.Count, Names, ref ReferencedDomainsPtr, ref SidsPtr); // // Make a decision regarding whether it makes sense to proceed // based on the return code and the value of the forceSuccess argument // if (ReturnCode == Interop.StatusOptions.STATUS_NO_MEMORY || ReturnCode == Interop.StatusOptions.STATUS_INSUFFICIENT_RESOURCES) { throw new OutOfMemoryException(); } else if (ReturnCode == Interop.StatusOptions.STATUS_ACCESS_DENIED) { throw new UnauthorizedAccessException(); } else if (ReturnCode == Interop.StatusOptions.STATUS_NONE_MAPPED || ReturnCode == Interop.StatusOptions.STATUS_SOME_NOT_MAPPED) { someFailed = true; } else if (ReturnCode != 0) { int win32ErrorCode = Interop.NtDll.RtlNtStatusToDosError(unchecked((int)ReturnCode)); if (win32ErrorCode != Interop.Errors.ERROR_TRUSTED_RELATIONSHIP_FAILURE) { Debug.Assert(false, string.Format(CultureInfo.InvariantCulture, "Interop.LsaLookupNames(2) returned unrecognized error {0}", win32ErrorCode)); } throw new Win32Exception(win32ErrorCode); } // // Interpret the results and generate SID objects // IdentityReferenceCollection Result = new IdentityReferenceCollection(sourceAccounts.Count); if (ReturnCode == 0 || ReturnCode == Interop.StatusOptions.STATUS_SOME_NOT_MAPPED) { SidsPtr.Initialize((uint)sourceAccounts.Count, (uint)Marshal.SizeOf<Interop.LSA_TRANSLATED_SID2>()); Win32.InitializeReferencedDomainsPointer(ReferencedDomainsPtr); Interop.LSA_TRANSLATED_SID2[] translatedSids = new Interop.LSA_TRANSLATED_SID2[sourceAccounts.Count]; SidsPtr.ReadArray(0, translatedSids, 0, translatedSids.Length); for (int i = 0; i < sourceAccounts.Count; i++) { Interop.LSA_TRANSLATED_SID2 Lts = translatedSids[i]; // // Only some names are recognized as NTAccount objects // switch ((SidNameUse)Lts.Use) { case SidNameUse.User: case SidNameUse.Group: case SidNameUse.Alias: case SidNameUse.Computer: case SidNameUse.WellKnownGroup: Result.Add(new SecurityIdentifier(Lts.Sid, true)); break; default: someFailed = true; Result.Add(sourceAccounts[i]); break; } } } else { for (int i = 0; i < sourceAccounts.Count; i++) { Result.Add(sourceAccounts[i]); } } return Result; } finally { LsaHandle.Dispose(); ReferencedDomainsPtr.Dispose(); SidsPtr.Dispose(); } } #endregion } }
// Deflater.cs // // Copyright (C) 2001 Mike Krueger // Copyright (C) 2004 John Reilly // // This file was translated from java, it was part of the GNU Classpath // Copyright (C) 2001 Free Software Foundation, Inc. // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // Linking this library statically or dynamically with other modules is // making a combined work based on this library. Thus, the terms and // conditions of the GNU General Public License cover the whole // combination. // // As a special exception, the copyright holders of this library give you // permission to link this library with independent modules to produce an // executable, regardless of the license terms of these independent // modules, and to copy and distribute the resulting executable under // terms of your choice, provided that you also meet, for each linked // independent module, the terms and conditions of the license of that // module. An independent module is a module which is not derived from // or based on this library. If you modify this library, you may extend // this exception to your version of the library, but you are not // obligated to do so. If you do not wish to do so, delete this // exception statement from your version. using System; namespace TvdbLib.SharpZipLib.Zip.Compression { /// <summary> /// This is the Deflater class. The deflater class compresses input /// with the deflate algorithm described in RFC 1951. It has several /// compression levels and three different strategies described below. /// /// This class is <i>not</i> thread safe. This is inherent in the API, due /// to the split of deflate and setInput. /// /// author of the original java version : Jochen Hoenicke /// </summary> public class Deflater { #region Deflater Documentation /* * The Deflater can do the following state transitions: * * (1) -> INIT_STATE ----> INIT_FINISHING_STATE ---. * / | (2) (5) | * / v (5) | * (3)| SETDICT_STATE ---> SETDICT_FINISHING_STATE |(3) * \ | (3) | ,--------' * | | | (3) / * v v (5) v v * (1) -> BUSY_STATE ----> FINISHING_STATE * | (6) * v * FINISHED_STATE * \_____________________________________/ * | (7) * v * CLOSED_STATE * * (1) If we should produce a header we start in INIT_STATE, otherwise * we start in BUSY_STATE. * (2) A dictionary may be set only when we are in INIT_STATE, then * we change the state as indicated. * (3) Whether a dictionary is set or not, on the first call of deflate * we change to BUSY_STATE. * (4) -- intentionally left blank -- :) * (5) FINISHING_STATE is entered, when flush() is called to indicate that * there is no more INPUT. There are also states indicating, that * the header wasn't written yet. * (6) FINISHED_STATE is entered, when everything has been flushed to the * internal pending output buffer. * (7) At any time (7) * */ #endregion #region Public Constants /// <summary> /// The best and slowest compression level. This tries to find very /// long and distant string repetitions. /// </summary> public const int BEST_COMPRESSION = 9; /// <summary> /// The worst but fastest compression level. /// </summary> public const int BEST_SPEED = 1; /// <summary> /// The default compression level. /// </summary> public const int DEFAULT_COMPRESSION = -1; /// <summary> /// This level won't compress at all but output uncompressed blocks. /// </summary> public const int NO_COMPRESSION = 0; /// <summary> /// The compression method. This is the only method supported so far. /// There is no need to use this constant at all. /// </summary> public const int DEFLATED = 8; #endregion #region Local Constants private const int IS_SETDICT = 0x01; private const int IS_FLUSHING = 0x04; private const int IS_FINISHING = 0x08; private const int INIT_STATE = 0x00; private const int SETDICT_STATE = 0x01; // private static int INIT_FINISHING_STATE = 0x08; // private static int SETDICT_FINISHING_STATE = 0x09; private const int BUSY_STATE = 0x10; private const int FLUSHING_STATE = 0x14; private const int FINISHING_STATE = 0x1c; private const int FINISHED_STATE = 0x1e; private const int CLOSED_STATE = 0x7f; #endregion #region Constructors /// <summary> /// Creates a new deflater with default compression level. /// </summary> public Deflater() : this(DEFAULT_COMPRESSION, false) { } /// <summary> /// Creates a new deflater with given compression level. /// </summary> /// <param name="level"> /// the compression level, a value between NO_COMPRESSION /// and BEST_COMPRESSION, or DEFAULT_COMPRESSION. /// </param> /// <exception cref="System.ArgumentOutOfRangeException">if lvl is out of range.</exception> public Deflater(int level) : this(level, false) { } /// <summary> /// Creates a new deflater with given compression level. /// </summary> /// <param name="level"> /// the compression level, a value between NO_COMPRESSION /// and BEST_COMPRESSION. /// </param> /// <param name="noZlibHeaderOrFooter"> /// true, if we should suppress the Zlib/RFC1950 header at the /// beginning and the adler checksum at the end of the output. This is /// useful for the GZIP/PKZIP formats. /// </param> /// <exception cref="System.ArgumentOutOfRangeException">if lvl is out of range.</exception> public Deflater(int level, bool noZlibHeaderOrFooter) { if (level == DEFAULT_COMPRESSION) { level = 6; } else if (level < NO_COMPRESSION || level > BEST_COMPRESSION) { throw new ArgumentOutOfRangeException("level"); } pending = new DeflaterPending(); engine = new DeflaterEngine(pending); this.noZlibHeaderOrFooter = noZlibHeaderOrFooter; SetStrategy(DeflateStrategy.Default); SetLevel(level); Reset(); } #endregion /// <summary> /// Resets the deflater. The deflater acts afterwards as if it was /// just created with the same compression level and strategy as it /// had before. /// </summary> public void Reset() { state = (noZlibHeaderOrFooter ? BUSY_STATE : INIT_STATE); totalOut = 0; pending.Reset(); engine.Reset(); } /// <summary> /// Gets the current adler checksum of the data that was processed so far. /// </summary> public int Adler { get { return engine.Adler; } } /// <summary> /// Gets the number of input bytes processed so far. /// </summary> public long TotalIn { get { return engine.TotalIn; } } /// <summary> /// Gets the number of output bytes so far. /// </summary> public long TotalOut { get { return totalOut; } } /// <summary> /// Flushes the current input block. Further calls to deflate() will /// produce enough output to inflate everything in the current input /// block. This is not part of Sun's JDK so I have made it package /// private. It is used by DeflaterOutputStream to implement /// flush(). /// </summary> public void Flush() { state |= IS_FLUSHING; } /// <summary> /// Finishes the deflater with the current input block. It is an error /// to give more input after this method was called. This method must /// be called to force all bytes to be flushed. /// </summary> public void Finish() { state |= (IS_FLUSHING | IS_FINISHING); } /// <summary> /// Returns true if the stream was finished and no more output bytes /// are available. /// </summary> public bool IsFinished { get { return (state == FINISHED_STATE) && pending.IsFlushed; } } /// <summary> /// Returns true, if the input buffer is empty. /// You should then call setInput(). /// NOTE: This method can also return true when the stream /// was finished. /// </summary> public bool IsNeedingInput { get { return engine.NeedsInput(); } } /// <summary> /// Sets the data which should be compressed next. This should be only /// called when needsInput indicates that more input is needed. /// If you call setInput when needsInput() returns false, the /// previous input that is still pending will be thrown away. /// The given byte array should not be changed, before needsInput() returns /// true again. /// This call is equivalent to <code>setInput(input, 0, input.length)</code>. /// </summary> /// <param name="input"> /// the buffer containing the input data. /// </param> /// <exception cref="System.InvalidOperationException"> /// if the buffer was finished() or ended(). /// </exception> public void SetInput(byte[] input) { SetInput(input, 0, input.Length); } /// <summary> /// Sets the data which should be compressed next. This should be /// only called when needsInput indicates that more input is needed. /// The given byte array should not be changed, before needsInput() returns /// true again. /// </summary> /// <param name="input"> /// the buffer containing the input data. /// </param> /// <param name="offset"> /// the start of the data. /// </param> /// <param name="count"> /// the number of data bytes of input. /// </param> /// <exception cref="System.InvalidOperationException"> /// if the buffer was Finish()ed or if previous input is still pending. /// </exception> public void SetInput(byte[] input, int offset, int count) { if ((state & IS_FINISHING) != 0) { throw new InvalidOperationException("Finish() already called"); } engine.SetInput(input, offset, count); } /// <summary> /// Sets the compression level. There is no guarantee of the exact /// position of the change, but if you call this when needsInput is /// true the change of compression level will occur somewhere near /// before the end of the so far given input. /// </summary> /// <param name="level"> /// the new compression level. /// </param> public void SetLevel(int level) { if (level == DEFAULT_COMPRESSION) { level = 6; } else if (level < NO_COMPRESSION || level > BEST_COMPRESSION) { throw new ArgumentOutOfRangeException("level"); } if (this.level != level) { this.level = level; engine.SetLevel(level); } } /// <summary> /// Get current compression level /// </summary> /// <returns>Returns the current compression level</returns> public int GetLevel() { return level; } /// <summary> /// Sets the compression strategy. Strategy is one of /// DEFAULT_STRATEGY, HUFFMAN_ONLY and FILTERED. For the exact /// position where the strategy is changed, the same as for /// SetLevel() applies. /// </summary> /// <param name="strategy"> /// The new compression strategy. /// </param> public void SetStrategy(DeflateStrategy strategy) { engine.Strategy = strategy; } /// <summary> /// Deflates the current input block with to the given array. /// </summary> /// <param name="output"> /// The buffer where compressed data is stored /// </param> /// <returns> /// The number of compressed bytes added to the output, or 0 if either /// IsNeedingInput() or IsFinished returns true or length is zero. /// </returns> public int Deflate(byte[] output) { return Deflate(output, 0, output.Length); } /// <summary> /// Deflates the current input block to the given array. /// </summary> /// <param name="output"> /// Buffer to store the compressed data. /// </param> /// <param name="offset"> /// Offset into the output array. /// </param> /// <param name="length"> /// The maximum number of bytes that may be stored. /// </param> /// <returns> /// The number of compressed bytes added to the output, or 0 if either /// needsInput() or finished() returns true or length is zero. /// </returns> /// <exception cref="System.InvalidOperationException"> /// If Finish() was previously called. /// </exception> /// <exception cref="System.ArgumentOutOfRangeException"> /// If offset or length don't match the array length. /// </exception> public int Deflate(byte[] output, int offset, int length) { int origLength = length; if (state == CLOSED_STATE) { throw new InvalidOperationException("Deflater closed"); } if (state < BUSY_STATE) { // output header int header = (DEFLATED + ((DeflaterConstants.MAX_WBITS - 8) << 4)) << 8; int level_flags = (level - 1) >> 1; if (level_flags < 0 || level_flags > 3) { level_flags = 3; } header |= level_flags << 6; if ((state & IS_SETDICT) != 0) { // Dictionary was set header |= DeflaterConstants.PRESET_DICT; } header += 31 - (header % 31); pending.WriteShortMSB(header); if ((state & IS_SETDICT) != 0) { int chksum = engine.Adler; engine.ResetAdler(); pending.WriteShortMSB(chksum >> 16); pending.WriteShortMSB(chksum & 0xffff); } state = BUSY_STATE | (state & (IS_FLUSHING | IS_FINISHING)); } for (;;) { int count = pending.Flush(output, offset, length); offset += count; totalOut += count; length -= count; if (length == 0 || state == FINISHED_STATE) { break; } if (!engine.Deflate((state & IS_FLUSHING) != 0, (state & IS_FINISHING) != 0)) { if (state == BUSY_STATE) { // We need more input now return origLength - length; } else if (state == FLUSHING_STATE) { if (level != NO_COMPRESSION) { /* We have to supply some lookahead. 8 bit lookahead * is needed by the zlib inflater, and we must fill * the next byte, so that all bits are flushed. */ int neededbits = 8 + ((-pending.BitCount) & 7); while (neededbits > 0) { /* write a static tree block consisting solely of * an EOF: */ pending.WriteBits(2, 10); neededbits -= 10; } } state = BUSY_STATE; } else if (state == FINISHING_STATE) { pending.AlignToByte(); // Compressed data is complete. Write footer information if required. if (!noZlibHeaderOrFooter) { int adler = engine.Adler; pending.WriteShortMSB(adler >> 16); pending.WriteShortMSB(adler & 0xffff); } state = FINISHED_STATE; } } } return origLength - length; } /// <summary> /// Sets the dictionary which should be used in the deflate process. /// This call is equivalent to <code>setDictionary(dict, 0, dict.Length)</code>. /// </summary> /// <param name="dictionary"> /// the dictionary. /// </param> /// <exception cref="System.InvalidOperationException"> /// if SetInput () or Deflate () were already called or another dictionary was already set. /// </exception> public void SetDictionary(byte[] dictionary) { SetDictionary(dictionary, 0, dictionary.Length); } /// <summary> /// Sets the dictionary which should be used in the deflate process. /// The dictionary is a byte array containing strings that are /// likely to occur in the data which should be compressed. The /// dictionary is not stored in the compressed output, only a /// checksum. To decompress the output you need to supply the same /// dictionary again. /// </summary> /// <param name="dictionary"> /// The dictionary data /// </param> /// <param name="index"> /// The index where dictionary information commences. /// </param> /// <param name="count"> /// The number of bytes in the dictionary. /// </param> /// <exception cref="System.InvalidOperationException"> /// If SetInput () or Deflate() were already called or another dictionary was already set. /// </exception> public void SetDictionary(byte[] dictionary, int index, int count) { if (state != INIT_STATE) { throw new InvalidOperationException(); } state = SETDICT_STATE; engine.SetDictionary(dictionary, index, count); } #region Instance Fields /// <summary> /// Compression level. /// </summary> int level; /// <summary> /// If true no Zlib/RFC1950 headers or footers are generated /// </summary> bool noZlibHeaderOrFooter; /// <summary> /// The current state. /// </summary> int state; /// <summary> /// The total bytes of output written. /// </summary> long totalOut; /// <summary> /// The pending output. /// </summary> DeflaterPending pending; /// <summary> /// The deflater engine. /// </summary> DeflaterEngine engine; #endregion } }
/* * Copyright (c) Contributors, http://aurora-sim.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System.Collections.Generic; using System.Linq; using System.Reflection; using Nini.Config; using Aurora.Simulation.Base; using OpenSim.Services.Interfaces; using Aurora.Framework; using Aurora.Framework.Servers.HttpServer; using OpenMetaverse; namespace OpenSim.Services.CapsService { public class CapsService : ICapsService, IService { #region Declares /// <summary> /// A list of all clients and their Client Caps Handlers /// </summary> protected Dictionary<UUID, IClientCapsService> m_ClientCapsServices = new Dictionary<UUID, IClientCapsService>(); /// <summary> /// A list of all regions Caps Services /// </summary> protected Dictionary<ulong, IRegionCapsService> m_RegionCapsServices = new Dictionary<ulong, IRegionCapsService>(); protected IRegistryCore m_registry; public IRegistryCore Registry { get { return m_registry; } } protected IHttpServer m_server; public IHttpServer Server { get { return m_server; } } public string HostUri { get { return m_server.ServerURI; } } #endregion #region IService members public string Name { get { return GetType().Name; } } public void Initialize(IConfigSource config, IRegistryCore registry) { IConfig handlerConfig = config.Configs["Handlers"]; if (handlerConfig.GetString("CapsHandler", "") != Name) return; m_registry = registry; registry.RegisterModuleInterface<ICapsService>(this); } public void Start(IConfigSource config, IRegistryCore registry) { ISimulationBase simBase = registry.RequestModuleInterface<ISimulationBase>(); m_server = simBase.GetHttpServer(0); if (MainConsole.Instance != null) MainConsole.Instance.Commands.AddCommand("show presences", "show presences", "Shows all presences in the grid", ShowUsers); } public void FinishedStartup() { } #endregion #region Console Commands protected void ShowUsers(string[] cmd) { //Check for all or full to show child agents bool showChildAgents = cmd.Length == 3 && (cmd[2] == "all" || (cmd[2] == "full")); #if (!ISWIN) int count = 0; foreach (IRegionCapsService regionCaps in m_RegionCapsServices.Values) foreach (IRegionClientCapsService client in regionCaps.GetClients()) { if ((client.RootAgent || showChildAgents)) count++; } #else int count = m_RegionCapsServices.Values.SelectMany(regionCaps => regionCaps.GetClients()).Count(clientCaps => (clientCaps.RootAgent || showChildAgents)); #endif MainConsole.Instance.WarnFormat ("{0} agents found: ", count); foreach (IClientCapsService clientCaps in m_ClientCapsServices.Values) { foreach(IRegionClientCapsService caps in clientCaps.GetCapsServices()) { if((caps.RootAgent || showChildAgents)) { MainConsole.Instance.InfoFormat("Region - {0}, User {1}, {2}, {3}", caps.Region.RegionName, clientCaps.AccountInfo.Name, caps.RootAgent ? "Root Agent" : "Child Agent", caps.Disabled ? "Disabled" : "Not Disabled"); } } } } #endregion #region ICapsService members #region Client Caps /// <summary> /// Remove the all of the user's CAPS from the system /// </summary> /// <param name="AgentID"></param> public void RemoveCAPS(UUID AgentID) { if(m_ClientCapsServices.ContainsKey(AgentID)) { IClientCapsService perClient = m_ClientCapsServices[AgentID]; perClient.Close(); m_ClientCapsServices.Remove(AgentID); m_registry.RequestModuleInterface<ISimulationBase>().EventManager.FireGenericEventHandler("UserLogout", AgentID); } } /// <summary> /// Create a Caps URL for the given user/region. Called normally by the EventQueueService or the LLLoginService on login /// </summary> /// <param name="AgentID"></param> /// <param name="CAPSBase"></param> /// <param name="regionHandle"></param> /// <param name="IsRootAgent">Will this child be a root agent</param> /// <param name="circuitData"></param> /// <param name = "port">The port to use for the CAPS service</param> /// <returns></returns> public string CreateCAPS (UUID AgentID, string CAPSBase, ulong regionHandle, bool IsRootAgent, AgentCircuitData circuitData, uint port) { //Now make sure we didn't use an old one or something IClientCapsService service = GetOrCreateClientCapsService(AgentID); IRegionClientCapsService clientService = service.GetOrCreateCapsService(regionHandle, CAPSBase, circuitData, port); //Fix the root agent status clientService.RootAgent = IsRootAgent; m_registry.RequestModuleInterface<ISimulationBase>().EventManager.FireGenericEventHandler("UserLogin", AgentID); MainConsole.Instance.Debug("[CapsService]: Adding Caps URL " + clientService.CapsUrl + " for agent " + AgentID); return clientService.CapsUrl; } /// <summary> /// Get or create a new Caps Service for the given client /// Note: This does not add them to a region if one is created. /// </summary> /// <param name="AgentID"></param> /// <returns></returns> public IClientCapsService GetOrCreateClientCapsService(UUID AgentID) { if (!m_ClientCapsServices.ContainsKey(AgentID)) { PerClientBasedCapsService client = new PerClientBasedCapsService(); client.Initialise(this, AgentID); m_ClientCapsServices.Add(AgentID, client); } return m_ClientCapsServices[AgentID]; } /// <summary> /// Get a Caps Service for the given client /// </summary> /// <param name="AgentID"></param> /// <returns></returns> public IClientCapsService GetClientCapsService(UUID AgentID) { if (!m_ClientCapsServices.ContainsKey(AgentID)) return null; return m_ClientCapsServices[AgentID]; } public List<IClientCapsService> GetClientsCapsServices() { return new List<IClientCapsService>(m_ClientCapsServices.Values); } #endregion #region Region Caps /// <summary> /// Get a region handler for the given region /// </summary> /// <param name="RegionHandle"></param> public IRegionCapsService GetCapsForRegion(ulong RegionHandle) { IRegionCapsService service; if (m_RegionCapsServices.TryGetValue(RegionHandle, out service)) { return service; } return null; } /// <summary> /// Create a caps handler for the given region /// </summary> /// <param name="RegionHandle"></param> public void AddCapsForRegion(ulong RegionHandle) { if (!m_RegionCapsServices.ContainsKey(RegionHandle)) { IRegionCapsService service = new PerRegionCapsService(); service.Initialise(RegionHandle, Registry); m_RegionCapsServices.Add(RegionHandle, service); } } /// <summary> /// Remove the handler for the given region /// </summary> /// <param name="RegionHandle"></param> public void RemoveCapsForRegion(ulong RegionHandle) { if (m_RegionCapsServices.ContainsKey(RegionHandle)) m_RegionCapsServices.Remove(RegionHandle); } public List<IRegionCapsService> GetRegionsCapsServices() { return new List<IRegionCapsService>(m_RegionCapsServices.Values); } #endregion #endregion } }
using Grpc.Core; using MessagePack; using System; using System.IO; using System.Threading.Tasks; using MagicOnion.Utils; using Microsoft.AspNetCore.Connections; namespace MagicOnion.Server.Hubs { public abstract class StreamingHubBase<THubInterface, TReceiver> : ServiceBase<THubInterface>, IStreamingHub<THubInterface, TReceiver> where THubInterface : IStreamingHub<THubInterface, TReceiver> { static protected readonly Task<Nil> NilTask = Task.FromResult(Nil.Default); static protected readonly ValueTask CompletedTask = new ValueTask(); static readonly Metadata ResponseHeaders = new Metadata() { { "x-magiconion-streaminghub-version", "2" }, }; public HubGroupRepository Group { get; private set; } = default!; /* lateinit */ protected Guid ConnectionId { get { return Context.ContextId; } } // Broadcast Commands [Ignore] protected TReceiver Broadcast(IGroup group) { var type = DynamicBroadcasterBuilder<TReceiver>.BroadcasterType; return (TReceiver)Activator.CreateInstance(type, group)!; } [Ignore] protected TReceiver BroadcastExceptSelf(IGroup group) { return BroadcastExcept(group, Context.ContextId); } [Ignore] protected TReceiver BroadcastExcept(IGroup group, Guid except) { var type = DynamicBroadcasterBuilder<TReceiver>.BroadcasterType_ExceptOne; return (TReceiver)Activator.CreateInstance(type, new object[] { group, except })!; } [Ignore] protected TReceiver BroadcastExcept(IGroup group, Guid[] excepts) { var type = DynamicBroadcasterBuilder<TReceiver>.BroadcasterType_ExceptMany; return (TReceiver)Activator.CreateInstance(type, new object[] { group, excepts })!; } [Ignore] protected TReceiver BroadcastToSelf(IGroup group) { return BroadcastTo(group, Context.ContextId); } [Ignore] protected TReceiver BroadcastTo(IGroup group, Guid toConnectionId) { var type = DynamicBroadcasterBuilder<TReceiver>.BroadcasterType_ToOne; return (TReceiver)Activator.CreateInstance(type, new object[] { group, toConnectionId })!; } [Ignore] protected TReceiver BroadcastTo(IGroup group, Guid[] toConnectionIds) { var type = DynamicBroadcasterBuilder<TReceiver>.BroadcasterType_ToMany; return (TReceiver)Activator.CreateInstance(type, new object[] { group, toConnectionIds })!; } /// <summary> /// Called before connect, instead of constructor. /// </summary> protected virtual ValueTask OnConnecting() { return CompletedTask; } /// <summary> /// Called after disconnect. /// </summary> protected virtual ValueTask OnDisconnected() { return CompletedTask; } public async Task<DuplexStreamingResult<byte[], byte[]>> Connect() { var streamingContext = GetDuplexStreamingContext<byte[], byte[]>(); var group = StreamingHubHandlerRepository.GetGroupRepository(Context.MethodHandler); this.Group = new HubGroupRepository(this.Context, group); try { await OnConnecting(); await HandleMessageAsync(); } catch (OperationCanceledException) { // NOTE: If DuplexStreaming is disconnected by the client, OperationCanceledException will be thrown. // However, such behavior is expected. the exception can be ignored. } catch (IOException ex) when (ex.InnerException is ConnectionAbortedException) { // NOTE: If DuplexStreaming is disconnected by the client, IOException will be thrown. // However, such behavior is expected. the exception can be ignored. } finally { Context.CompleteStreamingHub(); await OnDisconnected(); await this.Group.DisposeAsync(); } return streamingContext.Result(); } async Task HandleMessageAsync() { var ct = Context.CallContext.CancellationToken; var reader = Context.RequestStream!; var writer = Context.ResponseStream!; // Send a hint to the client to start sending messages. // The client can read the response headers before any StreamingHub's message. await Context.CallContext.WriteResponseHeadersAsync(ResponseHeaders); // Write a marker that is the beginning of the stream. // NOTE: To prevent buffering by AWS ALB or reverse-proxy. static byte[] BuildMarkerResponse() { using (var buffer = ArrayPoolBufferWriter.RentThreadStaticWriter()) { var writer = new MessagePackWriter(buffer); // response: [messageId, methodId, response] // HACK: If the ID of the message is `-1`, the client will ignore the message. writer.WriteArrayHeader(3); writer.Write(-1); writer.Write(0); writer.WriteNil(); writer.Flush(); return buffer.WrittenSpan.ToArray(); } } await writer.WriteAsync(BuildMarkerResponse()); var handlers = StreamingHubHandlerRepository.GetHandlers(Context.MethodHandler); // Main loop of StreamingHub. // Be careful to allocation and performance. while (await reader.MoveNext(ct)) // must keep SyncContext. { (int methodId, int messageId, int offset) FetchHeader(byte[] msgData) { var messagePackReader = new MessagePackReader(msgData); var length = messagePackReader.ReadArrayHeader(); if (length == 2) { // void: [methodId, [argument]] var mid = messagePackReader.ReadInt32(); var consumed = (int)messagePackReader.Consumed; return (mid, -1, consumed); } else if (length == 3) { // T: [messageId, methodId, [argument]] var msgId = messagePackReader.ReadInt32(); var metId = messagePackReader.ReadInt32(); var consumed = (int)messagePackReader.Consumed; return (metId, msgId, consumed); } else { throw new InvalidOperationException("Invalid data format."); } } var data = reader.Current; var (methodId, messageId, offset) = FetchHeader(data); if (messageId == -1) { if (handlers.TryGetValue(methodId, out var handler)) { var context = new StreamingHubContext() // create per invoke. { SerializerOptions = handler.serializerOptions, HubInstance = this, ServiceContext = Context, Request = new ArraySegment<byte>(data, offset, data.Length - offset), Path = handler.ToString(), MethodId = handler.MethodId, MessageId = -1, Timestamp = DateTime.UtcNow }; var isErrorOrInterrupted = false; Context.MethodHandler.logger.BeginInvokeHubMethod(context, context.Request, handler.RequestType); try { await handler.MethodBody.Invoke(context); } catch (Exception ex) { isErrorOrInterrupted = true; Context.MethodHandler.logger.Error(ex, context); } finally { Context.MethodHandler.logger.EndInvokeHubMethod(context, context.responseSize, context.responseType, (DateTime.UtcNow - context.Timestamp).TotalMilliseconds, isErrorOrInterrupted); } } else { throw new InvalidOperationException("Handler not found in received methodId, methodId:" + methodId); } } else { if (handlers.TryGetValue(methodId, out var handler)) { var context = new StreamingHubContext() // create per invoke. { SerializerOptions = handler.serializerOptions, HubInstance = this, ServiceContext = Context, Request = new ArraySegment<byte>(data, offset, data.Length - offset), Path = handler.ToString(), MethodId = handler.MethodId, MessageId = messageId, Timestamp = DateTime.UtcNow }; var isErrorOrInterrupted = false; Context.MethodHandler.logger.BeginInvokeHubMethod(context, context.Request, handler.RequestType); try { await handler.MethodBody.Invoke(context); } catch (ReturnStatusException ex) { await context.WriteErrorMessage((int)ex.StatusCode, ex.Detail, null, false); } catch (Exception ex) { isErrorOrInterrupted = true; Context.MethodHandler.logger.Error(ex, context); await context.WriteErrorMessage((int)StatusCode.Internal, $"An error occurred while processing handler '{handler.ToString()}'.", ex, Context.MethodHandler.isReturnExceptionStackTraceInErrorDetail); } finally { Context.MethodHandler.logger.EndInvokeHubMethod(context, context.responseSize, context.responseType, (DateTime.UtcNow - context.Timestamp).TotalMilliseconds, isErrorOrInterrupted); } } else { throw new InvalidOperationException("Handler not found in received methodId, methodId:" + methodId); } } } } // Interface methods for Client THubInterface IStreamingHub<THubInterface, TReceiver>.FireAndForget() { throw new NotSupportedException("Invoke from client proxy only"); } Task IStreamingHub<THubInterface, TReceiver>.DisposeAsync() { throw new NotSupportedException("Invoke from client proxy only"); } Task IStreamingHub<THubInterface, TReceiver>.WaitForDisconnect() { throw new NotSupportedException("Invoke from client proxy only"); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Diagnostics.CodeAnalysis; using System.Diagnostics.Contracts; namespace System.Globalization { /*=================================KoreanCalendar========================== ** ** Korean calendar is based on the Gregorian calendar. And the year is an offset to Gregorian calendar. ** That is, ** Korean year = Gregorian year + 2333. So 2000/01/01 A.D. is Korean 4333/01/01 ** ** 0001/1/1 A.D. is Korean year 2334. ** ** Calendar support range: ** Calendar Minimum Maximum ** ========== ========== ========== ** Gregorian 0001/01/01 9999/12/31 ** Korean 2334/01/01 12332/12/31 ============================================================================*/ [System.Runtime.InteropServices.ComVisible(true)] public class KoreanCalendar : Calendar { // // The era value for the current era. // public const int KoreanEra = 1; // Since // Gregorian Year = Era Year + yearOffset // Gregorian Year 1 is Korean year 2334, so that // 1 = 2334 + yearOffset // So yearOffset = -2333 // Gregorian year 2001 is Korean year 4334. //m_EraInfo[0] = new EraInfo(1, new DateTime(1, 1, 1).Ticks, -2333, 2334, GregorianCalendar.MaxYear + 2333); // Initialize our era info. static internal EraInfo[] koreanEraInfo = new EraInfo[] { new EraInfo( 1, 1, 1, 1, -2333, 2334, GregorianCalendar.MaxYear + 2333) // era #, start year/month/day, yearOffset, minEraYear }; internal GregorianCalendarHelper helper; [System.Runtime.InteropServices.ComVisible(false)] public override DateTime MinSupportedDateTime { get { return (DateTime.MinValue); } } [System.Runtime.InteropServices.ComVisible(false)] public override DateTime MaxSupportedDateTime { get { return (DateTime.MaxValue); } } /*=================================GetDefaultInstance========================== **Action: Internal method to provide a default intance of KoreanCalendar. Used by NLS+ implementation ** and other calendars. **Returns: **Arguments: **Exceptions: ============================================================================*/ /* internal static Calendar GetDefaultInstance() { if (m_defaultInstance == null) { m_defaultInstance = new KoreanCalendar(); } return (m_defaultInstance); } */ public KoreanCalendar() { try { new CultureInfo("ko-KR"); } catch (ArgumentException e) { throw new TypeInitializationException(this.GetType().ToString(), e); } helper = new GregorianCalendarHelper(this, koreanEraInfo); } internal override CalendarId ID { get { return CalendarId.KOREA; } } public override DateTime AddMonths(DateTime time, int months) { return (helper.AddMonths(time, months)); } public override DateTime AddYears(DateTime time, int years) { return (helper.AddYears(time, years)); } /*=================================GetDaysInMonth========================== **Action: Returns the number of days in the month given by the year and month arguments. **Returns: The number of days in the given month. **Arguments: ** year The year in Korean calendar. ** month The month ** era The Japanese era value. **Exceptions ** ArgumentException If month is less than 1 or greater * than 12. ============================================================================*/ public override int GetDaysInMonth(int year, int month, int era) { return (helper.GetDaysInMonth(year, month, era)); } public override int GetDaysInYear(int year, int era) { return (helper.GetDaysInYear(year, era)); } public override int GetDayOfMonth(DateTime time) { return (helper.GetDayOfMonth(time)); } public override DayOfWeek GetDayOfWeek(DateTime time) { return (helper.GetDayOfWeek(time)); } public override int GetDayOfYear(DateTime time) { return (helper.GetDayOfYear(time)); } public override int GetMonthsInYear(int year, int era) { return (helper.GetMonthsInYear(year, era)); } [SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems. [System.Runtime.InteropServices.ComVisible(false)] public override int GetWeekOfYear(DateTime time, CalendarWeekRule rule, DayOfWeek firstDayOfWeek) { return (helper.GetWeekOfYear(time, rule, firstDayOfWeek)); } public override int GetEra(DateTime time) { return (helper.GetEra(time)); } public override int GetMonth(DateTime time) { return (helper.GetMonth(time)); } public override int GetYear(DateTime time) { return (helper.GetYear(time)); } public override bool IsLeapDay(int year, int month, int day, int era) { return (helper.IsLeapDay(year, month, day, era)); } public override bool IsLeapYear(int year, int era) { return (helper.IsLeapYear(year, era)); } // Returns the leap month in a calendar year of the specified era. This method returns 0 // if this calendar does not have leap month, or this year is not a leap year. // [System.Runtime.InteropServices.ComVisible(false)] public override int GetLeapMonth(int year, int era) { return (helper.GetLeapMonth(year, era)); } public override bool IsLeapMonth(int year, int month, int era) { return (helper.IsLeapMonth(year, month, era)); } public override DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era) { return (helper.ToDateTime(year, month, day, hour, minute, second, millisecond, era)); } public override int[] Eras { get { return (helper.Eras); } } private const int DEFAULT_TWO_DIGIT_YEAR_MAX = 4362; public override int TwoDigitYearMax { get { if (twoDigitYearMax == -1) { twoDigitYearMax = GetSystemTwoDigitYearSetting(ID, DEFAULT_TWO_DIGIT_YEAR_MAX); } return (twoDigitYearMax); } set { VerifyWritable(); if (value < 99 || value > helper.MaxYear) { throw new ArgumentOutOfRangeException( "year", String.Format( CultureInfo.CurrentCulture, SR.ArgumentOutOfRange_Range, 99, helper.MaxYear)); } twoDigitYearMax = value; } } public override int ToFourDigitYear(int year) { if (year < 0) { throw new ArgumentOutOfRangeException("year", SR.ArgumentOutOfRange_NeedNonNegNum); } Contract.EndContractBlock(); return (helper.ToFourDigitYear(year, this.TwoDigitYearMax)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Xml; using System.Xml.Linq; using System.Runtime.Versioning; namespace System.Xml.Schema { internal class XNodeValidator { XmlSchemaSet schemas; ValidationEventHandler validationEventHandler; XObject source; bool addSchemaInfo; XmlNamespaceManager namespaceManager; XmlSchemaValidator validator; Dictionary<XmlSchemaInfo, XmlSchemaInfo> schemaInfos; ArrayList defaultAttributes; XName xsiTypeName; XName xsiNilName; public XNodeValidator(XmlSchemaSet schemas, ValidationEventHandler validationEventHandler) { this.schemas = schemas; this.validationEventHandler = validationEventHandler; XNamespace xsi = XNamespace.Get("http://www.w3.org/2001/XMLSchema-instance"); xsiTypeName = xsi.GetName("type"); xsiNilName = xsi.GetName("nil"); } public void Validate(XObject source, XmlSchemaObject partialValidationType, bool addSchemaInfo) { this.source = source; this.addSchemaInfo = addSchemaInfo; XmlSchemaValidationFlags validationFlags = XmlSchemaValidationFlags.AllowXmlAttributes; XmlNodeType nt = source.NodeType; switch (nt) { case XmlNodeType.Document: source = ((XDocument)source).Root; if (source == null) throw new InvalidOperationException(SR.Format(SR.InvalidOperation_MissingRoot)); validationFlags |= XmlSchemaValidationFlags.ProcessIdentityConstraints; break; case XmlNodeType.Element: break; case XmlNodeType.Attribute: if (((XAttribute)source).IsNamespaceDeclaration) goto default; if (source.Parent == null) throw new InvalidOperationException(SR.Format(SR.InvalidOperation_MissingParent)); break; default: throw new InvalidOperationException(SR.Format(SR.InvalidOperation_BadNodeType, nt)); } namespaceManager = new XmlNamespaceManager(schemas.NameTable); PushAncestorsAndSelf(source.Parent); validator = new XmlSchemaValidator(schemas.NameTable, schemas, namespaceManager, validationFlags); validator.ValidationEventHandler += new ValidationEventHandler(ValidationCallback); validator.XmlResolver = null; if (partialValidationType != null) { validator.Initialize(partialValidationType); } else { validator.Initialize(); } IXmlLineInfo orginal = SaveLineInfo(source); if (nt == XmlNodeType.Attribute) { ValidateAttribute((XAttribute)source); } else { ValidateElement((XElement)source); } validator.EndValidation(); RestoreLineInfo(orginal); } XmlSchemaInfo GetDefaultAttributeSchemaInfo(XmlSchemaAttribute sa) { XmlSchemaInfo si = new XmlSchemaInfo(); si.IsDefault = true; si.IsNil = false; si.SchemaAttribute = sa; XmlSchemaSimpleType st = sa.AttributeSchemaType; si.SchemaType = st; if (st.Datatype.Variety == XmlSchemaDatatypeVariety.Union) { string value = GetDefaultValue(sa); foreach (XmlSchemaSimpleType mt in ((XmlSchemaSimpleTypeUnion)st.Content).BaseMemberTypes) { object typedValue = null; try { typedValue = mt.Datatype.ParseValue(value, schemas.NameTable, namespaceManager); } catch (XmlSchemaException) { } if (typedValue != null) { si.MemberType = mt; break; } } } si.Validity = XmlSchemaValidity.Valid; return si; } string GetDefaultValue(XmlSchemaAttribute sa) { XmlQualifiedName name = sa.RefName; if (!name.IsEmpty) { sa = schemas.GlobalAttributes[name] as XmlSchemaAttribute; if (sa == null) return null; } string s = sa.FixedValue; if (s != null) return s; return sa.DefaultValue; } string GetDefaultValue(XmlSchemaElement se) { XmlQualifiedName name = se.RefName; if (!name.IsEmpty) { se = schemas.GlobalElements[name] as XmlSchemaElement; if (se == null) return null; } string s = se.FixedValue; if (s != null) return s; return se.DefaultValue; } void ReplaceSchemaInfo(XObject o, XmlSchemaInfo schemaInfo) { if (schemaInfos == null) { schemaInfos = new Dictionary<XmlSchemaInfo, XmlSchemaInfo>(new XmlSchemaInfoEqualityComparer()); } XmlSchemaInfo si = o.Annotation<XmlSchemaInfo>(); if (si != null) { if (!schemaInfos.ContainsKey(si)) { schemaInfos.Add(si, si); } o.RemoveAnnotations<XmlSchemaInfo>(); } if (!schemaInfos.TryGetValue(schemaInfo, out si)) { si = schemaInfo; schemaInfos.Add(si, si); } o.AddAnnotation(si); } void PushAncestorsAndSelf(XElement e) { while (e != null) { XAttribute a = e.lastAttr; if (a != null) { do { a = a.next; if (a.IsNamespaceDeclaration) { string localName = a.Name.LocalName; if (localName == "xmlns") { localName = string.Empty; } if (!namespaceManager.HasNamespace(localName)) { namespaceManager.AddNamespace(localName, a.Value); } } } while (a != e.lastAttr); } e = e.parent as XElement; } } void PushElement(XElement e, ref string xsiType, ref string xsiNil) { namespaceManager.PushScope(); XAttribute a = e.lastAttr; if (a != null) { do { a = a.next; if (a.IsNamespaceDeclaration) { string localName = a.Name.LocalName; if (localName == "xmlns") { localName = string.Empty; } namespaceManager.AddNamespace(localName, a.Value); } else { XName name = a.Name; if (name == xsiTypeName) { xsiType = a.Value; } else if (name == xsiNilName) { xsiNil = a.Value; } } } while (a != e.lastAttr); } } IXmlLineInfo SaveLineInfo(XObject source) { IXmlLineInfo previousLineInfo = validator.LineInfoProvider; validator.LineInfoProvider = source as IXmlLineInfo; return previousLineInfo; } void RestoreLineInfo(IXmlLineInfo originalLineInfo) { validator.LineInfoProvider = originalLineInfo; } void ValidateAttribute(XAttribute a) { IXmlLineInfo original = SaveLineInfo(a); XmlSchemaInfo si = addSchemaInfo ? new XmlSchemaInfo() : null; source = a; validator.ValidateAttribute(a.Name.LocalName, a.Name.NamespaceName, a.Value, si); if (addSchemaInfo) { ReplaceSchemaInfo(a, si); } RestoreLineInfo(original); } void ValidateAttributes(XElement e) { XAttribute a = e.lastAttr; IXmlLineInfo orginal = SaveLineInfo(a); if (a != null) { do { a = a.next; if (!a.IsNamespaceDeclaration) { ValidateAttribute(a); } } while (a != e.lastAttr); source = e; } if (addSchemaInfo) { if (defaultAttributes == null) { defaultAttributes = new ArrayList(); } else { defaultAttributes.Clear(); } validator.GetUnspecifiedDefaultAttributes(defaultAttributes); foreach (XmlSchemaAttribute sa in defaultAttributes) { a = new XAttribute(XNamespace.Get(sa.QualifiedName.Namespace).GetName(sa.QualifiedName.Name), GetDefaultValue(sa)); ReplaceSchemaInfo(a, GetDefaultAttributeSchemaInfo(sa)); e.Add(a); } } RestoreLineInfo(orginal); } void ValidateElement(XElement e) { XmlSchemaInfo si = addSchemaInfo ? new XmlSchemaInfo() : null; string xsiType = null; string xsiNil = null; PushElement(e, ref xsiType, ref xsiNil); IXmlLineInfo original = SaveLineInfo(e); source = e; validator.ValidateElement(e.Name.LocalName, e.Name.NamespaceName, si, xsiType, xsiNil, null, null); ValidateAttributes(e); validator.ValidateEndOfAttributes(si); ValidateNodes(e); validator.ValidateEndElement(si); if (addSchemaInfo) { if (si.Validity == XmlSchemaValidity.Valid && si.IsDefault) { e.Value = GetDefaultValue(si.SchemaElement); } ReplaceSchemaInfo(e, si); } RestoreLineInfo(original); namespaceManager.PopScope(); } void ValidateNodes(XElement e) { XNode n = e.content as XNode; IXmlLineInfo orginal = SaveLineInfo(n); if (n != null) { do { n = n.next; XElement c = n as XElement; if (c != null) { ValidateElement(c); } else { XText t = n as XText; if (t != null) { string s = t.Value; if (s.Length > 0) { validator.LineInfoProvider = t as IXmlLineInfo; validator.ValidateText(s); } } } } while (n != e.content); source = e; } else { string s = e.content as string; if (s != null && s.Length > 0) { validator.ValidateText(s); } } RestoreLineInfo(orginal); } void ValidationCallback(object sender, ValidationEventArgs e) { if (validationEventHandler != null) { validationEventHandler(source, e); } else if (e.Severity == XmlSeverityType.Error) { throw e.Exception; } } } internal class XmlSchemaInfoEqualityComparer : IEqualityComparer<XmlSchemaInfo> { public bool Equals(XmlSchemaInfo si1, XmlSchemaInfo si2) { if (si1 == si2) return true; if (si1 == null || si2 == null) return false; return si1.ContentType == si2.ContentType && si1.IsDefault == si2.IsDefault && si1.IsNil == si2.IsNil && (object)si1.MemberType == (object)si2.MemberType && (object)si1.SchemaAttribute == (object)si2.SchemaAttribute && (object)si1.SchemaElement == (object)si2.SchemaElement && (object)si1.SchemaType == (object)si2.SchemaType && si1.Validity == si2.Validity; } public int GetHashCode(XmlSchemaInfo si) { if (si == null) return 0; int h = (int)si.ContentType; if (si.IsDefault) { h ^= 1; } if (si.IsNil) { h ^= 1; } XmlSchemaSimpleType memberType = si.MemberType; if (memberType != null) { h ^= memberType.GetHashCode(); } XmlSchemaAttribute schemaAttribute = si.SchemaAttribute; if (schemaAttribute != null) { h ^= schemaAttribute.GetHashCode(); } XmlSchemaElement schemaElement = si.SchemaElement; if (schemaElement != null) { h ^= schemaElement.GetHashCode(); } XmlSchemaType schemaType = si.SchemaType; if (schemaType != null) { h ^= schemaType.GetHashCode(); } h ^= (int)si.Validity; return h; } } /// <summary> /// Extension methods /// </summary> public static class Extensions { /// <summary> /// Gets the schema information that has been assigned to the <see cref="XElement"/> as a result of schema validation. /// </summary> /// <param name="source">Extension point</param> [SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")] public static IXmlSchemaInfo GetSchemaInfo(this XElement source) { if (source == null) throw new ArgumentNullException(nameof(source)); return source.Annotation<IXmlSchemaInfo>(); } /// <summary> /// Gets the schema information that has been assigned to the <see cref="XAttribute"/> as a result of schema validation. /// </summary> /// <param name="source">Extension point</param> [SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")] public static IXmlSchemaInfo GetSchemaInfo(this XAttribute source) { if (source == null) throw new ArgumentNullException(nameof(source)); return source.Annotation<IXmlSchemaInfo>(); } /// <summary> /// Validate a <see cref="XDocument"/> /// </summary> /// <param name="source">Extension point</param> /// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param> /// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> /// that receives schema validation warnings and errors encountered during schema /// validation</param> public static void Validate(this XDocument source, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler) { source.Validate(schemas, validationEventHandler, false); } /// <summary> /// Validate a <see cref="XDocument"/> /// </summary> /// <param name="source">Extension point</param> /// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param> /// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> /// that receives schema validation warnings and errors encountered during schema /// validation</param> /// <param name="addSchemaInfo">If enabled the <see cref="XDocument"/> and the corresponding /// subtree is augmented with PSVI in the form of <see cref="IXmlSchemaInfo"/> annotations, /// default attributes and default element values</param> [SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")] public static void Validate(this XDocument source, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler, bool addSchemaInfo) { if (source == null) throw new ArgumentNullException(nameof(source)); if (schemas == null) throw new ArgumentNullException(nameof(schemas)); new XNodeValidator(schemas, validationEventHandler).Validate(source, null, addSchemaInfo); } /// <summary> /// Validate a <see cref="XElement"/> /// </summary> /// <param name="source">Extension point</param> /// <param name="partialValidationType">An <see cref="XmlSchemaElement"/> or /// <see cref="XmlSchemaType"/> object used to initialize the partial validation /// context</param> /// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param> /// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> that /// receives schema validation warnings and errors encountered during schema /// validation</param> public static void Validate(this XElement source, XmlSchemaObject partialValidationType, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler) { source.Validate(partialValidationType, schemas, validationEventHandler, false); } /// <summary> /// Validate a <see cref="XElement"/> /// </summary> /// <param name="source">Extension point</param> /// <param name="partialValidationType">An <see cref="XmlSchemaElement"/> or /// <see cref="XmlSchemaType"/> object used to initialize the partial validation /// context</param> /// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param> /// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> that /// receives schema validation warnings and errors encountered during schema /// validation</param> /// <param name="addSchemaInfo">If enabled the <see cref="XElement"/> and the corresponding /// subtree is augmented with PSVI in the form of <see cref="IXmlSchemaInfo"/> annotations, /// default attributes and default element values</param> [SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")] public static void Validate(this XElement source, XmlSchemaObject partialValidationType, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler, bool addSchemaInfo) { if (source == null) throw new ArgumentNullException(nameof(source)); if (partialValidationType == null) throw new ArgumentNullException(nameof(partialValidationType)); if (schemas == null) throw new ArgumentNullException(nameof(schemas)); new XNodeValidator(schemas, validationEventHandler).Validate(source, partialValidationType, addSchemaInfo); } /// <summary> /// Validate a <see cref="XAttribute"/> /// </summary> /// <param name="source">Extension point</param> /// <param name="partialValidationType">An <see cref="XmlSchemaAttribute"/> or /// <see cref="XmlSchemaType"/> object used to initialize the partial validation /// context</param> /// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param> /// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> that /// receives schema validation warnings and errors encountered during schema /// validation</param> public static void Validate(this XAttribute source, XmlSchemaObject partialValidationType, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler) { source.Validate(partialValidationType, schemas, validationEventHandler, false); } /// <summary> /// Validate a <see cref="XAttribute"/> /// </summary> /// <param name="source">Extension point</param> /// <param name="partialValidationType">An <see cref="XmlSchemaAttribute"/> or /// <see cref="XmlSchemaType"/> object used to initialize the partial validation /// context</param> /// <param name="schemas">The <see cref="XmlSchemaSet"/> used for validation</param> /// <param name="validationEventHandler">The <see cref="ValidationEventHandler"/> that /// receives schema validation warnings and errors encountered during schema /// validation</param> /// <param name="addSchemaInfo">If enabled the <see cref="XAttribute"/> is augmented with PSVI /// in the form of <see cref="IXmlSchemaInfo"/> annotations, default attributes and /// default element values</param> [SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters", Justification = "Reviewed by the design group.")] public static void Validate(this XAttribute source, XmlSchemaObject partialValidationType, XmlSchemaSet schemas, ValidationEventHandler validationEventHandler, bool addSchemaInfo) { if (source == null) throw new ArgumentNullException(nameof(source)); if (partialValidationType == null) throw new ArgumentNullException(nameof(partialValidationType)); if (schemas == null) throw new ArgumentNullException(nameof(schemas)); new XNodeValidator(schemas, validationEventHandler).Validate(source, partialValidationType, addSchemaInfo); } } }
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.html file at the root of this distribution. If * you cannot locate the Apache License, Version 2.0, please send an email to * dlr@microsoft.com. By using this source code in any fashion, you are agreeing to be bound * by the terms of the Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. * * * ***************************************************************************/ #if FEATURE_CORE_DLR using System.Linq.Expressions; #else using Microsoft.Scripting.Ast; #endif using System; using System.Collections.Generic; using System.Diagnostics; using System.Dynamic; using System.Runtime.CompilerServices; using System.Threading; using Microsoft.Scripting.Runtime; using Microsoft.Scripting.Utils; using IronPython.Runtime.Types; using IronPython.Runtime.Binding; namespace IronPython.Runtime.Operations { // These operations get linked into all new-style classes. public static class UserTypeOps { public static string ToStringReturnHelper(object o) { if (o is string && o != null) { return (string)o; } throw PythonOps.TypeError("__str__ returned non-string type ({0})", PythonTypeOps.GetName(o)); } public static PythonDictionary SetDictHelper(ref PythonDictionary dict, PythonDictionary value) { if (System.Threading.Interlocked.CompareExchange<PythonDictionary>(ref dict, value, null) == null) return value; return dict; } public static object GetPropertyHelper(object prop, object instance, string name) { PythonTypeSlot desc = prop as PythonTypeSlot; if (desc == null) { throw PythonOps.TypeError("Expected property for {0}, but found {1}", name.ToString(), DynamicHelpers.GetPythonType(prop).Name); } object value; desc.TryGetValue(DefaultContext.Default, instance, DynamicHelpers.GetPythonType(instance), out value); return value; } public static void SetPropertyHelper(object prop, object instance, object newValue, string name) { PythonTypeSlot desc = prop as PythonTypeSlot; if (desc == null) { throw PythonOps.TypeError("Expected settable property for {0}, but found {1}", name.ToString(), DynamicHelpers.GetPythonType(prop).Name); } desc.TrySetValue(DefaultContext.Default, instance, DynamicHelpers.GetPythonType(instance), newValue); } public static bool SetWeakRefHelper(IPythonObject obj, WeakRefTracker value) { if (!obj.PythonType.IsWeakReferencable) { return false; } object[] slots = obj.GetSlotsCreate(); slots[slots.Length - 1] = value; return true; } public static WeakRefTracker GetWeakRefHelper(IPythonObject obj) { object[] slots = obj.GetSlots(); if (slots == null) { return null; } return (WeakRefTracker)slots[slots.Length - 1]; } public static void SetFinalizerHelper(IPythonObject obj, WeakRefTracker value) { object[] slots = obj.GetSlotsCreate(); if (Interlocked.CompareExchange(ref slots[slots.Length - 1], value, null) != null) { GC.SuppressFinalize(value); } } public static object[] GetSlotsCreate(IPythonObject obj, ref object[] slots) { if (slots != null) { return slots; } Interlocked.CompareExchange( ref slots, new object[obj.PythonType.SlotCount + 1], // weakref is stored at the end null); return slots; } public static void AddRemoveEventHelper(object method, IPythonObject instance, object eventValue, string name) { object callable = method; // TODO: dt gives us a PythonContext which we should use PythonType dt = instance.PythonType; PythonTypeSlot dts = method as PythonTypeSlot; if (dts != null) { if (!dts.TryGetValue(DefaultContext.Default, instance, dt, out callable)) throw PythonOps.AttributeErrorForMissingAttribute(dt.Name, name); } if (!PythonOps.IsCallable(DefaultContext.Default, callable)) { throw PythonOps.TypeError("Expected callable value for {0}, but found {1}", name.ToString(), PythonTypeOps.GetName(method)); } PythonCalls.Call(callable, eventValue); } public static DynamicMetaObject/*!*/ GetMetaObjectHelper(IPythonObject self, Expression/*!*/ parameter, DynamicMetaObject baseMetaObject) { return new Binding.MetaUserObject(parameter, BindingRestrictions.Empty, baseMetaObject, self); } public static bool TryGetMixedNewStyleOldStyleSlot(CodeContext context, object instance, string name, out object value) { IPythonObject sdo = instance as IPythonObject; if (sdo != null) { PythonDictionary dict = sdo.Dict; if (dict != null && dict.TryGetValue(name, out value)) { return true; } } PythonType dt = DynamicHelpers.GetPythonType(instance); foreach (PythonType type in dt.ResolutionOrder) { PythonTypeSlot dts; if (type != TypeCache.Object && type.OldClass != null) { // we're an old class, check the old-class way OldClass oc = type.OldClass; if (oc.TryGetBoundCustomMember(context, name, out value)) { value = oc.GetOldStyleDescriptor(context, value, instance, oc); return true; } } else if (type.TryLookupSlot(context, name, out dts)) { // we're a dynamic type, check the dynamic type way return dts.TryGetValue(context, instance, dt, out value); } } value = null; return false; } public static bool TryGetDictionaryValue(PythonDictionary dict, string name, int keyVersion, int keyIndex, out object res) { CustomInstanceDictionaryStorage dictStorage; if (dict != null) { if ((dictStorage = dict._storage as CustomInstanceDictionaryStorage) != null && dictStorage.KeyVersion == keyVersion) { if (dictStorage.TryGetValue(keyIndex, out res)) { return true; } } else if (dict.TryGetValue(name, out res)) { return true; } } res = null; return false; } public static object SetDictionaryValue(IPythonObject self, string name, object value) { PythonDictionary dict = GetDictionary(self); return dict[name] = value; } public static object SetDictionaryValueOptimized(IPythonObject ipo, string name, object value, int keysVersion, int index) { var dict = UserTypeOps.GetDictionary(ipo); CustomInstanceDictionaryStorage storage; if ((storage = dict._storage as CustomInstanceDictionaryStorage) != null && storage.KeyVersion == keysVersion) { storage.SetExtraValue(index, value); } else { dict[name] = value; } return value; } public static object FastSetDictionaryValue(ref PythonDictionary dict, string name, object value) { if (dict == null) { Interlocked.CompareExchange(ref dict, PythonDictionary.MakeSymbolDictionary(), null); } return dict[name] = value; } public static object FastSetDictionaryValueOptimized(PythonType type, ref PythonDictionary dict, string name, object value, int keysVersion, int index) { if (dict == null) { Interlocked.CompareExchange(ref dict, type.MakeDictionary(), null); } CustomInstanceDictionaryStorage storage; if ((storage = dict._storage as CustomInstanceDictionaryStorage) != null && storage.KeyVersion == keysVersion) { storage.SetExtraValue(index, value); return value; } else { return dict[name] = value; } } public static object RemoveDictionaryValue(IPythonObject self, string name) { PythonDictionary dict = self.Dict; if (dict != null) { if (dict.Remove(name)) { return null; } } throw PythonOps.AttributeErrorForMissingAttribute(self.PythonType, name); } internal static PythonDictionary GetDictionary(IPythonObject self) { PythonDictionary dict = self.Dict; if (dict == null && self.PythonType.HasDictionary) { dict = self.SetDict(self.PythonType.MakeDictionary()); } return dict; } /// <summary> /// Object.ToString() displays the CLI type name. But we want to display the class name (e.g. /// '&lt;foo object at 0x000000000000002C&gt;' unless we've overridden __repr__ but not __str__ in /// which case we'll display the result of __repr__. /// </summary> public static string ToStringHelper(IPythonObject o) { return ObjectOps.__str__(DefaultContext.Default, o); } public static bool TryGetNonInheritedMethodHelper(PythonType dt, object instance, string name, out object callTarget) { // search MRO for other user-types in the chain that are overriding the method foreach (PythonType type in dt.ResolutionOrder) { if (type.IsSystemType) break; // hit the .NET types, we're done if (LookupValue(type, instance, name, out callTarget)) { return true; } } // check instance IPythonObject isdo = instance as IPythonObject; PythonDictionary dict; if (isdo != null && (dict = isdo.Dict) != null) { if (dict.TryGetValue(name, out callTarget)) return true; } callTarget = null; return false; } private static bool LookupValue(PythonType dt, object instance, string name, out object value) { PythonTypeSlot dts; if (dt.TryLookupSlot(DefaultContext.Default, name, out dts) && dts.TryGetValue(DefaultContext.Default, instance, dt, out value)) { return true; } value = null; return false; } public static bool TryGetNonInheritedValueHelper(IPythonObject instance, string name, out object callTarget) { PythonType dt = instance.PythonType; PythonTypeSlot dts; // search MRO for other user-types in the chain that are overriding the method foreach (PythonType type in dt.ResolutionOrder) { if (type.IsSystemType) break; // hit the .NET types, we're done if (type.TryLookupSlot(DefaultContext.Default, name, out dts)) { callTarget = dts; return true; } } // check instance IPythonObject isdo = instance as IPythonObject; PythonDictionary dict; if (isdo != null && (dict = isdo.Dict) != null) { if (dict.TryGetValue(name, out callTarget)) return true; } callTarget = null; return false; } public static object GetAttribute(CodeContext/*!*/ context, object self, string name, PythonTypeSlot getAttributeSlot, PythonTypeSlot getAttrSlot, SiteLocalStorage<CallSite<Func<CallSite, CodeContext, object, string, object>>>/*!*/ callSite) { object value; if (callSite.Data == null) { callSite.Data = MakeGetAttrSite(context); } try { if (getAttributeSlot.TryGetValue(context, self, ((IPythonObject)self).PythonType, out value)) { return callSite.Data.Target(callSite.Data, context, value, name); } } catch (MissingMemberException) { if (getAttrSlot != null && getAttrSlot.TryGetValue(context, self, ((IPythonObject)self).PythonType, out value)) { return callSite.Data.Target(callSite.Data, context, value, name); } throw; } if (getAttrSlot != null && getAttrSlot.TryGetValue(context, self, ((IPythonObject)self).PythonType, out value)) { return callSite.Data.Target(callSite.Data, context, value, name); } throw PythonOps.AttributeError(name); } public static object GetAttributeNoThrow(CodeContext/*!*/ context, object self, string name, PythonTypeSlot getAttributeSlot, PythonTypeSlot getAttrSlot, SiteLocalStorage<CallSite<Func<CallSite, CodeContext, object, string, object>>>/*!*/ callSite) { object value; if (callSite.Data == null) { callSite.Data = MakeGetAttrSite(context); } try { if (getAttributeSlot.TryGetValue(context, self, ((IPythonObject)self).PythonType, out value)) { return callSite.Data.Target(callSite.Data, context, value, name); } } catch (MissingMemberException) { try { if (getAttrSlot != null && getAttrSlot.TryGetValue(context, self, ((IPythonObject)self).PythonType, out value)) { return callSite.Data.Target(callSite.Data, context, value, name); } return OperationFailed.Value; } catch (MissingMemberException) { return OperationFailed.Value; } } try { if (getAttrSlot != null && getAttrSlot.TryGetValue(context, self, ((IPythonObject)self).PythonType, out value)) { return callSite.Data.Target(callSite.Data, context, value, name); } } catch (MissingMemberException) { } return OperationFailed.Value; } private static CallSite<Func<CallSite, CodeContext, object, string, object>> MakeGetAttrSite(CodeContext context) { return CallSite<Func<CallSite, CodeContext, object, string, object>>.Create( PythonContext.GetContext(context).InvokeOne ); } #region IValueEquality Helpers #if CLR2 public static int GetValueHashCodeHelper(object self) { // new-style classes only lookup in slots, not in instance // members object func; if (DynamicHelpers.GetPythonType(self).TryGetBoundMember(DefaultContext.Default, self, "__hash__", out func)) { return Converter.ConvertToInt32(PythonCalls.Call(func)); } return self.GetHashCode(); } public static bool ValueEqualsHelper(object self, object other) { object res = RichEqualsHelper(self, other); if (res != NotImplementedType.Value && res != null && res.GetType() == typeof(bool)) return (bool)res; return false; } private static object RichEqualsHelper(object self, object other) { object res; if (PythonTypeOps.TryInvokeBinaryOperator(DefaultContext.Default, self, other, "__eq__", out res)) return res; return NotImplementedType.Value; } #endif #endregion internal static Binding.FastBindResult<T> MakeGetBinding<T>(CodeContext codeContext, CallSite<T> site, IPythonObject self, Binding.PythonGetMemberBinder getBinder) where T : class { Type finalType = self.PythonType.FinalSystemType; if (typeof(IDynamicMetaObjectProvider).IsAssignableFrom(finalType) && !(self is IFastGettable)) { // very tricky, user is inheriting from a class which implements IDO, we // don't optimize this yet. return new Binding.FastBindResult<T>(); } return (Binding.FastBindResult<T>)(object)new Binding.MetaUserObject.FastGetBinderHelper( codeContext, (CallSite<Func<CallSite, object, CodeContext, object>>)(object)site, self, getBinder).GetBinding(codeContext, getBinder.Name); } internal static FastBindResult<T> MakeSetBinding<T>(CodeContext codeContext, CallSite<T> site, IPythonObject self, object value, Binding.PythonSetMemberBinder setBinder) where T : class { if (typeof(IDynamicMetaObjectProvider).IsAssignableFrom(self.GetType().BaseType)) { // very tricky, user is inheriting from a class which implements IDO, we // don't optimize this yet. return new FastBindResult<T>(); } // optimized versions for possible literals that can show up in code. Type setType = typeof(T); if (setType == typeof(Func<CallSite, object, object, object>)) { return (FastBindResult<T>)(object)new Binding.MetaUserObject.FastSetBinderHelper<object>( codeContext, self, value, setBinder).MakeSet(); } else if (setType == typeof(Func<CallSite, object, string, object>)) { return (FastBindResult<T>)(object)new Binding.MetaUserObject.FastSetBinderHelper<string>( codeContext, self, value, setBinder).MakeSet(); } else if (setType == typeof(Func<CallSite, object, int, object>)) { return (FastBindResult<T>)(object)new Binding.MetaUserObject.FastSetBinderHelper<int>( codeContext, self, value, setBinder).MakeSet(); } else if (setType == typeof(Func<CallSite, object, double, object>)) { return (FastBindResult<T>)(object)new Binding.MetaUserObject.FastSetBinderHelper<double>( codeContext, self, value, setBinder).MakeSet(); } else if (setType == typeof(Func<CallSite, object, List, object>)) { return (FastBindResult<T>)(object)new Binding.MetaUserObject.FastSetBinderHelper<List>( codeContext, self, value, setBinder).MakeSet(); } else if (setType == typeof(Func<CallSite, object, PythonTuple, object>)) { return (FastBindResult<T>)(object)new Binding.MetaUserObject.FastSetBinderHelper<PythonTuple>( codeContext, self, value, setBinder).MakeSet(); } else if (setType == typeof(Func<CallSite, object, PythonDictionary, object>)) { return (FastBindResult<T>)(object)new Binding.MetaUserObject.FastSetBinderHelper<PythonDictionary>( codeContext, self, value, setBinder).MakeSet(); } return new FastBindResult<T>(); } } /// <summary> /// Provides a debug view for user defined types. This class is declared as public /// because it is referred to from generated code. You should not use this class. /// </summary> public class UserTypeDebugView { private readonly IPythonObject _userObject; public UserTypeDebugView(IPythonObject userObject) { _userObject = userObject; } public PythonType __class__ { get { return _userObject.PythonType; } } [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] internal List<ObjectDebugView> Members { get { var res = new List<ObjectDebugView>(); if (_userObject.Dict != null) { foreach (var v in _userObject.Dict) { res.Add(new ObjectDebugView(v.Key, v.Value)); } } // collect any slots on the object object[] slots = _userObject.GetSlots(); if (slots != null) { var mro = _userObject.PythonType.ResolutionOrder; List<string> slotNames = new List<string>(); for(int i = mro.Count - 1; i>= 0; i--) { slotNames.AddRange(mro[i].GetTypeSlots()); } for (int i = 0; i < slots.Length - 1; i++) { if (slots[i] != Uninitialized.Instance) { res.Add(new ObjectDebugView(slotNames[i], slots[i])); } } } return res; } } } }
using System.ComponentModel; using System.Drawing; using System.Drawing.Design; using System.Web.UI.WebControls; using GuruComponents.Netrix.ComInterop; using GuruComponents.Netrix.UserInterface.TypeConverters; using GuruComponents.Netrix.UserInterface.TypeEditors; using DisplayNameAttribute=GuruComponents.Netrix.UserInterface.TypeEditors.DisplayNameAttribute; using TE=GuruComponents.Netrix.UserInterface.TypeEditors; namespace GuruComponents.Netrix.WebEditing.Elements { /// <summary> /// This class represents a TD tag. /// </summary> /// <remarks> /// TD means table data cell. If the cell contains a header rather than data, th should be used instead. /// TD must be used inside a TR element. /// <seealso cref="GuruComponents.Netrix.WebEditing.Elements.TableRowElement">TableRowElement</seealso> /// <seealso cref="GuruComponents.Netrix.WebEditing.Elements.TableHeaderElement">TableHeaderElement</seealso> /// <seealso cref="GuruComponents.Netrix.WebEditing.Elements.TableElement">TableElement</seealso> /// </remarks> public class TableCellElement : StyledElement { protected internal int cellsRow, cellsColumn; internal TableElement baseTable; /// <summary> /// A reference to the Table that contains this Cell. /// </summary> [Browsable(true), TypeConverter(typeof(ExpandableObjectConverter))] public TableElement Table { get { if (baseTable == null) { baseTable = HtmlEditor.GenericElementFactory.CreateElement(((HtmlEditor) base.HtmlEditor).GetParentElement(this.GetBaseElement(), "table")) as TableElement; } return baseTable; } } internal TableRowElement baseRow; /// <summary> /// A reference to the Row that contains this Cell. /// </summary> [Browsable(true), TypeConverter(typeof(ExpandableObjectConverter))] public TableRowElement Row { get { if (baseRow == null) { baseRow = HtmlEditor.GenericElementFactory.CreateElement(((HtmlEditor) base.HtmlEditor).GetParentElement(this.GetBaseElement(), "tr")) as TableRowElement; } return baseRow; } } # region Public Properties /// <include file='DocumentorIncludes.xml' path='//WebEditing/Elements[@name="HorizontalAlign"]/*'/> [DescriptionAttribute("")] [DefaultValueAttribute(HorizontalAlign.Left)] [CategoryAttribute("Element Layout")] [TypeConverter(typeof(UITypeConverterDropList))] [DisplayNameAttribute()] public HorizontalAlign align { get { return (HorizontalAlign) this.GetEnumAttribute ("align", (HorizontalAlign) 0); } set { if (value.Equals(HorizontalAlign.Justify)) { this.SetEnumAttribute ("align", (HorizontalAlign) 0, (HorizontalAlign) 0); } else { this.SetEnumAttribute ("align", value, (HorizontalAlign) 0); } return; } } /// <include file='DocumentorIncludes.xml' path='//WebEditing/Elements[@name="VerticalAlign"]/*'/> [DescriptionAttribute("")] [DefaultValueAttribute(VerticalAlign.NotSet)] [CategoryAttribute("Element Layout")] [TypeConverter(typeof(UITypeConverterDropList))] [DisplayName()] public VerticalAlign valign { get { return (VerticalAlign) this.GetEnumAttribute ("valign", (VerticalAlign) 0); } set { this.SetEnumAttribute ("valign", value, (VerticalAlign) 0); } } /// <summary> /// A background image for the cell. /// </summary> /// <remarks> /// The value should be relative path to an image. Remember that absolute paths don't work on a webserver. /// </remarks> [DescriptionAttribute("")] [CategoryAttribute("Element Layout")] [EditorAttribute( typeof(UITypeEditorUrl), typeof(UITypeEditor))] [DisplayName()] public string background { set { this.SetStringAttribute ("background", this.GetRelativeUrl(value)); } get { return this.GetRelativeUrl (this.GetStringAttribute ("background")); } } /// <summary> /// The background color of the cell. /// </summary> [DescriptionAttribute("")] [DefaultValueAttribute("")] [CategoryAttribute("Element Layout")] [TypeConverterAttribute(typeof(UITypeConverterColor))] [EditorAttribute( typeof(UITypeEditorColor), typeof(UITypeEditor))] [DisplayName()] public Color bgColor { set { this.SetColorAttribute ("bgColor", value); } get { return this.GetColorAttribute ("bgColor"); } } /// <summary> /// The color of the cells border. /// </summary> [DescriptionAttribute("")] [DefaultValueAttribute("")] [CategoryAttribute("Element Layout")] [TypeConverterAttribute(typeof(UITypeConverterColor))] [EditorAttribute( typeof(UITypeEditorColor), typeof(UITypeEditor))] [DisplayName()] public Color borderColor { set { this.SetColorAttribute ("borderColor", value); } get { return this.GetColorAttribute ("borderColor"); } } /// <summary> /// The number of columns this cell spans. /// </summary> /// <remarks> /// Changing this value will destroy the table structure if the spanned /// column does contain cell on other rows with the wrong number of cells. /// </remarks> [DescriptionAttribute("")] [DefaultValueAttribute(1)] [CategoryAttribute("Element Layout")] [EditorAttribute( typeof(UITypeEditorInt), typeof(UITypeEditor))] [DisplayName()] public int colSpan { get { return this.GetIntegerAttribute ("colSpan", 1); } set { this.SetIntegerAttribute ("colSpan", value, 1); } } /// <summary> /// The HEIGHT of the element. /// </summary> [DescriptionAttribute("")] [DefaultValueAttribute("")] [CategoryAttribute("Element Layout")] [EditorAttribute( typeof(UITypeEditorUnit), typeof(UITypeEditor))] [DisplayName()] public Unit height { set { this.SetUnitAttribute ("height", value); } get { return this.GetUnitAttribute ("height"); } } /// <summary> /// The WIDTH of the element. /// </summary> [DescriptionAttribute("")] [DefaultValueAttribute("")] [CategoryAttribute("Element Layout")] [EditorAttribute( typeof(UITypeEditorUnit), typeof(UITypeEditor))] [DisplayName()] public Unit width { set { this.SetUnitAttribute ("width", value); } get { return this.GetUnitAttribute ("width"); } } /// <summary> /// Suppresses the text wrapping of the cell. /// </summary> /// <remarks>Setting this attribute can enhance the cell width.</remarks> [DescriptionAttribute("")] [DefaultValueAttribute(false)] [CategoryAttribute("Element Layout")] [TypeConverter(typeof(UITypeConverterDropList))] [DisplayName()] public bool noWrap { get { return this.GetBooleanAttribute ("noWrap"); } set { this.SetBooleanAttribute ("noWrap", value); } } [DescriptionAttribute("")] [DefaultValueAttribute("")] [CategoryAttribute("JavaScript Events")] [EditorAttribute( typeof(UITypeEditorString), typeof(UITypeEditor))] [DisplayName()] [ScriptingVisible()] public string ScriptOnResize { set { this.SetStringAttribute ("onResize", value); } get { return this.GetStringAttribute ("onResize"); } } [DescriptionAttribute("")] [DefaultValueAttribute(1)] [CategoryAttribute("Element Layout")] [EditorAttribute( typeof(UITypeEditorInt), typeof(UITypeEditor))] [DisplayName()] public int rowSpan { get { return this.GetIntegerAttribute ("rowSpan", 1); } set { this.SetIntegerAttribute ("rowSpan", value, 1); } } # endregion public TableCellElement() : base("td", null) { } /// <summary> /// Creates the specified element. /// </summary> /// <remarks> /// The element is being created and attached to the current document, but nevertheless not visible, /// until it's being placed anywhere within the DOM. To attach an element it's possible to either /// use the <see cref="ElementDom"/> property of any other already placed element and refer to this /// DOM or use the body element (<see cref="HtmlEditor.GetBodyElement"/>) and add the element there. Also, in /// case of user interactive solutions, it's possible to add an element near the current caret /// position, using <see cref="HtmlEditor.CreateElementAtCaret(string)"/> method. /// <para> /// Note: Invisible elements do neither appear in the DOM nor do they get saved. /// </para> /// </remarks> /// <param name="editor">The editor this element belongs to.</param> public TableCellElement(IHtmlEditor editor) : base("td", editor) { } /// <include file='DocumentorIncludes.xml' path='//WebEditing/Elements[@name="PublicElementConstructor"]'/> public TableCellElement(string tag) : base(tag, null) { } protected TableCellElement(string tag, IHtmlEditor editor) : base (tag, editor) { } internal TableCellElement(Interop.IHTMLElement peer, IHtmlEditor editor) : base (peer, editor) { } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Web; using Aspose.Barcode.Live.Demos.UI.Config; using Aspose.Barcode.Live.Demos.UI.Controllers; namespace Aspose.Barcode.Live.Demos.UI.Models { public class ViewModel { public int MaximumUploadFiles { get; set; } /// <summary> /// Name of the product (e.g., words) /// </summary> public string Product { get; set; } public BaseController Controller; /// <summary> /// Product + AppName, e.g. wordsMerger /// </summary> public string ProductAppName { get; set; } private AsposeBarCodeContext _atcContext; public AsposeBarCodeContext AsposeBarCodeContext { get { if (_atcContext == null) _atcContext = new AsposeBarCodeContext(HttpContext.Current); return _atcContext; } } protected internal string DesktopAppNameByExtension(string extension, string defaultValue = null) { if (!string.IsNullOrEmpty(extension)) switch (extension.ToLower()) { case "docx": case "doc": case "dot": case "dotx": return "Word"; case "odt": case "ott": return "OpenOffice"; case "rtf": return "RTF"; case "txt": return "Text"; case "md": return "Markdown"; case "ps": return "PostScript"; case "tex": return "LaTeX"; case "acroform": return Resources["pdfXfaToAcroform"]; case "pdfa1a": return "PDF/A-1A"; case "pdfa1b": return "PDF/A-1B"; case "pdfa2a": return "PDF/A-2A"; case "pdfa3a": return "PDF/A-3A"; default: return string.IsNullOrEmpty(defaultValue) ? extension.ToUpper() : defaultValue; } return defaultValue; } private Dictionary<string, string> _resources; public Dictionary<string, string> Resources { get { if (_resources == null) _resources = AsposeBarCodeContext.Resources; return _resources; } set { _resources = value; } } public string UIBasePath => Configuration.AsposeBarCodeLiveDemosPath; public string PageProductTitle => Resources["Aspose" + TitleCase(Product)]; /// <summary> /// The name of the app (e.g., Conversion, Merger) /// </summary> public string AppName { get; set; } /// <summary> /// The full address of the application without query string (e.g., https://products.aspose.app/words/conversion) /// </summary> public string AppURL { get; set; } /// <summary> /// File extension without dot received by "fileformat" value in RouteData (e.g. docx) /// </summary> public string Extension { get; set; } /// <summary> /// File extension without dot received by "fileformat" value in RouteData (e.g. docx) /// </summary> public string Extension2 { get; set; } /// <summary> /// Redirect to main app, if there is no ExtensionInfoModel for auto generated models /// </summary> public bool RedirectToMainApp { get; set; } /// <summary> /// Name of the partial View of controls (e.g. UnlockControls) /// </summary> public string ControlsView { get; set; } /// <summary> /// Is canonical page opened (/all) /// </summary> public bool IsCanonical; public string AnotherFileText { get; set; } public string UploadButtonText { get; set; } public string ViewerButtonText { get; set; } public bool ShowViewerButton { get; set; } public string SuccessMessage { get; set; } /// <summary> /// List of app features for ul-list. E.g. Resources[app + "LiFeature1"] /// </summary> public List<string> AppFeatures { get; set; } public string Title { get; set; } public string TitleSub { get; set; } public string PageTitle { get => Controller.ViewBag.PageTitle; set => Controller.ViewBag.PageTitle = value; } public string MetaDescription { get => Controller.ViewBag.MetaDescription; set => Controller.ViewBag.MetaDescription = value; } public string MetaKeywords { get => Controller.ViewBag.MetaKeywords; set => Controller.ViewBag.MetaKeywords = value; } /// <summary> /// If the application doesn't need to upload several files (e.g. Viewer, Editor) /// </summary> public bool UploadAndRedirect { get; set; } protected string TitleCase(string value) => new System.Globalization.CultureInfo("en-US", false).TextInfo.ToTitleCase(value); /// <summary> /// e.g., .doc|.docx|.dot|.dotx|.rtf|.odt|.ott|.txt|.html|.xhtml|.mhtml /// </summary> public string ExtensionsString { get; set; } #region SaveAs private bool _saveAsComponent; public bool SaveAsComponent { get => _saveAsComponent; set { _saveAsComponent = value; Controller.ViewBag.SaveAsComponent = value; if (_saveAsComponent) { var sokey1 = $"{Product}{AppName}SaveAsOptions"; var sokey2 = $"{Product}SaveAsOptions"; if (Resources.ContainsKey(sokey1)) SaveAsOptions = Resources[sokey1].Split(','); else if (Resources.ContainsKey(sokey2)) { if (AppName == "Conversion" && Product == "words") { var lst = Resources[sokey2].Split(',').ToList(); try { var index = lst.FindIndex(x => x == "DOCX"); lst.RemoveAt(index); var index2 = lst.FindIndex(x => x == "DOC"); lst.Insert(index2, "DOCX"); } catch { // } finally { SaveAsOptions = lst.ToArray(); } } else if (AppName == "Conversion" && Product == "pdf") { var lst = Resources[sokey2].Split(',').ToList().Select(x => x.ToUpper().Trim()).ToList(); try { var index = lst.FindIndex(x => x == "DOCX"); lst.RemoveAt(index); var index2 = lst.FindIndex(x => x == "DOC"); lst.Insert(index2, "DOCX"); } catch { // } finally { SaveAsOptions = lst.ToArray(); } } else if (AppName == "Conversion" && Product == "page") { var lst = Resources[sokey2].Split(',').ToList().Select(x => x.ToUpper().Trim()).ToList(); SaveAsOptions = lst.ToArray(); } else SaveAsOptions = Resources[sokey2].Split(','); } var lifeaturekey = Product + "SaveAsLiFeature"; if (AppFeatures != null && Resources.ContainsKey(lifeaturekey)) AppFeatures.Add(Resources[lifeaturekey]); } } } public string SaveAsOptionsList { get { string list = ""; if (SaveAsOptions != null) { foreach (var extensin in SaveAsOptions) { if (list == "") { list = extensin.ToUpper(); } else { list = list + ", " + extensin.ToUpper(); } } } return list; } } /// <summary> /// FileFormats in UpperCase /// </summary> public string[] SaveAsOptions { get; set; } /// <summary> /// Original file format SaveAs option for multiple files uploading /// </summary> public bool SaveAsOriginal { get; set; } #endregion /// <summary> /// The possibility of changing the order of uploaded files. It is actual for Merger App. /// </summary> public bool UseSorting { get; set; } public string DropOrUploadFileLabel { get; set; } #region ViewSections public bool ShowExtensionInfo => ExtensionInfoModel != null; public ExtensionInfoModel ExtensionInfoModel { get; set; } public bool HowTo => HowToModel != null; public HowToModel HowToModel { get; set; } #endregion public string JSOptions => new JSOptions(this).ToString(); public ViewModel(BaseController controller, string app) { Controller = controller; Resources = controller.Resources; AppName = Resources.ContainsKey($"{app}APPName") ? Resources[$"{app}APPName"] : app; Product = controller.Product; var url = controller.Request.Url.AbsoluteUri; AppURL = url.Substring(0, (url.IndexOf("?") > 0 ? url.IndexOf("?") : url.Length)); ProductAppName = Product + app; UploadButtonText = GetFromResources(ProductAppName + "Button", app + "Button"); ViewerButtonText = GetFromResources(app + "Viewer", "ViewDocument"); SuccessMessage = GetFromResources(app + "SuccessMessage"); AnotherFileText = GetFromResources(app + "AnotherFile"); IsCanonical = true; HowToModel = new HowToModel(this); SetTitles(); SetAppFeatures(app); ShowViewerButton = true; SaveAsOriginal = true; SaveAsComponent = false; SetExtensionsString(); } private void SetTitles() { PageTitle = Resources[ProductAppName + "PageTitle"]; MetaDescription = Resources[ProductAppName + "MetaDescription"]; MetaKeywords = ""; Title = Resources[ProductAppName + "Title"]; TitleSub = Resources[ProductAppName + "SubTitle"]; Controller.ViewBag.CanonicalTag = null; } private void SetAppFeatures(string app) { AppFeatures = new List<string>(); var i = 1; while (Resources.ContainsKey($"{ProductAppName}LiFeature{i}")) AppFeatures.Add(Resources[$"{ProductAppName}LiFeature{i++}"]); // Stop other developers to add unnecessary features. if (AppFeatures.Count == 0) { i = 1; while (Resources.ContainsKey($"{app}LiFeature{i}")) { if (!Resources[$"{app}LiFeature{i}"].Contains("Instantly download") || AppFeatures.Count == 0) AppFeatures.Add(Resources[$"{app}LiFeature{i}"]); i++; } } } private string GetFromResources(string key, string defaultKey = null) { if (Resources.ContainsKey(key)) return Resources[key]; if (!string.IsNullOrEmpty(defaultKey) && Resources.ContainsKey(defaultKey)) return Resources[defaultKey]; return ""; } private void SetExtensionsString() { if (!ShowExtensionInfo) { var key1 = $"{Product}{AppName}ValidationExpression"; var key2 = $"{Product}ValidationExpression"; ExtensionsString = Resources.ContainsKey(key1) ? Resources[key1] : Resources[key2]; } else { switch (Extension) { case "doc": case "docx": ExtensionsString = ".docx|.doc"; break; case "html": case "htm": case "mhtml": case "mht": ExtensionsString = ".htm|.html|.mht|.mhtml"; break; default: ExtensionsString = $".{Extension}"; break; } if (AppName == "Comparison" && !string.IsNullOrEmpty(Extension2)) ExtensionsString += $"|.{Extension2}"; } } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.IO; using System.Security.Cryptography; using System.Text; using System.Text.RegularExpressions; using System.Xml; using OpenSim.Framework; using OpenSim.Framework.Servers; using OpenSim.Framework.Servers.HttpServer; namespace OpenSim.ApplicationPlugins.Rest.Inventory { /// <summary> /// This class represents the current REST request. It /// encapsulates the request/response state and takes care /// of response generation without exposing the REST handler /// to the actual mechanisms involved. /// /// This structure is created on entry to the Handler /// method and is disposed of upon return. It is part of /// the plug-in infrastructure, rather than the functionally /// specific REST handler, and fundamental changes to /// this should be reflected in the Rest HandlerVersion. The /// object is instantiated, and may be extended by, any /// given handler. See the inventory handler for an example /// of this. /// /// If possible, the underlying request/response state is not /// changed until the handler explicitly issues a Respond call. /// This ensures that the request/response pair can be safely /// processed by subsequent, unrelated, handlers even id the /// agent handler had completed much of its processing. Think /// of it as a transactional req/resp capability. /// </summary> public class RequestData { // HTTP Server interface data (Received values) internal OSHttpRequest request = null; internal OSHttpResponse response = null; internal string qprefix = null; // Request lifetime values // buffer is global because it is referenced by the handler // in supported of streamed requests. // If a service provider wants to construct the message // body explicitly it can use body to do this. The value // in body is used if the buffer is still null when a response // is generated. // Storing information in body will suppress the return of // statusBody which is only intended to report status on // requests which do not themselves ordinarily generate // an informational response. All of this is handled in // Respond(). internal byte[] buffer = null; internal string body = null; internal string bodyType = "text/html"; // The encoding in effect is set to a server default. It may // subsequently be overridden by a Content header. This // value is established during construction and is used // wherever encoding services are needed. internal Encoding encoding = Rest.Encoding; // These values are derived from the supplied URL. They // are initialized during construction. internal string path = null; internal string method = null; internal Uri uri = null; internal string query = null; internal string hostname = "localhost"; internal int port = 80; // The path part of the URI is decomposed. pathNodes // is an array of every element in the URI. Parameters // is an array that contains only those nodes that // are not a part of the authority prefix private string[] pathNodes = null; private string[] parameters = null; private static readonly string[] EmptyPath = { String.Empty }; // The status code gets set during the course of processing // and is the HTTP completion code. The status body is // initialized during construction, is appended to during the // course of execution, and is finalized during Respond // processing. // // Fail processing marks the request as failed and this is // then used to inhibit processing during Response processing. internal int statusCode = 0; internal string statusBody = String.Empty; internal bool fail = false; // This carries the URL to which the client should be redirected. // It is set by the service provider using the Redirect call. internal string redirectLocation = null; // These values influence response processing. They can be set by // service providers according to need. The defaults are generally // good. internal bool keepAlive = false; internal bool chunked = false; // XML related state internal XmlWriter writer = null; internal XmlReader reader = null; // Internal working state private StringBuilder sbuilder = new StringBuilder(1024); private MemoryStream xmldata = null; // This is used to make the response mechanism idempotent. internal bool handled = false; // Authentication related state // // Two supported authentication mechanisms are: // scheme = Rest.AS_BASIC; // scheme = Rest.AS_DIGEST; // Presented in that order (as required by spec) // A service provider can set the scheme variable to // force selection of a particular authentication model // (choosing from amongst those supported of course) // internal bool authenticated = false; internal string scheme = Rest.Scheme; internal string realm = Rest.Realm; internal string domain = null; internal string nonce = null; internal string cnonce = null; internal string qop = Rest.Qop_Auth; internal string opaque = null; internal string stale = null; internal string algorithm = Rest.Digest_MD5; internal string authParms = null; internal string authPrefix = null; internal string userName = String.Empty; internal string userPass = String.Empty; // Session related tables. These are only needed if QOP is set to "auth-sess" // and for now at least, it is not. Session related authentication is of // questionable merit in the context of REST anyway, but it is, arguably, more // secure. private static Dictionary<string,string> cntable = new Dictionary<string,string>(); private static Dictionary<string,string> sktable = new Dictionary<string,string>(); // This dictionary is used to keep track fo all of the parameters discovered // when the authorisation header is anaylsed. private Dictionary<string,string> authparms = new Dictionary<string,string>(); // These regular expressions are used to decipher the various header entries. private static Regex schema = new Regex("^\\s*(?<scheme>\\w+)\\s*.*", RegexOptions.Compiled | RegexOptions.IgnoreCase); private static Regex basicParms = new Regex("^\\s*(?:\\w+)\\s+(?<pval>\\S+)\\s*", RegexOptions.Compiled | RegexOptions.IgnoreCase); private static Regex digestParm1 = new Regex("\\s*(?<parm>\\w+)\\s*=\\s*\"(?<pval>[^\"]+)\"", RegexOptions.Compiled | RegexOptions.IgnoreCase); private static Regex digestParm2 = new Regex("\\s*(?<parm>\\w+)\\s*=\\s*(?<pval>[^\\p{P}\\s]+)", RegexOptions.Compiled | RegexOptions.IgnoreCase); private static Regex reuserPass = new Regex("(?<user>[^:]+):(?<pass>[\\S\\s]*)", RegexOptions.Compiled | RegexOptions.IgnoreCase); // For efficiency, we create static instances of these objects private static MD5 md5hash = MD5.Create(); private static StringComparer sc = StringComparer.OrdinalIgnoreCase; #region properties // Just for convenience... internal string MsgId { get { return Rest.MsgId; } } /// <summary> /// Return a boolean indication of whether or no an authenticated user is /// associated with this request. This could be wholly integrated, but /// that would make authentication mandatory. /// </summary> internal bool IsAuthenticated { get { if (Rest.Authenticate) { if (!authenticated) { authenticate(); } return authenticated; } else return true; } } /// <summary> /// Access to all 'nodes' in the supplied URI as an /// array of strings. /// </summary> internal string[] PathNodes { get { return pathNodes; } } /// <summary> /// Access to all non-prefix 'nodes' in the supplied URI as an /// array of strings. These identify a specific resource that /// is managed by the authority (the prefix). /// </summary> internal string[] Parameters { get { return parameters; } } #endregion properties #region constructors // Constructor internal RequestData(OSHttpRequest p_request, OSHttpResponse p_response, string p_qprefix) { request = p_request; response = p_response; qprefix = p_qprefix; sbuilder.Length = 0; encoding = request.ContentEncoding; if (encoding == null) { encoding = Rest.Encoding; } method = request.HttpMethod.ToLower(); initUrl(); initParameters(p_qprefix.Length); } #endregion constructors #region authentication_common /// <summary> /// The REST handler has requested authentication. Authentication /// is considered to be with respect to the current values for /// Realm, domain, etc. /// /// This method checks to see if the current request is already /// authenticated for this domain. If it is, then it returns /// true. If it is not, then it issues a challenge to the client /// and responds negatively to the request. /// /// As soon as authentication failure is detected the method calls /// DoChallenge() which terminates the request with REST exception /// for unauthroized access. /// </summary> private void authenticate() { string authdata = request.Headers.Get("Authorization"); string reqscheme = String.Empty; // If we don't have an authorization header, then this // user is certainly not authorized. This is the typical // pivot for the 1st request by a client. if (authdata == null) { Rest.Log.DebugFormat("{0} Challenge reason: No authorization data", MsgId); DoChallenge(); } // So, we have authentication data, now we have to check to // see what we got and whether or not it is valid for the // current domain. To do this we need to interpret the data // provided in the Authorization header. First we need to // identify the scheme being used and route accordingly. MatchCollection matches = schema.Matches(authdata); foreach (Match m in matches) { Rest.Log.DebugFormat("{0} Scheme matched : {1}", MsgId, m.Groups["scheme"].Value); reqscheme = m.Groups["scheme"].Value.ToLower(); } // If we want a specific authentication mechanism, make sure // we get it. null indicates we don't care. non-null indicates // a specific scheme requirement. if (scheme != null && scheme.ToLower() != reqscheme) { Rest.Log.DebugFormat("{0} Challenge reason: Requested scheme not acceptable", MsgId); DoChallenge(); } // In the future, these could be made into plug-ins... // But for now at least we have no reason to use anything other // then MD5. TLS/SSL are taken care of elsewhere. switch (reqscheme) { case "digest" : Rest.Log.DebugFormat("{0} Digest authentication offered", MsgId); DoDigest(authdata); break; case "basic" : Rest.Log.DebugFormat("{0} Basic authentication offered", MsgId); DoBasic(authdata); break; } // If the current header is invalid, then a challenge is still needed. if (!authenticated) { Rest.Log.DebugFormat("{0} Challenge reason: Authentication failed", MsgId); DoChallenge(); } } /// <summary> /// Construct the necessary WWW-Authenticate headers and fail the request /// with a NOT AUTHORIZED response. The parameters are the union of values /// required by the supported schemes. /// </summary> private void DoChallenge() { Flush(); nonce = Rest.NonceGenerator(); // should be unique per 401 (and it is) Challenge(scheme, realm, domain, nonce, opaque, stale, algorithm, qop, authParms); Fail(Rest.HttpStatusCodeNotAuthorized); } /// <summary> /// The Flush() call is here to support a problem encountered with the /// client where an authentication rejection was lost because the rejection /// may flow before the clienthas finished sending us the inbound data stream, /// in which case the client responds to the socket error on out put, and /// never sees the authentication challenge. The client should be fixed, /// because this solution leaves the server prone to DOS attacks. A message /// will be issued whenever flushing occurs. It can be enabled/disabled from /// the configuration file. /// </summary> private void Flush() { if (Rest.FlushEnabled) { byte[] dbuffer = new byte[8192]; Rest.Log.WarnFormat("{0} REST server is flushing the inbound data stream", MsgId); while (request.InputStream.Read(dbuffer,0,dbuffer.Length) != 0); } return; } // Indicate that authentication is required private void Challenge(string scheme, string realm, string domain, string nonce, string opaque, string stale, string alg, string qop, string auth) { sbuilder.Length = 0; // The service provider can force a particular scheme by // assigning a value to scheme. // Basic authentication is pretty simple. // Just specify the realm in question. if (scheme == null || scheme == Rest.AS_BASIC) { sbuilder.Append(Rest.AS_BASIC); if (realm != null) { sbuilder.Append(" realm="); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(realm); sbuilder.Append(Rest.CS_DQUOTE); } AddHeader(Rest.HttpHeaderWWWAuthenticate,sbuilder.ToString()); } sbuilder.Length = 0; // Digest authentication takes somewhat more // to express. if (scheme == null || scheme == Rest.AS_DIGEST) { sbuilder.Append(Rest.AS_DIGEST); sbuilder.Append(" "); // Specify the effective realm. This should // never be null if we are uthenticating, as it is required for all // authentication schemes. It defines, in conjunction with the // absolute URI information, the domain to which the authentication // applies. It is an arbitrary string. I *believe* this allows an // authentication to apply to disjoint resources within the same // server. if (realm != null) { sbuilder.Append("realm="); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(realm); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(Rest.CS_COMMA); } // Share our nonce. This is *uniquely* generated each time a 401 is // returned. We do not generate a very sophisticated nonce at the // moment (it's simply a base64 encoded UUID). if (nonce != null) { sbuilder.Append("nonce="); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(nonce); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(Rest.CS_COMMA); } // The opaque string should be returned by the client unchanged in all // subsequent requests. if (opaque != null) { sbuilder.Append("opaque="); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(opaque); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(Rest.CS_COMMA); } // This flag indicates that the authentication was rejected because the // included nonce was stale. The server might use timestamp information // in the nonce to determine this. We do not. if (stale != null) { sbuilder.Append("stale="); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(stale); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(Rest.CS_COMMA); } // Identifies the algorithm used to produce the digest and checksum. // The default is MD5. if (alg != null) { sbuilder.Append("algorithm="); sbuilder.Append(alg); sbuilder.Append(Rest.CS_COMMA); } // Theoretically QOP is optional, but it is required by a compliant // with current versions of the scheme. In fact IE requires that QOP // be specified and will refuse to authenticate otherwise. if (qop != String.Empty) { sbuilder.Append("qop="); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(qop); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(Rest.CS_COMMA); } // This parameter allows for arbitrary extensions to the protocol. // Unrecognized values should be simply ignored. if (auth != null) { sbuilder.Append(auth); sbuilder.Append(Rest.CS_COMMA); } // We don't know the userid that will be used // so we cannot make any authentication domain // assumptions. So the prefix will determine // this. sbuilder.Append("domain="); sbuilder.Append(Rest.CS_DQUOTE); sbuilder.Append(qprefix); sbuilder.Append(Rest.CS_DQUOTE); // Generate the authenticate header and we're basically // done. AddHeader(Rest.HttpHeaderWWWAuthenticate,sbuilder.ToString()); } } #endregion authentication_common #region authentication_basic /// <summary> /// Interpret a BASIC authorization claim. Some clients can only /// understand this and also expect it to be the first one /// offered. So we do. /// OpenSim also needs this, as it is the only scheme that allows /// authentication using the hashed passwords stored in the /// user database. /// </summary> private void DoBasic(string authdata) { string response = null; MatchCollection matches = basicParms.Matches(authdata); // In the case of basic authentication there is // only expected to be a single argument. foreach (Match m in matches) { authparms.Add("response",m.Groups["pval"].Value); Rest.Log.DebugFormat("{0} Parameter matched : {1} = {2}", MsgId, "response", m.Groups["pval"].Value); } // Did we get a valid response? if (authparms.TryGetValue("response", out response)) { // Decode response = Rest.Base64ToString(response); Rest.Log.DebugFormat("{0} Auth response is: <{1}>", MsgId, response); // Extract user & password Match m = reuserPass.Match(response); userName = m.Groups["user"].Value; userPass = m.Groups["pass"].Value; // Validate against user database authenticated = Validate(userName,userPass); } } /// <summary> /// This method provides validation in support of the BASIC /// authentication method. This is not normaly expected to be /// used, but is included for completeness (and because I tried /// it first). /// </summary> private bool Validate(string user, string pass) { Rest.Log.DebugFormat("{0} Simple User Validation", MsgId); // Both values are required if (user == null || pass == null) return false; // Eliminate any leading or trailing spaces user = user.Trim(); return vetPassword(user, pass); } /// <summary> /// This is used by the BASIC authentication scheme to calculate /// the double hash used by OpenSim to encode user's passwords. /// It returns true, if the supplied password is actually correct. /// If the specified user-id is not recognized, but the password /// matches the God password, then this is accepted as an admin /// session. /// </summary> private bool vetPassword(string user, string pass) { int x; string HA1; string first; string last; // Distinguish the parts, if necessary if ((x=user.IndexOf(Rest.C_SPACE)) != -1) { first = user.Substring(0,x); last = user.Substring(x+1); } else { first = user; last = String.Empty; } UserProfileData udata = Rest.UserServices.GetUserProfile(first, last); // If we don't recognize the user id, perhaps it is god? if (udata == null) return pass == Rest.GodKey; HA1 = HashToString(pass); HA1 = HashToString(String.Format("{0}:{1}",HA1,udata.PasswordSalt)); return (0 == sc.Compare(HA1, udata.PasswordHash)); } #endregion authentication_basic #region authentication_digest /// <summary> /// This is an RFC2617 compliant HTTP MD5 Digest authentication /// implementation. It has been tested with Firefox, Java HTTP client, /// and Microsoft's Internet Explorer V7. /// </summary> private void DoDigest(string authdata) { string response = null; // Find all of the values of the for x = "y" MatchCollection matches = digestParm1.Matches(authdata); foreach (Match m in matches) { authparms.Add(m.Groups["parm"].Value,m.Groups["pval"].Value); Rest.Log.DebugFormat("{0} String Parameter matched : {1} = {2}", MsgId, m.Groups["parm"].Value,m.Groups["pval"].Value); } // Find all of the values of the for x = y matches = digestParm2.Matches(authdata); foreach (Match m in matches) { authparms.Add(m.Groups["parm"].Value,m.Groups["pval"].Value); Rest.Log.DebugFormat("{0} Tokenized Parameter matched : {1} = {2}", MsgId, m.Groups["parm"].Value,m.Groups["pval"].Value); } // A response string MUST be returned, otherwise we are // NOT authenticated. Rest.Log.DebugFormat("{0} Validating authorization parameters", MsgId); if (authparms.TryGetValue("response", out response)) { string temp = null; do { string nck = null; string ncl = null; // The userid is sent in clear text. Needed for the // verification. authparms.TryGetValue("username", out userName); // All URI's of which this is a prefix are // optimistically considered to be authenticated by the // client. This is also needed to verify the response. authparms.TryGetValue("uri", out authPrefix); // There MUST be a nonce string present. We're not preserving any server // side state and we can't validate the MD5 unless the client returns it // to us, as it should. if (!authparms.TryGetValue("nonce", out nonce) || nonce == null) { Rest.Log.WarnFormat("{0} Authentication failed: nonce missing", MsgId); break; } // If there is an opaque string present, it had better // match what we sent. if (authparms.TryGetValue("opaque", out temp)) { if (temp != opaque) { Rest.Log.WarnFormat("{0} Authentication failed: bad opaque value", MsgId); break; } } // If an algorithm string is present, it had better // match what we sent. if (authparms.TryGetValue("algorithm", out temp)) { if (temp != algorithm) { Rest.Log.WarnFormat("{0} Authentication failed: bad algorithm value", MsgId); break; } } // Quality of protection considerations... if (authparms.TryGetValue("qop", out temp)) { qop = temp.ToLower(); // replace with actual value used // if QOP was specified then // these MUST be present. if (!authparms.ContainsKey("cnonce")) { Rest.Log.WarnFormat("{0} Authentication failed: cnonce missing", MsgId); Fail(Rest.HttpStatusCodeBadRequest); break; } cnonce = authparms["cnonce"]; if (!authparms.TryGetValue("nc", out nck) || nck == null) { Rest.Log.WarnFormat("{0} Authentication failed: cnonce counter missing", MsgId); Fail(Rest.HttpStatusCodeBadRequest); break; } Rest.Log.DebugFormat("{0} Comparing nonce indices", MsgId); if (cntable.TryGetValue(nonce, out ncl)) { Rest.Log.DebugFormat("{0} nonce values: Verify that request({1}) > Reference({2})", MsgId, nck, ncl); if (Rest.Hex2Int(ncl) >= Rest.Hex2Int(nck)) { Rest.Log.WarnFormat("{0} Authentication failed: bad cnonce counter", MsgId); Fail(Rest.HttpStatusCodeBadRequest); break; } cntable[nonce] = nck; } else { lock (cntable) cntable.Add(nonce, nck); } } else { qop = String.Empty; // if QOP was not specified then // these MUST NOT be present. if (authparms.ContainsKey("cnonce")) { Rest.Log.WarnFormat("{0} Authentication failed: invalid cnonce", MsgId); Fail(Rest.HttpStatusCodeBadRequest); break; } if (authparms.ContainsKey("nc")) { Rest.Log.WarnFormat("{0} Authentication failed: invalid cnonce counter[2]", MsgId); Fail(Rest.HttpStatusCodeBadRequest); break; } } // Validate the supplied userid/password info authenticated = ValidateDigest(userName, nonce, cnonce, nck, authPrefix, response); } while (false); } else Fail(Rest.HttpStatusCodeBadRequest); } /// <summary> /// This mechanism is used by the digest authentication mechanism /// to return the user's password. In fact, because the OpenSim /// user's passwords are already hashed, and the HTTP mechanism /// does not supply an open password, the hashed passwords cannot /// be used unless the client has used the same salting mechanism /// to has the password before using it in the authentication /// algorithn. This is not inconceivable... /// </summary> private string getPassword(string user) { int x; string first; string last; // Distinguish the parts, if necessary if ((x=user.IndexOf(Rest.C_SPACE)) != -1) { first = user.Substring(0,x); last = user.Substring(x+1); } else { first = user; last = String.Empty; } UserProfileData udata = Rest.UserServices.GetUserProfile(first, last); // If we don;t recognize the user id, perhaps it is god? if (udata == null) { Rest.Log.DebugFormat("{0} Administrator", MsgId); return Rest.GodKey; } else { Rest.Log.DebugFormat("{0} Normal User {1}", MsgId, user); return udata.PasswordHash; } } // Validate the request-digest private bool ValidateDigest(string user, string nonce, string cnonce, string nck, string uri, string response) { string patt = null; string payl = String.Empty; string KDS = null; string HA1 = null; string HA2 = null; string pass = getPassword(user); // Generate H(A1) if (algorithm == Rest.Digest_MD5Sess) { if (!sktable.ContainsKey(cnonce)) { patt = String.Format("{0}:{1}:{2}:{3}:{4}", user, realm, pass, nonce, cnonce); HA1 = HashToString(patt); sktable.Add(cnonce, HA1); } else { HA1 = sktable[cnonce]; } } else { patt = String.Format("{0}:{1}:{2}", user, realm, pass); HA1 = HashToString(patt); } // Generate H(A2) if (qop == "auth-int") { patt = String.Format("{0}:{1}:{2}", request.HttpMethod, uri, HashToString(payl)); } else { patt = String.Format("{0}:{1}", request.HttpMethod, uri); } HA2 = HashToString(patt); // Generate Digest if (qop != String.Empty) { patt = String.Format("{0}:{1}:{2}:{3}:{4}:{5}", HA1, nonce, nck, cnonce, qop, HA2); } else { patt = String.Format("{0}:{1}:{2}", HA1, nonce, HA2); } KDS = HashToString(patt); // Compare the generated sequence with the original return (0 == sc.Compare(KDS, response)); } private string HashToString(string pattern) { Rest.Log.DebugFormat("{0} Generate <{1}>", MsgId, pattern); byte[] hash = md5hash.ComputeHash(encoding.GetBytes(pattern)); sbuilder.Length = 0; for (int i = 0; i < hash.Length; i++) { sbuilder.Append(hash[i].ToString("x2")); } Rest.Log.DebugFormat("{0} Hash = <{1}>", MsgId, sbuilder.ToString()); return sbuilder.ToString(); } #endregion authentication_digest #region service_interface /// <summary> /// Conditionally set a normal completion code. This allows a normal /// execution path to default. /// </summary> internal void Complete() { if (statusCode == 0) { statusCode = Rest.HttpStatusCodeOK; } } /// <summary> /// Indicate a functionally-dependent conclusion to the /// request. See Rest.cs for a list of possible values. /// </summary> internal void Complete(int code) { statusCode = code; } /// <summary> /// Indicate that a request should be redirected, using /// the HTTP completion codes. Permanent and temporary /// redirections may be indicated. The supplied URL is /// the new location of the resource. /// </summary> internal void Redirect(string Url, bool temp) { redirectLocation = Url; if (temp) { statusCode = Rest.HttpStatusCodeTemporaryRedirect; } else { statusCode = Rest.HttpStatusCodePermanentRedirect; } Fail(statusCode, String.Empty, true); } /// <summary> /// Fail for an arbitrary reason. Just a failure with /// headers. The supplied message will be returned in the /// message body. /// </summary> internal void Fail(int code) { Fail(code, String.Empty, false); } /// <summary> /// For the more adventurous. This failure also includes a /// specified entity to be appended to the code-related /// status string. /// </summary> internal void Fail(int code, string addendum) { Fail(code, addendum, false); } internal void Fail(int code, string addendum, bool reset) { statusCode = code; appendStatus(String.Format("({0}) : {1}", code, Rest.HttpStatusDesc[code])); // Add any final addendum to the status information if (addendum != String.Empty) { appendStatus(String.Format(addendum)); } // Help us understand why the request is being rejected if (Rest.DEBUG) { Rest.Log.DebugFormat("{0} Request Failure State Dump", MsgId); Rest.Log.DebugFormat("{0} Scheme = {1}", MsgId, scheme); Rest.Log.DebugFormat("{0} Realm = {1}", MsgId, realm); Rest.Log.DebugFormat("{0} Domain = {1}", MsgId, domain); Rest.Log.DebugFormat("{0} Nonce = {1}", MsgId, nonce); Rest.Log.DebugFormat("{0} CNonce = {1}", MsgId, cnonce); Rest.Log.DebugFormat("{0} Opaque = {1}", MsgId, opaque); Rest.Log.DebugFormat("{0} Stale = {1}", MsgId, stale); Rest.Log.DebugFormat("{0} Algorithm = {1}", MsgId, algorithm); Rest.Log.DebugFormat("{0} QOP = {1}", MsgId, qop); Rest.Log.DebugFormat("{0} AuthPrefix = {1}", MsgId, authPrefix); Rest.Log.DebugFormat("{0} UserName = {1}", MsgId, userName); Rest.Log.DebugFormat("{0} UserPass = {1}", MsgId, userPass); } fail = true; // Respond to the client's request, tag the response (for the // benefit of trace) to indicate the reason. Respond(String.Format("Failure response: ({0}) : {1} ", code, Rest.HttpStatusDesc[code])); // Finally initialize and the throw a RestException. All of the // handler's infrastructure knows that this is a "normal" // completion from a code point-of-view. RestException re = new RestException(Rest.HttpStatusDesc[code]+" <"+code+">"); re.statusCode = code; re.statusDesc = Rest.HttpStatusDesc[code]; re.httpmethod = method; re.httppath = path; throw re; } // Reject this request internal void Reject() { Fail(Rest.HttpStatusCodeNotImplemented, "request rejected (not implemented)"); } // This MUST be called by an agent handler before it returns // control to Handle, otherwise the request will be ignored. // This is called implciitly for the REST stream handlers and // is harmless if it is called twice. internal virtual bool Respond(string reason) { Rest.Log.DebugFormat("{0} Respond ENTRY, handled = {1}, reason = {2}", MsgId, handled, reason); // We do this to try and make multiple Respond requests harmless, // as it is sometimes convenient to isse a response without // certain knowledge that it has not previously been done. if (!handled) { Rest.Log.DebugFormat("{0} Generating Response", MsgId); Rest.Log.DebugFormat("{0} Method is {1}", MsgId, method); // A Head request can NOT have a body! So don't waste time on // formatting if we're going to reject it anyway! if (method != Rest.HEAD) { Rest.Log.DebugFormat("{0} Response is not abbreviated", MsgId); // If the writer is non-null then we know that an XML // data component exists. Flush and close the writer and // then convert the result to the expected buffer format // unless the request has already been failed for some // reason. if (writer != null) { Rest.Log.DebugFormat("{0} XML Response handler extension ENTRY", MsgId); Rest.Log.DebugFormat("{0} XML Response exists", MsgId); writer.Flush(); writer.Close(); if (!fail) { buffer = xmldata.ToArray(); AddHeader("Content-Type","application/xml"); } xmldata.Close(); Rest.Log.DebugFormat("{0} XML Response encoded", MsgId); Rest.Log.DebugFormat("{0} XML Response handler extension EXIT", MsgId); } if (buffer == null && body != null) { buffer = encoding.GetBytes(body); AddHeader("Content-Type",bodyType); } // OK, if the buffer contains something, regardless of how // it got there, set various response headers accordingly. if (buffer != null) { Rest.Log.DebugFormat("{0} Buffer-based entity", MsgId); } else { if (statusBody != String.Empty) { statusBody += Rest.statusTail; buffer = encoding.GetBytes(statusBody); AddHeader("Content-Type","text/html"); } else { statusBody = Rest.statusHead; appendStatus(String.Format(": ({0}) {1}", statusCode, Rest.HttpStatusDesc[statusCode])); statusBody += Rest.statusTail; buffer = encoding.GetBytes(statusBody); AddHeader("Content-Type","text/html"); } } response.ContentLength64 = buffer.Length; if (response.ContentEncoding == null) response.ContentEncoding = encoding; response.SendChunked = chunked; response.KeepAlive = keepAlive; } // Set the status code & description. If nothing has been stored, // we consider that a success. if (statusCode == 0) { Complete(); } // Set the response code in the actual carrier response.StatusCode = statusCode; // For a redirect we need to set the relocation header accordingly if (response.StatusCode == (int) Rest.HttpStatusCodeTemporaryRedirect || response.StatusCode == (int) Rest.HttpStatusCodePermanentRedirect) { Rest.Log.DebugFormat("{0} Re-direct location is {1}", MsgId, redirectLocation); response.RedirectLocation = redirectLocation; } // And include the status description if provided. response.StatusDescription = Rest.HttpStatusDesc[response.StatusCode]; // Finally we send back our response. // We've left the setting of handled' until the // last minute because the header settings included // above are pretty harmless. But everything from // here on down probably leaves the response // element unusable by anyone else. handled = true; // DumpHeaders(); // if (request.InputStream != null) // { // Rest.Log.DebugFormat("{0} Closing input stream", MsgId); // request.InputStream.Close(); // } if (buffer != null && buffer.Length != 0) { Rest.Log.DebugFormat("{0} Entity buffer, length = {1}", MsgId, buffer.Length); // Rest.Log.DebugFormat("{0} Entity buffer, length = {1} : <{2}>", // MsgId, buffer.Length, encoding.GetString(buffer)); response.OutputStream.Write(buffer, 0, buffer.Length); } // Closing the outputstream should complete the transmission process Rest.Log.DebugFormat("{0} Sending response", MsgId); // response.OutputStream.Close(); response.Send(); } Rest.Log.DebugFormat("{0} Respond EXIT, handled = {1}, reason = {2}", MsgId, handled, reason); return handled; } /// <summary> /// These methods allow a service provider to manipulate the /// request/response headers. The DumpHeaders method is intended /// for problem diagnosis. /// </summary> internal void AddHeader(string hdr, string data) { if (Rest.DEBUG) Rest.Log.DebugFormat("{0} Adding header: <{1}: {2}>", MsgId, hdr, data); response.AddHeader(hdr, data); } // internal void RemoveHeader(string hdr) // { // if (Rest.DEBUG) // { // Rest.Log.DebugFormat("{0} Removing header: <{1}>", MsgId, hdr); // if (response.Headers.Get(hdr) == null) // { // Rest.Log.DebugFormat("{0} No such header existed", // MsgId, hdr); // } // } // response.Headers.Remove(hdr); // } // internal void DumpHeaders() // { // if (Rest.DEBUG) // { // for (int i=0;i<response.Headers.Count;i++) // { // Rest.Log.DebugFormat("{0} Header[{1}] : {2}", MsgId, i, // response.Headers.Get(i)); // } // } // } // Setup the XML writer for output internal void initXmlWriter() { XmlWriterSettings settings = new XmlWriterSettings(); xmldata = new MemoryStream(); settings.Indent = true; settings.IndentChars = " "; settings.Encoding = encoding; settings.CloseOutput = false; settings.OmitXmlDeclaration = true; settings.ConformanceLevel = ConformanceLevel.Fragment; writer = XmlWriter.Create(xmldata, settings); } internal void initXmlReader() { XmlReaderSettings settings = new XmlReaderSettings(); settings.ConformanceLevel = ConformanceLevel.Fragment; settings.IgnoreComments = true; settings.IgnoreWhitespace = true; settings.IgnoreProcessingInstructions = true; settings.ValidationType = ValidationType.None; reader = XmlReader.Create(request.InputStream,settings); } internal void appendStatus(string msg) { if (statusBody == String.Empty) { statusBody = String.Format(Rest.statusHead, request.HttpMethod); } statusBody = String.Format("{0} {1}", statusBody, msg); } #endregion service_interface #region internal_methods /// <summary> /// Helper methods for deconstructing and reconstructing /// URI path data. /// </summary> private void initUrl() { uri = request.Url; if (query == null) { query = uri.Query; } // If the path has not been previously initialized, // do so now. if (path == null) { path = uri.AbsolutePath; if (path.EndsWith(Rest.UrlPathSeparator)) path = path.Substring(0,path.Length-1); } // If we succeeded in getting a path, perform any // additional pre-processing required. if (path != null) { if (Rest.ExtendedEscape) { // Handle "+". Not a standard substitution, but // common enough... path = path.Replace(Rest.C_PLUS,Rest.C_SPACE); } pathNodes = path.Split(Rest.CA_PATHSEP); } else { pathNodes = EmptyPath; } // Elimiate any %-escaped values. This is left until here // so that escaped "+' are not mistakenly replaced. path = Uri.UnescapeDataString(path); // Request server context info hostname = uri.Host; port = uri.Port; } private int initParameters(int prfxlen) { if (prfxlen < path.Length-1) { parameters = path.Substring(prfxlen+1).Split(Rest.CA_PATHSEP); } else { parameters = new string[0]; } // Generate a debug list of the decoded parameters if (Rest.DEBUG && prfxlen < path.Length-1) { Rest.Log.DebugFormat("{0} URI: Parameters: {1}", MsgId, path.Substring(prfxlen)); for (int i = 0; i < parameters.Length; i++) { Rest.Log.DebugFormat("{0} Parameter[{1}]: {2}", MsgId, i, parameters[i]); } } return parameters.Length; } #endregion internal_methods } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Text; using System.Text.Json; using System.Threading.Tasks; using Azure.Core; using Azure.Core.TestFramework; using Azure.Storage.Blobs; using Azure.Storage.Blobs.ChangeFeed.Models; using Azure.Storage.Blobs.Models; using Moq; using NUnit.Framework; namespace Azure.Storage.Blobs.ChangeFeed.Tests { public class ChangeFeedTests : ChangeFeedTestBase { public ChangeFeedTests(bool async) : base(async, null /* RecordedTestMode.Record /* to re-record */) { } /// <summary> /// Tests building a ChangeFeed with a ChangeFeedCursor, and then calling ChangeFeed.GetCursor() /// and making sure the cursors match. /// </summary> [Test] public async Task GetCursor() { // Arrange Mock<BlobServiceClient> serviceClient = new Mock<BlobServiceClient>(MockBehavior.Strict); Mock<BlobContainerClient> containerClient = new Mock<BlobContainerClient>(MockBehavior.Strict); Mock<BlobClient> blobClient = new Mock<BlobClient>(MockBehavior.Strict); Mock<SegmentFactory> segmentFactory = new Mock<SegmentFactory>(MockBehavior.Strict); Mock<Segment> segment = new Mock<Segment>(MockBehavior.Strict); Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny<string>())).Returns(containerClient.Object); containerClient.Setup(r => r.Uri).Returns(containerUri); if (IsAsync) { containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); } else { containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); } containerClient.Setup(r => r.GetBlobClient(It.IsAny<string>())).Returns(blobClient.Object); using FileStream stream = File.OpenRead( $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); Response<BlobDownloadInfo> downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); if (IsAsync) { blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); } else { blobClient.Setup(r => r.Download()).Returns(downloadResponse); } if (IsAsync) { AsyncPageable<BlobHierarchyItem> asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); containerClient.Setup(r => r.GetBlobsByHierarchyAsync( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)).Returns(asyncPageable); } else { Pageable<BlobHierarchyItem> pageable = PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); containerClient.Setup(r => r.GetBlobsByHierarchy( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)).Returns(pageable); } if (IsAsync) { AsyncPageable<BlobHierarchyItem> asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYearFuncAsync); containerClient.Setup(r => r.GetBlobsByHierarchyAsync( default, default, default, It.IsAny<string>(), default)).Returns(asyncPageable); } else { Pageable<BlobHierarchyItem> pageable = PageResponseEnumerator.CreateEnumerable(GetSegmentsInYearFunc); containerClient.Setup(r => r.GetBlobsByHierarchy( default, default, default, It.IsAny<string>(), default)).Returns(pageable); } segmentFactory.Setup(r => r.BuildSegment( It.IsAny<bool>(), It.IsAny<string>(), It.IsAny<SegmentCursor>())) .ReturnsAsync(segment.Object); long chunkIndex = 1; long blockOffset = 2; long eventIndex = 3; ShardCursor shardCursor = new ShardCursor( chunkIndex, blockOffset, eventIndex); DateTimeOffset segmentTime = new DateTimeOffset(2020, 1, 4, 17, 0, 0, TimeSpan.Zero); int shardIndex = 0; SegmentCursor segmentCursor = new SegmentCursor( segmentTime, new List<ShardCursor> { shardCursor }, shardIndex); segment.Setup(r => r.GetCursor()).Returns(segmentCursor); DateTimeOffset endDateTime = new DateTimeOffset(2020, 5, 6, 18, 0, 0, TimeSpan.Zero); ChangeFeedCursor expectedCursor = new ChangeFeedCursor( urlHash: containerUri.ToString().GetHashCode(), endDateTime: endDateTime, currentSegmentCursor: segmentCursor); ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( containerClient.Object, segmentFactory.Object); // Act ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( IsAsync, continuation: JsonSerializer.Serialize<ChangeFeedCursor>(expectedCursor)); ChangeFeedCursor actualCursor = changeFeed.GetCursor(); // Assert Assert.AreEqual(expectedCursor.CursorVersion, actualCursor.CursorVersion); Assert.AreEqual(expectedCursor.EndTime, actualCursor.EndTime); Assert.AreEqual(expectedCursor.UrlHash, actualCursor.UrlHash); Assert.AreEqual(expectedCursor.CurrentSegmentCursor.SegmentTime, actualCursor.CurrentSegmentCursor.SegmentTime); Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardIndex, actualCursor.CurrentSegmentCursor.ShardIndex); Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors.Count, actualCursor.CurrentSegmentCursor.ShardCursors.Count); Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].BlockOffset, actualCursor.CurrentSegmentCursor.ShardCursors[0].BlockOffset); Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].ChunkIndex); Assert.AreEqual(expectedCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex, actualCursor.CurrentSegmentCursor.ShardCursors[0].EventIndex); containerClient.Verify(r => r.Uri); if (IsAsync) { containerClient.Verify(r => r.ExistsAsync(default)); } else { containerClient.Verify(r => r.Exists(default)); } containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); if (IsAsync) { blobClient.Verify(r => r.DownloadAsync()); } else { blobClient.Verify(r => r.Download()); } if (IsAsync) { containerClient.Verify(r => r.GetBlobsByHierarchyAsync( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)); } else { containerClient.Verify(r => r.GetBlobsByHierarchy( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)); } if (IsAsync) { containerClient.Verify(r => r.GetBlobsByHierarchyAsync( default, default, default, "idx/segments/2020/", default)); } else { containerClient.Verify(r => r.GetBlobsByHierarchy( default, default, default, "idx/segments/2020/", default)); } segmentFactory.Verify(r => r.BuildSegment( IsAsync, "idx/segments/2020/01/16/2300/meta.json", It.Is<SegmentCursor>( r => r.SegmentTime == segmentTime && r.ShardIndex == shardIndex && r.ShardCursors.Count == 1 && r.ShardCursors[0].BlockOffset == blockOffset && r.ShardCursors[0].ChunkIndex == chunkIndex && r.ShardCursors[0].EventIndex == eventIndex ))); segment.Verify(r => r.GetCursor()); } /// <summary> /// This test has 8 total events, 4 segments, and 2 years. /// We call ChangeFeed.GetPage() with a page size of 3, and then again with no page size, /// resulting in two pages with 3 and 5 Events. /// </summary> [Test] public async Task GetPage() { // Arrange int eventCount = 8; int segmentCount = 4; Mock<BlobServiceClient> serviceClient = new Mock<BlobServiceClient>(MockBehavior.Strict); Mock<BlobContainerClient> containerClient = new Mock<BlobContainerClient>(MockBehavior.Strict); Mock<BlobClient> blobClient = new Mock<BlobClient>(MockBehavior.Strict); Mock<SegmentFactory> segmentFactory = new Mock<SegmentFactory>(MockBehavior.Strict); Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); List<Mock<Segment>> segments = new List<Mock<Segment>>(); for (int i = 0; i < segmentCount; i++) { segments.Add(new Mock<Segment>(MockBehavior.Strict)); } // ChangeFeedFactory.BuildChangeFeed() setups. serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny<string>())).Returns(containerClient.Object); containerClient.SetupSequence(r => r.Uri) .Returns(containerUri) .Returns(containerUri); if (IsAsync) { containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); } else { containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); } containerClient.Setup(r => r.GetBlobClient(It.IsAny<string>())).Returns(blobClient.Object); using FileStream stream = File.OpenRead( $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); Response<BlobDownloadInfo> downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); if (IsAsync) { blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); } else { blobClient.Setup(r => r.Download()).Returns(downloadResponse); } if (IsAsync) { AsyncPageable<BlobHierarchyItem> asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathShortFuncAsync); containerClient.Setup(r => r.GetBlobsByHierarchyAsync( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)).Returns(asyncPageable); } else { Pageable<BlobHierarchyItem> pageable = PageResponseEnumerator.CreateEnumerable(GetYearsPathShortFunc); containerClient.Setup(r => r.GetBlobsByHierarchy( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)).Returns(pageable); } if (IsAsync) { AsyncPageable<BlobHierarchyItem> asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2019FuncAsync); AsyncPageable<BlobHierarchyItem> asyncPageable2 = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2020FuncAsync); containerClient.SetupSequence(r => r.GetBlobsByHierarchyAsync( default, default, default, It.IsAny<string>(), default)) .Returns(asyncPageable) .Returns(asyncPageable2); } else { Pageable<BlobHierarchyItem> pageable = PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2019Func); Pageable<BlobHierarchyItem> pageable2 = PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2020Func); containerClient.SetupSequence(r => r.GetBlobsByHierarchy( default, default, default, It.IsAny<string>(), default)) .Returns(pageable) .Returns(pageable2); } segmentFactory.SetupSequence(r => r.BuildSegment( It.IsAny<bool>(), It.IsAny<string>(), default)) .Returns(Task.FromResult(segments[0].Object)) .Returns(Task.FromResult(segments[1].Object)) .Returns(Task.FromResult(segments[2].Object)) .Returns(Task.FromResult(segments[3].Object)); List<BlobChangeFeedEvent> events = new List<BlobChangeFeedEvent>(); for (int i = 0; i < eventCount; i++) { events.Add(new BlobChangeFeedEvent { Id = Guid.NewGuid() }); } segments[0].SetupSequence(r => r.HasNext()) .Returns(false); segments[1].SetupSequence(r => r.HasNext()) .Returns(true) .Returns(false); segments[2].SetupSequence(r => r.HasNext()) .Returns(false); segments[3].SetupSequence(r => r.HasNext()) .Returns(true) .Returns(false); segments[0].SetupSequence(r => r.GetPage( It.IsAny<bool>(), It.IsAny<int?>(), default)) .Returns(Task.FromResult(new List<BlobChangeFeedEvent> { events[0], events[1] })); segments[1].SetupSequence(r => r.GetPage( It.IsAny<bool>(), It.IsAny<int?>(), default)) .Returns(Task.FromResult(new List<BlobChangeFeedEvent> { events[2] })) .Returns(Task.FromResult(new List<BlobChangeFeedEvent> { events[3] })); segments[2].SetupSequence(r => r.GetPage( It.IsAny<bool>(), It.IsAny<int?>(), default)) .Returns(Task.FromResult(new List<BlobChangeFeedEvent> { events[4], events[5] })); segments[3].SetupSequence(r => r.GetPage( It.IsAny<bool>(), It.IsAny<int?>(), default)) .Returns(Task.FromResult(new List<BlobChangeFeedEvent> { events[6], events[7] })); for (int i = 0; i < segments.Count; i++) { segments[i].Setup(r => r.Finalized) .Returns(true); } long chunkIndex = 1; long blockOffset = 2; long eventIndex = 3; ShardCursor shardCursor = new ShardCursor( chunkIndex, blockOffset, eventIndex); DateTimeOffset segmentTime = new DateTimeOffset(2020, 1, 4, 17, 0, 0, TimeSpan.Zero); int shardIndex = 0; SegmentCursor segmentCursor = new SegmentCursor( segmentTime, new List<ShardCursor> { shardCursor }, shardIndex); ChangeFeedCursor changeFeedCursor = new ChangeFeedCursor( containerUri.ToString().GetHashCode(), null, segmentCursor); containerClient.SetupSequence(r => r.Uri) .Returns(containerUri) .Returns(containerUri); segments[1].Setup(r => r.GetCursor()).Returns(segmentCursor); segments[3].Setup(r => r.GetCursor()).Returns(segmentCursor); ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( containerClient.Object, segmentFactory.Object); ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( IsAsync); // Act Page<BlobChangeFeedEvent> page0 = await changeFeed.GetPage(IsAsync, 3); Page<BlobChangeFeedEvent> page1 = await changeFeed.GetPage(IsAsync); // Assert Assert.AreEqual(JsonSerializer.Serialize(changeFeedCursor), page0.ContinuationToken); for (int i = 0; i < 3; i++) { Assert.AreEqual(events[i].Id, page0.Values[i].Id); } Assert.AreEqual(JsonSerializer.Serialize(changeFeedCursor), page1.ContinuationToken); for (int i = 3; i < events.Count; i++) { Assert.AreEqual(events[i].Id, page1.Values[i - 3].Id); } // ChangeFeedFactory.BuildChangeFeed() verifies containerClient.Verify(r => r.Uri); if (IsAsync) { containerClient.Verify(r => r.ExistsAsync(default)); } else { containerClient.Verify(r => r.Exists(default)); } containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); if (IsAsync) { blobClient.Verify(r => r.DownloadAsync()); } else { blobClient.Verify(r => r.Download()); } if (IsAsync) { containerClient.Verify(r => r.GetBlobsByHierarchyAsync( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)); } else { containerClient.Verify(r => r.GetBlobsByHierarchy( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)); } if (IsAsync) { containerClient.Verify(r => r.GetBlobsByHierarchyAsync( default, default, default, "idx/segments/2019/", default)); containerClient.Verify(r => r.GetBlobsByHierarchyAsync( default, default, default, "idx/segments/2020/", default)); } else { containerClient.Verify(r => r.GetBlobsByHierarchy( default, default, default, "idx/segments/2019/", default)); containerClient.Verify(r => r.GetBlobsByHierarchy( default, default, default, "idx/segments/2020/", default)); } // ChangeFeeed.Next() verifies. segments[0].Verify(r => r.HasNext()); segments[1].Verify(r => r.HasNext(), Times.Exactly(2)); segments[2].Verify(r => r.HasNext()); segments[3].Verify(r => r.HasNext(), Times.Exactly(3)); segments[0].Verify(r => r.GetPage( IsAsync, 3, default)); segments[1].Verify(r => r.GetPage( IsAsync, 1, default)); segments[1].Verify(r => r.GetPage( IsAsync, Constants.ChangeFeed.DefaultPageSize, default)); segments[2].Verify(r => r.GetPage( IsAsync, Constants.ChangeFeed.DefaultPageSize - 1, default)); segments[3].Verify(r => r.GetPage( IsAsync, Constants.ChangeFeed.DefaultPageSize - 3, default)); segments[1].Verify(r => r.GetCursor()); segments[3].Verify(r => r.GetCursor()); segments[0].Verify(r => r.Finalized, Times.Exactly(3)); segments[1].Verify(r => r.Finalized, Times.Exactly(4)); segments[2].Verify(r => r.Finalized, Times.Exactly(1)); segments[3].Verify(r => r.Finalized, Times.Exactly(2)); containerClient.Verify(r => r.Uri, Times.Exactly(2)); } [Test] public async Task NoYearsAfterStartTime() { // Arrange Mock<BlobServiceClient> serviceClient = new Mock<BlobServiceClient>(MockBehavior.Strict); Mock<BlobContainerClient> containerClient = new Mock<BlobContainerClient>(MockBehavior.Strict); Mock<BlobClient> blobClient = new Mock<BlobClient>(MockBehavior.Strict); Mock<SegmentFactory> segmentFactory = new Mock<SegmentFactory>(MockBehavior.Strict); Mock<Segment> segment = new Mock<Segment>(MockBehavior.Strict); Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny<string>())).Returns(containerClient.Object); if (IsAsync) { containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); } else { containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); } containerClient.Setup(r => r.GetBlobClient(It.IsAny<string>())).Returns(blobClient.Object); using FileStream stream = File.OpenRead( $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); Response<BlobDownloadInfo> downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); if (IsAsync) { blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); } else { blobClient.Setup(r => r.Download()).Returns(downloadResponse); } if (IsAsync) { AsyncPageable<BlobHierarchyItem> asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathFuncAsync); containerClient.Setup(r => r.GetBlobsByHierarchyAsync( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)).Returns(asyncPageable); } else { Pageable<BlobHierarchyItem> pageable = PageResponseEnumerator.CreateEnumerable(GetYearPathFunc); containerClient.Setup(r => r.GetBlobsByHierarchy( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)).Returns(pageable); } ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( containerClient.Object, segmentFactory.Object); ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( IsAsync, startTime: new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero)); // Act bool hasNext = changeFeed.HasNext(); // Assert Assert.IsFalse(hasNext); if (IsAsync) { containerClient.Verify(r => r.ExistsAsync(default)); } else { containerClient.Verify(r => r.Exists(default)); } containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); if (IsAsync) { blobClient.Verify(r => r.DownloadAsync()); } else { blobClient.Verify(r => r.Download()); } if (IsAsync) { containerClient.Verify(r => r.GetBlobsByHierarchyAsync( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)); } else { containerClient.Verify(r => r.GetBlobsByHierarchy( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)); } } [Test] public async Task NoSegmentsRemainingInStartYear() { // Arrange int eventCount = 2; int segmentCount = 2; Mock<BlobServiceClient> serviceClient = new Mock<BlobServiceClient>(MockBehavior.Strict); Mock<BlobContainerClient> containerClient = new Mock<BlobContainerClient>(MockBehavior.Strict); Mock<BlobClient> blobClient = new Mock<BlobClient>(MockBehavior.Strict); Mock<SegmentFactory> segmentFactory = new Mock<SegmentFactory>(MockBehavior.Strict); Uri containerUri = new Uri("https://account.blob.core.windows.net/$blobchangefeed"); List<Mock<Segment>> segments = new List<Mock<Segment>>(); for (int i = 0; i < segmentCount; i++) { segments.Add(new Mock<Segment>(MockBehavior.Strict)); } // ChangeFeedFactory.BuildChangeFeed() setups. serviceClient.Setup(r => r.GetBlobContainerClient(It.IsAny<string>())).Returns(containerClient.Object); containerClient.SetupSequence(r => r.Uri) .Returns(containerUri) .Returns(containerUri); if (IsAsync) { containerClient.Setup(r => r.ExistsAsync(default)).ReturnsAsync(Response.FromValue(true, new MockResponse(200))); } else { containerClient.Setup(r => r.Exists(default)).Returns(Response.FromValue(true, new MockResponse(200))); } containerClient.Setup(r => r.GetBlobClient(It.IsAny<string>())).Returns(blobClient.Object); using FileStream stream = File.OpenRead( $"{Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)}{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}{"ChangeFeedManifest.json"}"); BlobDownloadInfo blobDownloadInfo = BlobsModelFactory.BlobDownloadInfo(content: stream); Response<BlobDownloadInfo> downloadResponse = Response.FromValue(blobDownloadInfo, new MockResponse(200)); if (IsAsync) { blobClient.Setup(r => r.DownloadAsync()).ReturnsAsync(downloadResponse); } else { blobClient.Setup(r => r.Download()).Returns(downloadResponse); } if (IsAsync) { AsyncPageable<BlobHierarchyItem> asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetYearsPathShortFuncAsync); containerClient.Setup(r => r.GetBlobsByHierarchyAsync( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)).Returns(asyncPageable); } else { Pageable<BlobHierarchyItem> pageable = PageResponseEnumerator.CreateEnumerable(GetYearsPathShortFunc); containerClient.Setup(r => r.GetBlobsByHierarchy( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)).Returns(pageable); } if (IsAsync) { AsyncPageable<BlobHierarchyItem> asyncPageable = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2019FuncAsync); AsyncPageable<BlobHierarchyItem> asyncPageable2 = PageResponseEnumerator.CreateAsyncEnumerable(GetSegmentsInYear2020FuncAsync); containerClient.SetupSequence(r => r.GetBlobsByHierarchyAsync( default, default, default, It.IsAny<string>(), default)) .Returns(asyncPageable) .Returns(asyncPageable2); } else { Pageable<BlobHierarchyItem> pageable = PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2019Func); Pageable<BlobHierarchyItem> pageable2 = PageResponseEnumerator.CreateEnumerable(GetSegmentsInYear2020Func); containerClient.SetupSequence(r => r.GetBlobsByHierarchy( default, default, default, It.IsAny<string>(), default)) .Returns(pageable) .Returns(pageable2); } segmentFactory.SetupSequence(r => r.BuildSegment( It.IsAny<bool>(), It.IsAny<string>(), default)) .Returns(Task.FromResult(segments[0].Object)) .Returns(Task.FromResult(segments[1].Object)); List<BlobChangeFeedEvent> events = new List<BlobChangeFeedEvent>(); for (int i = 0; i < eventCount; i++) { events.Add(new BlobChangeFeedEvent { Id = Guid.NewGuid() }); } segments[0].SetupSequence(r => r.GetPage( It.IsAny<bool>(), It.IsAny<int?>(), default)) .Returns(Task.FromResult(new List<BlobChangeFeedEvent> { events[0] })); segments[1].SetupSequence(r => r.GetPage( It.IsAny<bool>(), It.IsAny<int?>(), default)) .Returns(Task.FromResult(new List<BlobChangeFeedEvent> { events[1] })); segments[0].SetupSequence(r => r.HasNext()) .Returns(false); segments[1].SetupSequence(r => r.HasNext()) .Returns(true) .Returns(false); segments[1].Setup(r => r.GetCursor()) .Returns(new SegmentCursor()); for (int i = 0; i < segments.Count; i++) { segments[i].Setup(r => r.Finalized) .Returns(true); } ChangeFeedFactory changeFeedFactory = new ChangeFeedFactory( containerClient.Object, segmentFactory.Object); ChangeFeed changeFeed = await changeFeedFactory.BuildChangeFeed( IsAsync, startTime: new DateTimeOffset(2019, 6, 1, 0, 0, 0, TimeSpan.Zero)); // Act Page<BlobChangeFeedEvent> page = await changeFeed.GetPage(IsAsync); // Assert Assert.AreEqual(2, page.Values.Count); Assert.AreEqual(events[0].Id, page.Values[0].Id); Assert.AreEqual(events[1].Id, page.Values[1].Id); containerClient.Verify(r => r.Uri); if (IsAsync) { containerClient.Verify(r => r.ExistsAsync(default)); } else { containerClient.Verify(r => r.Exists(default)); } containerClient.Verify(r => r.GetBlobClient(Constants.ChangeFeed.MetaSegmentsPath)); if (IsAsync) { blobClient.Verify(r => r.DownloadAsync()); } else { blobClient.Verify(r => r.Download()); } if (IsAsync) { containerClient.Verify(r => r.GetBlobsByHierarchyAsync( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)); } else { containerClient.Verify(r => r.GetBlobsByHierarchy( default, default, "/", Constants.ChangeFeed.SegmentPrefix, default)); } if (IsAsync) { containerClient.Verify(r => r.GetBlobsByHierarchyAsync( default, default, default, "idx/segments/2019/", default)); containerClient.Verify(r => r.GetBlobsByHierarchyAsync( default, default, default, "idx/segments/2020/", default)); } else { containerClient.Verify(r => r.GetBlobsByHierarchy( default, default, default, "idx/segments/2019/", default)); containerClient.Verify(r => r.GetBlobsByHierarchy( default, default, default, "idx/segments/2020/", default)); } // ChangeFeeed.Next() verifies. segments[0].Verify(r => r.HasNext(), Times.Exactly(1)); segments[0].Verify(r => r.GetPage( IsAsync, Constants.ChangeFeed.DefaultPageSize, default)); segments[1].Verify(r => r.HasNext(), Times.Exactly(3)); segments[1].Verify(r => r.GetPage( IsAsync, Constants.ChangeFeed.DefaultPageSize - 1, default)); containerClient.Verify(r => r.Uri, Times.Exactly(1)); } public static Task<Page<BlobHierarchyItem>> GetYearsPathShortFuncAsync(string continuation, int? pageSizeHint) => Task.FromResult(GetYearsPathShortFunc(continuation, pageSizeHint)); public static Page<BlobHierarchyItem> GetYearsPathShortFunc( string continuation, int? pageSizeHint) => new BlobHierarchyItemPage(new List<BlobHierarchyItem> { BlobsModelFactory.BlobHierarchyItem("idx/segments/1601/", null), BlobsModelFactory.BlobHierarchyItem("idx/segments/2019/", null), BlobsModelFactory.BlobHierarchyItem("idx/segments/2020/", null) }); public static Task<Page<BlobHierarchyItem>> GetSegmentsInYear2019FuncAsync( string continuation, int? pageSizeHint) => Task.FromResult(GetSegmentsInYear2019Func(continuation, pageSizeHint)); public static Page<BlobHierarchyItem> GetSegmentsInYear2019Func( string continuation, int? pageSizeHint) => new BlobHierarchyItemPage(new List<BlobHierarchyItem> { BlobsModelFactory.BlobHierarchyItem( null, BlobsModelFactory.BlobItem("idx/segments/2019/03/02/2000/meta.json", false, null)), BlobsModelFactory.BlobHierarchyItem( null, BlobsModelFactory.BlobItem("idx/segments/2019/04/03/2200/meta.json", false, null)) }); public static Task<Page<BlobHierarchyItem>> GetSegmentsInYear2020FuncAsync( string continuation, int? pageSizeHint) => Task.FromResult(GetSegmentsInYear2020Func(continuation, pageSizeHint)); public static Page<BlobHierarchyItem> GetSegmentsInYear2020Func( string continuation, int? pageSizeHint) => new BlobHierarchyItemPage(new List<BlobHierarchyItem> { BlobsModelFactory.BlobHierarchyItem( null, BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2000/meta.json", false, null)), BlobsModelFactory.BlobHierarchyItem( null, BlobsModelFactory.BlobItem("idx/segments/2020/03/03/2200/meta.json", false, null)) }); } }