context
stringlengths
2.52k
185k
gt
stringclasses
1 value
/* * MATLAB Compiler: 4.17 (R2012a) * Date: Fri Nov 15 15:25:35 2013 * Arguments: "-B" "macro_default" "-W" "dotnet:VECMATLIB,Transformation,0.0,private" "-T" * "link:lib" "-d" "C:\Users\Gus\Desktop\vec\LIB\VECMATLIB\src" "-w" * "enable:specified_file_mismatch" "-w" "enable:repeated_file" "-w" * "enable:switch_ignored" "-w" "enable:missing_lib_sentinel" "-w" "enable:demo_license" * "-v" * "class{Transformation:C:\Users\Gus\Documents\MATLAB\fkin.m,C:\Users\Gus\Documents\MATLAB * \LTransform.m}" */ using System; using System.Reflection; using System.IO; using MathWorks.MATLAB.NET.Arrays; using MathWorks.MATLAB.NET.Utility; #if SHARED [assembly: System.Reflection.AssemblyKeyFile(@"")] #endif namespace VECMATLIB { /// <summary> /// The Transformation class provides a CLS compliant, MWArray interface to the /// M-functions contained in the files: /// <newpara></newpara> /// C:\Users\Gus\Documents\MATLAB\fkin.m /// <newpara></newpara> /// C:\Users\Gus\Documents\MATLAB\LTransform.m /// <newpara></newpara> /// deployprint.m /// <newpara></newpara> /// printdlg.m /// </summary> /// <remarks> /// @Version 0.0 /// </remarks> public class Transformation : IDisposable { #region Constructors /// <summary internal= "true"> /// The static constructor instantiates and initializes the MATLAB Compiler Runtime /// instance. /// </summary> static Transformation() { if (MWMCR.MCRAppInitialized) { Assembly assembly= Assembly.GetExecutingAssembly(); string ctfFilePath= assembly.Location; int lastDelimiter= ctfFilePath.LastIndexOf(@"\"); ctfFilePath= ctfFilePath.Remove(lastDelimiter, (ctfFilePath.Length - lastDelimiter)); string ctfFileName = "VECMATLIB.ctf"; Stream embeddedCtfStream = null; String[] resourceStrings = assembly.GetManifestResourceNames(); foreach (String name in resourceStrings) { if (name.Contains(ctfFileName)) { embeddedCtfStream = assembly.GetManifestResourceStream(name); break; } } mcr= new MWMCR("", ctfFilePath, embeddedCtfStream, true); } else { throw new ApplicationException("MWArray assembly could not be initialized"); } } /// <summary> /// Constructs a new instance of the Transformation class. /// </summary> public Transformation() { } #endregion Constructors #region Finalize /// <summary internal= "true"> /// Class destructor called by the CLR garbage collector. /// </summary> ~Transformation() { Dispose(false); } /// <summary> /// Frees the native resources associated with this object /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary internal= "true"> /// Internal dispose function /// </summary> protected virtual void Dispose(bool disposing) { if (!disposed) { disposed= true; if (disposing) { // Free managed resources; } // Free native resources } } #endregion Finalize #region Methods /// <summary> /// Provides a single output, 0-input MWArrayinterface to the fkin M-function. /// </summary> /// <remarks> /// M-Documentation: /// q list of numbers, commands 1-5 /// toolLength = 8 or 12 for long short tool /// T1b is transofrmation from T.m /// </remarks> /// <returns>An MWArray containing the first output argument.</returns> /// public MWArray fkin() { return mcr.EvaluateFunction("fkin", new MWArray[]{}); } /// <summary> /// Provides a single output, 1-input MWArrayinterface to the fkin M-function. /// </summary> /// <remarks> /// M-Documentation: /// q list of numbers, commands 1-5 /// toolLength = 8 or 12 for long short tool /// T1b is transofrmation from T.m /// </remarks> /// <param name="q">Input argument #1</param> /// <returns>An MWArray containing the first output argument.</returns> /// public MWArray fkin(MWArray q) { return mcr.EvaluateFunction("fkin", q); } /// <summary> /// Provides a single output, 2-input MWArrayinterface to the fkin M-function. /// </summary> /// <remarks> /// M-Documentation: /// q list of numbers, commands 1-5 /// toolLength = 8 or 12 for long short tool /// T1b is transofrmation from T.m /// </remarks> /// <param name="q">Input argument #1</param> /// <param name="toolLength">Input argument #2</param> /// <returns>An MWArray containing the first output argument.</returns> /// public MWArray fkin(MWArray q, MWArray toolLength) { return mcr.EvaluateFunction("fkin", q, toolLength); } /// <summary> /// Provides a single output, 3-input MWArrayinterface to the fkin M-function. /// </summary> /// <remarks> /// M-Documentation: /// q list of numbers, commands 1-5 /// toolLength = 8 or 12 for long short tool /// T1b is transofrmation from T.m /// </remarks> /// <param name="q">Input argument #1</param> /// <param name="toolLength">Input argument #2</param> /// <param name="T1b">Input argument #3</param> /// <returns>An MWArray containing the first output argument.</returns> /// public MWArray fkin(MWArray q, MWArray toolLength, MWArray T1b) { return mcr.EvaluateFunction("fkin", q, toolLength, T1b); } /// <summary> /// Provides the standard 0-input MWArray interface to the fkin M-function. /// </summary> /// <remarks> /// M-Documentation: /// q list of numbers, commands 1-5 /// toolLength = 8 or 12 for long short tool /// T1b is transofrmation from T.m /// </remarks> /// <param name="numArgsOut">The number of output arguments to return.</param> /// <returns>An Array of length "numArgsOut" containing the output /// arguments.</returns> /// public MWArray[] fkin(int numArgsOut) { return mcr.EvaluateFunction(numArgsOut, "fkin", new MWArray[]{}); } /// <summary> /// Provides the standard 1-input MWArray interface to the fkin M-function. /// </summary> /// <remarks> /// M-Documentation: /// q list of numbers, commands 1-5 /// toolLength = 8 or 12 for long short tool /// T1b is transofrmation from T.m /// </remarks> /// <param name="numArgsOut">The number of output arguments to return.</param> /// <param name="q">Input argument #1</param> /// <returns>An Array of length "numArgsOut" containing the output /// arguments.</returns> /// public MWArray[] fkin(int numArgsOut, MWArray q) { return mcr.EvaluateFunction(numArgsOut, "fkin", q); } /// <summary> /// Provides the standard 2-input MWArray interface to the fkin M-function. /// </summary> /// <remarks> /// M-Documentation: /// q list of numbers, commands 1-5 /// toolLength = 8 or 12 for long short tool /// T1b is transofrmation from T.m /// </remarks> /// <param name="numArgsOut">The number of output arguments to return.</param> /// <param name="q">Input argument #1</param> /// <param name="toolLength">Input argument #2</param> /// <returns>An Array of length "numArgsOut" containing the output /// arguments.</returns> /// public MWArray[] fkin(int numArgsOut, MWArray q, MWArray toolLength) { return mcr.EvaluateFunction(numArgsOut, "fkin", q, toolLength); } /// <summary> /// Provides the standard 3-input MWArray interface to the fkin M-function. /// </summary> /// <remarks> /// M-Documentation: /// q list of numbers, commands 1-5 /// toolLength = 8 or 12 for long short tool /// T1b is transofrmation from T.m /// </remarks> /// <param name="numArgsOut">The number of output arguments to return.</param> /// <param name="q">Input argument #1</param> /// <param name="toolLength">Input argument #2</param> /// <param name="T1b">Input argument #3</param> /// <returns>An Array of length "numArgsOut" containing the output /// arguments.</returns> /// public MWArray[] fkin(int numArgsOut, MWArray q, MWArray toolLength, MWArray T1b) { return mcr.EvaluateFunction(numArgsOut, "fkin", q, toolLength, T1b); } /// <summary> /// Provides an interface for the fkin function in which the input and output /// arguments are specified as an array of MWArrays. /// </summary> /// <remarks> /// This method will allocate and return by reference the output argument /// array.<newpara></newpara> /// M-Documentation: /// q list of numbers, commands 1-5 /// toolLength = 8 or 12 for long short tool /// T1b is transofrmation from T.m /// </remarks> /// <param name="numArgsOut">The number of output arguments to return</param> /// <param name= "argsOut">Array of MWArray output arguments</param> /// <param name= "argsIn">Array of MWArray input arguments</param> /// public void fkin(int numArgsOut, ref MWArray[] argsOut, MWArray[] argsIn) { mcr.EvaluateFunction("fkin", numArgsOut, ref argsOut, argsIn); } /// <summary> /// Provides a single output, 0-input MWArrayinterface to the LTransform M-function. /// </summary> /// <remarks> /// M-Documentation: /// m1=COMP(1:20,:); /// m2=long(1:20,:); /// </remarks> /// <returns>An MWArray containing the first output argument.</returns> /// public MWArray LTransform() { return mcr.EvaluateFunction("LTransform", new MWArray[]{}); } /// <summary> /// Provides a single output, 1-input MWArrayinterface to the LTransform M-function. /// </summary> /// <remarks> /// M-Documentation: /// m1=COMP(1:20,:); /// m2=long(1:20,:); /// </remarks> /// <param name="m1">Input argument #1</param> /// <returns>An MWArray containing the first output argument.</returns> /// public MWArray LTransform(MWArray m1) { return mcr.EvaluateFunction("LTransform", m1); } /// <summary> /// Provides a single output, 2-input MWArrayinterface to the LTransform M-function. /// </summary> /// <remarks> /// M-Documentation: /// m1=COMP(1:20,:); /// m2=long(1:20,:); /// </remarks> /// <param name="m1">Input argument #1</param> /// <param name="commands">Input argument #2</param> /// <returns>An MWArray containing the first output argument.</returns> /// public MWArray LTransform(MWArray m1, MWArray commands) { return mcr.EvaluateFunction("LTransform", m1, commands); } /// <summary> /// Provides a single output, 3-input MWArrayinterface to the LTransform M-function. /// </summary> /// <remarks> /// M-Documentation: /// m1=COMP(1:20,:); /// m2=long(1:20,:); /// </remarks> /// <param name="m1">Input argument #1</param> /// <param name="commands">Input argument #2</param> /// <param name="ToolLength">Input argument #3</param> /// <returns>An MWArray containing the first output argument.</returns> /// public MWArray LTransform(MWArray m1, MWArray commands, MWArray ToolLength) { return mcr.EvaluateFunction("LTransform", m1, commands, ToolLength); } /// <summary> /// Provides the standard 0-input MWArray interface to the LTransform M-function. /// </summary> /// <remarks> /// M-Documentation: /// m1=COMP(1:20,:); /// m2=long(1:20,:); /// </remarks> /// <param name="numArgsOut">The number of output arguments to return.</param> /// <returns>An Array of length "numArgsOut" containing the output /// arguments.</returns> /// public MWArray[] LTransform(int numArgsOut) { return mcr.EvaluateFunction(numArgsOut, "LTransform", new MWArray[]{}); } /// <summary> /// Provides the standard 1-input MWArray interface to the LTransform M-function. /// </summary> /// <remarks> /// M-Documentation: /// m1=COMP(1:20,:); /// m2=long(1:20,:); /// </remarks> /// <param name="numArgsOut">The number of output arguments to return.</param> /// <param name="m1">Input argument #1</param> /// <returns>An Array of length "numArgsOut" containing the output /// arguments.</returns> /// public MWArray[] LTransform(int numArgsOut, MWArray m1) { return mcr.EvaluateFunction(numArgsOut, "LTransform", m1); } /// <summary> /// Provides the standard 2-input MWArray interface to the LTransform M-function. /// </summary> /// <remarks> /// M-Documentation: /// m1=COMP(1:20,:); /// m2=long(1:20,:); /// </remarks> /// <param name="numArgsOut">The number of output arguments to return.</param> /// <param name="m1">Input argument #1</param> /// <param name="commands">Input argument #2</param> /// <returns>An Array of length "numArgsOut" containing the output /// arguments.</returns> /// public MWArray[] LTransform(int numArgsOut, MWArray m1, MWArray commands) { return mcr.EvaluateFunction(numArgsOut, "LTransform", m1, commands); } /// <summary> /// Provides the standard 3-input MWArray interface to the LTransform M-function. /// </summary> /// <remarks> /// M-Documentation: /// m1=COMP(1:20,:); /// m2=long(1:20,:); /// </remarks> /// <param name="numArgsOut">The number of output arguments to return.</param> /// <param name="m1">Input argument #1</param> /// <param name="commands">Input argument #2</param> /// <param name="ToolLength">Input argument #3</param> /// <returns>An Array of length "numArgsOut" containing the output /// arguments.</returns> /// public MWArray[] LTransform(int numArgsOut, MWArray m1, MWArray commands, MWArray ToolLength) { return mcr.EvaluateFunction(numArgsOut, "LTransform", m1, commands, ToolLength); } /// <summary> /// Provides an interface for the LTransform function in which the input and output /// arguments are specified as an array of MWArrays. /// </summary> /// <remarks> /// This method will allocate and return by reference the output argument /// array.<newpara></newpara> /// M-Documentation: /// m1=COMP(1:20,:); /// m2=long(1:20,:); /// </remarks> /// <param name="numArgsOut">The number of output arguments to return</param> /// <param name= "argsOut">Array of MWArray output arguments</param> /// <param name= "argsIn">Array of MWArray input arguments</param> /// public void LTransform(int numArgsOut, ref MWArray[] argsOut, MWArray[] argsIn) { mcr.EvaluateFunction("LTransform", numArgsOut, ref argsOut, argsIn); } /// <summary> /// This method will cause a MATLAB figure window to behave as a modal dialog box. /// The method will not return until all the figure windows associated with this /// component have been closed. /// </summary> /// <remarks> /// An application should only call this method when required to keep the /// MATLAB figure window from disappearing. Other techniques, such as calling /// Console.ReadLine() from the application should be considered where /// possible.</remarks> /// public void WaitForFiguresToDie() { mcr.WaitForFiguresToDie(); } #endregion Methods #region Class Members private static MWMCR mcr= null; private bool disposed= false; #endregion Class Members } }
/***************************************************************************** * Automatic import and advanced preview added by Mitch Thompson * Full irrevocable rights and permissions granted to Esoteric Software *****************************************************************************/ using System; using System.Collections.Generic; using UnityEditor; #if !UNITY_4_3 using UnityEditor.AnimatedValues; #endif using UnityEngine; using Spine; [CustomEditor(typeof(SkeletonDataAsset))] public class SkeletonDataAssetInspector : Editor { static bool showAnimationStateData = true; static bool showAnimationList = true; static bool showSlotList = false; static bool showAttachments = false; static bool showUnity = true; static bool bakeAnimations = true; static bool bakeIK = true; static SendMessageOptions bakeEventOptions = SendMessageOptions.DontRequireReceiver; private SerializedProperty atlasAssets, skeletonJSON, scale, fromAnimation, toAnimation, duration, defaultMix, controller; #if SPINE_TK2D private SerializedProperty spriteCollection; #endif private bool m_initialized = false; private SkeletonDataAsset m_skeletonDataAsset; private SkeletonData m_skeletonData; private string m_skeletonDataAssetGUID; private bool needToSerialize; List<string> warnings = new List<string>(); void OnEnable () { SpineEditorUtilities.ConfirmInitialization(); try { atlasAssets = serializedObject.FindProperty("atlasAssets"); skeletonJSON = serializedObject.FindProperty("skeletonJSON"); scale = serializedObject.FindProperty("scale"); fromAnimation = serializedObject.FindProperty("fromAnimation"); toAnimation = serializedObject.FindProperty("toAnimation"); duration = serializedObject.FindProperty("duration"); defaultMix = serializedObject.FindProperty("defaultMix"); controller = serializedObject.FindProperty("controller"); #if SPINE_TK2D spriteCollection = serializedObject.FindProperty("spriteCollection"); #endif m_skeletonDataAsset = (SkeletonDataAsset)target; m_skeletonDataAssetGUID = AssetDatabase.AssetPathToGUID(AssetDatabase.GetAssetPath(m_skeletonDataAsset)); EditorApplication.update += Update; } catch { // TODO: WARNING: empty catch block supresses errors. } m_skeletonData = m_skeletonDataAsset.GetSkeletonData(true); showUnity = EditorPrefs.GetBool("SkeletonDataAssetInspector_showUnity", true); RepopulateWarnings(); } void OnDestroy () { m_initialized = false; EditorApplication.update -= Update; this.DestroyPreviewInstances(); if (this.m_previewUtility != null) { this.m_previewUtility.Cleanup(); this.m_previewUtility = null; } } override public void OnInspectorGUI () { serializedObject.Update(); EditorGUI.BeginChangeCheck(); #if !SPINE_TK2D EditorGUILayout.PropertyField(atlasAssets, true); #else EditorGUI.BeginDisabledGroup(spriteCollection.objectReferenceValue != null); EditorGUILayout.PropertyField(atlasAssets, true); EditorGUI.EndDisabledGroup(); EditorGUILayout.PropertyField(spriteCollection, true); #endif EditorGUILayout.PropertyField(skeletonJSON); EditorGUILayout.PropertyField(scale); if (EditorGUI.EndChangeCheck()) { if (serializedObject.ApplyModifiedProperties()) { if (m_previewUtility != null) { m_previewUtility.Cleanup(); m_previewUtility = null; } RepopulateWarnings(); OnEnable(); return; } } if (m_skeletonData != null) { DrawAnimationStateInfo(); DrawAnimationList(); DrawSlotList(); DrawUnityTools(); } else { DrawReimportButton(); //Show Warnings foreach (var str in warnings) EditorGUILayout.LabelField(new GUIContent(str, SpineEditorUtilities.Icons.warning)); } if(!Application.isPlaying) serializedObject.ApplyModifiedProperties(); } void DrawMecanim () { EditorGUILayout.PropertyField(controller, new GUIContent("Controller", SpineEditorUtilities.Icons.controllerIcon)); if (controller.objectReferenceValue == null) { GUILayout.BeginHorizontal(); GUILayout.Space(32); if (GUILayout.Button(new GUIContent("Generate Mecanim Controller"), EditorStyles.toolbarButton, GUILayout.Width(195), GUILayout.Height(20))) SkeletonBaker.GenerateMecanimAnimationClips(m_skeletonDataAsset); //GUILayout.Label(new GUIContent("Alternative to SkeletonAnimation, not a requirement.", SpineEditorUtilities.Icons.warning)); GUILayout.EndHorizontal(); EditorGUILayout.LabelField("Alternative to SkeletonAnimation, not required", EditorStyles.miniLabel); } } void DrawUnityTools () { bool pre = showUnity; showUnity = EditorGUILayout.Foldout(showUnity, new GUIContent("Unity Tools", SpineEditorUtilities.Icons.unityIcon)); if (pre != showUnity) EditorPrefs.SetBool("SkeletonDataAssetInspector_showUnity", showUnity); if (showUnity) { EditorGUI.indentLevel++; EditorGUILayout.LabelField("SkeletonAnimator", EditorStyles.boldLabel); EditorGUI.indentLevel++; DrawMecanim(); EditorGUI.indentLevel--; GUILayout.Space(32); EditorGUILayout.LabelField("Baking", EditorStyles.boldLabel); EditorGUILayout.HelpBox("WARNING!\n\nBaking is NOT the same as SkeletonAnimator!\nDoes not support the following:\n\tFlipX or Y\n\tInheritScale\n\tColor Keys\n\tDraw Order Keys\n\tIK and Curves are sampled at 60fps and are not realtime.\n\tPlease read SkeletonBaker.cs comments for full details.\n\nThe main use of Baking is to export Spine projects to be used without the Spine Runtime (ie: for sale on the Asset Store, or background objects that are animated only with a wind noise generator)", MessageType.Warning, true); EditorGUI.indentLevel++; bakeAnimations = EditorGUILayout.Toggle("Bake Animations", bakeAnimations); EditorGUI.BeginDisabledGroup(!bakeAnimations); { EditorGUI.indentLevel++; bakeIK = EditorGUILayout.Toggle("Bake IK", bakeIK); bakeEventOptions = (SendMessageOptions)EditorGUILayout.EnumPopup("Event Options", bakeEventOptions); EditorGUI.indentLevel--; } EditorGUI.EndDisabledGroup(); EditorGUI.indentLevel++; GUILayout.BeginHorizontal(); { if (GUILayout.Button(new GUIContent("Bake All Skins", SpineEditorUtilities.Icons.unityIcon), GUILayout.Height(32), GUILayout.Width(150))) SkeletonBaker.BakeToPrefab(m_skeletonDataAsset, m_skeletonData.Skins, "", bakeAnimations, bakeIK, bakeEventOptions); string skinName = "<No Skin>"; if (m_skeletonAnimation != null && m_skeletonAnimation.skeleton != null) { Skin bakeSkin = m_skeletonAnimation.skeleton.Skin; if (bakeSkin == null) { skinName = "Default"; bakeSkin = m_skeletonData.Skins.Items[0]; } else skinName = m_skeletonAnimation.skeleton.Skin.Name; bool oops = false; try { GUILayout.BeginVertical(); if (GUILayout.Button(new GUIContent("Bake " + skinName, SpineEditorUtilities.Icons.unityIcon), GUILayout.Height(32), GUILayout.Width(250))) SkeletonBaker.BakeToPrefab(m_skeletonDataAsset, new ExposedList<Skin>(new [] { bakeSkin }), "", bakeAnimations, bakeIK, bakeEventOptions); GUILayout.BeginHorizontal(); GUILayout.Label(new GUIContent("Skins", SpineEditorUtilities.Icons.skinsRoot), GUILayout.Width(50)); if (GUILayout.Button(skinName, EditorStyles.popup, GUILayout.Width(196))) { SelectSkinContext(); } GUILayout.EndHorizontal(); } catch { oops = true; //GUILayout.BeginVertical(); } if (!oops) GUILayout.EndVertical(); } } GUILayout.EndHorizontal(); EditorGUI.indentLevel--; EditorGUI.indentLevel--; } } void DrawReimportButton () { EditorGUI.BeginDisabledGroup(skeletonJSON.objectReferenceValue == null); if (GUILayout.Button(new GUIContent("Attempt Reimport", SpineEditorUtilities.Icons.warning))) { DoReimport(); return; } EditorGUI.EndDisabledGroup(); } void DoReimport () { SpineEditorUtilities.ImportSpineContent(new string[] { AssetDatabase.GetAssetPath(skeletonJSON.objectReferenceValue) }, true); if (m_previewUtility != null) { m_previewUtility.Cleanup(); m_previewUtility = null; } RepopulateWarnings(); OnEnable(); EditorUtility.SetDirty(m_skeletonDataAsset); } void DrawAnimationStateInfo () { showAnimationStateData = EditorGUILayout.Foldout(showAnimationStateData, "Animation State Data"); if (!showAnimationStateData) return; EditorGUI.BeginChangeCheck(); EditorGUILayout.PropertyField(defaultMix); // Animation names var animations = new string[m_skeletonData.Animations.Count]; for (int i = 0; i < animations.Length; i++) animations[i] = m_skeletonData.Animations.Items[i].Name; for (int i = 0; i < fromAnimation.arraySize; i++) { SerializedProperty from = fromAnimation.GetArrayElementAtIndex(i); SerializedProperty to = toAnimation.GetArrayElementAtIndex(i); SerializedProperty durationProp = duration.GetArrayElementAtIndex(i); EditorGUILayout.BeginHorizontal(); from.stringValue = animations[EditorGUILayout.Popup(Math.Max(Array.IndexOf(animations, from.stringValue), 0), animations)]; to.stringValue = animations[EditorGUILayout.Popup(Math.Max(Array.IndexOf(animations, to.stringValue), 0), animations)]; durationProp.floatValue = EditorGUILayout.FloatField(durationProp.floatValue); if (GUILayout.Button("Delete")) { duration.DeleteArrayElementAtIndex(i); toAnimation.DeleteArrayElementAtIndex(i); fromAnimation.DeleteArrayElementAtIndex(i); } EditorGUILayout.EndHorizontal(); } EditorGUILayout.BeginHorizontal(); EditorGUILayout.Space(); if (GUILayout.Button("Add Mix")) { duration.arraySize++; toAnimation.arraySize++; fromAnimation.arraySize++; } EditorGUILayout.Space(); EditorGUILayout.EndHorizontal(); if (EditorGUI.EndChangeCheck()) { m_skeletonDataAsset.FillStateData(); EditorUtility.SetDirty(m_skeletonDataAsset); serializedObject.ApplyModifiedProperties(); needToSerialize = true; } } void DrawAnimationList () { showAnimationList = EditorGUILayout.Foldout(showAnimationList, new GUIContent("Animations", SpineEditorUtilities.Icons.animationRoot)); if (!showAnimationList) return; if (GUILayout.Button(new GUIContent("Setup Pose", SpineEditorUtilities.Icons.skeleton), GUILayout.Width(105), GUILayout.Height(18))) { StopAnimation(); m_skeletonAnimation.skeleton.SetToSetupPose(); m_requireRefresh = true; } EditorGUILayout.LabelField("Name", "Duration"); foreach (Spine.Animation a in m_skeletonData.Animations) { GUILayout.BeginHorizontal(); if (m_skeletonAnimation != null && m_skeletonAnimation.state != null) { if (m_skeletonAnimation.state.GetCurrent(0) != null && m_skeletonAnimation.state.GetCurrent(0).Animation == a) { GUI.contentColor = Color.red; if (GUILayout.Button("\u25BA", EditorStyles.toolbarButton, GUILayout.Width(24))) { StopAnimation(); } GUI.contentColor = Color.white; } else { if (GUILayout.Button("\u25BA", EditorStyles.toolbarButton, GUILayout.Width(24))) { PlayAnimation(a.Name, true); } } } else { GUILayout.Label("?", GUILayout.Width(24)); } EditorGUILayout.LabelField(new GUIContent(a.Name, SpineEditorUtilities.Icons.animation), new GUIContent(a.Duration.ToString("f3") + "s" + ("(" + (Mathf.RoundToInt(a.Duration * 30)) + ")").PadLeft(12, ' '))); GUILayout.EndHorizontal(); } } void DrawSlotList () { showSlotList = EditorGUILayout.Foldout(showSlotList, new GUIContent("Slots", SpineEditorUtilities.Icons.slotRoot)); if (!showSlotList) return; if (m_skeletonAnimation == null || m_skeletonAnimation.skeleton == null) return; EditorGUI.indentLevel++; try { showAttachments = EditorGUILayout.ToggleLeft("Show Attachments", showAttachments); } catch { return; } List<Attachment> slotAttachments = new List<Attachment>(); List<string> slotAttachmentNames = new List<string>(); List<string> defaultSkinAttachmentNames = new List<string>(); var defaultSkin = m_skeletonData.Skins.Items[0]; Skin skin = m_skeletonAnimation.skeleton.Skin; if (skin == null) { skin = defaultSkin; } for (int i = m_skeletonAnimation.skeleton.Slots.Count - 1; i >= 0; i--) { Slot slot = m_skeletonAnimation.skeleton.Slots.Items[i]; EditorGUILayout.LabelField(new GUIContent(slot.Data.Name, SpineEditorUtilities.Icons.slot)); if (showAttachments) { EditorGUI.indentLevel++; slotAttachments.Clear(); slotAttachmentNames.Clear(); defaultSkinAttachmentNames.Clear(); skin.FindNamesForSlot(i, slotAttachmentNames); skin.FindAttachmentsForSlot(i, slotAttachments); if (skin != defaultSkin) { defaultSkin.FindNamesForSlot(i, defaultSkinAttachmentNames); defaultSkin.FindNamesForSlot(i, slotAttachmentNames); defaultSkin.FindAttachmentsForSlot(i, slotAttachments); } else { defaultSkin.FindNamesForSlot(i, defaultSkinAttachmentNames); } for (int a = 0; a < slotAttachments.Count; a++) { Attachment attachment = slotAttachments[a]; string name = slotAttachmentNames[a]; Texture2D icon = null; var type = attachment.GetType(); if (type == typeof(RegionAttachment)) icon = SpineEditorUtilities.Icons.image; else if (type == typeof(MeshAttachment)) icon = SpineEditorUtilities.Icons.mesh; else if (type == typeof(BoundingBoxAttachment)) icon = SpineEditorUtilities.Icons.boundingBox; else if (type == typeof(SkinnedMeshAttachment)) icon = SpineEditorUtilities.Icons.weights; else icon = SpineEditorUtilities.Icons.warning; //TODO: Waterboard Nate //if (name != attachment.Name) //icon = SpineEditorUtilities.Icons.skinPlaceholder; bool initialState = slot.Attachment == attachment; bool toggled = EditorGUILayout.ToggleLeft(new GUIContent(name, icon), slot.Attachment == attachment); if (!defaultSkinAttachmentNames.Contains(name)) { Rect skinPlaceHolderIconRect = GUILayoutUtility.GetLastRect(); skinPlaceHolderIconRect.width = SpineEditorUtilities.Icons.skinPlaceholder.width; skinPlaceHolderIconRect.height = SpineEditorUtilities.Icons.skinPlaceholder.height; GUI.DrawTexture(skinPlaceHolderIconRect, SpineEditorUtilities.Icons.skinPlaceholder); } if (toggled != initialState) { if (toggled) { slot.Attachment = attachment; } else { slot.Attachment = null; } m_requireRefresh = true; } } EditorGUI.indentLevel--; } } EditorGUI.indentLevel--; } void RepopulateWarnings () { warnings.Clear(); if (skeletonJSON.objectReferenceValue == null) warnings.Add("Missing Skeleton JSON"); else { if (SpineEditorUtilities.IsValidSpineData((TextAsset)skeletonJSON.objectReferenceValue) == false) { warnings.Add("Skeleton data file is not a valid JSON or binary file."); } else { bool detectedNullAtlasEntry = false; var atlasList = new List<Atlas>(); for (int i = 0; i < atlasAssets.arraySize; i++) { if (atlasAssets.GetArrayElementAtIndex(i).objectReferenceValue == null) { detectedNullAtlasEntry = true; break; } else { atlasList.Add(((AtlasAsset)atlasAssets.GetArrayElementAtIndex(i).objectReferenceValue).GetAtlas()); } } if (detectedNullAtlasEntry) warnings.Add("AtlasAsset elements cannot be Null"); else { //get requirements var missingPaths = SpineEditorUtilities.GetRequiredAtlasRegions(AssetDatabase.GetAssetPath((TextAsset)skeletonJSON.objectReferenceValue)); foreach (var atlas in atlasList) { for (int i = 0; i < missingPaths.Count; i++) { if (atlas.FindRegion(missingPaths[i]) != null) { missingPaths.RemoveAt(i); i--; } } } foreach (var str in missingPaths) warnings.Add("Missing Region: '" + str + "'"); } } } } //preview window stuff private PreviewRenderUtility m_previewUtility; private GameObject m_previewInstance; private Vector2 previewDir; private SkeletonAnimation m_skeletonAnimation; //private SkeletonData m_skeletonData; private static int sliderHash = "Slider".GetHashCode(); private float m_lastTime; private bool m_playing; private bool m_requireRefresh; private Color m_originColor = new Color(0.3f, 0.3f, 0.3f, 1); private void StopAnimation () { m_skeletonAnimation.state.ClearTrack(0); m_playing = false; } List<Spine.Event> m_animEvents = new List<Spine.Event>(); List<float> m_animEventFrames = new List<float>(); private void PlayAnimation (string animName, bool loop) { m_animEvents.Clear(); m_animEventFrames.Clear(); m_skeletonAnimation.state.SetAnimation(0, animName, loop); Spine.Animation a = m_skeletonAnimation.state.GetCurrent(0).Animation; foreach (Timeline t in a.Timelines) { if (t.GetType() == typeof(EventTimeline)) { EventTimeline et = (EventTimeline)t; for (int i = 0; i < et.Events.Length; i++) { m_animEvents.Add(et.Events[i]); m_animEventFrames.Add(et.Frames[i]); } } } m_playing = true; } private void InitPreview () { if (this.m_previewUtility == null) { this.m_lastTime = Time.realtimeSinceStartup; this.m_previewUtility = new PreviewRenderUtility(true); this.m_previewUtility.m_Camera.orthographic = true; this.m_previewUtility.m_Camera.orthographicSize = 1; this.m_previewUtility.m_Camera.cullingMask = -2147483648; this.m_previewUtility.m_Camera.nearClipPlane = 0.01f; this.m_previewUtility.m_Camera.farClipPlane = 1000f; this.CreatePreviewInstances(); } } private void CreatePreviewInstances () { this.DestroyPreviewInstances(); if (this.m_previewInstance == null) { try { string skinName = EditorPrefs.GetString(m_skeletonDataAssetGUID + "_lastSkin", ""); m_previewInstance = SpineEditorUtilities.InstantiateSkeletonAnimation((SkeletonDataAsset)target, skinName).gameObject; m_previewInstance.hideFlags = HideFlags.HideAndDontSave; m_previewInstance.layer = 0x1f; m_skeletonAnimation = m_previewInstance.GetComponent<SkeletonAnimation>(); m_skeletonAnimation.initialSkinName = skinName; m_skeletonAnimation.LateUpdate(); m_skeletonData = m_skeletonAnimation.skeletonDataAsset.GetSkeletonData(true); m_previewInstance.GetComponent<Renderer>().enabled = false; m_initialized = true; AdjustCameraGoals(true); } catch { } } } private void DestroyPreviewInstances () { if (this.m_previewInstance != null) { DestroyImmediate(this.m_previewInstance); m_previewInstance = null; } m_initialized = false; } public override bool HasPreviewGUI () { //TODO: validate json data for (int i = 0; i < atlasAssets.arraySize; i++) { var prop = atlasAssets.GetArrayElementAtIndex(i); if (prop.objectReferenceValue == null) return false; } return skeletonJSON.objectReferenceValue != null; } Texture m_previewTex = new Texture(); public override void OnInteractivePreviewGUI (Rect r, GUIStyle background) { this.InitPreview(); if (UnityEngine.Event.current.type == EventType.Repaint) { if (m_requireRefresh) { this.m_previewUtility.BeginPreview(r, background); this.DoRenderPreview(true); this.m_previewTex = this.m_previewUtility.EndPreview(); m_requireRefresh = false; } if (this.m_previewTex != null) GUI.DrawTexture(r, m_previewTex, ScaleMode.StretchToFill, false); } DrawSkinToolbar(r); NormalizedTimeBar(r); //TODO: implement panning // this.previewDir = Drag2D(this.previewDir, r); MouseScroll(r); } float m_orthoGoal = 1; Vector3 m_posGoal = new Vector3(0, 0, -10); double m_adjustFrameEndTime = 0; private void AdjustCameraGoals (bool calculateMixTime) { if (this.m_previewInstance == null) return; if (calculateMixTime) { if (m_skeletonAnimation.state.GetCurrent(0) != null) { m_adjustFrameEndTime = EditorApplication.timeSinceStartup + m_skeletonAnimation.state.GetCurrent(0).Mix; } } GameObject go = this.m_previewInstance; Bounds bounds = go.GetComponent<Renderer>().bounds; m_orthoGoal = bounds.size.y; m_posGoal = bounds.center + new Vector3(0, 0, -10); } private void AdjustCameraGoals () { AdjustCameraGoals(false); } private void AdjustCamera () { if (m_previewUtility == null) return; if (EditorApplication.timeSinceStartup < m_adjustFrameEndTime) { AdjustCameraGoals(); } float orthoSet = Mathf.Lerp(this.m_previewUtility.m_Camera.orthographicSize, m_orthoGoal, 0.1f); this.m_previewUtility.m_Camera.orthographicSize = orthoSet; float dist = Vector3.Distance(m_previewUtility.m_Camera.transform.position, m_posGoal); if(dist > 0f) { Vector3 pos = Vector3.Lerp(this.m_previewUtility.m_Camera.transform.position, m_posGoal, 0.1f); pos.x = 0; this.m_previewUtility.m_Camera.transform.position = pos; this.m_previewUtility.m_Camera.transform.rotation = Quaternion.identity; m_requireRefresh = true; } } private void DoRenderPreview (bool drawHandles) { GameObject go = this.m_previewInstance; if (m_requireRefresh && go != null) { go.GetComponent<Renderer>().enabled = true; if (EditorApplication.isPlaying) { //do nothing } else { m_skeletonAnimation.Update((Time.realtimeSinceStartup - m_lastTime)); } m_lastTime = Time.realtimeSinceStartup; if (!EditorApplication.isPlaying) m_skeletonAnimation.LateUpdate(); if (drawHandles) { Handles.SetCamera(m_previewUtility.m_Camera); Handles.color = m_originColor; Handles.DrawLine(new Vector3(-1000 * m_skeletonDataAsset.scale, 0, 0), new Vector3(1000 * m_skeletonDataAsset.scale, 0, 0)); Handles.DrawLine(new Vector3(0, 1000 * m_skeletonDataAsset.scale, 0), new Vector3(0, -1000 * m_skeletonDataAsset.scale, 0)); } this.m_previewUtility.m_Camera.Render(); if (drawHandles) { Handles.SetCamera(m_previewUtility.m_Camera); foreach (var slot in m_skeletonAnimation.skeleton.Slots) { var boundingBoxAttachment = slot.Attachment as BoundingBoxAttachment; if (boundingBoxAttachment != null) { DrawBoundingBox (slot.Bone, boundingBoxAttachment); } } } go.GetComponent<Renderer>().enabled = false; } } static void DrawBoundingBox (Bone bone, BoundingBoxAttachment box) { if (box.Vertices.Length <= 0) return; // Handle cases where user creates a BoundingBoxAttachment but doesn't actually define it. var worldVerts = new float[box.Vertices.Length]; box.ComputeWorldVertices(bone, worldVerts); Handles.color = Color.green; Vector3 lastVert = Vector3.back; Vector3 vert = Vector3.back; Vector3 firstVert = new Vector3(worldVerts[0], worldVerts[1], -1); for (int i = 0; i < worldVerts.Length; i += 2) { vert.x = worldVerts[i]; vert.y = worldVerts[i + 1]; if (i > 0) { Handles.DrawLine(lastVert, vert); } lastVert = vert; } Handles.DrawLine(lastVert, firstVert); } void Update () { AdjustCamera(); if (m_playing) { m_requireRefresh = true; Repaint(); } else if (m_requireRefresh) { Repaint(); } else { //only needed if using smooth menus } if (needToSerialize) { needToSerialize = false; serializedObject.ApplyModifiedProperties(); } } void DrawSkinToolbar (Rect r) { if (m_skeletonAnimation == null) return; if (m_skeletonAnimation.skeleton != null) { string label = (m_skeletonAnimation.skeleton != null && m_skeletonAnimation.skeleton.Skin != null) ? m_skeletonAnimation.skeleton.Skin.Name : "default"; Rect popRect = new Rect(r); popRect.y += 32; popRect.x += 4; popRect.height = 24; popRect.width = 40; EditorGUI.DropShadowLabel(popRect, new GUIContent("Skin", SpineEditorUtilities.Icons.skinsRoot)); popRect.y += 11; popRect.width = 150; popRect.x += 44; if (GUI.Button(popRect, label, EditorStyles.popup)) { SelectSkinContext(); } } } void SelectSkinContext () { GenericMenu menu = new GenericMenu(); foreach (Skin s in m_skeletonData.Skins) { menu.AddItem(new GUIContent(s.Name), this.m_skeletonAnimation.skeleton.Skin == s, SetSkin, (object)s); } menu.ShowAsContext(); } void SetSkin (object o) { Skin skin = (Skin)o; m_skeletonAnimation.initialSkinName = skin.Name; m_skeletonAnimation.Reset(); m_requireRefresh = true; EditorPrefs.SetString(m_skeletonDataAssetGUID + "_lastSkin", skin.Name); } void NormalizedTimeBar (Rect r) { if (m_skeletonAnimation == null) return; Rect barRect = new Rect(r); barRect.height = 32; barRect.x += 4; barRect.width -= 4; GUI.Box(barRect, ""); Rect lineRect = new Rect(barRect); float width = lineRect.width; TrackEntry t = m_skeletonAnimation.state.GetCurrent(0); if (t != null) { int loopCount = (int)(t.Time / t.EndTime); float currentTime = t.Time - (t.EndTime * loopCount); float normalizedTime = currentTime / t.Animation.Duration; lineRect.x = barRect.x + (width * normalizedTime) - 0.5f; lineRect.width = 2; GUI.color = Color.red; GUI.DrawTexture(lineRect, EditorGUIUtility.whiteTexture); GUI.color = Color.white; for (int i = 0; i < m_animEvents.Count; i++) { //TODO: Tooltip //Spine.Event spev = animEvents[i]; float fr = m_animEventFrames[i]; var evRect = new Rect(barRect); evRect.x = Mathf.Clamp(((fr / t.Animation.Duration) * width) - (SpineEditorUtilities.Icons._event.width / 2), barRect.x, float.MaxValue); evRect.width = SpineEditorUtilities.Icons._event.width; evRect.height = SpineEditorUtilities.Icons._event.height; evRect.y += SpineEditorUtilities.Icons._event.height; GUI.DrawTexture(evRect, SpineEditorUtilities.Icons._event); //TODO: Tooltip /* UnityEngine.Event ev = UnityEngine.Event.current; if(ev.isMouse){ if(evRect.Contains(ev.mousePosition)){ Rect tooltipRect = new Rect(evRect); tooltipRect.width = 500; tooltipRect.y -= 4; tooltipRect.x += 4; GUI.Label(tooltipRect, spev.Data.Name); } } */ } } } void MouseScroll (Rect position) { UnityEngine.Event current = UnityEngine.Event.current; int controlID = GUIUtility.GetControlID(sliderHash, FocusType.Passive); switch (current.GetTypeForControl(controlID)) { case EventType.ScrollWheel: if (position.Contains(current.mousePosition)) { m_orthoGoal += current.delta.y; GUIUtility.hotControl = controlID; current.Use(); } break; } } //TODO: Implement preview panning /* static Vector2 Drag2D(Vector2 scrollPosition, Rect position) { int controlID = GUIUtility.GetControlID(sliderHash, FocusType.Passive); UnityEngine.Event current = UnityEngine.Event.current; switch (current.GetTypeForControl(controlID)) { case EventType.MouseDown: if (position.Contains(current.mousePosition) && (position.width > 50f)) { GUIUtility.hotControl = controlID; current.Use(); EditorGUIUtility.SetWantsMouseJumping(1); } return scrollPosition; case EventType.MouseUp: if (GUIUtility.hotControl == controlID) { GUIUtility.hotControl = 0; } EditorGUIUtility.SetWantsMouseJumping(0); return scrollPosition; case EventType.MouseMove: return scrollPosition; case EventType.MouseDrag: if (GUIUtility.hotControl == controlID) { scrollPosition -= (Vector2) (((current.delta * (!current.shift ? ((float) 1) : ((float) 3))) / Mathf.Min(position.width, position.height)) * 140f); scrollPosition.y = Mathf.Clamp(scrollPosition.y, -90f, 90f); current.Use(); GUI.changed = true; } return scrollPosition; } return scrollPosition; } */ public override GUIContent GetPreviewTitle () { return new GUIContent("Preview"); } public override void OnPreviewSettings () { if (!m_initialized) { GUILayout.HorizontalSlider(0, 0, 2, GUILayout.MaxWidth(64)); } else { float speed = GUILayout.HorizontalSlider(m_skeletonAnimation.timeScale, 0, 2, GUILayout.MaxWidth(64)); //snap to nearest 0.25 float y = speed / 0.25f; int q = Mathf.RoundToInt(y); speed = q * 0.25f; m_skeletonAnimation.timeScale = speed; } } //TODO: Fix first-import error //TODO: Update preview without thumbnail public override Texture2D RenderStaticPreview (string assetPath, UnityEngine.Object[] subAssets, int width, int height) { var tex = new Texture2D(width, height, TextureFormat.ARGB32, false); this.InitPreview(); if (this.m_previewUtility.m_Camera == null) return null; m_requireRefresh = true; this.DoRenderPreview(false); AdjustCameraGoals(false); this.m_previewUtility.m_Camera.orthographicSize = m_orthoGoal / 2; this.m_previewUtility.m_Camera.transform.position = m_posGoal; this.m_previewUtility.BeginStaticPreview(new Rect(0, 0, width, height)); this.DoRenderPreview(false); //TODO: Figure out why this is throwing errors on first attempt // if(m_previewUtility != null){ // Handles.SetCamera(this.m_previewUtility.m_Camera); // Handles.BeginGUI(); // GUI.DrawTexture(new Rect(40,60,width,height), SpineEditorUtilities.Icons.spine, ScaleMode.StretchToFill); // Handles.EndGUI(); // } tex = this.m_previewUtility.EndStaticPreview(); return tex; } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Linq; using System.Linq.Expressions; using System.Net; using System.Text; using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.WindowsAzure.Storage.Table; using Orleans.Runtime; namespace Orleans.AzureUtils { internal class SiloInstanceTableEntry : TableEntity { public string DeploymentId { get; set; } // PartitionKey public string Address { get; set; } // RowKey public string Port { get; set; } // RowKey public string Generation { get; set; } // RowKey public string HostName { get; set; } // Mandatory public string Status { get; set; } // Mandatory public string ProxyPort { get; set; } // Optional public string RoleName { get; set; } // Optional - only for Azure role public string SiloName { get; set; } public string InstanceName { get; set; } // For backward compatability we leave the old column, untill all clients update the code to new version. public string UpdateZone { get; set; } // Optional - only for Azure role public string FaultZone { get; set; } // Optional - only for Azure role public string SuspectingSilos { get; set; } // For liveness public string SuspectingTimes { get; set; } // For liveness public string StartTime { get; set; } // Time this silo was started. For diagnostics. public string IAmAliveTime { get; set; } // Time this silo updated it was alive. For diagnostics. public string MembershipVersion { get; set; } // Special version row (for serializing table updates). // We'll have a designated row with only MembershipVersion column. internal const string TABLE_VERSION_ROW = "VersionRow"; // Row key for version row. internal const char Seperator = '-'; public static string ConstructRowKey(SiloAddress silo) { return String.Format("{0}-{1}-{2}", silo.Endpoint.Address, silo.Endpoint.Port, silo.Generation); } internal static SiloAddress UnpackRowKey(string rowKey) { var debugInfo = "UnpackRowKey"; try { #if DEBUG debugInfo = String.Format("UnpackRowKey: RowKey={0}", rowKey); Trace.TraceInformation(debugInfo); #endif int idx1 = rowKey.IndexOf(Seperator); int idx2 = rowKey.LastIndexOf(Seperator); #if DEBUG debugInfo = String.Format("UnpackRowKey: RowKey={0} Idx1={1} Idx2={2}", rowKey, idx1, idx2); #endif var addressStr = rowKey.Substring(0, idx1); var portStr = rowKey.Substring(idx1 + 1, idx2 - idx1 - 1); var genStr = rowKey.Substring(idx2 + 1); #if DEBUG debugInfo = String.Format("UnpackRowKey: RowKey={0} -> Address={1} Port={2} Generation={3}", rowKey, addressStr, portStr, genStr); Trace.TraceInformation(debugInfo); #endif IPAddress address = IPAddress.Parse(addressStr); int port = Int32.Parse(portStr); int generation = Int32.Parse(genStr); return SiloAddress.New(new IPEndPoint(address, port), generation); } catch (Exception exc) { throw new AggregateException("Error from " + debugInfo, exc); } } public override string ToString() { var sb = new StringBuilder(); if (RowKey.Equals(TABLE_VERSION_ROW)) { sb.Append("VersionRow [").Append(DeploymentId); sb.Append(" Deployment=").Append(DeploymentId); sb.Append(" MembershipVersion=").Append(MembershipVersion); sb.Append("]"); } else { sb.Append("OrleansSilo ["); sb.Append(" Deployment=").Append(DeploymentId); sb.Append(" LocalEndpoint=").Append(Address); sb.Append(" LocalPort=").Append(Port); sb.Append(" Generation=").Append(Generation); sb.Append(" Host=").Append(HostName); sb.Append(" Status=").Append(Status); sb.Append(" ProxyPort=").Append(ProxyPort); if (!string.IsNullOrEmpty(RoleName)) sb.Append(" RoleName=").Append(RoleName); sb.Append(" SiloName=").Append(SiloName); sb.Append(" UpgradeZone=").Append(UpdateZone); sb.Append(" FaultZone=").Append(FaultZone); if (!string.IsNullOrEmpty(SuspectingSilos)) sb.Append(" SuspectingSilos=").Append(SuspectingSilos); if (!string.IsNullOrEmpty(SuspectingTimes)) sb.Append(" SuspectingTimes=").Append(SuspectingTimes); sb.Append(" StartTime=").Append(StartTime); sb.Append(" IAmAliveTime=").Append(IAmAliveTime); sb.Append("]"); } return sb.ToString(); } } internal class OrleansSiloInstanceManager { public string TableName { get { return INSTANCE_TABLE_NAME; } } private const string INSTANCE_TABLE_NAME = "OrleansSiloInstances"; private readonly string INSTANCE_STATUS_CREATED = SiloStatus.Created.ToString(); //"Created"; private readonly string INSTANCE_STATUS_ACTIVE = SiloStatus.Active.ToString(); //"Active"; private readonly string INSTANCE_STATUS_DEAD = SiloStatus.Dead.ToString(); //"Dead"; private readonly AzureTableDataManager<SiloInstanceTableEntry> storage; private readonly ILogger logger; internal static TimeSpan initTimeout = AzureTableDefaultPolicies.TableCreationTimeout; public string DeploymentId { get; private set; } private OrleansSiloInstanceManager(string deploymentId, string storageConnectionString, ILoggerFactory loggerFactory) { DeploymentId = deploymentId; logger = loggerFactory.CreateLogger<OrleansSiloInstanceManager>(); storage = new AzureTableDataManager<SiloInstanceTableEntry>( INSTANCE_TABLE_NAME, storageConnectionString, loggerFactory); } public static async Task<OrleansSiloInstanceManager> GetManager(string deploymentId, string storageConnectionString, ILoggerFactory loggerFactory) { var instance = new OrleansSiloInstanceManager(deploymentId, storageConnectionString, loggerFactory); try { await instance.storage.InitTableAsync() .WithTimeout(initTimeout); } catch (TimeoutException te) { string errorMsg = String.Format("Unable to create or connect to the Azure table in {0}", initTimeout); instance.logger.Error(ErrorCode.AzureTable_32, errorMsg, te); throw new OrleansException(errorMsg, te); } catch (Exception ex) { string errorMsg = String.Format("Exception trying to create or connect to the Azure table: {0}", ex.Message); instance.logger.Error(ErrorCode.AzureTable_33, errorMsg, ex); throw new OrleansException(errorMsg, ex); } return instance; } public SiloInstanceTableEntry CreateTableVersionEntry(int tableVersion) { return new SiloInstanceTableEntry { DeploymentId = DeploymentId, PartitionKey = DeploymentId, RowKey = SiloInstanceTableEntry.TABLE_VERSION_ROW, MembershipVersion = tableVersion.ToString(CultureInfo.InvariantCulture) }; } public void RegisterSiloInstance(SiloInstanceTableEntry entry) { entry.Status = INSTANCE_STATUS_CREATED; logger.Info(ErrorCode.Runtime_Error_100270, "Registering silo instance: {0}", entry.ToString()); storage.UpsertTableEntryAsync(entry) .WaitWithThrow(AzureTableDefaultPolicies.TableOperationTimeout); } public void UnregisterSiloInstance(SiloInstanceTableEntry entry) { entry.Status = INSTANCE_STATUS_DEAD; logger.Info(ErrorCode.Runtime_Error_100271, "Unregistering silo instance: {0}", entry.ToString()); storage.UpsertTableEntryAsync(entry) .WaitWithThrow(AzureTableDefaultPolicies.TableOperationTimeout); } public void ActivateSiloInstance(SiloInstanceTableEntry entry) { logger.Info(ErrorCode.Runtime_Error_100272, "Activating silo instance: {0}", entry.ToString()); entry.Status = INSTANCE_STATUS_ACTIVE; storage.UpsertTableEntryAsync(entry) .WaitWithThrow(AzureTableDefaultPolicies.TableOperationTimeout); } public async Task<IList<Uri>> FindAllGatewayProxyEndpoints() { IEnumerable<SiloInstanceTableEntry> gatewaySiloInstances = await FindAllGatewaySilos(); return gatewaySiloInstances.Select(ConvertToGatewayUri).ToList(); } /// <summary> /// Represent a silo instance entry in the gateway URI format. /// </summary> /// <param name="gateway">The input silo instance</param> /// <returns></returns> private static Uri ConvertToGatewayUri(SiloInstanceTableEntry gateway) { int proxyPort = 0; if (!string.IsNullOrEmpty(gateway.ProxyPort)) int.TryParse(gateway.ProxyPort, out proxyPort); int gen = 0; if (!string.IsNullOrEmpty(gateway.Generation)) int.TryParse(gateway.Generation, out gen); SiloAddress address = SiloAddress.New(new IPEndPoint(IPAddress.Parse(gateway.Address), proxyPort), gen); return address.ToGatewayUri(); } private async Task<IEnumerable<SiloInstanceTableEntry>> FindAllGatewaySilos() { if (logger.IsEnabled(LogLevel.Debug)) logger.Debug(ErrorCode.Runtime_Error_100277, "Searching for active gateway silos for deployment {0}.", this.DeploymentId); const string zeroPort = "0"; try { string filterOnPartitionKey = TableQuery.GenerateFilterCondition(nameof(SiloInstanceTableEntry.PartitionKey), QueryComparisons.Equal, this.DeploymentId); string filterOnStatus = TableQuery.GenerateFilterCondition(nameof(SiloInstanceTableEntry.Status), QueryComparisons.Equal, INSTANCE_STATUS_ACTIVE); string filterOnProxyPort = TableQuery.GenerateFilterCondition(nameof(SiloInstanceTableEntry.ProxyPort), QueryComparisons.NotEqual, zeroPort); string query = TableQuery.CombineFilters(filterOnPartitionKey, TableOperators.And, TableQuery.CombineFilters(filterOnStatus, TableOperators.And, filterOnProxyPort)); var queryResults = await storage.ReadTableEntriesAndEtagsAsync(query) .WithTimeout(AzureTableDefaultPolicies.TableOperationTimeout); List<SiloInstanceTableEntry> gatewaySiloInstances = queryResults.Select(entity => entity.Item1).ToList(); logger.Info(ErrorCode.Runtime_Error_100278, "Found {0} active Gateway Silos for deployment {1}.", gatewaySiloInstances.Count, this.DeploymentId); return gatewaySiloInstances; }catch(Exception exc) { logger.Error(ErrorCode.Runtime_Error_100331, string.Format("Error searching for active gateway silos for deployment {0} ", this.DeploymentId), exc); throw; } } public async Task<string> DumpSiloInstanceTable() { var queryResults = await storage.ReadAllTableEntriesForPartitionAsync(this.DeploymentId); SiloInstanceTableEntry[] entries = queryResults.Select(entry => entry.Item1).ToArray(); var sb = new StringBuilder(); sb.Append(String.Format("Deployment {0}. Silos: ", DeploymentId)); // Loop through the results, displaying information about the entity Array.Sort(entries, (e1, e2) => { if (e1 == null) return (e2 == null) ? 0 : -1; if (e2 == null) return (e1 == null) ? 0 : 1; if (e1.SiloName == null) return (e2.SiloName == null) ? 0 : -1; if (e2.SiloName == null) return (e1.SiloName == null) ? 0 : 1; return String.CompareOrdinal(e1.SiloName, e2.SiloName); }); foreach (SiloInstanceTableEntry entry in entries) { sb.AppendLine(String.Format("[IP {0}:{1}:{2}, {3}, Instance={4}, Status={5}]", entry.Address, entry.Port, entry.Generation, entry.HostName, entry.SiloName, entry.Status)); } return sb.ToString(); } #region Silo instance table storage operations internal Task<string> MergeTableEntryAsync(SiloInstanceTableEntry data) { return storage.MergeTableEntryAsync(data, AzureStorageUtils.ANY_ETAG); // we merge this without checking eTags. } internal Task<Tuple<SiloInstanceTableEntry, string>> ReadSingleTableEntryAsync(string partitionKey, string rowKey) { return storage.ReadSingleTableEntryAsync(partitionKey, rowKey); } internal async Task<int> DeleteTableEntries(string deploymentId) { if (deploymentId == null) throw new ArgumentNullException("deploymentId"); var entries = await storage.ReadAllTableEntriesForPartitionAsync(deploymentId); var entriesList = new List<Tuple<SiloInstanceTableEntry, string>>(entries); if (entriesList.Count <= AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS) { await storage.DeleteTableEntriesAsync(entriesList); }else { List<Task> tasks = new List<Task>(); foreach (var batch in entriesList.BatchIEnumerable(AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS)) { tasks.Add(storage.DeleteTableEntriesAsync(batch)); } await Task.WhenAll(tasks); } return entriesList.Count(); } internal async Task<List<Tuple<SiloInstanceTableEntry, string>>> FindSiloEntryAndTableVersionRow(SiloAddress siloAddress) { string rowKey = SiloInstanceTableEntry.ConstructRowKey(siloAddress); string filterOnPartitionKey = TableQuery.GenerateFilterCondition(nameof(SiloInstanceTableEntry.PartitionKey), QueryComparisons.Equal, this.DeploymentId); string filterOnRowKey1 = TableQuery.GenerateFilterCondition(nameof(SiloInstanceTableEntry.RowKey), QueryComparisons.Equal, rowKey); string filterOnRowKey2 = TableQuery.GenerateFilterCondition(nameof(SiloInstanceTableEntry.RowKey), QueryComparisons.Equal, SiloInstanceTableEntry.TABLE_VERSION_ROW); string query = TableQuery.CombineFilters(filterOnPartitionKey, TableOperators.And, TableQuery.CombineFilters(filterOnRowKey1, TableOperators.Or, filterOnRowKey2)); var queryResults = await storage.ReadTableEntriesAndEtagsAsync(query); var asList = queryResults.ToList(); if (asList.Count < 1 || asList.Count > 2) throw new KeyNotFoundException(string.Format("Could not find table version row or found too many entries. Was looking for key {0}, found = {1}", siloAddress.ToLongString(), Utils.EnumerableToString(asList))); int numTableVersionRows = asList.Count(tuple => tuple.Item1.RowKey == SiloInstanceTableEntry.TABLE_VERSION_ROW); if (numTableVersionRows < 1) throw new KeyNotFoundException(string.Format("Did not read table version row. Read = {0}", Utils.EnumerableToString(asList))); if (numTableVersionRows > 1) throw new KeyNotFoundException(string.Format("Read {0} table version rows, while was expecting only 1. Read = {1}", numTableVersionRows, Utils.EnumerableToString(asList))); return asList; } internal async Task<List<Tuple<SiloInstanceTableEntry, string>>> FindAllSiloEntries() { var queryResults = await storage.ReadAllTableEntriesForPartitionAsync(this.DeploymentId); var asList = queryResults.ToList(); if (asList.Count < 1) throw new KeyNotFoundException(string.Format("Could not find enough rows in the FindAllSiloEntries call. Found = {0}", Utils.EnumerableToString(asList))); int numTableVersionRows = asList.Count(tuple => tuple.Item1.RowKey == SiloInstanceTableEntry.TABLE_VERSION_ROW); if (numTableVersionRows < 1) throw new KeyNotFoundException(string.Format("Did not find table version row. Read = {0}", Utils.EnumerableToString(asList))); if (numTableVersionRows > 1) throw new KeyNotFoundException(string.Format("Read {0} table version rows, while was expecting only 1. Read = {1}", numTableVersionRows, Utils.EnumerableToString(asList))); return asList; } /// <summary> /// Insert (create new) row entry /// </summary> internal async Task<bool> TryCreateTableVersionEntryAsync() { try { var versionRow = await storage.ReadSingleTableEntryAsync(DeploymentId, SiloInstanceTableEntry.TABLE_VERSION_ROW); if (versionRow != null && versionRow.Item1 != null) { return false; } SiloInstanceTableEntry entry = CreateTableVersionEntry(0); await storage.CreateTableEntryAsync(entry); return true; } catch (Exception exc) { HttpStatusCode httpStatusCode; string restStatus; if (!AzureStorageUtils.EvaluateException(exc, out httpStatusCode, out restStatus)) throw; if (logger.IsEnabled(LogLevel.Trace)) logger.Trace("InsertSiloEntryConditionally failed with httpStatusCode={0}, restStatus={1}", httpStatusCode, restStatus); if (AzureStorageUtils.IsContentionError(httpStatusCode)) return false; throw; } } /// <summary> /// Insert (create new) row entry /// </summary> /// <param name="siloEntry">Silo Entry to be written</param> /// <param name="tableVersionEntry">Version row to update</param> /// <param name="tableVersionEtag">Version row eTag</param> internal async Task<bool> InsertSiloEntryConditionally(SiloInstanceTableEntry siloEntry, SiloInstanceTableEntry tableVersionEntry, string tableVersionEtag) { try { await storage.InsertTwoTableEntriesConditionallyAsync(siloEntry, tableVersionEntry, tableVersionEtag); return true; } catch (Exception exc) { HttpStatusCode httpStatusCode; string restStatus; if (!AzureStorageUtils.EvaluateException(exc, out httpStatusCode, out restStatus)) throw; if (logger.IsEnabled(LogLevel.Trace)) logger.Trace("InsertSiloEntryConditionally failed with httpStatusCode={0}, restStatus={1}", httpStatusCode, restStatus); if (AzureStorageUtils.IsContentionError(httpStatusCode)) return false; throw; } } /// <summary> /// Conditionally update the row for this entry, but only if the eTag matches with the current record in data store /// </summary> /// <param name="siloEntry">Silo Entry to be written</param> /// <param name="entryEtag">ETag value for the entry being updated</param> /// <param name="tableVersionEntry">Version row to update</param> /// <param name="versionEtag">ETag value for the version row</param> /// <returns></returns> internal async Task<bool> UpdateSiloEntryConditionally(SiloInstanceTableEntry siloEntry, string entryEtag, SiloInstanceTableEntry tableVersionEntry, string versionEtag) { try { await storage.UpdateTwoTableEntriesConditionallyAsync(siloEntry, entryEtag, tableVersionEntry, versionEtag); return true; } catch (Exception exc) { HttpStatusCode httpStatusCode; string restStatus; if (!AzureStorageUtils.EvaluateException(exc, out httpStatusCode, out restStatus)) throw; if (logger.IsEnabled(LogLevel.Trace)) logger.Trace("UpdateSiloEntryConditionally failed with httpStatusCode={0}, restStatus={1}", httpStatusCode, restStatus); if (AzureStorageUtils.IsContentionError(httpStatusCode)) return false; throw; } } #endregion } }
using System; using System.Collections.Generic; using NLog; using NBagOfTricks; using Nebulator.Common; namespace Nebulator.OSC { /// <summary> /// Abstraction layer between OSC comm and Nebulator steps. aka OSC client. /// </summary> public class OscOutput : IOutputDevice { #region Fields /// <summary>My logger.</summary> readonly Logger _logger = LogManager.GetLogger("OscOutput"); /// <summary>OSC output device.</summary> NebOsc.Output? _oscOutput; /// <summary>Access synchronizer.</summary> readonly object _lock = new(); /// <summary>Notes to stop later.</summary> readonly List<StepNoteOff> _stops = new(); #endregion #region Properties /// <inheritdoc /> public string DeviceName { get; private set; } = Definitions.UNKNOWN_STRING; /// <inheritdoc /> public DeviceType DeviceType => DeviceType.OscOut; #endregion #region Lifecycle /// <summary> /// Constructor. /// </summary> public OscOutput() { } /// <inheritdoc /> public bool Init() { bool inited = false; _oscOutput?.Dispose(); _oscOutput = null; // Check for properly formed url:port. List<string> parts = UserSettings.TheSettings.OscOut.SplitByToken(":"); if (parts.Count == 2) { if (int.TryParse(parts[1], out int port)) { string ip = parts[0]; _oscOutput = new NebOsc.Output() { RemoteIP = ip, RemotePort = port }; if (_oscOutput.Init()) { inited = true; DeviceName = _oscOutput.DeviceName; _oscOutput.LogEvent += OscOutput_LogEvent; } else { _logger.Error($"Init OSC out failed"); inited = false; } } } return inited; } /// <summary> /// Resource clean up. /// </summary> public void Dispose() { _oscOutput?.Dispose(); _oscOutput = null; } #endregion #region Public functions /// <inheritdoc /> public void Housekeep() { // Send any stops due. _stops.ForEach(s => { s.Expiry--; if (s.Expiry < 0) Send(s); }); // Reset. _stops.RemoveAll(s => s.Expiry < 0); } /// <inheritdoc /> public bool Send(Step step) { bool ret = true; // Critical code section. lock (_lock) { if (_oscOutput is not null) { List<int> msgs = new(); NebOsc.Message? msg = null; switch (step) { case StepNoteOn non: // /noteon/ channel notenum vel msg = new NebOsc.Message() { Address = "/noteon" }; msg.Data.Add(non.ChannelNumber); msg.Data.Add(non.NoteNumber); msg.Data.Add(non.VelocityToPlay); if (non.Duration.TotalSubdivs > 0) // specific duration { // Remove any lingering note offs and add a fresh one. _stops.RemoveAll(s => s.NoteNumber == non.NoteNumber && s.ChannelNumber == non.ChannelNumber); _stops.Add(new StepNoteOff() { Device = non.Device, ChannelNumber = non.ChannelNumber, NoteNumber = MathUtils.Constrain(non.NoteNumber, 0, Definitions.MAX_MIDI), Expiry = non.Duration.TotalSubdivs }); } break; case StepNoteOff noff: // /noteoff/ channel notenum msg = new NebOsc.Message() { Address = "/noteoff" }; msg.Data.Add(noff.ChannelNumber); msg.Data.Add(noff.NoteNumber); break; case StepControllerChange ctl: // /controller/ channel ctlnum val msg = new NebOsc.Message() { Address = "/controller" }; msg.Data.Add(ctl.ChannelNumber); msg.Data.Add(ctl.ControllerId); msg.Data.Add(ctl.Value); break; case StepPatch stt: // ignore n/a break; default: break; } if (msg is not null) { if(_oscOutput.Send(msg)) { if(UserSettings.TheSettings.MonitorOutput) { _logger.Trace($"{TraceCat.SND} OscOut:{step}"); } } else { _logger.Error($"Send failed"); } } else { _logger.Error($"Send failed"); } } } return ret; } /// <inheritdoc /> public void Kill(int channel) { } /// <inheritdoc /> public void Start() { } /// <inheritdoc /> public void Stop() { } #endregion #region Private functions /// <summary> /// OSC has something to say. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void OscOutput_LogEvent(object? sender, NebOsc.LogEventArgs e) { if (e.IsError) { _logger.Error(e.Message); } else { _logger.Info(e.Message); } } #endregion } }
// // X509Chain.cs: X.509 Certificate Path // This is a VERY simplified and minimal version // used for // Authenticode support // TLS/SSL support // // Author: // Sebastien Pouliot <sebastien@ximian.com> // // (C) 2003 Motus Technologies Inc. (http://www.motus.com) // Copyright (C) 2004 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Security; using System.Security.Permissions; #if !INSIDE_CORLIB using System.Net; #endif using Mono.Security.X509.Extensions; namespace Mono.Security.X509 { #if INSIDE_CORLIB internal #else public #endif class X509Chain { private X509CertificateCollection roots; private System.Security.Cryptography.X509Certificates.X509Certificate2Collection rootsMS; private X509CertificateCollection certs; private X509Certificate _root; private X509CertificateCollection _chain; private X509ChainStatusFlags _status; // constructors public X509Chain () { certs = new X509CertificateCollection (); } // get a pre-builded chain public X509Chain (X509CertificateCollection chain) : this () { _chain = new X509CertificateCollection (); _chain.AddRange (chain); } // properties public X509CertificateCollection Chain { get { return _chain; } } // the root of the specified certificate (may not be trusted!) public X509Certificate Root { get { return _root; } } public X509ChainStatusFlags Status { get { return _status; } } public System.Security.Cryptography.X509Certificates.X509Certificate2Collection TrustAnchorsMS { get { if (rootsMS == null) { rootsMS = new System.Security.Cryptography.X509Certificates.X509Certificate2Collection(); var storeLocalMachine = new System.Security.Cryptography.X509Certificates.X509Store(System.Security.Cryptography.X509Certificates.StoreName.Root, System.Security.Cryptography.X509Certificates.StoreLocation.LocalMachine); storeLocalMachine.Open(System.Security.Cryptography.X509Certificates.OpenFlags.ReadOnly); rootsMS.AddRange(storeLocalMachine.Certificates); storeLocalMachine.Close(); return rootsMS; } return rootsMS; } [SecurityPermission(SecurityAction.Demand, Flags = SecurityPermissionFlag.ControlPolicy)] set { rootsMS = value; } } public X509CertificateCollection TrustAnchors { get { if (roots == null) { roots = new X509CertificateCollection (); roots.AddRange (X509StoreManager.TrustedRootCertificates); return roots; } return roots; } [SecurityPermission (SecurityAction.Demand, Flags=SecurityPermissionFlag.ControlPolicy)] set { roots = value; } } // methods public void LoadCertificate (X509Certificate x509) { certs.Add (x509); } public void LoadCertificates (X509CertificateCollection collection) { certs.AddRange (collection); } public X509Certificate FindByIssuerName (string issuerName) { foreach (X509Certificate x in certs) { if (x.IssuerName == issuerName) return x; } return null; } public bool Build (X509Certificate leaf) { _status = X509ChainStatusFlags.NoError; if (_chain == null) { // chain not supplied - we must build it ourselve _chain = new X509CertificateCollection (); X509Certificate x = leaf; X509Certificate tmp = x; while ((x != null) && (!x.IsSelfSigned)) { tmp = x; // last valid _chain.Add (x); x = FindCertificateParent (x); } // find a trusted root _root = FindCertificateRoot (tmp); } else { // chain supplied - still have to check signatures! int last = _chain.Count; if (last > 0) { if (IsParent (leaf, _chain [0])) { int i = 1; for (; i < last; i++) { if (!IsParent (_chain [i-1], _chain [i])) break; } if (i == last) _root = FindCertificateRoot (_chain [last - 1]); } } else { // is the leaf a root ? (trusted or untrusted) _root = FindCertificateRoot (leaf); } } // validate the chain if ((_chain != null) && (_status == X509ChainStatusFlags.NoError)) { foreach (X509Certificate x in _chain) { // validate dates for each certificate in the chain // note: we DO NOT check for nested date/time if (!IsValid (x)) { return false; } } // check leaf if (!IsValid (leaf)) { // switch status code if the failure is expiration if (_status == X509ChainStatusFlags.NotTimeNested) _status = X509ChainStatusFlags.NotTimeValid; return false; } // check root if ((_root != null) && !IsValid (_root)) { return false; } } return (_status == X509ChainStatusFlags.NoError); } // public void Reset () { _status = X509ChainStatusFlags.NoError; roots = null; // this force a reload rootsMS = null; certs.Clear (); if (_chain != null) _chain.Clear (); } // private stuff private bool IsValid (X509Certificate cert) { if (!cert.IsCurrent) { // FIXME: nesting isn't very well implemented _status = X509ChainStatusFlags.NotTimeNested; return false; } // TODO - we should check for CRITICAL but unknown extensions // X509ChainStatusFlags.InvalidExtension #if !INSIDE_CORLIB if (ServicePointManager.CheckCertificateRevocationList) { // TODO - check revocation (CRL, OCSP ...) // X509ChainStatusFlags.RevocationStatusUnknown // X509ChainStatusFlags.Revoked } #endif return true; } private X509Certificate FindCertificateParent (X509Certificate child) { foreach (X509Certificate potentialParent in certs) { if (IsParent (child, potentialParent)) return potentialParent; } return null; } private X509Certificate FindCertificateRoot (X509Certificate potentialRoot) { if (potentialRoot == null) { _status = X509ChainStatusFlags.PartialChain; return null; } // if the trusted root is in the chain if (IsTrusted (potentialRoot)) { return potentialRoot; } if (IsTrustedMS(new System.Security.Cryptography.X509Certificates.X509Certificate2(potentialRoot.RawData))) { return potentialRoot; } // if the root isn't in the chain foreach (System.Security.Cryptography.X509Certificates.X509Certificate2 root in TrustAnchorsMS) { var rootNate = new X509Certificate(root.RawData); if (IsParent(potentialRoot, rootNate)) { return rootNate; } } // if the root isn't in the chain foreach (X509Certificate root in TrustAnchors) { if (IsParent (potentialRoot, root)) { return root; } } // is it a (untrusted) root ? if (potentialRoot.IsSelfSigned) { _status = X509ChainStatusFlags.UntrustedRoot; return potentialRoot; } _status = X509ChainStatusFlags.PartialChain; return null; } private bool IsTrustedMS(System.Security.Cryptography.X509Certificates.X509Certificate2 potentialTrusted) { return TrustAnchorsMS.Contains(potentialTrusted); } private bool IsTrusted (X509Certificate potentialTrusted) { return TrustAnchors.Contains (potentialTrusted); } private bool IsParent (X509Certificate child, X509Certificate parent) { if (child.IssuerName != parent.SubjectName) return false; // parent MUST have the Basic Constraint CA=true (except for trusted roots) // see why at http://www.microsoft.com/technet/security/bulletin/MS02-050.asp if ((parent.Version > 2) && (!IsTrusted (parent))) { // TODO: we do not support pathLenConstraint X509Extension ext = parent.Extensions ["2.5.29.19"]; if (ext != null) { BasicConstraintsExtension bc = new BasicConstraintsExtension (ext); if (!bc.CertificateAuthority) _status = X509ChainStatusFlags.InvalidBasicConstraints; } else _status = X509ChainStatusFlags.InvalidBasicConstraints; } if (!child.VerifySignature (parent.RSA)) { _status = X509ChainStatusFlags.NotSignatureValid; return false; } return true; } } }
using Microsoft.Extensions.Logging; using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Sockets; using System.Threading; using System.Threading.Tasks; using static MessagePack.MessagePackSerializer; namespace MinChain { public class ConnectionManager : IDisposable { static readonly ILogger logger = Logging.Logger<ConnectionManager>(); public const int ListenBacklog = 20; public event Action<int> NewConnectionEstablished; public event Func<Message, int, Task> MessageReceived; readonly List<ConnectionInfo> peers = new List<ConnectionInfo>(); Task listenTask; CancellationTokenSource tokenSource; CancellationToken token; SemaphoreSlim sendLock; class ConnectionInfo { public ConnectionInfo(TcpClient tcpClient) { Client = tcpClient; Stream = tcpClient.GetStream(); } public TcpClient Client { get; } public NetworkStream Stream { get; } public Task LastWrite { get; set; } = Task.CompletedTask; } public void Start(IPEndPoint localEndpoint = null) { tokenSource = new CancellationTokenSource(); token = tokenSource.Token; if (localEndpoint != null) listenTask = Listen(localEndpoint); sendLock = new SemaphoreSlim(1); } public void Dispose() { if (!tokenSource.IsNull()) { logger.LogInformation("Stop listening."); tokenSource.Cancel(); tokenSource.Dispose(); tokenSource = null; } peers.ForEach(x => x?.Client.Dispose()); peers.Clear(); } async Task Listen(IPEndPoint localEndpoint) { var listener = new TcpListener( localEndpoint.Address, localEndpoint.Port); logger.LogInformation($"Start listening on {localEndpoint}"); try { listener.Start(ListenBacklog); } catch (SocketException exp) { logger.LogError("Error listening server port", exp); return; } var tcs = new TaskCompletionSource<int>(); using (token.Register(tcs.SetCanceled)) { while (!token.IsCancellationRequested) { var acceptTask = listener.AcceptTcpClientAsync(); if ((await Task.WhenAny(acceptTask, tcs.Task)).IsCanceled) break; TcpClient peer; try { peer = acceptTask.Result; } catch (SocketException exp) { logger.LogInformation( "Failed to accept new client.", exp); continue; } AddPeer(peer); } } listener.Stop(); } public async Task ConnectToAsync(IPEndPoint endpoint) { var cl = new TcpClient(AddressFamily.InterNetwork); try { await cl.ConnectAsync(endpoint.Address, endpoint.Port); } catch (SocketException exp) { logger.LogInformation( $"Failed to connect to {endpoint}. Retry in 30 seconds.", exp); // Create another task to retry. var ignored = Task.Delay(TimeSpan.FromSeconds(30)) .ContinueWith(_ => ConnectToAsync(endpoint)); return; } AddPeer(cl); } void AddPeer(TcpClient peer) { var connectionInfo = new ConnectionInfo(peer); int id; lock (peers) { id = peers.Count; peers.Add(connectionInfo); } Task.Run(async () => { NewConnectionEstablished(id); await ReadLoop(connectionInfo, id); }); } async Task ReadLoop(ConnectionInfo connection, int peerId) { logger.LogInformation($@"Peer #{peerId} connected to { connection.Client.Client.RemoteEndPoint}."); try { while (!token.IsCancellationRequested) { var d = await connection.Stream.ReadChunkAsync(token); var msg = Deserialize<Message>(d); await MessageReceived(msg, peerId); } } finally { logger.LogInformation($"Peer #{peerId} disconnected."); peers[peerId] = null; connection.Client.Dispose(); } } public Task SendAsync(Message message, int peerId) { var peer = peerId < 0 || peerId >= peers.Count ? null : peers[peerId]; return peer.IsNull() ? Task.CompletedTask : SendAsync(message, peer); } public Task BroadcastAsync(Message message, int? exceptPeerId = null) { return Task.WhenAll( from peer in peers.Where((_, i) => i != exceptPeerId) where !peer.IsNull() select SendAsync(message, peer)); } async Task SendAsync(Message message, ConnectionInfo connection) { // This method may be called concurrently. var bytes = Serialize(message); try { await sendLock.WaitAsync(token); await connection.Stream.WriteChunkAsync(bytes, token); } finally { sendLock.Release(); } } public IEnumerable<EndPoint> GetPeers() { return peers .Select(x => x?.Client.Client.RemoteEndPoint as IPEndPoint) .Where(x => !x.IsNull()); } public void Close(int peerId) { var peer = peers[peerId]; if (peer.IsNull()) { peers[peerId] = null; peer.Client.Dispose(); } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Runtime.CompilerServices; using System.Diagnostics.CodeAnalysis; using Content.Shared.Physics; using Content.Shared.Random.Helpers; using Robust.Shared.GameObjects; using Robust.Shared.IoC; using Robust.Shared.Map; using Robust.Shared.Maths; using Robust.Shared.Physics; using Robust.Shared.Physics.Broadphase; using Robust.Shared.Random; namespace Content.Shared.Maps { public static class TurfHelpers { /// <summary> /// Attempts to get the turf at map indices with grid id or null if no such turf is found. /// </summary> public static TileRef GetTileRef(this Vector2i vector2i, GridId gridId, IMapManager? mapManager = null) { if (!gridId.IsValid()) return default; mapManager ??= IoCManager.Resolve<IMapManager>(); if (!mapManager.TryGetGrid(gridId, out var grid)) return default; if (!grid.TryGetTileRef(vector2i, out var tile)) return default; return tile; } /// <summary> /// Attempts to get the turf at a certain coordinates or null if no such turf is found. /// </summary> public static TileRef? GetTileRef(this EntityCoordinates coordinates, IEntityManager? entityManager = null, IMapManager? mapManager = null) { entityManager ??= IoCManager.Resolve<IEntityManager>(); if (!coordinates.IsValid(entityManager)) return null; mapManager ??= IoCManager.Resolve<IMapManager>(); if (!mapManager.TryGetGrid(coordinates.GetGridId(entityManager), out var grid)) return null; if (!grid.TryGetTileRef(coordinates, out var tile)) return null; return tile; } public static bool TryGetTileRef(this EntityCoordinates coordinates, [NotNullWhen(true)] out TileRef? turf, IEntityManager? entityManager = null, IMapManager? mapManager = null) { return (turf = coordinates.GetTileRef(entityManager, mapManager)) != null; } /// <summary> /// Returns the content tile definition for a tile. /// </summary> public static ContentTileDefinition GetContentTileDefinition(this Tile tile, ITileDefinitionManager? tileDefinitionManager = null) { tileDefinitionManager ??= IoCManager.Resolve<ITileDefinitionManager>(); return (ContentTileDefinition)tileDefinitionManager[tile.TypeId]; } /// <summary> /// Returns whether a tile is considered space. /// </summary> public static bool IsSpace(this Tile tile, ITileDefinitionManager? tileDefinitionManager = null) { return tile.GetContentTileDefinition(tileDefinitionManager).IsSpace; } /// <summary> /// Returns the content tile definition for a tile ref. /// </summary> public static ContentTileDefinition GetContentTileDefinition(this TileRef tile, ITileDefinitionManager? tileDefinitionManager = null) { return tile.Tile.GetContentTileDefinition(tileDefinitionManager); } /// <summary> /// Returns whether a tile ref is considered space. /// </summary> public static bool IsSpace(this TileRef tile, ITileDefinitionManager? tileDefinitionManager = null) { return tile.Tile.IsSpace(tileDefinitionManager); } public static bool PryTile(this EntityCoordinates coordinates, IEntityManager? entityManager = null, IMapManager? mapManager = null) { entityManager ??= IoCManager.Resolve<IEntityManager>(); mapManager ??= IoCManager.Resolve<IMapManager>(); return coordinates.ToVector2i(entityManager, mapManager).PryTile(coordinates.GetGridId(entityManager)); } public static bool PryTile(this Vector2i indices, GridId gridId, IMapManager? mapManager = null, ITileDefinitionManager? tileDefinitionManager = null, IEntityManager? entityManager = null) { mapManager ??= IoCManager.Resolve<IMapManager>(); var grid = mapManager.GetGrid(gridId); var tileRef = grid.GetTileRef(indices); return tileRef.PryTile(mapManager, tileDefinitionManager, entityManager); } public static bool PryTile(this TileRef tileRef, IMapManager? mapManager = null, ITileDefinitionManager? tileDefinitionManager = null, IEntityManager? entityManager = null, IRobustRandom? robustRandom = null) { var tile = tileRef.Tile; var indices = tileRef.GridIndices; // If the arguments are null, resolve the needed dependencies. mapManager ??= IoCManager.Resolve<IMapManager>(); tileDefinitionManager ??= IoCManager.Resolve<ITileDefinitionManager>(); entityManager ??= IoCManager.Resolve<IEntityManager>(); robustRandom ??= IoCManager.Resolve<IRobustRandom>(); if (tile.IsEmpty) return false; var tileDef = (ContentTileDefinition) tileDefinitionManager[tile.TypeId]; if (!tileDef.CanCrowbar) return false; var mapGrid = mapManager.GetGrid(tileRef.GridIndex); var plating = tileDefinitionManager[tileDef.BaseTurfs[^1]]; mapGrid.SetTile(tileRef.GridIndices, new Tile(plating.TileId)); const float margin = 0.1f; var (x, y) = ((mapGrid.TileSize - 2 * margin) * robustRandom.NextFloat() + margin, (mapGrid.TileSize - 2 * margin) * robustRandom.NextFloat() + margin); //Actually spawn the relevant tile item at the right position and give it some random offset. var tileItem = entityManager.SpawnEntity(tileDef.ItemDropPrototypeName, indices.ToEntityCoordinates(tileRef.GridIndex, mapManager).Offset(new Vector2(x, y))); entityManager.GetComponent<TransformComponent>(tileItem).LocalRotation = robustRandom.NextDouble() * Math.Tau; return true; } /// <summary> /// Helper that returns all entities in a turf. /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static IEnumerable<EntityUid> GetEntitiesInTile(this TileRef turf, LookupFlags flags = LookupFlags.IncludeAnchored, EntityLookupSystem? lookupSystem = null) { lookupSystem ??= EntitySystem.Get<EntityLookupSystem>(); if (!GetWorldTileBox(turf, out var worldBox)) return Enumerable.Empty<EntityUid>(); return lookupSystem.GetEntitiesIntersecting(turf.MapIndex, worldBox, flags); } /// <summary> /// Helper that returns all entities in a turf. /// </summary> public static IEnumerable<EntityUid> GetEntitiesInTile(this EntityCoordinates coordinates, LookupFlags flags = LookupFlags.IncludeAnchored, EntityLookupSystem? lookupSystem = null) { var turf = coordinates.GetTileRef(); if (turf == null) return Enumerable.Empty<EntityUid>(); return GetEntitiesInTile(turf.Value, flags, lookupSystem); } /// <summary> /// Helper that returns all entities in a turf. /// </summary> public static IEnumerable<EntityUid> GetEntitiesInTile(this Vector2i indices, GridId gridId, LookupFlags flags = LookupFlags.IncludeAnchored, EntityLookupSystem? lookupSystem = null) { return GetEntitiesInTile(indices.GetTileRef(gridId), flags, lookupSystem); } /// <summary> /// Checks if a turf has something dense on it. /// </summary> public static bool IsBlockedTurf(this TileRef turf, bool filterMobs) { var physics = EntitySystem.Get<SharedPhysicsSystem>(); if (!GetWorldTileBox(turf, out var worldBox)) return false; var query = physics.GetCollidingEntities(turf.MapIndex, in worldBox); foreach (var body in query) { if (body.CanCollide && body.Hard && (body.CollisionLayer & (int) CollisionGroup.Impassable) != 0) return true; if (filterMobs && (body.CollisionLayer & (int) CollisionGroup.MobMask) != 0) return true; } return false; } public static EntityCoordinates GridPosition(this TileRef turf, IMapManager? mapManager = null) { mapManager ??= IoCManager.Resolve<IMapManager>(); return turf.GridIndices.ToEntityCoordinates(turf.GridIndex, mapManager); } /// <summary> /// Creates a box the size of a tile, at the same position in the world as the tile. /// </summary> private static bool GetWorldTileBox(TileRef turf, out Box2Rotated res) { var map = IoCManager.Resolve<IMapManager>(); if (map.TryGetGrid(turf.GridIndex, out var tileGrid)) { // This is scaled to 90 % so it doesn't encompass walls on other tiles. var tileBox = Box2.UnitCentered.Scale(0.9f); tileBox = tileBox.Scale(tileGrid.TileSize); var worldPos = tileGrid.GridTileToWorldPos(turf.GridIndices); tileBox = tileBox.Translated(worldPos); // Now tileBox needs to be rotated to match grid rotation res = new Box2Rotated(tileBox, tileGrid.WorldRotation, worldPos); return true; } // Have to "return something" res = Box2Rotated.UnitCentered; return false; } } }
/* * MindTouch Core - open source enterprise collaborative networking * Copyright (c) 2006-2010 MindTouch Inc. * www.mindtouch.com oss@mindtouch.com * * For community documentation and downloads visit www.opengarden.org; * please review the licensing section. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * http://www.gnu.org/copyleft/gpl.html */ using System; using System.Collections.Generic; using System.Text; using System.Data; using MindTouch.Data; namespace MindTouch.Deki.Data.MySql { public partial class MySqlDekiDataSession { private static readonly IDictionary<GroupsSortField, string> GROUPS_SORT_FIELD_MAPPING = new Dictionary<GroupsSortField, string>() { { GroupsSortField.ID, "group_id" }, { GroupsSortField.NAME, "group_name" }, { GroupsSortField.ROLE, "roles.role_name" }, { GroupsSortField.SERVICE, "services.service_description" } }; public void GroupMembers_UpdateGroupsForUser(uint userId, IList<uint> groupIds) { StringBuilder userGroupQuery = null; if (!ArrayUtil.IsNullOrEmpty(groupIds)) { userGroupQuery = new StringBuilder("insert ignore into user_groups (user_id, group_id) values "); for (int i = 0; i < groupIds.Count; i++) { if (i > 0) { userGroupQuery.Append(","); } userGroupQuery.AppendFormat("({0},{1})", userId, groupIds[i]); } } else { userGroupQuery = new StringBuilder(); } Catalog.NewQuery(string.Format(@" /* GroupMembers_UpdateGroupsForUser */ delete from user_groups where user_id = ?USERID; {0}", userGroupQuery.ToString())) .With("USERID", userId) .Execute(); } public void GroupMembers_UpdateUsersInGroup(uint groupid, IList<uint> userIds, DateTime timestamp) { string userIdsText = string.Join(",", DbUtils.ConvertArrayToDelimittedString<uint>(',', userIds)); string deleteQuery = "delete from user_groups where group_id = ?GROUPID"; StringBuilder insertQuery = new StringBuilder(); if (userIds.Count > 0) { deleteQuery = string.Format("{0} and user_id not in ({1})", deleteQuery, userIdsText); insertQuery.Append("insert ignore into user_groups (user_id, group_id, last_edit) values "); for (int i = 0; i < userIds.Count; i++) { insertQuery.AppendFormat("{0}({1}, ?GROUPID, ?TIMESTAMP)", i > 0 ? "," : "", userIds[i]); } } Catalog.NewQuery(string.Format(@" /* GroupMembers_UpdateUsersInGroup */ {0}; {1};", deleteQuery, insertQuery.ToString())) .With("GROUPID", groupid) .With("TIMESTAMP", timestamp) .Execute(); } public IList<GroupBE> Groups_GetByIds(IList<uint> groupIds) { if (groupIds.Count == 0) return new List<GroupBE>(); string groupIdsText = string.Join(",", DbUtils.ConvertArrayToDelimittedString<uint>(',', groupIds)); return Groups_GetInternal(string.Format("where groups.group_id in ({0})", groupIdsText), "Groups_GetByIds"); } public IList<GroupBE> Groups_GetByNames(IList<string> groupNames) { if (ArrayUtil.IsNullOrEmpty(groupNames)) { return new List<GroupBE>(); } StringBuilder groupNamesStr = new StringBuilder(); for (int i = 0; i < groupNames.Count; i++) { if (i > 0) { groupNamesStr.Append(","); } groupNamesStr.AppendFormat("'{0}'", DataCommand.MakeSqlSafe(groupNames[i])); } return Groups_GetInternal(string.Format("where groups.group_name in ({0})", groupNamesStr), "Groups_GetByNames"); } public IList<GroupBE> Groups_GetByUser(uint userId) { if (userId == 0) return null; return Groups_GetInternal(string.Format( @"join user_groups on groups.group_id = user_groups.group_id where user_groups.user_id = {0};", userId), "Groups_GetByUser"); } public IList<GroupBE> Groups_GetByQuery(string groupNameFilter, uint? serviceIdFilter, SortDirection sortDir, GroupsSortField sortField, uint? offset, uint? limit, out uint totalCount, out uint queryCount) { List<GroupBE> result = new List<GroupBE>(); StringBuilder query = new StringBuilder(); if (groupNameFilter != null) { groupNameFilter = "%" + DataCommand.MakeSqlSafe(groupNameFilter) + "%"; } string sortFieldString = null; GROUPS_SORT_FIELD_MAPPING.TryGetValue(sortField, out sortFieldString); if ((sortFieldString ?? string.Empty).StartsWith("roles.")) { query.Append(@" left join roles on groups.group_role_id = roles.role_id"); } if ((sortFieldString ?? string.Empty).StartsWith("services.")) { query.AppendFormat(@" left join services on groups.group_service_id = services.service_id"); } if (!string.IsNullOrEmpty(groupNameFilter) || serviceIdFilter != null) { query.Append(" where (1=1)"); if (serviceIdFilter != null) { query.AppendFormat(" AND group_service_id = {0}", serviceIdFilter.Value); } if (!string.IsNullOrEmpty(groupNameFilter)) { query.AppendFormat(" AND group_name like '{0}'", groupNameFilter); } } if (!string.IsNullOrEmpty(sortFieldString)) { query.AppendFormat(" order by {0} ", sortFieldString); if (sortDir != SortDirection.UNDEFINED) { query.Append(sortDir.ToString()); } } return Groups_GetInternal(query.ToString(), "Groups_GetByQuery", true, limit, offset, out totalCount, out queryCount); } public uint Groups_Insert(GroupBE group) { uint groupId = Catalog.NewQuery(@" /* Groups_Insert */ insert IGNORE into `groups` (`group_name`, `group_role_id`, `group_service_id`, `group_creator_user_id`, `group_last_edit`) values (?NAME, ?ROLEID, ?SERVICEID, ?CREATORUSERID, ?TIMESTAMP); select LAST_INSERT_ID();") .With("NAME", group.Name) .With("ROLEID", group.RoleId) .With("SERVICEID", group.ServiceId) .With("CREATORUSERID", group.CreatorUserId) .With("TIMESTAMP", group.TimeStamp) .ReadAsUInt() ?? 0; return groupId; } public void Groups_Delete(uint groupId) { Catalog.NewQuery(@" /* Groups_Delete */ delete from user_groups where group_id = ?GROUPID; delete from groups where group_id = ?GROUPID;") .With("GROUPID", groupId) .Execute(); } public void Groups_Update(GroupBE group) { Catalog.NewQuery(@" /* Groups_Update */ UPDATE groups set group_role_id = ?ROLEID, group_name = ?NAME where group_id = ?GROUPID;") .With("GROUPID", group.Id) .With("ROLEID", group.RoleId) .With("NAME", group.Name) .Execute(); } public IList<uint> Groups_UpdateServicesToLocal(uint oldServiceId) { List<uint> groupIds = new List<uint>(); string query = string.Format(@"/* Groups_UpdateServicesToLocal */ SELECT group_id FROM groups WHERE group_service_id = ?OLDSERVICEID; UPDATE groups SET group_service_id = 1 WHERE group_service_id = ?OLDSERVICEID; "); Catalog.NewQuery(query) .With("OLDSERVICEID", oldServiceId) .Execute(delegate(IDataReader dr) { while(dr.Read()) { groupIds.Add((uint) dr.GetInt32(0)); } }); return groupIds; } private IList<GroupBE> Groups_GetInternal(string where, string functionDescription) { uint totalCount, queryCount; return Groups_GetInternal(where, functionDescription, true, null, null, out totalCount, out queryCount); } private IList<GroupBE> Groups_GetInternal(string where, string functionDescription, bool lookupCount, uint? limit, uint? offset, out uint totalCount, out uint queryCount) { totalCount = queryCount = 0; uint totalCountTemp = 0, queryCountTemp = 0; List<GroupBE> groups = new List<GroupBE>(); string totalCountQuery = lookupCount ? "select count(*) as totalcount from groups" : string.Empty; string queryCountQuery = lookupCount ? "select count(*) as querycount from groups " + where : string.Empty; string limitOffsetQuery = string.Empty; if (limit != null || offset != null) { limitOffsetQuery = string.Format("limit {0} offset {1}", limit ?? int.MaxValue, offset ?? 0); } string query = string.Format(@" /* GroupDA::{0} */ SET group_concat_max_len = @@max_allowed_packet; select groups.*, ( select cast(group_concat( user_groups.user_id, '') as char) from user_groups join users on users.user_id = user_groups.user_id where user_groups.group_id = groups.group_id group by user_groups.group_id ) as group_userids from groups {1} {2}; {3}; {4}; ", functionDescription, where.TrimEnd(new char[] { ';' }), limitOffsetQuery, totalCountQuery, queryCountQuery); Catalog.NewQuery(query) .Execute(delegate(IDataReader dr) { while (dr.Read()) { GroupBE group = Groups_Populate(dr); groups.Add(group); } if (dr.NextResult() && dr.Read()) { totalCountTemp = DbUtils.Convert.To<uint>(dr["totalcount"], 0); } if (dr.NextResult() && dr.Read()) { queryCountTemp = DbUtils.Convert.To<uint>(dr["querycount"], 0); } }); totalCount = totalCountTemp; queryCount = queryCountTemp; return groups; } private GroupBE Groups_Populate(IDataReader dr) { GroupBE group = new GroupBE(); group.CreatorUserId = dr.Read<uint>("group_creator_user_id"); group.Id = dr.Read<uint>("group_id"); group.Name = dr.Read<string>("group_name"); group.RoleId = dr.Read<uint>("group_role_id"); group.ServiceId = dr.Read<uint>("group_service_id"); group.TimeStamp = dr.Read<DateTime>("group_last_edit"); group.UserIds = dr.Read<string>("group_userids"); return group; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Threading.Tasks; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.AddUsing { public partial class AddUsingTests { [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestWhereExtension() { await TestInRegularAndScriptAsync( @"using System; using System.Collections.Generic; class Program { static void Main(string[] args) { var q = args.[|Where|] } }", @"using System; using System.Collections.Generic; using System.Linq; class Program { static void Main(string[] args) { var q = args.Where } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestSelectExtension() { await TestInRegularAndScriptAsync( @"using System; using System.Collections.Generic; class Program { static void Main(string[] args) { var q = args.[|Select|] } }", @"using System; using System.Collections.Generic; using System.Linq; class Program { static void Main(string[] args) { var q = args.Select } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestGroupByExtension() { await TestInRegularAndScriptAsync( @"using System; using System.Collections.Generic; class Program { static void Main(string[] args) { var q = args.[|GroupBy|] } }", @"using System; using System.Collections.Generic; using System.Linq; class Program { static void Main(string[] args) { var q = args.GroupBy } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestJoinExtension() { await TestInRegularAndScriptAsync( @"using System; using System.Collections.Generic; class Program { static void Main(string[] args) { var q = args.[|Join|] } }", @"using System; using System.Collections.Generic; using System.Linq; class Program { static void Main(string[] args) { var q = args.Join } }"); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task RegressionFor8455() { await TestMissingInRegularAndScriptAsync( @"class C { void M() { int dim = (int)Math.[|Min|](); } }"); } [WorkItem(772321, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/772321")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestExtensionWithThePresenceOfTheSameNameNonExtensionMethod() { await TestInRegularAndScriptAsync( @"namespace NS1 { class Program { void Main() { [|new C().Foo(4);|] } } class C { public void Foo(string y) { } } } namespace NS2 { static class CExt { public static void Foo(this NS1.C c, int x) { } } }", @"using NS2; namespace NS1 { class Program { void Main() { new C().Foo(4); } } class C { public void Foo(string y) { } } } namespace NS2 { static class CExt { public static void Foo(this NS1.C c, int x) { } } }"); } [WorkItem(772321, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/772321")] [WorkItem(920398, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/920398")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestExtensionWithThePresenceOfTheSameNameNonExtensionPrivateMethod() { await TestInRegularAndScriptAsync( @"namespace NS1 { class Program { void Main() { [|new C().Foo(4);|] } } class C { private void Foo(int x) { } } } namespace NS2 { static class CExt { public static void Foo(this NS1.C c, int x) { } } }", @"using NS2; namespace NS1 { class Program { void Main() { new C().Foo(4); } } class C { private void Foo(int x) { } } } namespace NS2 { static class CExt { public static void Foo(this NS1.C c, int x) { } } }"); } [WorkItem(772321, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/772321")] [WorkItem(920398, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/920398")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestExtensionWithThePresenceOfTheSameNameExtensionPrivateMethod() { await TestInRegularAndScriptAsync( @"using NS2; namespace NS1 { class Program { void Main() { [|new C().Foo(4);|] } } class C { } } namespace NS2 { static class CExt { private static void Foo(this NS1.C c, int x) { } } } namespace NS3 { static class CExt { public static void Foo(this NS1.C c, int x) { } } }", @"using NS2; using NS3; namespace NS1 { class Program { void Main() { new C().Foo(4); } } class C { } } namespace NS2 { static class CExt { private static void Foo(this NS1.C c, int x) { } } } namespace NS3 { static class CExt { public static void Foo(this NS1.C c, int x) { } } }"); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { [|1|] }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { 1 }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod2() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { 1, 2, [|3|] }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { 1, 2, 3 }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod3() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { 1, [|2|], 3 }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { 1, 2, 3 }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod4() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, [|{ 4, 5, 6 }|], { 7, 8, 9 } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, { 4, 5, 6 }, { 7, 8, 9 } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod5() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, { 4, 5, 6 }, [|{ 7, 8, 9 }|] }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, { 4, 5, 6 }, { 7, 8, 9 } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod6() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, { ""Four"", ""Five"", ""Six"" }, [|{ '7', '8', '9' }|] }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, { ""Four"", ""Five"", ""Six"" }, { '7', '8', '9' } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod7() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, [|{ ""Four"", ""Five"", ""Six"" }|], { '7', '8', '9' } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, { ""Four"", ""Five"", ""Six"" }, { '7', '8', '9' } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod8() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { [|{ 1, 2, 3 }|] }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod9() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { [|""This""|] }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { ""This"" }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod10() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { [|{ 1, 2, 3 }|], { ""Four"", ""Five"", ""Six"" }, { '7', '8', '9' } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } } namespace Ext2 { static class Extensions { public static void Add(this X x, object[] i) { } } }", @"using System; using System.Collections; using Ext; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, { ""Four"", ""Five"", ""Six"" }, { '7', '8', '9' } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } } namespace Ext2 { static class Extensions { public static void Add(this X x, object[] i) { } } }", parseOptions: null); } [WorkItem(269, "https://github.com/dotnet/roslyn/issues/269")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestAddUsingForAddExtentionMethod11() { await TestAsync( @"using System; using System.Collections; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { [|{ 1, 2, 3 }|], { ""Four"", ""Five"", ""Six"" }, { '7', '8', '9' } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } } namespace Ext2 { static class Extensions { public static void Add(this X x, object[] i) { } } }", @"using System; using System.Collections; using Ext2; class X : IEnumerable { public IEnumerator GetEnumerator() { new X { { 1, 2, 3 }, { ""Four"", ""Five"", ""Six"" }, { '7', '8', '9' } }; return null; } } namespace Ext { static class Extensions { public static void Add(this X x, int i) { } } } namespace Ext2 { static class Extensions { public static void Add(this X x, object[] i) { } } }", index: 1, parseOptions: null); } [WorkItem(3818, "https://github.com/dotnet/roslyn/issues/3818")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task InExtensionMethodUnderConditionalAccessExpression() { var initialText = @"<Workspace> <Project Language=""C#"" AssemblyName=""CSAssembly"" CommonReferences=""true""> <Document FilePath = ""Program""> namespace Sample { class Program { static void Main(string[] args) { string myString = ""Sample""; var other = myString?[|.StringExtension()|].Substring(0); } } } </Document> <Document FilePath = ""Extensions""> namespace Sample.Extensions { public static class StringExtensions { public static string StringExtension(this string s) { return ""Ok""; } } } </Document> </Project> </Workspace>"; var expectedText = @"using Sample.Extensions; namespace Sample { class Program { static void Main(string[] args) { string myString = ""Sample""; var other = myString?.StringExtension().Substring(0); } } }"; await TestInRegularAndScriptAsync(initialText, expectedText); } [WorkItem(3818, "https://github.com/dotnet/roslyn/issues/3818")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task InExtensionMethodUnderMultipleConditionalAccessExpressions() { var initialText = @"<Workspace> <Project Language=""C#"" AssemblyName=""CSAssembly"" CommonReferences=""true""> <Document FilePath = ""Program""> public class C { public T F&lt;T&gt;(T x) { return F(new C())?.F(new C())?[|.Extn()|]; } } </Document> <Document FilePath = ""Extensions""> namespace Sample.Extensions { public static class Extensions { public static C Extn(this C obj) { return obj.F(new C()); } } } </Document> </Project> </Workspace>"; var expectedText = @"using Sample.Extensions; public class C { public T F<T>(T x) { return F(new C())?.F(new C())?.Extn(); } }"; await TestInRegularAndScriptAsync(initialText, expectedText); } [WorkItem(3818, "https://github.com/dotnet/roslyn/issues/3818")] [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task InExtensionMethodUnderMultipleConditionalAccessExpressions2() { var initialText = @"<Workspace> <Project Language=""C#"" AssemblyName=""CSAssembly"" CommonReferences=""true""> <Document FilePath = ""Program""> public class C { public T F&lt;T&gt;(T x) { return F(new C())?.F(new C())[|.Extn()|]?.F(newC()); } } </Document> <Document FilePath = ""Extensions""> namespace Sample.Extensions { public static class Extensions { public static C Extn(this C obj) { return obj.F(new C()); } } } </Document> </Project> </Workspace>"; var expectedText = @"using Sample.Extensions; public class C { public T F<T>(T x) { return F(new C())?.F(new C()).Extn()?.F(newC()); } }"; await TestInRegularAndScriptAsync(initialText, expectedText); } [Fact, Trait(Traits.Feature, Traits.Features.CodeActionsAddImport)] public async Task TestDeconstructExtension() { await TestAsync( @" class Program { void M(Program p) { var (x, y) = [|p|]; } } namespace N { static class E { public static void Deconstruct(this Program p, out int x, out int y) { } } }", @" using N; class Program { void M(Program p) { var (x, y) = [|p|]; } } namespace N { static class E { public static void Deconstruct(this Program p, out int x, out int y) { } } }", parseOptions: null); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.IO; using System.Linq; using Xunit; namespace System.Security.Cryptography.X509Certificates.Tests { public static class CollectionImportTests { [Fact] public static void ImportNull() { X509Certificate2Collection cc2 = new X509Certificate2Collection(); Assert.Throws<ArgumentNullException>(() => cc2.Import((byte[])null)); Assert.Throws<ArgumentNullException>(() => cc2.Import((string)null)); } [Fact] public static void ImportEmpty_Pkcs12() { using (ImportedCollection ic = Cert.Import(TestData.EmptyPfx)) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(0, collection.Count); } } [Fact] public static void ImportX509DerBytes() { using (ImportedCollection ic = Cert.Import(TestData.MsCertificate)) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(1, collection.Count); } } [Fact] public static void ImportX509PemBytes() { using (ImportedCollection ic = Cert.Import(TestData.MsCertificatePemBytes)) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(1, collection.Count); } } [Fact] public static void ImportX509DerFile() { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "MS.cer"))) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(1, collection.Count); } } [Fact] public static void ImportX509PemFile() { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "MS.pem"))) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(1, collection.Count); } } [Fact] public static void ImportPkcs7DerBytes_Empty() { using (ImportedCollection ic = Cert.Import(TestData.Pkcs7EmptyDerBytes)) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(0, collection.Count); } } [Fact] public static void ImportPkcs7PemBytes_Empty() { using (ImportedCollection ic = Cert.Import(TestData.Pkcs7EmptyPemBytes)) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(0, collection.Count); } } [Fact] public static void ImportPkcs7DerFile_Empty() { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "empty.p7b"))) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(0, collection.Count); } } [Fact] public static void ImportPkcs7PemFile_Empty() { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "empty.p7c"))) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(0, collection.Count); } } [Fact] public static void ImportPkcs7DerBytes_Single() { using (ImportedCollection ic = Cert.Import(TestData.Pkcs7SingleDerBytes)) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(1, collection.Count); } } [Fact] public static void ImportPkcs7PemBytes_Single() { using (ImportedCollection ic = Cert.Import(TestData.Pkcs7SinglePemBytes)) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(1, collection.Count); } } [Fact] public static void ImportPkcs7DerFile_Single() { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "singlecert.p7b"))) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(1, collection.Count); } } [Fact] public static void ImportPkcs7PemFile_Single() { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "singlecert.p7c"))) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(1, collection.Count); } } [Fact] public static void ImportPkcs7DerBytes_Chain() { using (ImportedCollection ic = Cert.Import(TestData.Pkcs7ChainDerBytes)) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(3, collection.Count); } } [Fact] public static void ImportPkcs7PemBytes_Chain() { using (ImportedCollection ic = Cert.Import(TestData.Pkcs7ChainPemBytes)) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(3, collection.Count); } } [Fact] public static void ImportPkcs7DerFile_Chain() { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "certchain.p7b"))) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(3, collection.Count); } } [Fact] public static void ImportPkcs7PemFile_Chain() { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "certchain.p7c"))) { X509Certificate2Collection collection = ic.Collection; Assert.Equal(3, collection.Count); } } [Theory] [MemberData(nameof(StorageFlags))] public static void ImportPkcs12Bytes_Single(X509KeyStorageFlags keyStorageFlags) { using (ImportedCollection ic = Cert.Import(TestData.PfxData, TestData.PfxDataPassword, keyStorageFlags)) { X509Certificate2Collection cc2 = ic.Collection; int count = cc2.Count; Assert.Equal(1, count); } } [Theory] [MemberData(nameof(StorageFlags))] public static void ImportPkcs12Bytes_Single_VerifyContents(X509KeyStorageFlags keyStorageFlags) { using (var pfxCer = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword, Cert.EphemeralIfPossible)) { using (ImportedCollection ic = Cert.Import(TestData.PfxData, TestData.PfxDataPassword, keyStorageFlags)) { X509Certificate2Collection cc2 = ic.Collection; int count = cc2.Count; Assert.Equal(1, count); using (X509Certificate2 c = cc2[0]) { // pfxCer was loaded directly, cc2[0] was Imported, two distinct copies. Assert.NotSame(pfxCer, c); Assert.Equal(pfxCer, c); Assert.Equal(pfxCer.Thumbprint, c.Thumbprint); } } } } [Theory] [MemberData(nameof(StorageFlags))] public static void ImportPkcs12File_Single(X509KeyStorageFlags keyStorageFlags) { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "My.pfx"), TestData.PfxDataPassword, keyStorageFlags)) { X509Certificate2Collection cc2 = ic.Collection; int count = cc2.Count; Assert.Equal(1, count); } } [Theory] [MemberData(nameof(StorageFlags))] public static void ImportPkcs12Bytes_Chain(X509KeyStorageFlags keyStorageFlags) { using (ImportedCollection ic = Cert.Import(TestData.ChainPfxBytes, TestData.ChainPfxPassword, keyStorageFlags)) { X509Certificate2Collection certs = ic.Collection; int count = certs.Count; Assert.Equal(3, count); } } [Theory] [MemberData(nameof(StorageFlags))] public static void ImportPkcs12File_Chain(X509KeyStorageFlags keyStorageFlags) { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "test.pfx"), TestData.ChainPfxPassword, keyStorageFlags)) { X509Certificate2Collection certs = ic.Collection; int count = certs.Count; Assert.Equal(3, count); } } [Theory] [MemberData(nameof(StorageFlags))] public static void ImportPkcs12File_Chain_VerifyContents(X509KeyStorageFlags keyStorageFlags) { using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "test.pfx"), TestData.ChainPfxPassword, keyStorageFlags)) { X509Certificate2Collection certs = ic.Collection; int count = certs.Count; Assert.Equal(3, count); // Verify that the read ordering is consistent across the platforms string[] expectedSubjects = { "MS Passport Test Sub CA", "MS Passport Test Root CA", "test.local", }; string[] actualSubjects = certs.OfType<X509Certificate2>(). Select(cert => cert.GetNameInfo(X509NameType.SimpleName, false)). ToArray(); Assert.Equal(expectedSubjects, actualSubjects); // And verify that we have private keys when we expect them bool[] expectedHasPrivateKeys = { false, false, true, }; bool[] actualHasPrivateKeys = certs.OfType<X509Certificate2>(). Select(cert => cert.HasPrivateKey). ToArray(); Assert.Equal(expectedHasPrivateKeys, actualHasPrivateKeys); } } [Fact] public static void InvalidStorageFlags() { X509Certificate2Collection coll = new X509Certificate2Collection(); byte[] nonEmptyBytes = new byte[1]; Assert.Throws<ArgumentException>( "keyStorageFlags", () => coll.Import(nonEmptyBytes, string.Empty, (X509KeyStorageFlags)0xFF)); Assert.Throws<ArgumentException>( "keyStorageFlags", () => coll.Import(string.Empty, string.Empty, (X509KeyStorageFlags)0xFF)); // No test is performed here for the ephemeral flag failing downlevel, because the live // binary is always used by default, meaning it doesn't know EphemeralKeySet doesn't exist. } #if netcoreapp [Fact] public static void InvalidStorageFlags_PersistedEphemeral() { const X509KeyStorageFlags PersistedEphemeral = X509KeyStorageFlags.EphemeralKeySet | X509KeyStorageFlags.PersistKeySet; byte[] nonEmptyBytes = new byte[1]; X509Certificate2Collection coll = new X509Certificate2Collection(); Assert.Throws<ArgumentException>( "keyStorageFlags", () => coll.Import(nonEmptyBytes, string.Empty, PersistedEphemeral)); Assert.Throws<ArgumentException>( "keyStorageFlags", () => coll.Import(string.Empty, string.Empty, PersistedEphemeral)); } #endif public static IEnumerable<object[]> StorageFlags { get { yield return new object[] { X509KeyStorageFlags.DefaultKeySet }; #if netcoreapp yield return new object[] { X509KeyStorageFlags.EphemeralKeySet }; #endif } } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) Under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You Under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed Under the License is distributed on an "AS Is" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations Under the License. ==================================================================== */ namespace NPOI.HSSF.UserModel { using System; using System.Collections; using NPOI.DDF; using NPOI.HSSF.Record; using NPOI.Util; using NPOI.SS.UserModel; using System.Collections.Generic; using NPOI.HSSF.Model; using NPOI.SS.Util; using NPOI.POIFS.FileSystem; using System.IO; /// <summary> /// The patriarch is the toplevel container for shapes in a sheet. It does /// little other than act as a container for other shapes and Groups. /// @author Glen Stampoultzis (glens at apache.org) /// </summary> public class HSSFPatriarch : HSSFShapeContainer, IDrawing { //private static POILogger log = POILogFactory.GetLogger(typeof(HSSFPatriarch)); List<HSSFShape> _shapes = new List<HSSFShape>(); private HSSFSheet _sheet; private EscherSpgrRecord _spgrRecord; private EscherContainerRecord _mainSpgrContainer; /** * The EscherAggregate we have been bound to. * (This will handle writing us out into records, * and building up our shapes from the records) */ private EscherAggregate _boundAggregate; /// <summary> /// Creates the patriarch. /// </summary> /// <param name="sheet">the sheet this patriarch is stored in.</param> /// <param name="boundAggregate">The bound aggregate.</param> public HSSFPatriarch(HSSFSheet sheet, EscherAggregate boundAggregate) { _boundAggregate = boundAggregate; _sheet = sheet; _mainSpgrContainer = _boundAggregate.GetEscherContainer().ChildContainers[0]; EscherContainerRecord spContainer = (EscherContainerRecord)_boundAggregate.GetEscherContainer() .ChildContainers[0].GetChild(0); _spgrRecord = (EscherSpgrRecord)spContainer.GetChildById(EscherSpgrRecord.RECORD_ID); BuildShapeTree(); } public static HSSFPatriarch CreatePatriarch(HSSFPatriarch patriarch, HSSFSheet sheet) { HSSFPatriarch newPatriarch = new HSSFPatriarch(sheet, new EscherAggregate(true)); newPatriarch.AfterCreate(); foreach (HSSFShape shape in patriarch.Children) { HSSFShape newShape; if (shape is HSSFShapeGroup) { newShape = ((HSSFShapeGroup)shape).CloneShape(newPatriarch); } else { newShape = shape.CloneShape(); } newPatriarch.OnCreate(newShape); newPatriarch.AddShape(newShape); } return newPatriarch; } /** * check if any shapes contain wrong data * At now(13.08.2010) check if patriarch contains 2 or more comments with same coordinates */ protected internal void PreSerialize() { Dictionary<int, NoteRecord> tailRecords = _boundAggregate.TailRecords; /* * contains coordinates of comments we iterate over */ Hashtable coordinates = new Hashtable(tailRecords.Count); foreach (NoteRecord rec in tailRecords.Values) { String noteRef = new CellReference(rec.Row, rec.Column).FormatAsString(); // A1-style notation if (coordinates.Contains(noteRef)) { throw new InvalidOperationException("found multiple cell comments for cell " + noteRef); } else { coordinates.Add(noteRef, null); } } } /** * @param shape to be removed * @return true of shape is removed */ public bool RemoveShape(HSSFShape shape) { bool isRemoved = _mainSpgrContainer.RemoveChildRecord(shape.GetEscherContainer()); if (isRemoved) { shape.AfterRemove(this); _shapes.Remove(shape); } return isRemoved; } internal void AfterCreate() { DrawingManager2 drawingManager = ((HSSFWorkbook)_sheet.Workbook).Workbook.DrawingManager; short dgId = drawingManager.FindNewDrawingGroupId(); _boundAggregate.SetDgId(dgId); _boundAggregate.SetMainSpRecordId(NewShapeId()); drawingManager.IncrementDrawingsSaved(); } /// <summary> /// Creates a new Group record stored Under this patriarch. /// </summary> /// <param name="anchor">the client anchor describes how this Group is attached /// to the sheet.</param> /// <returns>the newly created Group.</returns> public HSSFShapeGroup CreateGroup(HSSFClientAnchor anchor) { HSSFShapeGroup group = new HSSFShapeGroup(null, anchor); AddShape(group); OnCreate(group); return group; } /// <summary> /// Creates a simple shape. This includes such shapes as lines, rectangles, /// and ovals. /// Note: Microsoft Excel seems to sometimes disallow /// higher y1 than y2 or higher x1 than x2 in the anchor, you might need to /// reverse them and draw shapes vertically or horizontally flipped! /// </summary> /// <param name="anchor">the client anchor describes how this Group is attached /// to the sheet.</param> /// <returns>the newly created shape.</returns> public HSSFSimpleShape CreateSimpleShape(HSSFClientAnchor anchor) { HSSFSimpleShape shape = new HSSFSimpleShape(null, anchor); AddShape(shape); //open existing file OnCreate(shape); return shape; } /// <summary> /// Creates a picture. /// </summary> /// <param name="anchor">the client anchor describes how this Group is attached /// to the sheet.</param> /// <param name="pictureIndex">Index of the picture.</param> /// <returns>the newly created shape.</returns> public IPicture CreatePicture(HSSFClientAnchor anchor, int pictureIndex) { HSSFPicture shape = new HSSFPicture(null, (HSSFClientAnchor)anchor); shape.PictureIndex = pictureIndex; AddShape(shape); //open existing file OnCreate(shape); return shape; } /// <summary> /// CreatePicture /// </summary> /// <param name="anchor">the client anchor describes how this picture is attached to the sheet.</param> /// <param name="pictureIndex">the index of the picture in the workbook collection of pictures.</param> /// <returns>return newly created shape</returns> public IPicture CreatePicture(IClientAnchor anchor, int pictureIndex) { return CreatePicture((HSSFClientAnchor)anchor, pictureIndex); } /** * Adds a new OLE Package Shape * * @param anchor the client anchor describes how this picture is * attached to the sheet. * @param storageId the storageId returned by {@Link HSSFWorkbook.AddOlePackage} * @param pictureIndex the index of the picture (used as preview image) in the * workbook collection of pictures. * * @return newly Created shape */ public HSSFObjectData CreateObjectData(HSSFClientAnchor anchor, int storageId, int pictureIndex) { ObjRecord obj = new ObjRecord(); CommonObjectDataSubRecord ftCmo = new CommonObjectDataSubRecord(); ftCmo.ObjectType = (/*setter*/CommonObjectType.Picture); // ftCmo.ObjectId=(/*setter*/oleShape.ShapeId); ... will be Set by onCreate(...) ftCmo.IsLocked = (/*setter*/true); ftCmo.IsPrintable = (/*setter*/true); ftCmo.IsAutoFill = (/*setter*/true); ftCmo.IsAutoline = (/*setter*/true); ftCmo.Reserved1 = (/*setter*/0); ftCmo.Reserved2 = (/*setter*/0); ftCmo.Reserved3 = (/*setter*/0); obj.AddSubRecord(ftCmo); // FtCf (pictFormat) FtCfSubRecord ftCf = new FtCfSubRecord(); HSSFPictureData pictData = Sheet.Workbook.GetAllPictures()[(pictureIndex - 1)] as HSSFPictureData; switch ((PictureType)pictData.Format) { case PictureType.WMF: case PictureType.EMF: // this needs patch #49658 to be applied to actually work ftCf.Flags = (/*setter*/FtCfSubRecord.METAFILE_BIT); break; case PictureType.DIB: case PictureType.PNG: case PictureType.JPEG: case PictureType.PICT: ftCf.Flags = (/*setter*/FtCfSubRecord.BITMAP_BIT); break; default: throw new InvalidOperationException("Invalid picture type: " + pictData.Format); } obj.AddSubRecord(ftCf); // FtPioGrbit (pictFlags) FtPioGrbitSubRecord ftPioGrbit = new FtPioGrbitSubRecord(); ftPioGrbit.SetFlagByBit(FtPioGrbitSubRecord.AUTO_PICT_BIT, true); obj.AddSubRecord(ftPioGrbit); EmbeddedObjectRefSubRecord ftPictFmla = new EmbeddedObjectRefSubRecord(); ftPictFmla.SetUnknownFormulaData(new byte[] { 2, 0, 0, 0, 0 }); ftPictFmla.OLEClassName = (/*setter*/"Paket"); ftPictFmla.SetStorageId(storageId); obj.AddSubRecord(ftPictFmla); obj.AddSubRecord(new EndSubRecord()); String entryName = "MBD" + HexDump.ToHex(storageId); DirectoryEntry oleRoot; try { DirectoryNode dn = (_sheet.Workbook as HSSFWorkbook).RootDirectory; if (dn == null) throw new FileNotFoundException(); oleRoot = (DirectoryEntry)dn.GetEntry(entryName); } catch (FileNotFoundException e) { throw new InvalidOperationException("trying to add ole shape without actually Adding data first - use HSSFWorkbook.AddOlePackage first", e); } // create picture shape, which need to be minimal modified for oleshapes HSSFPicture shape = new HSSFPicture(null, anchor); shape.PictureIndex = (/*setter*/pictureIndex); EscherContainerRecord spContainer = shape.GetEscherContainer(); EscherSpRecord spRecord = spContainer.GetChildById(EscherSpRecord.RECORD_ID) as EscherSpRecord; spRecord.Flags = (/*setter*/spRecord.Flags | EscherSpRecord.FLAG_OLESHAPE); HSSFObjectData oleShape = new HSSFObjectData(spContainer, obj, oleRoot); AddShape(oleShape); OnCreate(oleShape); return oleShape; } /// <summary> /// Creates a polygon /// </summary> /// <param name="anchor">the client anchor describes how this Group is attached /// to the sheet.</param> /// <returns>the newly Created shape.</returns> public HSSFPolygon CreatePolygon(IClientAnchor anchor) { HSSFPolygon shape = new HSSFPolygon(null, (HSSFAnchor)anchor); AddShape(shape); OnCreate(shape); return shape; } /// <summary> /// Constructs a textbox Under the patriarch. /// </summary> /// <param name="anchor">the client anchor describes how this Group is attached /// to the sheet.</param> /// <returns>the newly Created textbox.</returns> public HSSFSimpleShape CreateTextbox(IClientAnchor anchor) { HSSFTextbox shape = new HSSFTextbox(null, (HSSFAnchor)anchor); AddShape(shape); OnCreate(shape); return shape; } /** * Constructs a cell comment. * * @param anchor the client anchor describes how this comment is attached * to the sheet. * @return the newly created comment. */ public HSSFComment CreateComment(HSSFAnchor anchor) { HSSFComment shape = new HSSFComment(null, anchor); AddShape(shape); OnCreate(shape); return shape; } /** * YK: used to create autofilters * * @see org.apache.poi.hssf.usermodel.HSSFSheet#setAutoFilter(int, int, int, int) */ public HSSFSimpleShape CreateComboBox(HSSFAnchor anchor) { HSSFCombobox shape = new HSSFCombobox(null, anchor); AddShape(shape); OnCreate(shape); return shape; } /// <summary> /// Constructs a cell comment. /// </summary> /// <param name="anchor">the client anchor describes how this comment is attached /// to the sheet.</param> /// <returns>the newly created comment.</returns> public IComment CreateCellComment(IClientAnchor anchor) { return CreateComment((HSSFAnchor)anchor); } private void SetFlipFlags(HSSFShape shape) { EscherSpRecord sp = (EscherSpRecord)shape.GetEscherContainer().GetChildById(EscherSpRecord.RECORD_ID); if (shape.Anchor.IsHorizontallyFlipped) { sp.Flags = (sp.Flags | EscherSpRecord.FLAG_FLIPHORIZ); } if (shape.Anchor.IsVerticallyFlipped) { sp.Flags = (sp.Flags | EscherSpRecord.FLAG_FLIPVERT); } } /// <summary> /// Returns a list of all shapes contained by the patriarch. /// </summary> /// <value>The children.</value> public IList<HSSFShape> Children { get { return _shapes; } } /** * add a shape to this drawing */ public void AddShape(HSSFShape shape) { shape.Patriarch = this; _shapes.Add(shape); } private void OnCreate(HSSFShape shape) { EscherContainerRecord spgrContainer = _boundAggregate.GetEscherContainer().ChildContainers[0]; EscherContainerRecord spContainer = shape.GetEscherContainer(); int shapeId = NewShapeId(); shape.ShapeId = shapeId; spgrContainer.AddChildRecord(spContainer); shape.AfterInsert(this); SetFlipFlags(shape); } /// <summary> /// Total count of all children and their children's children. /// </summary> /// <value>The count of all children.</value> public int CountOfAllChildren { get { int count = _shapes.Count; for (IEnumerator iterator = _shapes.GetEnumerator(); iterator.MoveNext(); ) { HSSFShape shape = (HSSFShape)iterator.Current; count += shape.CountOfAllChildren; } return count; } } /// <summary> /// Sets the coordinate space of this Group. All children are contrained /// to these coordinates. /// </summary> /// <param name="x1">The x1.</param> /// <param name="y1">The y1.</param> /// <param name="x2">The x2.</param> /// <param name="y2">The y2.</param> public void SetCoordinates(int x1, int y1, int x2, int y2) { _spgrRecord.RectY1 = (y1); _spgrRecord.RectY2 = (y2); _spgrRecord.RectX1 = (x1); _spgrRecord.RectX2 = (x2); } public void Clear() { List<HSSFShape> copy = new List<HSSFShape>(_shapes); foreach (HSSFShape shape in copy) { RemoveShape(shape); } } internal int NewShapeId() { DrawingManager2 dm = ((HSSFWorkbook)_sheet.Workbook).Workbook.DrawingManager; EscherDgRecord dg = (EscherDgRecord)_boundAggregate.GetEscherContainer().GetChildById(EscherDgRecord.RECORD_ID); short drawingGroupId = dg.DrawingGroupId; return dm.AllocateShapeId(drawingGroupId, dg); } /// <summary> /// Does this HSSFPatriarch contain a chart? /// (Technically a reference to a chart, since they /// Get stored in a different block of records) /// FIXME - detect chart in all cases (only seems /// to work on some charts so far) /// </summary> /// <returns> /// <c>true</c> if this instance contains chart; otherwise, <c>false</c>. /// </returns> public bool ContainsChart() { // TODO - support charts properly in usermodel // We're looking for a EscherOptRecord EscherOptRecord optRecord = (EscherOptRecord) _boundAggregate.FindFirstWithId(EscherOptRecord.RECORD_ID); if (optRecord == null) { // No opt record, can't have chart return false; } for (IEnumerator it = optRecord.EscherProperties.GetEnumerator(); it.MoveNext(); ) { EscherProperty prop = (EscherProperty)it.Current; if (prop.PropertyNumber == 896 && prop.IsComplex) { EscherComplexProperty cp = (EscherComplexProperty)prop; String str = StringUtil.GetFromUnicodeLE(cp.ComplexData); //Console.Error.WriteLine(str); if (str.Equals("Chart 1\0")) { return true; } } } return false; } /// <summary> /// The top left x coordinate of this Group. /// </summary> /// <value>The x1.</value> public int X1 { get { return _spgrRecord.RectX1; } } /// <summary> /// The top left y coordinate of this Group. /// </summary> /// <value>The y1.</value> public int Y1 { get { return _spgrRecord.RectY1; } } /// <summary> /// The bottom right x coordinate of this Group. /// </summary> /// <value>The x2.</value> public int X2 { get { return _spgrRecord.RectX2; } } /// <summary> /// The bottom right y coordinate of this Group. /// </summary> /// <value>The y2.</value> public int Y2 { get { return _spgrRecord.RectY2; } } /// <summary> /// Returns the aggregate escher record we're bound to /// </summary> /// <returns></returns> internal EscherAggregate GetBoundAggregate() { return _boundAggregate; } /** * Creates a new client anchor and sets the top-left and bottom-right * coordinates of the anchor. * * @param dx1 the x coordinate in EMU within the first cell. * @param dy1 the y coordinate in EMU within the first cell. * @param dx2 the x coordinate in EMU within the second cell. * @param dy2 the y coordinate in EMU within the second cell. * @param col1 the column (0 based) of the first cell. * @param row1 the row (0 based) of the first cell. * @param col2 the column (0 based) of the second cell. * @param row2 the row (0 based) of the second cell. * @return the newly created client anchor */ public IClientAnchor CreateAnchor(int dx1, int dy1, int dx2, int dy2, int col1, int row1, int col2, int row2) { return new HSSFClientAnchor(dx1, dy1, dx2, dy2, (short)col1, row1, (short)col2, row2); } public IChart CreateChart(IClientAnchor anchor) { throw new RuntimeException("NotImplemented"); } /** * create shape tree from existing escher records tree */ public void BuildShapeTree() { EscherContainerRecord dgContainer = _boundAggregate.GetEscherContainer(); if (dgContainer == null) { return; } EscherContainerRecord spgrConrainer = dgContainer.ChildContainers[0]; IList<EscherContainerRecord> spgrChildren = spgrConrainer.ChildContainers; for (int i = 0; i < spgrChildren.Count; i++) { EscherContainerRecord spContainer = spgrChildren[i]; if (i != 0) { HSSFShapeFactory.CreateShapeTree(spContainer, _boundAggregate, this, ((HSSFWorkbook)_sheet.Workbook).RootDirectory); } } } public List<HSSFShape> GetShapes() { return _shapes; } public IEnumerator<HSSFShape> GetEnumerator() { return _shapes.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return _shapes.GetEnumerator(); } protected internal HSSFSheet Sheet { get { return _sheet; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Globalization; using System.Runtime.InteropServices; namespace System { public partial class String { public bool Contains(string value) { return (IndexOf(value, StringComparison.Ordinal) >= 0); } public bool Contains(string value, StringComparison comparisonType) { return (IndexOf(value, comparisonType) >= 0); } // Returns the index of the first occurrence of a specified character in the current instance. // The search starts at startIndex and runs thorough the next count characters. // public int IndexOf(char value) { return IndexOf(value, 0, this.Length); } public int IndexOf(char value, int startIndex) { return IndexOf(value, startIndex, this.Length - startIndex); } public unsafe int IndexOf(char value, int startIndex, int count) { if (startIndex < 0 || startIndex > Length) throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); if (count < 0 || count > Length - startIndex) throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); fixed (char* pChars = &_firstChar) { char* pCh = pChars + startIndex; while (count >= 4) { if (*pCh == value) goto ReturnIndex; if (*(pCh + 1) == value) goto ReturnIndex1; if (*(pCh + 2) == value) goto ReturnIndex2; if (*(pCh + 3) == value) goto ReturnIndex3; count -= 4; pCh += 4; } while (count > 0) { if (*pCh == value) goto ReturnIndex; count--; pCh++; } return -1; ReturnIndex3: pCh++; ReturnIndex2: pCh++; ReturnIndex1: pCh++; ReturnIndex: return (int)(pCh - pChars); } } // Returns the index of the first occurrence of any specified character in the current instance. // The search starts at startIndex and runs to startIndex + count - 1. // public int IndexOfAny(char[] anyOf) { return IndexOfAny(anyOf, 0, this.Length); } public int IndexOfAny(char[] anyOf, int startIndex) { return IndexOfAny(anyOf, startIndex, this.Length - startIndex); } public int IndexOfAny(char[] anyOf, int startIndex, int count) { if (anyOf == null) throw new ArgumentNullException(nameof(anyOf)); if ((uint)startIndex > (uint)Length) throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); if ((uint)count > (uint)(Length - startIndex)) throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); if (anyOf.Length == 2) { // Very common optimization for directory separators (/, \), quotes (", '), brackets, etc return IndexOfAny(anyOf[0], anyOf[1], startIndex, count); } else if (anyOf.Length == 3) { return IndexOfAny(anyOf[0], anyOf[1], anyOf[2], startIndex, count); } else if (anyOf.Length > 3) { return IndexOfCharArray(anyOf, startIndex, count); } else if (anyOf.Length == 1) { return IndexOf(anyOf[0], startIndex, count); } else // anyOf.Length == 0 { return -1; } } private unsafe int IndexOfAny(char value1, char value2, int startIndex, int count) { fixed (char* pChars = &_firstChar) { char* pCh = pChars + startIndex; while (count > 0) { char c = *pCh; if (c == value1 || c == value2) return (int)(pCh - pChars); // Possibly reads outside of count and can include null terminator // Handled in the return logic c = *(pCh + 1); if (c == value1 || c == value2) return (count == 1 ? -1 : (int)(pCh - pChars) + 1); pCh += 2; count -= 2; } return -1; } } private unsafe int IndexOfAny(char value1, char value2, char value3, int startIndex, int count) { fixed (char* pChars = &_firstChar) { char* pCh = pChars + startIndex; while (count > 0) { char c = *pCh; if (c == value1 || c == value2 || c == value3) return (int)(pCh - pChars); pCh++; count--; } return -1; } } private unsafe int IndexOfCharArray(char[] anyOf, int startIndex, int count) { // use probabilistic map, see InitializeProbabilisticMap ProbabilisticMap map = default(ProbabilisticMap); uint* charMap = (uint*)&map; InitializeProbabilisticMap(charMap, anyOf); fixed (char* pChars = &_firstChar) { char* pCh = pChars + startIndex; while (count > 0) { int thisChar = *pCh; if (IsCharBitSet(charMap, (byte)thisChar) && IsCharBitSet(charMap, (byte)(thisChar >> 8)) && ArrayContains((char)thisChar, anyOf)) { return (int)(pCh - pChars); } count--; pCh++; } return -1; } } private const int PROBABILISTICMAP_BLOCK_INDEX_MASK = 0x7; private const int PROBABILISTICMAP_BLOCK_INDEX_SHIFT = 0x3; private const int PROBABILISTICMAP_SIZE = 0x8; // A probabilistic map is an optimization that is used in IndexOfAny/ // LastIndexOfAny methods. The idea is to create a bit map of the characters we // are searching for and use this map as a "cheap" check to decide if the // current character in the string exists in the array of input characters. // There are 256 bits in the map, with each character mapped to 2 bits. Every // character is divided into 2 bytes, and then every byte is mapped to 1 bit. // The character map is an array of 8 integers acting as map blocks. The 3 lsb // in each byte in the character is used to index into this map to get the // right block, the value of the remaining 5 msb are used as the bit position // inside this block. private static unsafe void InitializeProbabilisticMap(uint* charMap, char[] anyOf) { bool hasAscii = false; uint* charMapLocal = charMap; // https://github.com/dotnet/coreclr/issues/14264 for (int i = 0; i < anyOf.Length; ++i) { int c = anyOf[i]; // Map low bit SetCharBit(charMapLocal, (byte)c); // Map high bit c >>= 8; if (c == 0) { hasAscii = true; } else { SetCharBit(charMapLocal, (byte)c); } } if (hasAscii) { // Common to search for ASCII symbols. Just set the high value once. charMapLocal[0] |= 1u; } } private static bool ArrayContains(char searchChar, char[] anyOf) { for (int i = 0; i < anyOf.Length; i++) { if (anyOf[i] == searchChar) return true; } return false; } private unsafe static bool IsCharBitSet(uint* charMap, byte value) { return (charMap[value & PROBABILISTICMAP_BLOCK_INDEX_MASK] & (1u << (value >> PROBABILISTICMAP_BLOCK_INDEX_SHIFT))) != 0; } private unsafe static void SetCharBit(uint* charMap, byte value) { charMap[value & PROBABILISTICMAP_BLOCK_INDEX_MASK] |= 1u << (value >> PROBABILISTICMAP_BLOCK_INDEX_SHIFT); } public int IndexOf(String value) { return IndexOf(value, StringComparison.CurrentCulture); } public int IndexOf(String value, int startIndex) { return IndexOf(value, startIndex, StringComparison.CurrentCulture); } public int IndexOf(String value, int startIndex, int count) { if (startIndex < 0 || startIndex > this.Length) { throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); } if (count < 0 || count > this.Length - startIndex) { throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); } return IndexOf(value, startIndex, count, StringComparison.CurrentCulture); } public int IndexOf(String value, StringComparison comparisonType) { return IndexOf(value, 0, this.Length, comparisonType); } public int IndexOf(String value, int startIndex, StringComparison comparisonType) { return IndexOf(value, startIndex, this.Length - startIndex, comparisonType); } public int IndexOf(String value, int startIndex, int count, StringComparison comparisonType) { // Validate inputs if (value == null) throw new ArgumentNullException(nameof(value)); if (startIndex < 0 || startIndex > this.Length) throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); if (count < 0 || startIndex > this.Length - count) throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); switch (comparisonType) { case StringComparison.CurrentCulture: return CultureInfo.CurrentCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.None); case StringComparison.CurrentCultureIgnoreCase: return CultureInfo.CurrentCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); case StringComparison.InvariantCulture: return CultureInfo.InvariantCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.None); case StringComparison.InvariantCultureIgnoreCase: return CultureInfo.InvariantCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); case StringComparison.Ordinal: return CultureInfo.InvariantCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.Ordinal); case StringComparison.OrdinalIgnoreCase: return TextInfo.IndexOfStringOrdinalIgnoreCase(this, value, startIndex, count); default: throw new ArgumentException(SR.NotSupported_StringComparison, nameof(comparisonType)); } } // Returns the index of the last occurrence of a specified character in the current instance. // The search starts at startIndex and runs backwards to startIndex - count + 1. // The character at position startIndex is included in the search. startIndex is the larger // index within the string. // public int LastIndexOf(char value) { return LastIndexOf(value, this.Length - 1, this.Length); } public int LastIndexOf(char value, int startIndex) { return LastIndexOf(value, startIndex, startIndex + 1); } public unsafe int LastIndexOf(char value, int startIndex, int count) { if (Length == 0) return -1; if (startIndex < 0 || startIndex >= Length) throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); if (count < 0 || count - 1 > startIndex) throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); fixed (char* pChars = &_firstChar) { char* pCh = pChars + startIndex; //We search [startIndex..EndIndex] while (count >= 4) { if (*pCh == value) goto ReturnIndex; if (*(pCh - 1) == value) goto ReturnIndex1; if (*(pCh - 2) == value) goto ReturnIndex2; if (*(pCh - 3) == value) goto ReturnIndex3; count -= 4; pCh -= 4; } while (count > 0) { if (*pCh == value) goto ReturnIndex; count--; pCh--; } return -1; ReturnIndex3: pCh--; ReturnIndex2: pCh--; ReturnIndex1: pCh--; ReturnIndex: return (int)(pCh - pChars); } } // Returns the index of the last occurrence of any specified character in the current instance. // The search starts at startIndex and runs backwards to startIndex - count + 1. // The character at position startIndex is included in the search. startIndex is the larger // index within the string. // public int LastIndexOfAny(char[] anyOf) { return LastIndexOfAny(anyOf, this.Length - 1, this.Length); } public int LastIndexOfAny(char[] anyOf, int startIndex) { return LastIndexOfAny(anyOf, startIndex, startIndex + 1); } public unsafe int LastIndexOfAny(char[] anyOf, int startIndex, int count) { if (anyOf == null) throw new ArgumentNullException(nameof(anyOf)); if (Length == 0) return -1; if ((uint)startIndex >= (uint)Length) { throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); } if ((count < 0) || ((count - 1) > startIndex)) { throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); } if (anyOf.Length > 1) { return LastIndexOfCharArray(anyOf, startIndex, count); } else if (anyOf.Length == 1) { return LastIndexOf(anyOf[0], startIndex, count); } else // anyOf.Length == 0 { return -1; } } private unsafe int LastIndexOfCharArray(char[] anyOf, int startIndex, int count) { // use probabilistic map, see InitializeProbabilisticMap ProbabilisticMap map = default(ProbabilisticMap); uint* charMap = (uint*)&map; InitializeProbabilisticMap(charMap, anyOf); fixed (char* pChars = &_firstChar) { char* pCh = pChars + startIndex; while (count > 0) { int thisChar = *pCh; if (IsCharBitSet(charMap, (byte)thisChar) && IsCharBitSet(charMap, (byte)(thisChar >> 8)) && ArrayContains((char)thisChar, anyOf)) { return (int)(pCh - pChars); } count--; pCh--; } return -1; } } // Returns the index of the last occurrence of any character in value in the current instance. // The search starts at startIndex and runs backwards to startIndex - count + 1. // The character at position startIndex is included in the search. startIndex is the larger // index within the string. // public int LastIndexOf(String value) { return LastIndexOf(value, this.Length - 1, this.Length, StringComparison.CurrentCulture); } public int LastIndexOf(String value, int startIndex) { return LastIndexOf(value, startIndex, startIndex + 1, StringComparison.CurrentCulture); } public int LastIndexOf(String value, int startIndex, int count) { if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); } return LastIndexOf(value, startIndex, count, StringComparison.CurrentCulture); } public int LastIndexOf(String value, StringComparison comparisonType) { return LastIndexOf(value, this.Length - 1, this.Length, comparisonType); } public int LastIndexOf(String value, int startIndex, StringComparison comparisonType) { return LastIndexOf(value, startIndex, startIndex + 1, comparisonType); } public int LastIndexOf(String value, int startIndex, int count, StringComparison comparisonType) { if (value == null) throw new ArgumentNullException(nameof(value)); // Special case for 0 length input strings if (this.Length == 0 && (startIndex == -1 || startIndex == 0)) return (value.Length == 0) ? 0 : -1; // Now after handling empty strings, make sure we're not out of range if (startIndex < 0 || startIndex > this.Length) throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); // Make sure that we allow startIndex == this.Length if (startIndex == this.Length) { startIndex--; if (count > 0) count--; } // 2nd half of this also catches when startIndex == MAXINT, so MAXINT - 0 + 1 == -1, which is < 0. if (count < 0 || startIndex - count + 1 < 0) throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); // If we are looking for nothing, just return startIndex if (value.Length == 0) return startIndex; switch (comparisonType) { case StringComparison.CurrentCulture: return CultureInfo.CurrentCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.None); case StringComparison.CurrentCultureIgnoreCase: return CultureInfo.CurrentCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); case StringComparison.InvariantCulture: return CultureInfo.InvariantCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.None); case StringComparison.InvariantCultureIgnoreCase: return CultureInfo.InvariantCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); case StringComparison.Ordinal: return CultureInfo.InvariantCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.Ordinal); case StringComparison.OrdinalIgnoreCase: return TextInfo.LastIndexOfStringOrdinalIgnoreCase(this, value, startIndex, count); default: throw new ArgumentException(SR.NotSupported_StringComparison, nameof(comparisonType)); } } [StructLayout(LayoutKind.Explicit, Size = PROBABILISTICMAP_SIZE * sizeof(uint))] private struct ProbabilisticMap { } } }
// Python Tools for Visual Studio // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABLITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System; using Microsoft.PythonTools.DkmDebugger; using Microsoft.VisualStudio; using Microsoft.VisualStudio.Debugger.Interop; // This file contains the various event objects that are sent to the debugger from the sample engine via IDebugEventCallback2::Event. // These are used in EngineCallback.cs. // The events are how the engine tells the debugger about what is happening in the debuggee process. // There are three base classe the other events derive from: AD7AsynchronousEvent, AD7StoppingEvent, and AD7SynchronousEvent. These // each implement the IDebugEvent2.GetAttributes method for the type of event they represent. // Most events sent the debugger are asynchronous events. namespace Microsoft.PythonTools.Debugger.DebugEngine { #region Event base classes class AD7AsynchronousEvent : IDebugEvent2 { public const uint Attributes = (uint)enum_EVENTATTRIBUTES.EVENT_ASYNCHRONOUS; int IDebugEvent2.GetAttributes(out uint eventAttributes) { eventAttributes = Attributes; return VSConstants.S_OK; } } class AD7StoppingEvent : IDebugEvent2 { public const uint Attributes = (uint)enum_EVENTATTRIBUTES.EVENT_ASYNC_STOP; int IDebugEvent2.GetAttributes(out uint eventAttributes) { eventAttributes = Attributes; return VSConstants.S_OK; } } class AD7SynchronousEvent : IDebugEvent2 { public const uint Attributes = (uint)enum_EVENTATTRIBUTES.EVENT_SYNCHRONOUS; int IDebugEvent2.GetAttributes(out uint eventAttributes) { eventAttributes = Attributes; return VSConstants.S_OK; } } #endregion // The debug engine (DE) sends this interface to the session debug manager (SDM) when an instance of the DE is created. sealed class AD7EngineCreateEvent : AD7AsynchronousEvent, IDebugEngineCreateEvent2 { public const string IID = "FE5B734C-759D-4E59-AB04-F103343BDD06"; private IDebugEngine2 m_engine; AD7EngineCreateEvent(AD7Engine engine) { m_engine = engine; } public static void Send(AD7Engine engine) { AD7EngineCreateEvent eventObject = new AD7EngineCreateEvent(engine); engine.Send(eventObject, IID, null, null); } int IDebugEngineCreateEvent2.GetEngine(out IDebugEngine2 engine) { engine = m_engine; return VSConstants.S_OK; } } // This interface is sent by the debug engine (DE) to the session debug manager (SDM) when a program is attached to. sealed class AD7ProgramCreateEvent : AD7AsynchronousEvent, IDebugProgramCreateEvent2 { public const string IID = "96CD11EE-ECD4-4E89-957E-B5D496FC4139"; internal static void Send(AD7Engine engine) { AD7ProgramCreateEvent eventObject = new AD7ProgramCreateEvent(); engine.Send(eventObject, IID, null); } } // This interface is sent by the debug engine (DE) to the session debug manager (SDM) when a program is attached to. sealed class AD7ExpressionEvaluationCompleteEvent : AD7AsynchronousEvent, IDebugExpressionEvaluationCompleteEvent2 { public const string IID = "C0E13A85-238A-4800-8315-D947C960A843"; private readonly IDebugExpression2 _expression; private readonly IDebugProperty2 _property; public AD7ExpressionEvaluationCompleteEvent(IDebugExpression2 expression, IDebugProperty2 property) { this._expression = expression; this._property = property; } #region IDebugExpressionEvaluationCompleteEvent2 Members public int GetExpression(out IDebugExpression2 ppExpr) { ppExpr = _expression; return VSConstants.S_OK; } public int GetResult(out IDebugProperty2 ppResult) { ppResult = _property; return VSConstants.S_OK; } #endregion } // This interface is sent by the debug engine (DE) to the session debug manager (SDM) when a module is loaded or unloaded. sealed class AD7ModuleLoadEvent : AD7AsynchronousEvent, IDebugModuleLoadEvent2 { public const string IID = "989DB083-0D7C-40D1-A9D9-921BF611A4B2"; readonly AD7Module m_module; readonly bool m_fLoad; public AD7ModuleLoadEvent(AD7Module module, bool fLoad) { m_module = module; m_fLoad = fLoad; } int IDebugModuleLoadEvent2.GetModule(out IDebugModule2 module, ref string debugMessage, ref int fIsLoad) { module = m_module; if (m_fLoad) { debugMessage = null; //String.Concat("Loaded '", m_module.DebuggedModule.Name, "'"); fIsLoad = 1; } else { debugMessage = null; // String.Concat("Unloaded '", m_module.DebuggedModule.Name, "'"); fIsLoad = 0; } return VSConstants.S_OK; } } // This interface is sent by the debug engine (DE) to the session debug manager (SDM) when a program has run to completion // or is otherwise destroyed. sealed class AD7ProgramDestroyEvent : AD7SynchronousEvent, IDebugProgramDestroyEvent2 { public const string IID = "E147E9E3-6440-4073-A7B7-A65592C714B5"; readonly uint m_exitCode; public AD7ProgramDestroyEvent(uint exitCode) { m_exitCode = exitCode; } #region IDebugProgramDestroyEvent2 Members int IDebugProgramDestroyEvent2.GetExitCode(out uint exitCode) { exitCode = m_exitCode; return VSConstants.S_OK; } #endregion } // This interface is sent by the debug engine (DE) to the session debug manager (SDM) when a thread is created in a program being debugged. sealed class AD7ThreadCreateEvent : AD7AsynchronousEvent, IDebugThreadCreateEvent2 { public const string IID = "2090CCFC-70C5-491D-A5E8-BAD2DD9EE3EA"; } // This interface is sent by the debug engine (DE) to the session debug manager (SDM) when a thread has exited. sealed class AD7ThreadDestroyEvent : AD7AsynchronousEvent, IDebugThreadDestroyEvent2 { public const string IID = "2C3B7532-A36F-4A6E-9072-49BE649B8541"; readonly uint m_exitCode; public AD7ThreadDestroyEvent(uint exitCode) { m_exitCode = exitCode; } #region IDebugThreadDestroyEvent2 Members int IDebugThreadDestroyEvent2.GetExitCode(out uint exitCode) { exitCode = m_exitCode; return VSConstants.S_OK; } #endregion } // This interface is sent by the debug engine (DE) to the session debug manager (SDM) when a program is loaded, but before any code is executed. sealed class AD7LoadCompleteEvent : AD7StoppingEvent, IDebugLoadCompleteEvent2 { public const string IID = "B1844850-1349-45D4-9F12-495212F5EB0B"; public AD7LoadCompleteEvent() { } internal static void Send(AD7Engine engine) { var eventObject = new AD7LoadCompleteEvent(); engine.Send(eventObject, IID, null); } } // This interface tells the session debug manager (SDM) that an asynchronous break has been successfully completed. sealed class AD7AsyncBreakCompleteEvent : AD7StoppingEvent, IDebugBreakEvent2 { public const string IID = "c7405d1d-e24b-44e0-b707-d8a5a4e1641b"; } // This interface tells the session debug manager (SDM) that an asynchronous break has been successfully completed. sealed class AD7SteppingCompleteEvent : AD7StoppingEvent, IDebugStepCompleteEvent2 { public const string IID = "0F7F24C1-74D9-4EA6-A3EA-7EDB2D81441D"; } // This interface is sent when a pending breakpoint has been bound in the debuggee. sealed class AD7BreakpointBoundEvent : AD7AsynchronousEvent, IDebugBreakpointBoundEvent2 { public const string IID = "1dddb704-cf99-4b8a-b746-dabb01dd13a0"; private AD7PendingBreakpoint m_pendingBreakpoint; private AD7BoundBreakpoint m_boundBreakpoint; public AD7BreakpointBoundEvent(AD7PendingBreakpoint pendingBreakpoint, AD7BoundBreakpoint boundBreakpoint) { m_pendingBreakpoint = pendingBreakpoint; m_boundBreakpoint = boundBreakpoint; } #region IDebugBreakpointBoundEvent2 Members int IDebugBreakpointBoundEvent2.EnumBoundBreakpoints(out IEnumDebugBoundBreakpoints2 ppEnum) { IDebugBoundBreakpoint2[] boundBreakpoints = new IDebugBoundBreakpoint2[1]; boundBreakpoints[0] = m_boundBreakpoint; ppEnum = new AD7BoundBreakpointsEnum(boundBreakpoints); return VSConstants.S_OK; } int IDebugBreakpointBoundEvent2.GetPendingBreakpoint(out IDebugPendingBreakpoint2 ppPendingBP) { ppPendingBP = m_pendingBreakpoint; return VSConstants.S_OK; } #endregion } // This interface is sent when the entry point has been hit. sealed class AD7EntryPointEvent : AD7StoppingEvent, IDebugEntryPointEvent2 { public const string IID = "e8414a3e-1642-48ec-829e-5f4040e16da9"; } // This Event is sent when a breakpoint is hit in the debuggee sealed class AD7BreakpointEvent : AD7StoppingEvent, IDebugBreakpointEvent2 { public const string IID = "501C1E21-C557-48B8-BA30-A1EAB0BC4A74"; IEnumDebugBoundBreakpoints2 m_boundBreakpoints; public AD7BreakpointEvent(IEnumDebugBoundBreakpoints2 boundBreakpoints) { m_boundBreakpoints = boundBreakpoints; } #region IDebugBreakpointEvent2 Members int IDebugBreakpointEvent2.EnumBreakpoints(out IEnumDebugBoundBreakpoints2 ppEnum) { ppEnum = m_boundBreakpoints; return VSConstants.S_OK; } #endregion } sealed class AD7DebugExceptionEvent : AD7StoppingEvent, IDebugExceptionEvent2 { public const string IID = "51A94113-8788-4A54-AE15-08B74FF922D0"; private readonly string _exception, _description; private readonly bool _isUnhandled; public AD7DebugExceptionEvent(string typeName, string description, bool isUnhandled) { _exception = typeName; _description = description; _isUnhandled = isUnhandled; } #region IDebugExceptionEvent2 Members public int CanPassToDebuggee() { return VSConstants.S_FALSE; } public int GetException(EXCEPTION_INFO[] pExceptionInfo) { pExceptionInfo[0].guidType = AD7Engine.DebugEngineGuid; pExceptionInfo[0].bstrExceptionName = _exception; if (_isUnhandled) { pExceptionInfo[0].dwState = enum_EXCEPTION_STATE.EXCEPTION_STOP_USER_UNCAUGHT; } else { pExceptionInfo[0].dwState = enum_EXCEPTION_STATE.EXCEPTION_STOP_FIRST_CHANCE; } return VSConstants.S_OK; } public int GetExceptionDescription(out string pbstrDescription) { pbstrDescription = _description; return VSConstants.S_OK; } public int PassToDebuggee(int fPass) { if (fPass != 0) { return VSConstants.S_OK; } return VSConstants.E_FAIL; } #endregion } sealed class AD7DebugOutputStringEvent2 : AD7AsynchronousEvent, IDebugOutputStringEvent2 { public const string IID = "569C4BB1-7B82-46FC-AE28-4536DDAD753E"; private readonly string _output; public AD7DebugOutputStringEvent2(string output) { _output = output; } #region IDebugOutputStringEvent2 Members public int GetString(out string pbstrString) { pbstrString = _output; return VSConstants.S_OK; } #endregion } sealed class AD7CustomEvent : IDebugEvent2, IDebugCustomEvent110 { public const string IID = "2615D9BC-1948-4D21-81EE-7A963F20CF59"; private readonly VsComponentMessage _message; public AD7CustomEvent(VsComponentMessage message) { _message = message; } public AD7CustomEvent(VsPackageMessage message, object param1 = null, object param2 = null) : this(new VsComponentMessage { MessageCode = (uint)message, Parameter1 = param1, Parameter2 = param2 }) { } int IDebugEvent2.GetAttributes(out uint eventAttributes) { eventAttributes = (uint)(enum_EVENTATTRIBUTES.EVENT_SYNCHRONOUS | enum_EVENTATTRIBUTES.EVENT_IMMEDIATE); return VSConstants.S_OK; } int IDebugCustomEvent110.GetCustomEventInfo(out Guid guidVSService, VsComponentMessage[] message) { guidVSService = Guids.CustomDebuggerEventHandlerGuid; message[0] = _message; return VSConstants.S_OK; } } }
/* Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT License. See License.txt in the project root for license information. */ using System; using System.Collections.Generic; using System.Data; using System.Linq; using System.Linq.Expressions; using System.Xml.Linq; using Lucene.Net.Search; using Microsoft.Xrm.Client.Messages; using Microsoft.Xrm.Sdk; using Microsoft.Xrm.Sdk.Client; using Microsoft.Xrm.Sdk.Metadata; using Adxstudio.Xrm.Globalization; namespace Adxstudio.Xrm.Data { public static class EnumerableExtensions { public static DataTable ToDataTable(this IEnumerable<Entity> entities, OrganizationServiceContext serviceContext, Entity savedQuery = null, bool onlyGenerateSavedQueryColumns = false, string dateTimeFormat = null, IFormatProvider dateTimeFormatProvider = null) { if (entities == null) { throw new ArgumentNullException("entities"); } if (serviceContext == null) { throw new ArgumentNullException("serviceContext"); } var table = new DataTable(); var entityArray = entities.ToArray(); if (!entityArray.Any()) { return table; } AddColumnsBasedOnSavedQuery(table, savedQuery); AddDataToTable(entityArray, table, !onlyGenerateSavedQueryColumns, dateTimeFormat, dateTimeFormatProvider); AddDisplayNamesToColumnCaptions(entityArray.First(), table, serviceContext); return table; } private static void AddColumnsBasedOnSavedQuery(DataTable table, Entity savedQuery) { if (savedQuery == null) { return; } var layoutXml = XElement.Parse(savedQuery.GetAttributeValue<string>("layoutxml")); var layoutRow = layoutXml.Element("row"); if (layoutRow == null) { return; } var cellNames = layoutRow.Elements("cell").Select(cell => cell.Attribute("name")).Where(name => name != null); foreach (var name in cellNames) { table.Columns.Add(name.Value); } } private static void AddDataToTable(IEnumerable<Entity> entities, DataTable table, bool autogenerateColumns, string dateTimeFormat = null, IFormatProvider dateTimeFormatProvider = null) { foreach (var entity in entities) { var row = table.NewRow(); foreach (var attribute in entity.Attributes) { if (!table.Columns.Contains(attribute.Key) && autogenerateColumns) { table.Columns.Add(attribute.Key); } if (table.Columns.Contains(attribute.Key)) { var aliasedValue = attribute.Value as AliasedValue; object value = aliasedValue != null ? aliasedValue.Value : attribute.Value; var entityReference = value as EntityReference; if (entityReference != null) { row[attribute.Key] = entityReference.Name ?? entityReference.Id.ToString(); } else { var dateTime = value as DateTime?; if (dateTimeFormat != null && dateTime != null) { row[attribute.Key] = dateTimeFormatProvider == null ? dateTime.Value.ToString(dateTimeFormat) : dateTime.Value.ToString(dateTimeFormat, dateTimeFormatProvider); } else { row[attribute.Key] = entity.FormattedValues.Contains(attribute.Key) ? entity.FormattedValues[attribute.Key] : value ?? DBNull.Value; } } } } table.Rows.Add(row); } } private static void AddDisplayNamesToColumnCaptions(Entity entity, DataTable table, OrganizationServiceContext serviceContext) { var attributeMetadatas = serviceContext.RetrieveEntity(entity.LogicalName, EntityFilters.Attributes).Attributes; foreach (DataColumn column in table.Columns) { var attributeMetadata = attributeMetadatas.FirstOrDefault(metadata => metadata.LogicalName == column.ColumnName); if (attributeMetadata != null && attributeMetadata.DisplayName.UserLocalizedLabel != null) { column.Caption = attributeMetadata.DisplayName.GetLocalizedLabelString(); } } } /// <summary> /// Loads a DataTable from a sequence of objects. /// </summary> /// <param name="source">The sequence of objects to load into the DataTable.</param> public static DataTable CopyToDataTable<T>(this IEnumerable<T> source) { return new ObjectShredder<T>().Shred(source, null, null); } /// <summary> /// Loads a DataTable from a sequence of objects. /// </summary> /// <param name="source">The sequence of objects to load into the DataTable.</param> /// <param name="table">The input table.</param> /// <param name="options">Specifies how values from the source sequence will be applied to /// existing rows in the table.</param> public static DataTable CopyToDataTable<T>(this IEnumerable<T> source, DataTable table, LoadOption? options) { return new ObjectShredder<T>().Shred(source, table, options); } /// <summary> /// Filters a query based on whether a selected value is equal to one of the values in a given collection. /// </summary> /// <typeparam name="T">The query type.</typeparam> /// <typeparam name="TValue">The type of value to be compared.</typeparam> /// <param name="queryable">The query.</param> /// <param name="selector">A lamba expression that will select the value to be compared.</param> /// <param name="values">The collection of values to be compared against.</param> /// <returns>The query, with filter appended.</returns> /// <example> /// <![CDATA[ /// var query = serviceContext.CreateQuery("adx_webpage").WhereIn(e => e.GetAttributeValue<Guid>("adx_webpageid"), pageIds); /// ]]> /// </example> public static IQueryable<T> WhereIn<T, TValue>(this IQueryable<T> queryable, Expression<Func<T, TValue>> selector, IEnumerable<TValue> values) { return values.Any() ? queryable.Where(In(selector, values)) : Enumerable.Empty<T>().AsQueryable(); } private static Expression<Func<T, bool>> In<T, TValue>(Expression<Func<T, TValue>> selector, IEnumerable<TValue> values) { return Expression.Lambda<Func<T, bool>>( In(selector.Body, values), selector.Parameters.First()); } private static Expression In<TValue>(Expression selectorBody, IEnumerable<TValue> values) { return In( selectorBody, values.Skip(1), Expression.Equal(selectorBody, Expression.Constant(values.First()))); } private static Expression In<TValue>(Expression selectorBody, IEnumerable<TValue> values, Expression expression) { return values.Any() ? In( selectorBody, values.Skip(1), Expression.OrElse(expression, Expression.Equal(selectorBody, Expression.Constant(values.First())))) : expression; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Globalization; /// <summary> /// Parse(System.String,System.IFormatProvider) /// </summary> public class DecimalParse4 { #region Public Methods public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; TestLibrary.TestFramework.LogInformation("[Negitive]"); retVal = NegTest1() && retVal; retVal = NegTest2() && retVal; retVal = NegTest3() && retVal; retVal = NegTest4() && retVal; return retVal; } #region Positive Test Cases public bool PosTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest1: Calling Parse method."); try { Decimal m1 = new decimal(TestLibrary.Generator.GetDouble(-55)); CultureInfo myCulture = CultureInfo.CurrentCulture; string m1ToString = m1.ToString(myCulture); Decimal expectValue = m1; Decimal actualValue = Decimal.Parse(m1ToString, myCulture); if (actualValue != expectValue) { TestLibrary.TestFramework.LogError("001.1", "Parse method should return " + expectValue); retVal = false; } m1 = new decimal(TestLibrary.Generator.GetInt32(-55)); m1ToString = m1.ToString(myCulture); expectValue = m1; actualValue = Decimal.Parse(m1ToString,myCulture); if (actualValue != expectValue) { TestLibrary.TestFramework.LogError("001.2", "Parse method should return " + expectValue); retVal = false; } m1 = new decimal(TestLibrary.Generator.GetSingle(-55)); m1ToString = m1.ToString(myCulture); expectValue = m1; actualValue = Decimal.Parse(m1ToString, myCulture); if (actualValue != expectValue) { TestLibrary.TestFramework.LogError("001.3", "Parse method should return " + expectValue); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("001.0", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest2: Calling Parse method and the decimal is MaxValue and MinValue."); try { Decimal m1 = Decimal.MaxValue; CultureInfo myCulture = CultureInfo.CurrentCulture; string m1ToString = m1.ToString(myCulture); Decimal expectValue = m1; Decimal actualValue = Decimal.Parse(m1ToString, myCulture); if (actualValue != expectValue) { TestLibrary.TestFramework.LogError("002.1", "Parse method should return " + expectValue); TestLibrary.TestFramework.LogInformation("Actual value: " + actualValue.ToString()); retVal = false; } m1 = Decimal.MinValue; m1ToString = m1.ToString(myCulture); expectValue = m1; actualValue = Decimal.Parse(m1ToString, myCulture); if (actualValue != expectValue) { TestLibrary.TestFramework.LogError("002.2", "Parse method should return " + expectValue); TestLibrary.TestFramework.LogInformation("Actual value: " + actualValue.ToString()); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("001.0", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest3: Calling Parse method and the decimal is Especial value."); try { Decimal m1 = -9876543210.9876543210m; CultureInfo myCulture = CultureInfo.InvariantCulture; string m1ToString = m1.ToString(myCulture); Decimal expectValue = m1; Decimal actualValue = Decimal.Parse(m1ToString, myCulture); if (actualValue != expectValue) { TestLibrary.TestFramework.LogError("003.1", "Parse method should return " + expectValue); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("001.0", "Unexpected exception: " + e); retVal = false; } return retVal; } #endregion #region Negitive test public bool NegTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest1: s is a null reference."); try { string m1ToString = null; CultureInfo myCulture = CultureInfo.InvariantCulture; Decimal actualValue = Decimal.Parse(m1ToString, myCulture); TestLibrary.TestFramework.LogError("101.1", "ArgumentNullException should be caught."); retVal = false; } catch (ArgumentNullException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("101.0", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest2: s is not in the correct format."); try { string m1ToString = "ADAAAW"; CultureInfo myCulture = CultureInfo.InvariantCulture; Decimal actualValue = Decimal.Parse(m1ToString, myCulture); TestLibrary.TestFramework.LogError("102.1", "FormatException should be caught."); retVal = false; } catch (FormatException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("102.0", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest3: s represents a number greater than MaxValue."); try { Decimal myDecimal = decimal.MaxValue; CultureInfo myCulture = CultureInfo.CurrentCulture; string m1ToString = myDecimal.ToString(myCulture); m1ToString = m1ToString + m1ToString; Decimal actualValue = Decimal.Parse(m1ToString, myCulture); TestLibrary.TestFramework.LogError("103.1", "OverflowException should be caught."); retVal = false; } catch (OverflowException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("103.0", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest4: s represents a number less than MinValue."); try { Decimal myDecimal = decimal.MinValue; CultureInfo myCulture = CultureInfo.CurrentCulture; string m1ToString = myDecimal.ToString(myCulture); m1ToString = m1ToString + Decimal.Negate(myDecimal).ToString(myCulture); Decimal actualValue = Decimal.Parse(m1ToString,myCulture); TestLibrary.TestFramework.LogError("104.1", "OverflowException should be caught."); retVal = false; } catch (OverflowException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("104.0", "Unexpected exception: " + e); retVal = false; } return retVal; } #endregion #endregion public static int Main() { DecimalParse4 test = new DecimalParse4(); TestLibrary.TestFramework.BeginTestCase("DecimalParse4"); if (test.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } #region private method public TypeCode GetExpectValue(Decimal myValue) { return TypeCode.Decimal; } #endregion }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IO; using System.Xml.Schema; using Xunit; using Xunit.Abstractions; namespace System.Xml.Tests { // ===================== Constructor ===================== public class TCConstructor : CXmlSchemaValidatorTestCase { private ITestOutputHelper _output; public TCConstructor(ITestOutputHelper output): base(output) { _output = output; } [Fact] public void SetXmlNameTableToNull() { XmlSchemaValidator val; try { val = new XmlSchemaValidator(null, new XmlSchemaSet(), new XmlNamespaceManager(new NameTable()), AllFlags); } catch (ArgumentNullException) { return; } _output.WriteLine("ArgumentNullException was not thrown!"); Assert.True(false); } [Theory] [InlineData("empty")] [InlineData("full")] public void SetXmlNameTableTo_Empty_Full(String nameTableStatus) { XmlSchemaValidator val; ObservedNameTable nt = new ObservedNameTable(); XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchemaSet sch = CreateSchemaSetFromXml("<root />"); if (nameTableStatus == "full") { nt.Add("root"); nt.Add("foo"); nt.IsAddCalled = false; nt.IsGetCalled = false; } val = new XmlSchemaValidator(nt, sch, new XmlNamespaceManager(new NameTable()), AllFlags); Assert.NotEqual(val, null); val.Initialize(); val.ValidateElement("root", "", info); Assert.True(nt.IsAddCalled); Assert.Equal(nt.IsGetCalled, false); return; } [Fact] public void SetSchemaSetToNull() { XmlSchemaValidator val; try { val = new XmlSchemaValidator(new NameTable(), null, new XmlNamespaceManager(new NameTable()), AllFlags); } catch (ArgumentNullException) { return; } _output.WriteLine("ArgumentNullException was not thrown!"); Assert.True(false); } [Theory] [InlineData("empty")] [InlineData("notcompiled")] [InlineData("compiled")] public void SetSchemaSetTo_Empty_NotCompiled_Compiled(String schemaSetStatus) { XmlSchemaValidator val; XmlSchemaSet sch = new XmlSchemaSet(); if (schemaSetStatus != "empty") { sch.Add("", Path.Combine(TestData, XSDFILE_NO_TARGET_NAMESPACE)); if (schemaSetStatus == "compiled") sch.Compile(); } val = new XmlSchemaValidator(new NameTable(), sch, new XmlNamespaceManager(new NameTable()), AllFlags); Assert.NotEqual(val, null); val.Initialize(); val.ValidateElement("elem1", "", null); val.SkipToEndElement(null); val.EndValidation(); return; } // BUG 304774 - resolved [Fact] public void SetSchemaSetWithInvalidContent_TypeCollision() { XmlSchemaValidator val; XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchemaSet sch = new XmlSchemaSet(); sch.Add("", XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">\n" + " <xs:element name=\"root\" type=\"xs:int\" />\n" + "</xs:schema>"))); sch.Add("", XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">\n" + " <xs:element name=\"root\" type=\"xs:string\" />\n" + "</xs:schema>"))); try { val = new XmlSchemaValidator(new NameTable(), sch, new XmlNamespaceManager(new NameTable()), AllFlags); } catch (XmlSchemaValidationException) { return; } return; } [Fact] public void CustomXmlNameSpaceResolverImplementation() { XmlSchemaValidator val; XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchemaSet sch = new XmlSchemaSet(); ObservedNamespaceManager nsManager = new ObservedNamespaceManager(new NameTable()); nsManager.AddNamespace("n1", "uri:tempuri"); val = new XmlSchemaValidator(new NameTable(), sch, nsManager, AllFlags); val.AddSchema(XmlSchema.Read(XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:n1=\"uri:tempuri\"\n" + " targetNamespace=\"uri:tempuri1\">\n" + " <xs:complexType name=\"foo\">\n" + " <xs:sequence>\n" + " <xs:element name=\"bar\" />\n" + " </xs:sequence>\n" + " </xs:complexType>\n" + "</xs:schema>")), null)); val.Initialize(); val.ValidateElement("root", "", info, "n1:foo", null, null, null); Assert.True(nsManager.IsLookupNamespaceCalled); return; } } // ===================== AddSchema ===================== public class TCAddSchema : CXmlSchemaValidatorTestCase { private ITestOutputHelper _output; public TCAddSchema(ITestOutputHelper output): base(output) { _output = output; } [Fact] public void PassNull() { XmlSchemaValidator val = CreateValidator(new XmlSchemaSet()); try { val.AddSchema(null); } catch (ArgumentNullException) { return; } Assert.True(false); } [Fact] public void CheckDeepCopyOfXmlSchema() { XmlSchemaValidator val = CreateValidator(new XmlSchemaSet()); XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchema s = new XmlSchema(); XmlSchemaElement e1 = new XmlSchemaElement(); XmlSchemaElement e2 = new XmlSchemaElement(); e1.Name = "foo"; e2.Name = "bar"; s.Items.Add(e1); val.AddSchema(s); s.Items.Add(e2); val.Initialize(); try { val.ValidateElement("bar", "", info); } catch (XmlSchemaValidationException) { return; } Assert.True(false); } [Fact] public void AddSameXmlSchemaTwice() { XmlSchemaValidator val = CreateValidator(new XmlSchemaSet()); XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchema s; s = XmlSchema.Read(XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">\n" + " <xs:element name=\"root\" />\n" + "</xs:schema>")), null); val.AddSchema(s); val.AddSchema(s); val.Initialize(); val.ValidateElement("root", "", info); return; } [Fact] public void AddSameXmlSchemaWithTargetNamespaceTwice() { XmlSchemaValidator val = CreateValidator(new XmlSchemaSet()); XmlSchemaInfo info = new XmlSchemaInfo(); XmlSchema s; s = XmlSchema.Read(XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:n1=\"uri:tempuri\"\n" + " targetNamespace=\"uri:tempuri\">\n" + " <xs:element name=\"root\" />\n" + "</xs:schema>")), null); val.AddSchema(s); val.AddSchema(s); val.Initialize(); val.ValidateElement("root", "uri:tempuri", info); return; } [Fact] public void AddSchemasWithTypeCollision() { XmlSchemaValidator val = CreateValidator(new XmlSchemaSet()); XmlSchemaInfo info = new XmlSchemaInfo(); val.AddSchema(XmlSchema.Read(XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">\n" + " <xs:element name=\"root\" type=\"xs:string\" />\n" + "</xs:schema>")), null)); try { val.AddSchema(XmlSchema.Read(XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">\n" + " <xs:element name=\"root\" type=\"xs:boolean\" />\n" + "</xs:schema>")), null)); } catch (XmlSchemaValidationException) { return; } Assert.True(false); } [Fact] public void ValidateThenAddAdditionalSchemas() { XmlSchemaValidator val; XmlSchemaInfo info = new XmlSchemaInfo(); val = CreateValidator(new XmlSchemaSet()); val.AddSchema(XmlSchema.Read(XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\"\n" + " targetNamespace=\"uri:tempuri1\">\n" + " <xs:element name=\"foo\" type=\"xs:string\" />\n" + "</xs:schema>")), null)); val.Initialize(); val.ValidateElement("foo", "uri:tempuri1", info); val.SkipToEndElement(info); val.AddSchema(XmlSchema.Read(XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\"\n" + " targetNamespace=\"uri:tempuri2\">\n" + " <xs:element name=\"bar\" type=\"xs:string\" />\n" + "</xs:schema>")), null)); val.ValidateElement("bar", "uri:tempuri2", info); val.SkipToEndElement(info); val.EndValidation(); return; } [Theory] [InlineData(true)] [InlineData(false)] public void ImportAnotherSchemaThat_Is_IsNot_InSchemaSet(bool importTwice) { XmlSchemaValidator val; XmlSchemaInfo info = new XmlSchemaInfo(); Uri u = new Uri(Uri.UriSchemeFile + Uri.SchemeDelimiter + Path.Combine(Path.GetFullPath(TestData), XSDFILE_TARGET_NAMESPACE)); XmlSchema s1; XmlSchemaSet schemas = new XmlSchemaSet(); schemas.XmlResolver = new XmlUrlResolver(); val = CreateValidator(new XmlSchemaSet()); s1 = XmlSchema.Read(new StringReader("<?xml version=\"1.0\"?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:temp=\"uri:tempuri\">\n" + " <xs:import namespace=\"uri:tempuri\"\n" + " schemaLocation=\"" + u.AbsoluteUri + "\" />\n" + " <xs:element name=\"root\">\n" + " <xs:complexType>\n" + " <xs:sequence>\n" + " <xs:element ref=\"temp:elem1\" />\n" + " </xs:sequence>\n" + " </xs:complexType>\n" + " </xs:element>\n" + "</xs:schema>"), null); schemas.Add(s1); if (importTwice) { foreach (XmlSchema s in schemas.Schemas("uri:tempuri")) val.AddSchema(s); } val.AddSchema(s1); val.Initialize(); val.ValidateElement("root", "", info); val.ValidateEndOfAttributes(null); val.ValidateElement("elem1", "uri:tempuri", info); val.ValidateEndOfAttributes(null); val.ValidateEndElement(info); val.ValidateEndElement(info); val.EndValidation(); return; } //(BUG #306858) [Fact] public void SetIgnoreInlineSchemaFlag_AddSchemaShouldDoNothing() { XmlSchemaValidator val; XmlSchemaInfo info = new XmlSchemaInfo(); val = CreateValidator(CreateSchemaSetFromXml("<root />"), XmlSchemaValidationFlags.ReportValidationWarnings | XmlSchemaValidationFlags.ProcessSchemaLocation); val.AddSchema(XmlSchema.Read(XmlReader.Create(new StringReader("<?xml version=\"1.0\" ?>\n" + "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">\n" + " <xs:element name=\"foo\" type=\"xs:string\" />\n" + "</xs:schema>")), null)); val.Initialize(); try { val.ValidateElement("foo", "", info); throw new Exception("Additional schema was loaded!"); } catch (XmlSchemaValidationException) { return; } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Text; using System.Threading.Tasks; using Windows.Foundation.Collections; using Windows.Security.Authentication.Web; using Newtonsoft.Json.Linq; namespace Auth0.LoginClient { /// <summary> /// A simple client to Authenticate Users with Auth0. /// </summary> public partial class Auth0Client { private const string AuthorizeUrl = "https://{0}/authorize?client_id={1}&redirect_uri={2}&response_type=token&connection={3}&scope={4}"; private const string LoginWidgetUrl = "https://{0}/login/?client={1}&redirect_uri={2}&response_type=token&scope={3}"; private const string ParamQueryString = "&{0}={1}"; private const string ResourceOwnerEndpoint = "https://{0}/oauth/ro"; private const string DelegationEndpoint = "https://{0}/delegation"; private const string UserInfoEndpoint = "https://{0}/userinfo?access_token={1}"; private const string DefaultCallback = "https://{0}/mobile"; private readonly string domain; private readonly string clientId; private static readonly string[] ReservedAuthParams = { "state", "access_token", "scope", "protocol", "device", "request_id", "connection_scopes", "nonce", "offline_mode" }; private DiagnosticsHeader diagnostics; /// <summary> /// Creates a new instances on the Auth0 login client /// </summary> /// <param name="domain">The domain your want to log in to</param> /// <param name="clientId">The client ID</param> public Auth0Client(string domain, string clientId) : this(domain, clientId, null) { } /// <summary> /// Creates a new instances on the Auth0 login client /// </summary> /// <param name="domain">The domain your want to log in to</param> /// <param name="clientId">The client ID</param> /// <param name="diagnostics">The <see cref="DiagnosticsHeader"/> which is sent along with the request. This is used for telemetry by Auth0. To opt out of this, specify a value of DiagnosticsHeader.Suppress</param> public Auth0Client(string domain, string clientId, DiagnosticsHeader diagnostics) { // If no diagnostics header structure was specified, then revert to the default one if (diagnostics == null) { diagnostics = DiagnosticsHeader.Default; } this.diagnostics = diagnostics; this.domain = domain; this.clientId = clientId; this.DeviceIdProvider = new Device(); } public Auth0User CurrentUser { get; private set; } public string CallbackUrl { get { return string.Format(DefaultCallback, this.domain); } } /// <summary> /// The component used to generate the device's unique id /// </summary> public IDeviceIdProvider DeviceIdProvider { get; set; } /// <summary> /// Login a user into an Auth0 application. Attempts to do a background login, but if unsuccessful shows an embedded browser window either showing the widget or skipping it by passing a connection name /// </summary> /// <param name="connection">Optional connection name to bypass the login widget</param> /// <param name="withRefreshToken">true to include the refresh_token in the response, false (default) otherwise. /// The refresh_token allows you to renew the id_token indefinitely (does not expire) unless specifically revoked.</param> /// <param name="scope">Optional scope, either 'openid' or 'openid profile'</param> /// <param name="authParams">Additional parameters to forward to Auth0 or to the IdP (like login_hint).</param> /// <returns>Returns a Task of Auth0User</returns> public async Task<Auth0User> LoginAsync(string connection = "", bool withRefreshToken = false, string scope = "openid", IDictionary<string, string> authParams = null) { scope = IncreaseScopeWithOfflineAccess(withRefreshToken, scope); var tcs = new TaskCompletionSource<Auth0User>(); var auth = await this.GetAuthenticatorAsync(connection, scope, authParams); if (auth.ResponseStatus == WebAuthenticationStatus.Success) { Dictionary<string, string> tokens = ParseResult(auth.ResponseData); if (tokens != null) { if (tokens.ContainsKey("error")) { var failureMessage = new StringBuilder(); failureMessage.Append("Error=").Append(tokens["error"]); if (tokens.ContainsKey("error_description")) { failureMessage.Append(";Description=").Append(tokens["error_description"]); } if (tokens.ContainsKey("error_uri")) { failureMessage.Append(";Uri=").Append(tokens["error_uri"]); } throw new AuthenticationErrorException(failureMessage.ToString()); } this.SetupCurrentUser(tokens); tcs.TrySetResult(this.CurrentUser); } else { throw new AuthenticationErrorException(); } } else if (auth.ResponseStatus == WebAuthenticationStatus.UserCancel) { throw new AuthenticationCanceledException(); } return this.CurrentUser; } /// <summary> /// Log a user into an Auth0 application given an user name and password. /// </summary> /// <returns>Task that will complete when the user has finished authentication.</returns> /// <param name="connection" type="string">The name of the connection to use in Auth0. Connection defines an Identity Provider.</param> /// <param name="userName" type="string">User name.</param> /// <param name="password" type="string">User password.</param> /// <param name="withRefreshToken">true to include the refresh_token in the response, false otherwise. /// The refresh_token allows you to renew the id_token indefinitely (does not expire) unless specifically revoked.</param> /// <param name="scope">Scope.</param> public async Task<Auth0User> LoginAsync(string connection, string userName, string password, bool withRefreshToken = false, string scope = "openid") { scope = IncreaseScopeWithOfflineAccess(withRefreshToken, scope); var endpoint = string.Format(ResourceOwnerEndpoint, this.domain); var parameters = new Dictionary<string, string> { {"client_id", this.clientId}, {"connection", connection}, {"username", userName}, {"password", password}, {"grant_type", "password"}, {"scope", scope} }; // Add the diagnostics query string, unless user explicitly opted out of it if (!object.ReferenceEquals(diagnostics, DiagnosticsHeader.Suppress)) parameters.Add("auth0client", diagnostics.ToString()); if (scope.Contains("offline_access")) { var deviceId = Uri.EscapeDataString(await this.DeviceIdProvider.GetDeviceId()); parameters.Add("device", deviceId); } var request = new HttpClient(); var result = await request.PostAsync(new Uri(endpoint), new FormUrlEncodedContent(parameters)); try { result.EnsureSuccessStatusCode(); var text = result.Content.ReadAsStringAsync().Result; var data = JObject.Parse(text).ToObject<Dictionary<string, string>>(); if (data.ContainsKey("error")) { throw new UnauthorizedAccessException("Error authenticating: " + data["error"]); } else if (data.ContainsKey("access_token")) { this.SetupCurrentUser(data); } else { throw new UnauthorizedAccessException( "Expected access_token in access token response, but did not receive one."); } } catch (Exception ex) { throw ex; } return this.CurrentUser; } /// <summary> /// Renews the idToken (JWT) /// </summary> /// <returns>The refreshed token.</returns> /// <param name="refreshToken">The refresh token</param> /// <param name="options">Additional parameters.</param> public async Task<JObject> RefreshToken(string refreshToken = "", Dictionary<string, string> options = null) { var emptyToken = string.IsNullOrEmpty(refreshToken); if (emptyToken && (this.CurrentUser == null || string.IsNullOrEmpty(this.CurrentUser.RefreshToken))) { throw new InvalidOperationException( "The current user's refresh_token could not be retrieved and no refresh_token was provided as parameter"); } return await this.GetDelegationToken( api: "app", refreshToken: emptyToken ? this.CurrentUser.RefreshToken : refreshToken, options: options); } /// <summary> /// Verifies if the jwt for the current user has expired. /// </summary> /// <returns>true if the token has expired, false otherwise.</returns> /// <remarks>Must be logged in before invoking.</remarks> public bool HasTokenExpired() { if (string.IsNullOrEmpty(this.CurrentUser.IdToken)) { throw new InvalidOperationException("You need to login first."); } return TokenValidator.HasExpired(this.CurrentUser.IdToken); } /// <summary> /// Renews the idToken (JWT) /// </summary> /// <returns>The refreshed token.</returns> /// <remarks>The JWT must not have expired.</remarks> /// <param name="options">Additional parameters.</param> public Task<JObject> RenewIdToken(Dictionary<string, string> options = null) { if (string.IsNullOrEmpty(this.CurrentUser.IdToken)) { throw new InvalidOperationException("You need to login first."); } options = options ?? new Dictionary<string, string>(); if (!options.ContainsKey("scope")) { options["scope"] = "passthrough"; } return this.GetDelegationToken( api: "app", idToken: this.CurrentUser.IdToken, options: options); } /// <summary> /// Get a delegation token /// </summary> /// <returns>Delegation token result.</returns> /// <param name="api">The type of the API to be used.</param> /// <param name="idToken">The string representing the JWT. Useful only if not expired.</param> /// <param name="refreshToken">The refresh token.</param> /// <param name="targetClientId">The clientId of the target application for which to obtain a delegation token.</param> /// <param name="options">Additional parameters.</param> public Task<JObject> GetDelegationToken(string api = "", string idToken = "", string refreshToken = "", string targetClientId = "", Dictionary<string, string> options = null) { if (!(string.IsNullOrEmpty(idToken) || string.IsNullOrEmpty(refreshToken))) { throw new InvalidOperationException( "You must provide either the idToken parameter or the refreshToken parameter, not both."); } if (string.IsNullOrEmpty(idToken) && string.IsNullOrEmpty(refreshToken)) { if (this.CurrentUser == null || string.IsNullOrEmpty(this.CurrentUser.IdToken)) { throw new InvalidOperationException( "You need to login first or specify a value for idToken or refreshToken parameter."); } idToken = this.CurrentUser.IdToken; } options = options ?? new Dictionary<string, string>(); options["id_token"] = idToken; options["api_type"] = api; options["refresh_token"] = refreshToken; var endpoint = string.Format(DelegationEndpoint, this.domain); var parameters = new Dictionary<string, string> { {"grant_type", "urn:ietf:params:oauth:grant-type:jwt-bearer"}, {"target", targetClientId}, {"client_id", this.clientId} }; // custom parameters foreach (var option in options) { if (!parameters.ContainsKey(option.Key)) parameters.Add(option.Key, option.Value); } var request = new HttpClient(); return request.PostAsync(new Uri(endpoint), new FormUrlEncodedContent(parameters)).ContinueWith(t => { try { var text = t.Result.Content.ReadAsStringAsync().Result; return JObject.Parse(text); } catch (Exception) { throw; } }); } /// <summary> /// Log a user out of a Auth0 application. /// </summary> public void Logout() { this.CurrentUser = null; } private static string IncreaseScopeWithOfflineAccess(bool withRefreshToken, string scope) { if (withRefreshToken && !scope.Contains("offline_access")) { scope += " offline_access"; } return scope; } private void SetupCurrentUser(IDictionary<string, string> accountProperties) { var endpoint = string.Format(UserInfoEndpoint, this.domain, accountProperties["access_token"]); var request = new HttpClient(); request.GetAsync(new Uri(endpoint)).ContinueWith(t => { try { t.Result.EnsureSuccessStatusCode(); var profileString = t.Result.Content.ReadAsStringAsync().Result; accountProperties.Add("profile", profileString); } catch (Exception ex) { throw ex; } finally { this.CurrentUser = new Auth0User(accountProperties); } }) .Wait(); } private async Task<WebAuthenticationResult> GetAuthenticatorAsync(string connection, string scope, IDictionary<string, string> authParams) { // Generate state to include in startUri var chars = new char[16]; var rand = new Random(); for (var i = 0; i < chars.Length; i++) { chars[i] = (char)rand.Next((int)'a', (int)'z' + 1); } // Encode scope value scope = WebUtility.UrlEncode(scope); var redirectUri = this.CallbackUrl; var authorizeUri = string.Format(AuthorizeUrl, this.domain, this.clientId, Uri.EscapeDataString(redirectUri), connection, scope); // Add the diagnostics query string, unless user explicitly opted out of it if (!object.ReferenceEquals(diagnostics, DiagnosticsHeader.Suppress)) authorizeUri += string.Format("&auth0client={0}", diagnostics.ToString()); if (scope.Contains("offline_access")) { var deviceId = Uri.EscapeDataString(await this.DeviceIdProvider.GetDeviceId()); authorizeUri += string.Format("&device={0}", deviceId); } // Add custom auth params to the request. if (authParams != null) { foreach (var authParam in authParams.Where(a => !ReservedAuthParams.Contains(a.Key))) authorizeUri += String.Format(ParamQueryString, authParam.Key, authParam.Value); } var state = new string(chars); var startUri = new Uri(authorizeUri + "&state=" + state); var endUri = new Uri(redirectUri); return await WebAuthenticationBroker.AuthenticateAsync(WebAuthenticationOptions.None, startUri, endUri); } private static bool RequireDevice(string scope) { return !String.IsNullOrEmpty(scope) && scope.Contains("offline_access"); } /// <summary> /// After authenticating the result will be: https://callback#id_token=1234&access_token=12345&... /// </summary> /// <param name="result"></param> /// <returns></returns> private static Dictionary<string, string> ParseResult(string result) { if (String.IsNullOrEmpty(result) || !result.Contains("#")) return null; var tokens = new Dictionary<string, string>(); foreach (var tokenPart in result.Split('#')[1].Split('&')) { var tokenKeyValue = tokenPart.Split('='); tokens.Add(tokenKeyValue[0], tokenKeyValue[1]); } return tokens; } } }
/* * Farseer Physics Engine based on Box2D.XNA port: * Copyright (c) 2010 Ian Qvist * * Box2D.XNA port of Box2D: * Copyright (c) 2009 Brandon Furtwangler, Nathan Furtwangler * * Original source Box2D: * Copyright (c) 2006-2009 Erin Catto http://www.gphysics.com * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ using System; using System.Collections.Generic; using System.Diagnostics; using FarseerPhysics.Collision; using FarseerPhysics.Collision.Shapes; using FarseerPhysics.Common; using Microsoft.Xna.Framework; namespace FarseerPhysics.Dynamics.Contacts { /// <summary> /// A contact edge is used to connect bodies and contacts together /// in a contact graph where each body is a node and each contact /// is an edge. A contact edge belongs to a doubly linked list /// maintained in each attached body. Each contact has two contact /// nodes, one for each attached body. /// </summary> public sealed class ContactEdge { /// <summary> /// The contact /// </summary> public Contact Contact; /// <summary> /// The next contact edge in the body's contact list /// </summary> public ContactEdge Next; /// <summary> /// Provides quick access to the other body attached. /// </summary> public Body Other; /// <summary> /// The previous contact edge in the body's contact list /// </summary> public ContactEdge Prev; } [Flags] public enum ContactFlags { None = 0, /// <summary> /// Used when crawling contact graph when forming islands. /// </summary> Island = 0x0001, /// <summary> /// Set when the shapes are touching. /// </summary> Touching = 0x0002, /// <summary> /// This contact can be disabled (by user) /// </summary> Enabled = 0x0004, /// <summary> /// This contact needs filtering because a fixture filter was changed. /// </summary> Filter = 0x0008, /// <summary> /// This bullet contact had a TOI event /// </summary> BulletHit = 0x0010, /// <summary> /// This contact has a valid TOI i the field TOI /// </summary> TOI = 0x0020 } /// <summary> /// The class manages contact between two shapes. A contact exists for each overlapping /// AABB in the broad-phase (except if filtered). Therefore a contact object may exist /// that has no contact points. /// </summary> public class Contact { private static EdgeShape _edge = new EdgeShape(); private static ContactType[,] _registers = new[,] { { ContactType.Circle, ContactType.EdgeAndCircle, ContactType.PolygonAndCircle, ContactType.LoopAndCircle, }, { ContactType.EdgeAndCircle, ContactType.NotSupported, // 1,1 is invalid (no ContactType.Edge) ContactType.EdgeAndPolygon, ContactType.NotSupported, // 1,3 is invalid (no ContactType.EdgeAndLoop) }, { ContactType.PolygonAndCircle, ContactType.EdgeAndPolygon, ContactType.Polygon, ContactType.LoopAndPolygon, }, { ContactType.LoopAndCircle, ContactType.NotSupported, // 3,1 is invalid (no ContactType.EdgeAndLoop) ContactType.LoopAndPolygon, ContactType.NotSupported, // 3,3 is invalid (no ContactType.Loop) }, }; public Fixture FixtureA; public Fixture FixtureB; internal ContactFlags Flags; public Manifold Manifold; // Nodes for connecting bodies. internal ContactEdge NodeA = new ContactEdge(); internal ContactEdge NodeB = new ContactEdge(); public float TOI; internal int TOICount; private ContactType _type; private Contact(Fixture fA, int indexA, Fixture fB, int indexB) { Reset(fA, indexA, fB, indexB); } /// Enable/disable this contact. This can be used inside the pre-solve /// contact listener. The contact is only disabled for the current /// time step (or sub-step in continuous collisions). public bool Enabled { set { if (value) { Flags |= ContactFlags.Enabled; } else { Flags &= ~ContactFlags.Enabled; } } get { return (Flags & ContactFlags.Enabled) == ContactFlags.Enabled; } } /// <summary> /// Get the child primitive index for fixture A. /// </summary> /// <value>The child index A.</value> public int ChildIndexA { get; internal set; } /// <summary> /// Get the child primitive index for fixture B. /// </summary> /// <value>The child index B.</value> public int ChildIndexB { get; internal set; } /// <summary> /// Get the next contact in the world's contact list. /// </summary> /// <value>The next.</value> public Contact Next { get; internal set; } /// <summary> /// Get the previous contact in the world's contact list. /// </summary> /// <value>The prev.</value> public Contact Prev { get; internal set; } /// <summary> /// Get the contact manifold. Do not modify the manifold unless you understand the /// internals of Box2D. /// </summary> /// <param name="manifold">The manifold.</param> public void GetManifold(out Manifold manifold) { manifold = Manifold; } /// <summary> /// Gets the world manifold. /// </summary> public void GetWorldManifold(out Vector2 normal, out FixedArray2<Vector2> points) { Body bodyA = FixtureA.Body; Body bodyB = FixtureB.Body; Shape shapeA = FixtureA.Shape; Shape shapeB = FixtureB.Shape; Collision.Collision.GetWorldManifold(ref Manifold, ref bodyA.Xf, shapeA.Radius, ref bodyB.Xf, shapeB.Radius, out normal, out points); } /// <summary> /// Determines whether this contact is touching. /// </summary> /// <returns> /// <c>true</c> if this instance is touching; otherwise, <c>false</c>. /// </returns> public bool IsTouching() { return (Flags & ContactFlags.Touching) == ContactFlags.Touching; } /// <summary> /// Flag this contact for filtering. Filtering will occur the next time step. /// </summary> public void FlagForFiltering() { Flags |= ContactFlags.Filter; } private void Reset(Fixture fA, int indexA, Fixture fB, int indexB) { Flags = ContactFlags.Enabled; FixtureA = fA; FixtureB = fB; ChildIndexA = indexA; ChildIndexB = indexB; Manifold.PointCount = 0; Prev = null; Next = null; NodeA.Contact = null; NodeA.Prev = null; NodeA.Next = null; NodeA.Other = null; NodeB.Contact = null; NodeB.Prev = null; NodeB.Next = null; NodeB.Other = null; TOICount = 0; } /// <summary> /// Update the contact manifold and touching status. /// Note: do not assume the fixture AABBs are overlapping or are valid. /// </summary> /// <param name="contactManager">The contact manager.</param> internal void Update(ContactManager contactManager) { Manifold oldManifold = Manifold; // Re-enable this contact. Flags |= ContactFlags.Enabled; bool touching; bool wasTouching = (Flags & ContactFlags.Touching) == ContactFlags.Touching; bool sensor = FixtureA.IsSensor || FixtureB.IsSensor; Body bodyA = FixtureA.Body; Body bodyB = FixtureB.Body; // Is this contact a sensor? if (sensor) { Shape shapeA = FixtureA.Shape; Shape shapeB = FixtureB.Shape; touching = AABB.TestOverlap(shapeA, ChildIndexA, shapeB, ChildIndexB, ref bodyA.Xf, ref bodyB.Xf); // Sensors don't generate manifolds. Manifold.PointCount = 0; } else { Evaluate(ref Manifold, ref bodyA.Xf, ref bodyB.Xf); touching = Manifold.PointCount > 0; // Match old contact ids to new contact ids and copy the // stored impulses to warm start the solver. for (int i = 0; i < Manifold.PointCount; ++i) { ManifoldPoint mp2 = Manifold.Points[i]; mp2.NormalImpulse = 0.0f; mp2.TangentImpulse = 0.0f; ContactID id2 = mp2.Id; bool found = false; for (int j = 0; j < oldManifold.PointCount; ++j) { ManifoldPoint mp1 = oldManifold.Points[j]; if (mp1.Id.Key == id2.Key) { mp2.NormalImpulse = mp1.NormalImpulse; mp2.TangentImpulse = mp1.TangentImpulse; found = true; break; } } if (found == false) { mp2.NormalImpulse = 0.0f; mp2.TangentImpulse = 0.0f; } Manifold.Points[i] = mp2; } if (touching != wasTouching) { bodyA.Awake = true; bodyB.Awake = true; } } if (touching) { Flags |= ContactFlags.Touching; } else { Flags &= ~ContactFlags.Touching; } if (wasTouching == false && touching) { //Report the collision to both participants: if (FixtureA.OnCollision != null) Enabled = FixtureA.OnCollision(FixtureA, FixtureB, this); //Reverse the order of the reported fixtures. The first fixture is always the one that the //user subscribed to. if (FixtureB.OnCollision != null) Enabled = FixtureB.OnCollision(FixtureB, FixtureA, this); //BeginContact can also return false and disable the contact if (contactManager.BeginContact != null) Enabled = contactManager.BeginContact(this); //if the user disabled the contact (needed to exclude it in TOI solver), we also need to mark //it as not touching. if (Enabled == false) Flags &= ~ContactFlags.Touching; } if (wasTouching && touching == false) { //Report the separation to both participants: if (FixtureA.OnSeparation != null) FixtureA.OnSeparation(FixtureA, FixtureB); //Reverse the order of the reported fixtures. The first fixture is always the one that the //user subscribed to. if (FixtureB.OnSeparation != null) FixtureB.OnSeparation(FixtureB, FixtureA); if (contactManager.EndContact != null) contactManager.EndContact(this); } if (sensor) return; if (contactManager.PreSolve != null) contactManager.PreSolve(this, ref oldManifold); } /// <summary> /// Evaluate this contact with your own manifold and transforms. /// </summary> /// <param name="manifold">The manifold.</param> /// <param name="transformA">The first transform.</param> /// <param name="transformB">The second transform.</param> private void Evaluate(ref Manifold manifold, ref Transform transformA, ref Transform transformB) { switch (_type) { case ContactType.Polygon: Collision.Collision.CollidePolygons(ref manifold, (PolygonShape) FixtureA.Shape, ref transformA, (PolygonShape) FixtureB.Shape, ref transformB); break; case ContactType.PolygonAndCircle: Collision.Collision.CollidePolygonAndCircle(ref manifold, (PolygonShape) FixtureA.Shape, ref transformA, (CircleShape) FixtureB.Shape, ref transformB); break; case ContactType.EdgeAndCircle: Collision.Collision.CollideEdgeAndCircle(ref manifold, (EdgeShape) FixtureA.Shape, ref transformA, (CircleShape) FixtureB.Shape, ref transformB); break; case ContactType.EdgeAndPolygon: Collision.Collision.CollideEdgeAndPolygon(ref manifold, (EdgeShape) FixtureA.Shape, ref transformA, (PolygonShape) FixtureB.Shape, ref transformB); break; case ContactType.LoopAndCircle: LoopShape loop = (LoopShape) FixtureA.Shape; loop.GetChildEdge(ref _edge, ChildIndexA); Collision.Collision.CollideEdgeAndCircle(ref manifold, _edge, ref transformA, (CircleShape) FixtureB.Shape, ref transformB); break; case ContactType.LoopAndPolygon: LoopShape loop2 = (LoopShape) FixtureA.Shape; loop2.GetChildEdge(ref _edge, ChildIndexA); Collision.Collision.CollideEdgeAndPolygon(ref manifold, _edge, ref transformA, (PolygonShape) FixtureB.Shape, ref transformB); break; case ContactType.Circle: Collision.Collision.CollideCircles(ref manifold, (CircleShape) FixtureA.Shape, ref transformA, (CircleShape) FixtureB.Shape, ref transformB); break; } } internal static Contact Create(Fixture fixtureA, int indexA, Fixture fixtureB, int indexB) { ShapeType type1 = fixtureA.ShapeType; ShapeType type2 = fixtureB.ShapeType; Debug.Assert(ShapeType.Unknown < type1 && type1 < ShapeType.TypeCount); Debug.Assert(ShapeType.Unknown < type2 && type2 < ShapeType.TypeCount); Contact c; Queue<Contact> pool = fixtureA.Body.World.ContactPool; if (pool.Count > 0) { c = pool.Dequeue(); if ((type1 >= type2 || (type1 == ShapeType.Edge && type2 == ShapeType.Polygon)) && !(type2 == ShapeType.Edge && type1 == ShapeType.Polygon)) { c.Reset(fixtureA, indexA, fixtureB, indexB); } else { c.Reset(fixtureB, indexB, fixtureA, indexA); } } else { // Edge+Polygon is non-symetrical due to the way Erin handles collision type registration. if ((type1 >= type2 || (type1 == ShapeType.Edge && type2 == ShapeType.Polygon)) && !(type2 == ShapeType.Edge && type1 == ShapeType.Polygon)) { c = new Contact(fixtureA, indexA, fixtureB, indexB); } else { c = new Contact(fixtureB, indexB, fixtureA, indexA); } } c._type = _registers[(int) type1, (int) type2]; return c; } internal void Destroy() { FixtureA.Body.World.ContactPool.Enqueue(this); Reset(null, 0, null, 0); } #region Nested type: ContactType private enum ContactType { NotSupported, Polygon, PolygonAndCircle, Circle, EdgeAndPolygon, EdgeAndCircle, LoopAndPolygon, LoopAndCircle, } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace PointNet.Web.API.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.ComponentModel; using System.Runtime.InteropServices; using System.Drawing.Internal; namespace System.Drawing.Drawing2D { public sealed class LinearGradientBrush : Brush { private bool _interpolationColorsWasSet; public LinearGradientBrush(PointF point1, PointF point2, Color color1, Color color2) { IntPtr nativeBrush; int status = SafeNativeMethods.Gdip.GdipCreateLineBrush(new GPPOINTF(point1), new GPPOINTF(point2), color1.ToArgb(), color2.ToArgb(), (int)WrapMode.Tile, out nativeBrush); SafeNativeMethods.Gdip.CheckStatus(status); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(Point point1, Point point2, Color color1, Color color2) { IntPtr nativeBrush; int status = SafeNativeMethods.Gdip.GdipCreateLineBrushI(new GPPOINT(point1), new GPPOINT(point2), color1.ToArgb(), color2.ToArgb(), (int)WrapMode.Tile, out nativeBrush); SafeNativeMethods.Gdip.CheckStatus(status); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(RectangleF rect, Color color1, Color color2, LinearGradientMode linearGradientMode) { if (linearGradientMode < LinearGradientMode.Horizontal || linearGradientMode > LinearGradientMode.BackwardDiagonal) { throw new InvalidEnumArgumentException(nameof(linearGradientMode), unchecked((int)linearGradientMode), typeof(LinearGradientMode)); } if (rect.Width == 0.0 || rect.Height == 0.0) { throw new ArgumentException(SR.Format(SR.GdiplusInvalidRectangle, rect.ToString())); } var gprectf = new GPRECTF(rect); IntPtr nativeBrush; int status = SafeNativeMethods.Gdip.GdipCreateLineBrushFromRect(ref gprectf, color1.ToArgb(), color2.ToArgb(), unchecked((int)linearGradientMode), (int)WrapMode.Tile, out nativeBrush); SafeNativeMethods.Gdip.CheckStatus(status); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(Rectangle rect, Color color1, Color color2, LinearGradientMode linearGradientMode) { if (linearGradientMode < LinearGradientMode.Horizontal || linearGradientMode > LinearGradientMode.BackwardDiagonal) { throw new InvalidEnumArgumentException(nameof(linearGradientMode), unchecked((int)linearGradientMode), typeof(LinearGradientMode)); } if (rect.Width == 0 || rect.Height == 0) { throw new ArgumentException(SR.Format(SR.GdiplusInvalidRectangle, rect.ToString())); } var gpRect = new GPRECT(rect); IntPtr nativeBrush; int status = SafeNativeMethods.Gdip.GdipCreateLineBrushFromRectI(ref gpRect, color1.ToArgb(), color2.ToArgb(), unchecked((int)linearGradientMode), (int)WrapMode.Tile, out nativeBrush); SafeNativeMethods.Gdip.CheckStatus(status); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(RectangleF rect, Color color1, Color color2, float angle) : this(rect, color1, color2, angle, false) { } public LinearGradientBrush(RectangleF rect, Color color1, Color color2, float angle, bool isAngleScaleable) { if (rect.Width == 0.0 || rect.Height == 0.0) { throw new ArgumentException(SR.Format(SR.GdiplusInvalidRectangle, rect.ToString())); } var gprectf = new GPRECTF(rect); IntPtr nativeBrush; int status = SafeNativeMethods.Gdip.GdipCreateLineBrushFromRectWithAngle(ref gprectf, color1.ToArgb(), color2.ToArgb(), angle, isAngleScaleable, (int)WrapMode.Tile, out nativeBrush); SafeNativeMethods.Gdip.CheckStatus(status); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(Rectangle rect, Color color1, Color color2, float angle) : this(rect, color1, color2, angle, false) { } public LinearGradientBrush(Rectangle rect, Color color1, Color color2, float angle, bool isAngleScaleable) { if (rect.Width == 0 || rect.Height == 0) { throw new ArgumentException(SR.Format(SR.GdiplusInvalidRectangle, rect.ToString())); } var gprect = new GPRECT(rect); IntPtr nativeBrush; int status = SafeNativeMethods.Gdip.GdipCreateLineBrushFromRectWithAngleI(ref gprect, color1.ToArgb(), color2.ToArgb(), angle, isAngleScaleable, (int)WrapMode.Tile, out nativeBrush); SafeNativeMethods.Gdip.CheckStatus(status); SetNativeBrushInternal(nativeBrush); } internal LinearGradientBrush(IntPtr nativeBrush) { Debug.Assert(nativeBrush != IntPtr.Zero, "Initializing native brush with null."); SetNativeBrushInternal(nativeBrush); } public override object Clone() { IntPtr clonedBrush; int status = SafeNativeMethods.Gdip.GdipCloneBrush(new HandleRef(this, NativeBrush), out clonedBrush); SafeNativeMethods.Gdip.CheckStatus(status); return new LinearGradientBrush(clonedBrush); } public Color[] LinearColors { get { int[] colors = new int[] { 0, 0 }; int status = SafeNativeMethods.Gdip.GdipGetLineColors(new HandleRef(this, NativeBrush), colors); SafeNativeMethods.Gdip.CheckStatus(status); return new Color[] { Color.FromArgb(colors[0]), Color.FromArgb(colors[1]) }; } set { int status = SafeNativeMethods.Gdip.GdipSetLineColors(new HandleRef(this, NativeBrush), value[0].ToArgb(), value[1].ToArgb()); SafeNativeMethods.Gdip.CheckStatus(status); } } public RectangleF Rectangle { get { var rect = new GPRECTF(); int status = SafeNativeMethods.Gdip.GdipGetLineRect(new HandleRef(this, NativeBrush), ref rect); SafeNativeMethods.Gdip.CheckStatus(status); return rect.ToRectangleF(); } } public bool GammaCorrection { get { int status = SafeNativeMethods.Gdip.GdipGetLineGammaCorrection(new HandleRef(this, NativeBrush), out bool useGammaCorrection); SafeNativeMethods.Gdip.CheckStatus(status); return useGammaCorrection; } set { int status = SafeNativeMethods.Gdip.GdipSetLineGammaCorrection(new HandleRef(this, NativeBrush), value); SafeNativeMethods.Gdip.CheckStatus(status); } } public Blend Blend { get { // Interpolation colors and blends don't work together very well. Getting the Blend when InterpolationColors // is set set puts the Brush into an unusable state afterwards. // Bail out here to avoid that. if (_interpolationColorsWasSet) { return null; } // Figure out the size of blend factor array. int status = SafeNativeMethods.Gdip.GdipGetLineBlendCount(new HandleRef(this, NativeBrush), out int retval); SafeNativeMethods.Gdip.CheckStatus(status); if (retval <= 0) { return null; } // Allocate a temporary native memory buffer. int count = retval; IntPtr factors = IntPtr.Zero; IntPtr positions = IntPtr.Zero; try { int size = checked(4 * count); factors = Marshal.AllocHGlobal(size); positions = Marshal.AllocHGlobal(size); // Retrieve horizontal blend factors. status = SafeNativeMethods.Gdip.GdipGetLineBlend(new HandleRef(this, NativeBrush), factors, positions, count); SafeNativeMethods.Gdip.CheckStatus(status); // Return the result in a managed array. var blend = new Blend(count); Marshal.Copy(factors, blend.Factors, 0, count); Marshal.Copy(positions, blend.Positions, 0, count); return blend; } finally { if (factors != IntPtr.Zero) { Marshal.FreeHGlobal(factors); } if (positions != IntPtr.Zero) { Marshal.FreeHGlobal(positions); } } } set { // Allocate temporary native memory buffer and copy input blend factors into it. int count = value.Factors.Length; IntPtr factors = IntPtr.Zero; IntPtr positions = IntPtr.Zero; try { int size = checked(4 * count); factors = Marshal.AllocHGlobal(size); positions = Marshal.AllocHGlobal(size); Marshal.Copy(value.Factors, 0, factors, count); Marshal.Copy(value.Positions, 0, positions, count); // Set blend factors. int status = SafeNativeMethods.Gdip.GdipSetLineBlend(new HandleRef(this, NativeBrush), new HandleRef(null, factors), new HandleRef(null, positions), count); SafeNativeMethods.Gdip.CheckStatus(status); } finally { if (factors != IntPtr.Zero) { Marshal.FreeHGlobal(factors); } if (positions != IntPtr.Zero) { Marshal.FreeHGlobal(positions); } } } } public void SetSigmaBellShape(float focus) => SetSigmaBellShape(focus, (float)1.0); public void SetSigmaBellShape(float focus, float scale) { if (focus < 0 || focus > 1 || scale < 0 || scale > 1) { throw new ArgumentException(SR.Format(SR.GdiplusInvalidParameter)); } int status = SafeNativeMethods.Gdip.GdipSetLineSigmaBlend(new HandleRef(this, NativeBrush), focus, scale); SafeNativeMethods.Gdip.CheckStatus(status); } public void SetBlendTriangularShape(float focus) => SetBlendTriangularShape(focus, (float)1.0); public void SetBlendTriangularShape(float focus, float scale) { if (focus < 0 || focus > 1 || scale < 0 || scale > 1) { throw new ArgumentException(SR.Format(SR.GdiplusInvalidParameter)); } int status = SafeNativeMethods.Gdip.GdipSetLineLinearBlend(new HandleRef(this, NativeBrush), focus, scale); SafeNativeMethods.Gdip.CheckStatus(status); } public ColorBlend InterpolationColors { get { if (!_interpolationColorsWasSet) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.Format(SR.InterpolationColorsColorBlendNotSet), string.Empty)); } // Figure out the size of blend factor array. int status = SafeNativeMethods.Gdip.GdipGetLinePresetBlendCount(new HandleRef(this, NativeBrush), out int retval); SafeNativeMethods.Gdip.CheckStatus(status); // Allocate temporary native memory buffer. int count = retval; IntPtr colors = IntPtr.Zero; IntPtr positions = IntPtr.Zero; try { int size = checked(4 * count); colors = Marshal.AllocHGlobal(size); positions = Marshal.AllocHGlobal(size); // Retrieve horizontal blend factors. status = SafeNativeMethods.Gdip.GdipGetLinePresetBlend(new HandleRef(this, NativeBrush), colors, positions, count); SafeNativeMethods.Gdip.CheckStatus(status); // Return the result in a managed array. var blend = new ColorBlend(count); int[] argb = new int[count]; Marshal.Copy(colors, argb, 0, count); Marshal.Copy(positions, blend.Positions, 0, count); // Copy ARGB values into Color array of ColorBlend. blend.Colors = new Color[argb.Length]; for (int i = 0; i < argb.Length; i++) { blend.Colors[i] = Color.FromArgb(argb[i]); } return blend; } finally { if (colors != IntPtr.Zero) { Marshal.FreeHGlobal(colors); } if (positions != IntPtr.Zero) { Marshal.FreeHGlobal(positions); } } } set { _interpolationColorsWasSet = true; if (value == null) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.Format(SR.InterpolationColorsInvalidColorBlendObject), string.Empty)); } else if (value.Colors.Length < 2) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.Format(SR.InterpolationColorsInvalidColorBlendObject), SR.Format(SR.InterpolationColorsLength))); } else if (value.Colors.Length != value.Positions.Length) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.Format(SR.InterpolationColorsInvalidColorBlendObject), SR.Format(SR.InterpolationColorsLengthsDiffer))); } else if (value.Positions[0] != 0.0f) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.Format(SR.InterpolationColorsInvalidColorBlendObject), SR.Format(SR.InterpolationColorsInvalidStartPosition))); } else if (value.Positions[value.Positions.Length - 1] != 1.0f) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.Format(SR.InterpolationColorsInvalidColorBlendObject), SR.Format(SR.InterpolationColorsInvalidEndPosition))); } // Allocate a temporary native memory buffer and copy input blend factors into it. int count = value.Colors.Length; IntPtr colors = IntPtr.Zero; IntPtr positions = IntPtr.Zero; try { int size = checked(4 * count); colors = Marshal.AllocHGlobal(size); positions = Marshal.AllocHGlobal(size); int[] argbs = new int[count]; for (int i = 0; i < count; i++) { argbs[i] = value.Colors[i].ToArgb(); } Marshal.Copy(argbs, 0, colors, count); Marshal.Copy(value.Positions, 0, positions, count); // Set blend factors. int status = SafeNativeMethods.Gdip.GdipSetLinePresetBlend(new HandleRef(this, NativeBrush), new HandleRef(null, colors), new HandleRef(null, positions), count); SafeNativeMethods.Gdip.CheckStatus(status); } finally { if (colors != IntPtr.Zero) { Marshal.FreeHGlobal(colors); } if (positions != IntPtr.Zero) { Marshal.FreeHGlobal(positions); } } } } public WrapMode WrapMode { get { int status = SafeNativeMethods.Gdip.GdipGetLineWrapMode(new HandleRef(this, NativeBrush), out int mode); SafeNativeMethods.Gdip.CheckStatus(status); return (WrapMode)mode; } set { if (value < WrapMode.Tile || value > WrapMode.Clamp) { throw new InvalidEnumArgumentException(nameof(value), unchecked((int)value), typeof(WrapMode)); } int status = SafeNativeMethods.Gdip.GdipSetLineWrapMode(new HandleRef(this, NativeBrush), unchecked((int)value)); SafeNativeMethods.Gdip.CheckStatus(status); } } public Matrix Transform { get { var matrix = new Matrix(); int status = SafeNativeMethods.Gdip.GdipGetLineTransform(new HandleRef(this, NativeBrush), new HandleRef(matrix, matrix.nativeMatrix)); SafeNativeMethods.Gdip.CheckStatus(status); return matrix; } set { if (value == null) { throw new ArgumentNullException("matrix"); } int status = SafeNativeMethods.Gdip.GdipSetLineTransform(new HandleRef(this, NativeBrush), new HandleRef(value, value.nativeMatrix)); SafeNativeMethods.Gdip.CheckStatus(status); } } public void ResetTransform() { int status = SafeNativeMethods.Gdip.GdipResetLineTransform(new HandleRef(this, NativeBrush)); SafeNativeMethods.Gdip.CheckStatus(status); } public void MultiplyTransform(Matrix matrix) => MultiplyTransform(matrix, MatrixOrder.Prepend); public void MultiplyTransform(Matrix matrix, MatrixOrder order) { if (matrix == null) { throw new ArgumentNullException(nameof(matrix)); } int status = SafeNativeMethods.Gdip.GdipMultiplyLineTransform(new HandleRef(this, NativeBrush), new HandleRef(matrix, matrix.nativeMatrix), order); SafeNativeMethods.Gdip.CheckStatus(status); } public void TranslateTransform(float dx, float dy) => TranslateTransform(dx, dy, MatrixOrder.Prepend); public void TranslateTransform(float dx, float dy, MatrixOrder order) { int status = SafeNativeMethods.Gdip.GdipTranslateLineTransform(new HandleRef(this, NativeBrush), dx, dy, order); SafeNativeMethods.Gdip.CheckStatus(status); } public void ScaleTransform(float sx, float sy) => ScaleTransform(sx, sy, MatrixOrder.Prepend); public void ScaleTransform(float sx, float sy, MatrixOrder order) { int status = SafeNativeMethods.Gdip.GdipScaleLineTransform(new HandleRef(this, NativeBrush), sx, sy, order); SafeNativeMethods.Gdip.CheckStatus(status); } public void RotateTransform(float angle) => RotateTransform(angle, MatrixOrder.Prepend); public void RotateTransform(float angle, MatrixOrder order) { int status = SafeNativeMethods.Gdip.GdipRotateLineTransform(new HandleRef(this, NativeBrush), angle, order); SafeNativeMethods.Gdip.CheckStatus(status); } } }
// Copyright 2017 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using Google.Api.Gax; using Google.Cloud.Spanner.V1.Internal.Logging; using Grpc.Core; namespace Google.Cloud.Spanner.Data.Ycsb { internal abstract class YcsbRunner { protected const string AppVersion = "v1.0"; protected const string ClientType = "client_type"; protected const string Operationcount = "operationcount"; protected const string QpstestTime = "qpstesttime"; protected const string DebugMode = "debugmode"; protected const string NumWorker = "num_worker"; protected const string Prewarm = "prewarm"; protected const string TargetQps = "qpstarget"; protected const string GrpcInline = "grpcinline"; protected static readonly ConcurrentDictionary<string, object> Options = new ConcurrentDictionary<string, object>(); private static readonly Random s_globalRandom = new Random(Environment.TickCount); protected static readonly ThreadLocal<Random> Rand = new ThreadLocal<Random>( () => { lock (s_globalRandom) { return new Random(s_globalRandom.Next()); } }); private static readonly ThreadLocal<byte[]> s_byteBuffer = new ThreadLocal<byte[]>(() => new byte[75]); private static double s_total; private long _totalOperations; private string _templateFile; protected static bool IsDebugMode { get; private set; } protected abstract List<Operation> Operations { get; } protected int Run(string[] args) { if (!TryParseArguments(args)) { return -1; } LoadConfigFile(); ValidateArguments(); InitializeSettings(); DebugMessage("prewarm..."); Task.WhenAll(Enumerable.Range(1, GetOptionWithDefault(Prewarm, 0)).Select(x => PreWarmOneInstance())) .WaitWithUnwrappedExceptions(); DebugMessage("running..."); var overallTime = Stopwatch.StartNew(); if (Options.ContainsKey(TargetQps)) { //target qps version RunWorkerWithQps(GetOption<double>(TargetQps), TimeSpan.FromSeconds(GetOption<int>(QpstestTime))) .WaitWithUnwrappedExceptions(); } else { //target worker version RunWithTargetWorkers(GetOption<int>(NumWorker)).WaitWithUnwrappedExceptions(); } PrintResults(overallTime.ElapsedMilliseconds); return 0; } protected virtual void InitializeSettings() { GrpcEnvironment.SetCompletionQueueCount(GetOption<int>(NumWorker)); GrpcEnvironment.SetThreadPoolSize(GetOption<int>(NumWorker)); GrpcEnvironment.SetHandlerInlining(GetOptionWithDefault(GrpcInline, false)); AppDomain.CurrentDomain.UnhandledException += (sender, eventArgs) => { Console.Error.WriteLine($"ERROR {eventArgs.ExceptionObject.ToString()}"); }; Trace.Listeners.Add(new TextWriterTraceListener(Console.Out)); foreach (var kvp in Options) { DebugMessage($"key:{kvp.Key} = {kvp.Value}"); } } protected virtual Task PreWarmOneInstance() => Task.FromResult(0); /// <summary> /// Targets a specific QPS by executing multiple subtasks. /// (this is from C# spanner stress tests) /// </summary> private async Task RunWorkerWithQps(double queriesPerSecond, TimeSpan testTime) { var sw = Stopwatch.StartNew(); var all = new List<Task>(); while (sw.Elapsed < testTime) { if (sw.Elapsed.TotalSeconds * queriesPerSecond > all.Count) { var operation = ChooseOperation(); try { all.Add(operation.ExecuteAsync()); } catch (Exception e) { DebugMessage($"This error is being ignored and we are continuing the test: {e.Message}"); } } else { await Task.Yield(); } } var timeout = Task.Delay(TimeSpan.FromSeconds(10)); await Task.WhenAny(Task.WhenAll(all), timeout); _totalOperations = all.Count; } /// <summary> /// Runs the test with multiple parallel subtasks equal to at most "numworkers" /// </summary> private async Task RunWithTargetWorkers(int numWorkers) { var parallelWorkers = new List<Task>(); var operation = ChooseOperation(); long operationsRemaining = _totalOperations; while (operationsRemaining > 0) { await Task.Yield(); parallelWorkers.Add(operation.ExecuteAsync()); if (operationsRemaining % 1000 == 0) { DebugMessage($"{operationsRemaining} operations remaining."); } operation = ChooseOperation(); if (parallelWorkers.Count == numWorkers) { try { parallelWorkers.Remove(await Task.WhenAny(parallelWorkers).ConfigureAwait(false)); } catch (Exception e) { DebugMessage($"This error is being ignored and we are continuing the test: {e.Message}"); parallelWorkers.Clear(); //start over. } } operationsRemaining--; } } private Operation ChooseOperation() { double operationTarget = Rand.Value.NextDouble() * s_total; var operation = 0; operationTarget -= Operations[operation].Proportion; while (operationTarget > 0 && operation < Operations.Count) { operation++; if (operation < Operations.Count) { operationTarget -= Operations[operation].Proportion; } } return Operations[Math.Min(operation, Operations.Count - 1)]; } private void PrintResults(long totalElapsedMillis) { Console.WriteLine($"[OVERALL], RunTime(ms), {totalElapsedMillis}"); Console.WriteLine( $"[OVERALL], Throughput(ops/sec), {1000 * _totalOperations / totalElapsedMillis}"); foreach (var operation in Operations) { operation.PrintResults(); } Logger.DefaultLogger.LogPerformanceData(); } internal static string GenerateRandomString() { Rand.Value.NextBytes(s_byteBuffer.Value); return Convert.ToBase64String(s_byteBuffer.Value); } private void LoadConfigFile() { foreach (string line in File.ReadLines(_templateFile)) { if (line.StartsWith("#")) { continue; } var kvp = line.Split('='); if (kvp.Length == 2) { Options[kvp[0].ToLowerInvariant()] = kvp[1]; } } Options["AppVersion"] = AppVersion; //this is just for verification purposes if this app gets updated. } protected virtual void ValidateArguments() { s_total = Operations.Select(x => x.Proportion).Sum(); _totalOperations = GetOption<long>(Operationcount); if (GetOption<string>(ClientType) != "dotnet") { Console.Error.WriteLine("Expected client_type=dotnet"); throw new InvalidOperationException(); } } protected static T GetOptionWithDefault<T>(string optionName, T defaultValue) => !Options.ContainsKey(optionName) ? defaultValue : GetOption<T>(optionName); protected static T GetOption<T>(string optionName) { if (!Options.ContainsKey(optionName)) { throw new InvalidOperationException($"Argument {optionName} was not specified, but required."); } var value = Options[optionName]; if (value is T) { return (T) value; } var parsedValue = (T) Convert.ChangeType(value, typeof(T)); Options[optionName] = parsedValue; return parsedValue; } private bool TryParseArguments(IReadOnlyList<string> args) { if (args == null || args.Count == 0 || args[0] != "run") { PrintUsage(); return false; } if (args[2] != "-P" || args.Count < 4) { PrintUsage(); return false; } _templateFile = args[3]; if (!File.Exists(_templateFile)) { Console.Error.WriteLine($"File {_templateFile} does not exist!"); PrintUsage(); return false; } if (args.Count % 2 != 0) { //wierd error, user had a -p without another value or had an extra space in a value causing it //to be misread. Console.Error.WriteLine("Error parsing argument values."); PrintUsage(); return false; } for (var i = 4; i < args.Count; i += 2) { if (args[i] != "-p") { Console.Error.WriteLine($"Expected a '-p' at argument {(i - 1) / 2}"); PrintUsage(); return false; } var keyValue = args[i + 1].Split('='); if (keyValue.Length != 2) { Console.Error.WriteLine($"Unable to parse key/value for arg {(i - 1) / 2}"); PrintUsage(); return false; } Options[keyValue[0].ToLowerInvariant()] = keyValue[1]; } IsDebugMode = Options.ContainsKey(DebugMode); return true; } protected void DebugMessage(string message) { if (IsDebugMode) { Console.WriteLine(message); } } protected void PrintUsage() => Console.Error.WriteLine( "Usage:\n dotnet Google.Cloud.Spanner.Data.Ycsb.dll run spanner -P <relative_path_to_template_file> [-p property=value]..."); [SuppressMessage("ReSharper", "PossibleMultipleEnumeration")] private static double Variance(IEnumerable<double> source) { if (!source.Any()) { return 0; } double mean = source.Average(); return source.Select(d => (d - mean) * (d - mean)).Average(); } protected class Operation { // Each entry is in milliseconds for one operation. public ConcurrentBag<double> Latencies { get; } = new ConcurrentBag<double>(); // Name corresponds to the name printed out in results for this operation. public string Name { get; } // The fractional share of total executions that run this operation. // If the total of all proportions does not equal '1.0', then it will be 'normalized' public double Proportion => GetOption<double>(ProportionField); // The actual operation to execute asynchronously. public Func<ConcurrentBag<double>, Task> Action { get; } // The config property name for calculating "proportion" public string ProportionField { get; } public Operation(string proportionField, Func<ConcurrentBag<double>, Task> action) { ProportionField = proportionField; Name = proportionField.Replace("proportion", "").ToUpperInvariant(); Action = action; } public Task ExecuteAsync() => Action(Latencies); public void PrintResults() { var sortedlatencies = Latencies.DefaultIfEmpty(0).OrderByDescending(x => x).ToList(); Console.WriteLine($"[{Name}], Operations, {Latencies.Count}"); Console.WriteLine( $"[{Name}], AverageLatency(us), {Latencies.DefaultIfEmpty(0).Average() * 1000}"); Console.WriteLine($"[{Name}], LatencyVariance(us), {Variance(sortedlatencies) * 1000}"); Console.WriteLine( $"[{Name}], MinLatency(us), {sortedlatencies.LastOrDefault() * 1000}"); Console.WriteLine( $"[{Name}], MaxLatency(us), {sortedlatencies.FirstOrDefault() * 1000}"); Console.WriteLine( $"[{Name}], 50thPercentileLatency(us), {sortedlatencies.Skip((int) Math.Floor(Latencies.Count * .5)).First() * 1000}"); Console.WriteLine( $"[{Name}], 95thPercentileLatency(us), {sortedlatencies.Skip((int) Math.Floor(Latencies.Count * .05)).First() * 1000}"); Console.WriteLine( $"[{Name}], 99thPercentileLatency(us), {sortedlatencies.Skip((int) Math.Floor(Latencies.Count * .01)).First() * 1000}"); Console.WriteLine( $"[{Name}], 99.9thPercentileLatency(us), {sortedlatencies.Skip((int) Math.Floor(Latencies.Count * .001)).First() * 1000}"); Console.WriteLine($"[{Name}], Return=OK, {Latencies.Count}"); //bucket. var sortedLatencies = Latencies.OrderBy(x => x).ToList(); var latencyIndex = 0; var bucket = 0; for (; bucket < GetOptionWithDefault("histogram.buckets", 0L); bucket++) { var count = 0; while (latencyIndex < sortedLatencies.Count && sortedLatencies[latencyIndex] <= bucket) { count++; latencyIndex++; } Console.WriteLine($"[{Name}], {bucket}, {count}"); } Console.WriteLine($"[{Name}], >{bucket}, {sortedLatencies.Count - latencyIndex}"); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Runtime.Serialization; using System.Text; using System.Threading.Tasks; using System.Xml; using Microsoft.AspNetCore.Mvc.Formatters.Xml; using Microsoft.AspNetCore.Mvc.Infrastructure; using Microsoft.AspNetCore.WebUtilities; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Options; namespace Microsoft.AspNetCore.Mvc.Formatters { /// <summary> /// This class handles serialization of objects /// to XML using <see cref="DataContractSerializer"/> /// </summary> public class XmlDataContractSerializerOutputFormatter : TextOutputFormatter { private readonly ConcurrentDictionary<Type, object> _serializerCache = new ConcurrentDictionary<Type, object>(); private readonly ILogger _logger; private DataContractSerializerSettings _serializerSettings; private MvcOptions? _mvcOptions; private AsyncEnumerableReader? _asyncEnumerableReaderFactory; /// <summary> /// Initializes a new instance of <see cref="XmlDataContractSerializerOutputFormatter"/> /// with default <see cref="XmlWriterSettings"/>. /// </summary> public XmlDataContractSerializerOutputFormatter() : this(FormattingUtilities.GetDefaultXmlWriterSettings()) { } /// <summary> /// Initializes a new instance of <see cref="XmlDataContractSerializerOutputFormatter"/> /// with default <see cref="XmlWriterSettings"/>. /// </summary> /// <param name="loggerFactory">The <see cref="ILoggerFactory"/>.</param> public XmlDataContractSerializerOutputFormatter(ILoggerFactory loggerFactory) : this(FormattingUtilities.GetDefaultXmlWriterSettings(), loggerFactory) { } /// <summary> /// Initializes a new instance of <see cref="XmlDataContractSerializerOutputFormatter"/>. /// </summary> /// <param name="writerSettings">The settings to be used by the <see cref="DataContractSerializer"/>.</param> public XmlDataContractSerializerOutputFormatter(XmlWriterSettings writerSettings) : this(writerSettings, loggerFactory: NullLoggerFactory.Instance) { } /// <summary> /// Initializes a new instance of <see cref="XmlDataContractSerializerOutputFormatter"/>. /// </summary> /// <param name="writerSettings">The settings to be used by the <see cref="DataContractSerializer"/>.</param> /// <param name="loggerFactory">The <see cref="ILoggerFactory"/>.</param> public XmlDataContractSerializerOutputFormatter(XmlWriterSettings writerSettings, ILoggerFactory loggerFactory) { if (writerSettings == null) { throw new ArgumentNullException(nameof(writerSettings)); } SupportedEncodings.Add(Encoding.UTF8); SupportedEncodings.Add(Encoding.Unicode); SupportedMediaTypes.Add(MediaTypeHeaderValues.ApplicationXml); SupportedMediaTypes.Add(MediaTypeHeaderValues.TextXml); SupportedMediaTypes.Add(MediaTypeHeaderValues.ApplicationAnyXmlSyntax); WriterSettings = writerSettings; _serializerSettings = new DataContractSerializerSettings(); WrapperProviderFactories = new List<IWrapperProviderFactory>() { new SerializableErrorWrapperProviderFactory(), }; WrapperProviderFactories.Add(new EnumerableWrapperProviderFactory(WrapperProviderFactories)); _logger = loggerFactory.CreateLogger(GetType()); } /// <summary> /// Gets the list of <see cref="IWrapperProviderFactory"/> to /// provide the wrapping type for serialization. /// </summary> public IList<IWrapperProviderFactory> WrapperProviderFactories { get; } /// <summary> /// Gets the settings to be used by the XmlWriter. /// </summary> public XmlWriterSettings WriterSettings { get; } /// <summary> /// Gets or sets the <see cref="DataContractSerializerSettings"/> used to configure the /// <see cref="DataContractSerializer"/>. /// </summary> public DataContractSerializerSettings SerializerSettings { get => _serializerSettings; set { if (value == null) { throw new ArgumentNullException(nameof(value)); } _serializerSettings = value; } } /// <summary> /// Gets the type to be serialized. /// </summary> /// <param name="type">The original type to be serialized</param> /// <returns>The original or wrapped type provided by any <see cref="IWrapperProvider"/>s.</returns> protected virtual Type GetSerializableType(Type type) { if (type == null) { throw new ArgumentNullException(nameof(type)); } var wrapperProvider = WrapperProviderFactories.GetWrapperProvider(new WrapperProviderContext( type, isSerialization: true)); return wrapperProvider?.WrappingType ?? type; } /// <inheritdoc /> protected override bool CanWriteType(Type? type) { if (type == null) { return false; } return GetCachedSerializer(GetSerializableType(type)) != null; } /// <summary> /// Create a new instance of <see cref="DataContractSerializer"/> for the given object type. /// </summary> /// <param name="type">The type of object for which the serializer should be created.</param> /// <returns>A new instance of <see cref="DataContractSerializer"/></returns> protected virtual DataContractSerializer? CreateSerializer(Type type) { if (type == null) { throw new ArgumentNullException(nameof(type)); } try { // Verify that type is a valid data contract by forcing the serializer to try to create a data contract FormattingUtilities.XsdDataContractExporter.GetRootElementName(type); // If the serializer does not support this type it will throw an exception. return new DataContractSerializer(type, _serializerSettings); } catch (Exception ex) { _logger.FailedToCreateDataContractSerializer(type.FullName!, ex); // We do not surface the caught exception because if CanWriteResult returns // false, then this Formatter is not picked up at all. return null; } } /// <summary> /// Creates a new instance of <see cref="XmlWriter"/> using the given <see cref="TextWriter"/> and /// <see cref="XmlWriterSettings"/>. /// </summary> /// <param name="writer"> /// The underlying <see cref="TextWriter"/> which the <see cref="XmlWriter"/> should write to. /// </param> /// <param name="xmlWriterSettings"> /// The <see cref="XmlWriterSettings"/>. /// </param> /// <returns>A new instance of <see cref="XmlWriter"/></returns> public virtual XmlWriter CreateXmlWriter( TextWriter writer, XmlWriterSettings xmlWriterSettings) { if (writer == null) { throw new ArgumentNullException(nameof(writer)); } if (xmlWriterSettings == null) { throw new ArgumentNullException(nameof(xmlWriterSettings)); } // We always close the TextWriter, so the XmlWriter shouldn't. xmlWriterSettings.CloseOutput = false; return XmlWriter.Create(writer, xmlWriterSettings); } /// <summary> /// Creates a new instance of <see cref="XmlWriter"/> using the given <see cref="TextWriter"/> and /// <see cref="XmlWriterSettings"/>. /// </summary> /// <param name="context">The formatter context associated with the call.</param> /// <param name="writer"> /// The underlying <see cref="TextWriter"/> which the <see cref="XmlWriter"/> should write to. /// </param> /// <param name="xmlWriterSettings"> /// The <see cref="XmlWriterSettings"/>. /// </param> /// <returns>A new instance of <see cref="XmlWriter"/>.</returns> public virtual XmlWriter CreateXmlWriter( OutputFormatterWriteContext context, TextWriter writer, XmlWriterSettings xmlWriterSettings) { return CreateXmlWriter(writer, xmlWriterSettings); } /// <inheritdoc /> public override async Task WriteResponseBodyAsync(OutputFormatterWriteContext context, Encoding selectedEncoding) { if (context == null) { throw new ArgumentNullException(nameof(context)); } if (selectedEncoding == null) { throw new ArgumentNullException(nameof(selectedEncoding)); } var writerSettings = WriterSettings.Clone(); writerSettings.Encoding = selectedEncoding; var httpContext = context.HttpContext; var response = httpContext.Response; _mvcOptions ??= httpContext.RequestServices.GetRequiredService<IOptions<MvcOptions>>().Value; _asyncEnumerableReaderFactory ??= new AsyncEnumerableReader(_mvcOptions); var value = context.Object; var valueType = context.ObjectType; if (value is not null && _asyncEnumerableReaderFactory.TryGetReader(value.GetType(), out var reader)) { Log.BufferingAsyncEnumerable(_logger, value); value = await reader(value, context.HttpContext.RequestAborted); valueType = value.GetType(); if (context.HttpContext.RequestAborted.IsCancellationRequested) { return; } } Debug.Assert(valueType is not null); // Wrap the object only if there is a wrapping type. var wrappingType = GetSerializableType(valueType); if (wrappingType != null && wrappingType != valueType) { var wrapperProvider = WrapperProviderFactories.GetWrapperProvider(new WrapperProviderContext( declaredType: valueType, isSerialization: true)); Debug.Assert(wrapperProvider is not null); value = wrapperProvider.Wrap(value); } var dataContractSerializer = GetCachedSerializer(wrappingType!); var responseStream = response.Body; FileBufferingWriteStream? fileBufferingWriteStream = null; if (!_mvcOptions.SuppressOutputFormatterBuffering) { fileBufferingWriteStream = new FileBufferingWriteStream(); responseStream = fileBufferingWriteStream; } try { await using (var textWriter = context.WriterFactory(responseStream, writerSettings.Encoding)) { using var xmlWriter = CreateXmlWriter(context, textWriter, writerSettings); dataContractSerializer.WriteObject(xmlWriter, value); } if (fileBufferingWriteStream != null) { response.ContentLength = fileBufferingWriteStream.Length; await fileBufferingWriteStream.DrainBufferAsync(response.Body); } } finally { if (fileBufferingWriteStream != null) { await fileBufferingWriteStream.DisposeAsync(); } } } /// <summary> /// Gets the cached serializer or creates and caches the serializer for the given type. /// </summary> /// <returns>The <see cref="DataContractSerializer"/> instance.</returns> protected virtual DataContractSerializer GetCachedSerializer(Type type) { if (!_serializerCache.TryGetValue(type, out var serializer)) { serializer = CreateSerializer(type); if (serializer != null) { _serializerCache.TryAdd(type, serializer); } } return (DataContractSerializer)serializer!; } private static class Log { private static readonly LogDefineOptions SkipEnabledCheckLogOptions = new() { SkipEnabledCheck = true }; private static readonly Action<ILogger, string, Exception?> _bufferingAsyncEnumerable = LoggerMessage.Define<string>( LogLevel.Debug, new EventId(1, "BufferingAsyncEnumerable"), "Buffering IAsyncEnumerable instance of type '{Type}'.", SkipEnabledCheckLogOptions); public static void BufferingAsyncEnumerable(ILogger logger, object asyncEnumerable) { if (logger.IsEnabled(LogLevel.Debug)) { _bufferingAsyncEnumerable(logger, asyncEnumerable.GetType().FullName!, null); } } } } }
/* * Infoplus API * * Infoplus API. * * OpenAPI spec version: v1.0 * Contact: api@infopluscommerce.com * Generated by: https://github.com/swagger-api/swagger-codegen.git * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using RestSharp; using Infoplus.Client; using Infoplus.Model; namespace Infoplus.Api { /// <summary> /// Represents a collection of functions to interact with the API endpoints /// </summary> public interface IVendorComplianceSurveyApi : IApiAccessor { #region Synchronous Operations /// <summary> /// Create a vendorComplianceSurvey /// </summary> /// <remarks> /// Inserts a new vendorComplianceSurvey using the specified data. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be inserted.</param> /// <returns>VendorComplianceSurvey</returns> VendorComplianceSurvey AddVendorComplianceSurvey (VendorComplianceSurvey body); /// <summary> /// Create a vendorComplianceSurvey /// </summary> /// <remarks> /// Inserts a new vendorComplianceSurvey using the specified data. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be inserted.</param> /// <returns>ApiResponse of VendorComplianceSurvey</returns> ApiResponse<VendorComplianceSurvey> AddVendorComplianceSurveyWithHttpInfo (VendorComplianceSurvey body); /// <summary> /// Delete a vendorComplianceSurvey /// </summary> /// <remarks> /// Deletes the vendorComplianceSurvey identified by the specified id. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be deleted.</param> /// <returns></returns> void DeleteVendorComplianceSurvey (int? vendorComplianceSurveyId); /// <summary> /// Delete a vendorComplianceSurvey /// </summary> /// <remarks> /// Deletes the vendorComplianceSurvey identified by the specified id. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be deleted.</param> /// <returns>ApiResponse of Object(void)</returns> ApiResponse<Object> DeleteVendorComplianceSurveyWithHttpInfo (int? vendorComplianceSurveyId); /// <summary> /// Search vendorComplianceSurveys by filter /// </summary> /// <remarks> /// Returns the list of vendorComplianceSurveys that match the given filter. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="filter">Query string, used to filter results. (optional)</param> /// <param name="page">Result page number. Defaults to 1. (optional)</param> /// <param name="limit">Maximum results per page. Defaults to 20. Max allowed value is 250. (optional)</param> /// <param name="sort">Sort results by specified field. (optional)</param> /// <returns>List&lt;VendorComplianceSurvey&gt;</returns> List<VendorComplianceSurvey> GetVendorComplianceSurveyByFilter (string filter = null, int? page = null, int? limit = null, string sort = null); /// <summary> /// Search vendorComplianceSurveys by filter /// </summary> /// <remarks> /// Returns the list of vendorComplianceSurveys that match the given filter. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="filter">Query string, used to filter results. (optional)</param> /// <param name="page">Result page number. Defaults to 1. (optional)</param> /// <param name="limit">Maximum results per page. Defaults to 20. Max allowed value is 250. (optional)</param> /// <param name="sort">Sort results by specified field. (optional)</param> /// <returns>ApiResponse of List&lt;VendorComplianceSurvey&gt;</returns> ApiResponse<List<VendorComplianceSurvey>> GetVendorComplianceSurveyByFilterWithHttpInfo (string filter = null, int? page = null, int? limit = null, string sort = null); /// <summary> /// Get a vendorComplianceSurvey by id /// </summary> /// <remarks> /// Returns the vendorComplianceSurvey identified by the specified id. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be returned.</param> /// <returns>VendorComplianceSurvey</returns> VendorComplianceSurvey GetVendorComplianceSurveyById (int? vendorComplianceSurveyId); /// <summary> /// Get a vendorComplianceSurvey by id /// </summary> /// <remarks> /// Returns the vendorComplianceSurvey identified by the specified id. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be returned.</param> /// <returns>ApiResponse of VendorComplianceSurvey</returns> ApiResponse<VendorComplianceSurvey> GetVendorComplianceSurveyByIdWithHttpInfo (int? vendorComplianceSurveyId); /// <summary> /// Update a vendorComplianceSurvey /// </summary> /// <remarks> /// Updates an existing vendorComplianceSurvey using the specified data. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be updated.</param> /// <returns></returns> void UpdateVendorComplianceSurvey (VendorComplianceSurvey body); /// <summary> /// Update a vendorComplianceSurvey /// </summary> /// <remarks> /// Updates an existing vendorComplianceSurvey using the specified data. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be updated.</param> /// <returns>ApiResponse of Object(void)</returns> ApiResponse<Object> UpdateVendorComplianceSurveyWithHttpInfo (VendorComplianceSurvey body); #endregion Synchronous Operations #region Asynchronous Operations /// <summary> /// Create a vendorComplianceSurvey /// </summary> /// <remarks> /// Inserts a new vendorComplianceSurvey using the specified data. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be inserted.</param> /// <returns>Task of VendorComplianceSurvey</returns> System.Threading.Tasks.Task<VendorComplianceSurvey> AddVendorComplianceSurveyAsync (VendorComplianceSurvey body); /// <summary> /// Create a vendorComplianceSurvey /// </summary> /// <remarks> /// Inserts a new vendorComplianceSurvey using the specified data. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be inserted.</param> /// <returns>Task of ApiResponse (VendorComplianceSurvey)</returns> System.Threading.Tasks.Task<ApiResponse<VendorComplianceSurvey>> AddVendorComplianceSurveyAsyncWithHttpInfo (VendorComplianceSurvey body); /// <summary> /// Delete a vendorComplianceSurvey /// </summary> /// <remarks> /// Deletes the vendorComplianceSurvey identified by the specified id. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be deleted.</param> /// <returns>Task of void</returns> System.Threading.Tasks.Task DeleteVendorComplianceSurveyAsync (int? vendorComplianceSurveyId); /// <summary> /// Delete a vendorComplianceSurvey /// </summary> /// <remarks> /// Deletes the vendorComplianceSurvey identified by the specified id. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be deleted.</param> /// <returns>Task of ApiResponse</returns> System.Threading.Tasks.Task<ApiResponse<Object>> DeleteVendorComplianceSurveyAsyncWithHttpInfo (int? vendorComplianceSurveyId); /// <summary> /// Search vendorComplianceSurveys by filter /// </summary> /// <remarks> /// Returns the list of vendorComplianceSurveys that match the given filter. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="filter">Query string, used to filter results. (optional)</param> /// <param name="page">Result page number. Defaults to 1. (optional)</param> /// <param name="limit">Maximum results per page. Defaults to 20. Max allowed value is 250. (optional)</param> /// <param name="sort">Sort results by specified field. (optional)</param> /// <returns>Task of List&lt;VendorComplianceSurvey&gt;</returns> System.Threading.Tasks.Task<List<VendorComplianceSurvey>> GetVendorComplianceSurveyByFilterAsync (string filter = null, int? page = null, int? limit = null, string sort = null); /// <summary> /// Search vendorComplianceSurveys by filter /// </summary> /// <remarks> /// Returns the list of vendorComplianceSurveys that match the given filter. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="filter">Query string, used to filter results. (optional)</param> /// <param name="page">Result page number. Defaults to 1. (optional)</param> /// <param name="limit">Maximum results per page. Defaults to 20. Max allowed value is 250. (optional)</param> /// <param name="sort">Sort results by specified field. (optional)</param> /// <returns>Task of ApiResponse (List&lt;VendorComplianceSurvey&gt;)</returns> System.Threading.Tasks.Task<ApiResponse<List<VendorComplianceSurvey>>> GetVendorComplianceSurveyByFilterAsyncWithHttpInfo (string filter = null, int? page = null, int? limit = null, string sort = null); /// <summary> /// Get a vendorComplianceSurvey by id /// </summary> /// <remarks> /// Returns the vendorComplianceSurvey identified by the specified id. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be returned.</param> /// <returns>Task of VendorComplianceSurvey</returns> System.Threading.Tasks.Task<VendorComplianceSurvey> GetVendorComplianceSurveyByIdAsync (int? vendorComplianceSurveyId); /// <summary> /// Get a vendorComplianceSurvey by id /// </summary> /// <remarks> /// Returns the vendorComplianceSurvey identified by the specified id. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be returned.</param> /// <returns>Task of ApiResponse (VendorComplianceSurvey)</returns> System.Threading.Tasks.Task<ApiResponse<VendorComplianceSurvey>> GetVendorComplianceSurveyByIdAsyncWithHttpInfo (int? vendorComplianceSurveyId); /// <summary> /// Update a vendorComplianceSurvey /// </summary> /// <remarks> /// Updates an existing vendorComplianceSurvey using the specified data. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be updated.</param> /// <returns>Task of void</returns> System.Threading.Tasks.Task UpdateVendorComplianceSurveyAsync (VendorComplianceSurvey body); /// <summary> /// Update a vendorComplianceSurvey /// </summary> /// <remarks> /// Updates an existing vendorComplianceSurvey using the specified data. /// </remarks> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be updated.</param> /// <returns>Task of ApiResponse</returns> System.Threading.Tasks.Task<ApiResponse<Object>> UpdateVendorComplianceSurveyAsyncWithHttpInfo (VendorComplianceSurvey body); #endregion Asynchronous Operations } /// <summary> /// Represents a collection of functions to interact with the API endpoints /// </summary> public partial class VendorComplianceSurveyApi : IVendorComplianceSurveyApi { private Infoplus.Client.ExceptionFactory _exceptionFactory = (name, response) => null; /// <summary> /// Initializes a new instance of the <see cref="VendorComplianceSurveyApi"/> class. /// </summary> /// <returns></returns> public VendorComplianceSurveyApi(String basePath) { this.Configuration = new Configuration(new ApiClient(basePath)); ExceptionFactory = Infoplus.Client.Configuration.DefaultExceptionFactory; // ensure API client has configuration ready if (Configuration.ApiClient.Configuration == null) { this.Configuration.ApiClient.Configuration = this.Configuration; } } /// <summary> /// Initializes a new instance of the <see cref="VendorComplianceSurveyApi"/> class /// using Configuration object /// </summary> /// <param name="configuration">An instance of Configuration</param> /// <returns></returns> public VendorComplianceSurveyApi(Configuration configuration = null) { if (configuration == null) // use the default one in Configuration this.Configuration = Configuration.Default; else this.Configuration = configuration; ExceptionFactory = Infoplus.Client.Configuration.DefaultExceptionFactory; // ensure API client has configuration ready if (Configuration.ApiClient.Configuration == null) { this.Configuration.ApiClient.Configuration = this.Configuration; } } /// <summary> /// Gets the base path of the API client. /// </summary> /// <value>The base path</value> public String GetBasePath() { return this.Configuration.ApiClient.RestClient.BaseUrl.ToString(); } /// <summary> /// Sets the base path of the API client. /// </summary> /// <value>The base path</value> [Obsolete("SetBasePath is deprecated, please do 'Configuration.ApiClient = new ApiClient(\"http://new-path\")' instead.")] public void SetBasePath(String basePath) { // do nothing } /// <summary> /// Gets or sets the configuration object /// </summary> /// <value>An instance of the Configuration</value> public Configuration Configuration {get; set;} /// <summary> /// Provides a factory method hook for the creation of exceptions. /// </summary> public Infoplus.Client.ExceptionFactory ExceptionFactory { get { if (_exceptionFactory != null && _exceptionFactory.GetInvocationList().Length > 1) { throw new InvalidOperationException("Multicast delegate for ExceptionFactory is unsupported."); } return _exceptionFactory; } set { _exceptionFactory = value; } } /// <summary> /// Gets the default header. /// </summary> /// <returns>Dictionary of HTTP header</returns> [Obsolete("DefaultHeader is deprecated, please use Configuration.DefaultHeader instead.")] public Dictionary<String, String> DefaultHeader() { return this.Configuration.DefaultHeader; } /// <summary> /// Add default header. /// </summary> /// <param name="key">Header field name.</param> /// <param name="value">Header field value.</param> /// <returns></returns> [Obsolete("AddDefaultHeader is deprecated, please use Configuration.AddDefaultHeader instead.")] public void AddDefaultHeader(string key, string value) { this.Configuration.AddDefaultHeader(key, value); } /// <summary> /// Create a vendorComplianceSurvey Inserts a new vendorComplianceSurvey using the specified data. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be inserted.</param> /// <returns>VendorComplianceSurvey</returns> public VendorComplianceSurvey AddVendorComplianceSurvey (VendorComplianceSurvey body) { ApiResponse<VendorComplianceSurvey> localVarResponse = AddVendorComplianceSurveyWithHttpInfo(body); return localVarResponse.Data; } /// <summary> /// Create a vendorComplianceSurvey Inserts a new vendorComplianceSurvey using the specified data. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be inserted.</param> /// <returns>ApiResponse of VendorComplianceSurvey</returns> public ApiResponse< VendorComplianceSurvey > AddVendorComplianceSurveyWithHttpInfo (VendorComplianceSurvey body) { // verify the required parameter 'body' is set if (body == null) throw new ApiException(400, "Missing required parameter 'body' when calling VendorComplianceSurveyApi->AddVendorComplianceSurvey"); var localVarPath = "/v1.0/vendorComplianceSurvey"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { "application/json" }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (body != null && body.GetType() != typeof(byte[])) { localVarPostBody = Configuration.ApiClient.Serialize(body); // http body (model) parameter } else { localVarPostBody = body; // byte array } // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) Configuration.ApiClient.CallApi(localVarPath, Method.POST, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("AddVendorComplianceSurvey", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<VendorComplianceSurvey>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), (VendorComplianceSurvey) Configuration.ApiClient.Deserialize(localVarResponse, typeof(VendorComplianceSurvey))); } /// <summary> /// Create a vendorComplianceSurvey Inserts a new vendorComplianceSurvey using the specified data. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be inserted.</param> /// <returns>Task of VendorComplianceSurvey</returns> public async System.Threading.Tasks.Task<VendorComplianceSurvey> AddVendorComplianceSurveyAsync (VendorComplianceSurvey body) { ApiResponse<VendorComplianceSurvey> localVarResponse = await AddVendorComplianceSurveyAsyncWithHttpInfo(body); return localVarResponse.Data; } /// <summary> /// Create a vendorComplianceSurvey Inserts a new vendorComplianceSurvey using the specified data. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be inserted.</param> /// <returns>Task of ApiResponse (VendorComplianceSurvey)</returns> public async System.Threading.Tasks.Task<ApiResponse<VendorComplianceSurvey>> AddVendorComplianceSurveyAsyncWithHttpInfo (VendorComplianceSurvey body) { // verify the required parameter 'body' is set if (body == null) throw new ApiException(400, "Missing required parameter 'body' when calling VendorComplianceSurveyApi->AddVendorComplianceSurvey"); var localVarPath = "/v1.0/vendorComplianceSurvey"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { "application/json" }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (body != null && body.GetType() != typeof(byte[])) { localVarPostBody = Configuration.ApiClient.Serialize(body); // http body (model) parameter } else { localVarPostBody = body; // byte array } // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) await Configuration.ApiClient.CallApiAsync(localVarPath, Method.POST, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("AddVendorComplianceSurvey", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<VendorComplianceSurvey>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), (VendorComplianceSurvey) Configuration.ApiClient.Deserialize(localVarResponse, typeof(VendorComplianceSurvey))); } /// <summary> /// Delete a vendorComplianceSurvey Deletes the vendorComplianceSurvey identified by the specified id. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be deleted.</param> /// <returns></returns> public void DeleteVendorComplianceSurvey (int? vendorComplianceSurveyId) { DeleteVendorComplianceSurveyWithHttpInfo(vendorComplianceSurveyId); } /// <summary> /// Delete a vendorComplianceSurvey Deletes the vendorComplianceSurvey identified by the specified id. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be deleted.</param> /// <returns>ApiResponse of Object(void)</returns> public ApiResponse<Object> DeleteVendorComplianceSurveyWithHttpInfo (int? vendorComplianceSurveyId) { // verify the required parameter 'vendorComplianceSurveyId' is set if (vendorComplianceSurveyId == null) throw new ApiException(400, "Missing required parameter 'vendorComplianceSurveyId' when calling VendorComplianceSurveyApi->DeleteVendorComplianceSurvey"); var localVarPath = "/v1.0/vendorComplianceSurvey/{vendorComplianceSurveyId}"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (vendorComplianceSurveyId != null) localVarPathParams.Add("vendorComplianceSurveyId", Configuration.ApiClient.ParameterToString(vendorComplianceSurveyId)); // path parameter // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) Configuration.ApiClient.CallApi(localVarPath, Method.DELETE, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("DeleteVendorComplianceSurvey", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<Object>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), null); } /// <summary> /// Delete a vendorComplianceSurvey Deletes the vendorComplianceSurvey identified by the specified id. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be deleted.</param> /// <returns>Task of void</returns> public async System.Threading.Tasks.Task DeleteVendorComplianceSurveyAsync (int? vendorComplianceSurveyId) { await DeleteVendorComplianceSurveyAsyncWithHttpInfo(vendorComplianceSurveyId); } /// <summary> /// Delete a vendorComplianceSurvey Deletes the vendorComplianceSurvey identified by the specified id. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be deleted.</param> /// <returns>Task of ApiResponse</returns> public async System.Threading.Tasks.Task<ApiResponse<Object>> DeleteVendorComplianceSurveyAsyncWithHttpInfo (int? vendorComplianceSurveyId) { // verify the required parameter 'vendorComplianceSurveyId' is set if (vendorComplianceSurveyId == null) throw new ApiException(400, "Missing required parameter 'vendorComplianceSurveyId' when calling VendorComplianceSurveyApi->DeleteVendorComplianceSurvey"); var localVarPath = "/v1.0/vendorComplianceSurvey/{vendorComplianceSurveyId}"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (vendorComplianceSurveyId != null) localVarPathParams.Add("vendorComplianceSurveyId", Configuration.ApiClient.ParameterToString(vendorComplianceSurveyId)); // path parameter // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) await Configuration.ApiClient.CallApiAsync(localVarPath, Method.DELETE, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("DeleteVendorComplianceSurvey", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<Object>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), null); } /// <summary> /// Search vendorComplianceSurveys by filter Returns the list of vendorComplianceSurveys that match the given filter. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="filter">Query string, used to filter results. (optional)</param> /// <param name="page">Result page number. Defaults to 1. (optional)</param> /// <param name="limit">Maximum results per page. Defaults to 20. Max allowed value is 250. (optional)</param> /// <param name="sort">Sort results by specified field. (optional)</param> /// <returns>List&lt;VendorComplianceSurvey&gt;</returns> public List<VendorComplianceSurvey> GetVendorComplianceSurveyByFilter (string filter = null, int? page = null, int? limit = null, string sort = null) { ApiResponse<List<VendorComplianceSurvey>> localVarResponse = GetVendorComplianceSurveyByFilterWithHttpInfo(filter, page, limit, sort); return localVarResponse.Data; } /// <summary> /// Search vendorComplianceSurveys by filter Returns the list of vendorComplianceSurveys that match the given filter. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="filter">Query string, used to filter results. (optional)</param> /// <param name="page">Result page number. Defaults to 1. (optional)</param> /// <param name="limit">Maximum results per page. Defaults to 20. Max allowed value is 250. (optional)</param> /// <param name="sort">Sort results by specified field. (optional)</param> /// <returns>ApiResponse of List&lt;VendorComplianceSurvey&gt;</returns> public ApiResponse< List<VendorComplianceSurvey> > GetVendorComplianceSurveyByFilterWithHttpInfo (string filter = null, int? page = null, int? limit = null, string sort = null) { var localVarPath = "/v1.0/vendorComplianceSurvey/search"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (filter != null) localVarQueryParams.Add("filter", Configuration.ApiClient.ParameterToString(filter)); // query parameter if (page != null) localVarQueryParams.Add("page", Configuration.ApiClient.ParameterToString(page)); // query parameter if (limit != null) localVarQueryParams.Add("limit", Configuration.ApiClient.ParameterToString(limit)); // query parameter if (sort != null) localVarQueryParams.Add("sort", Configuration.ApiClient.ParameterToString(sort)); // query parameter // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) Configuration.ApiClient.CallApi(localVarPath, Method.GET, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("GetVendorComplianceSurveyByFilter", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<List<VendorComplianceSurvey>>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), (List<VendorComplianceSurvey>) Configuration.ApiClient.Deserialize(localVarResponse, typeof(List<VendorComplianceSurvey>))); } /// <summary> /// Search vendorComplianceSurveys by filter Returns the list of vendorComplianceSurveys that match the given filter. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="filter">Query string, used to filter results. (optional)</param> /// <param name="page">Result page number. Defaults to 1. (optional)</param> /// <param name="limit">Maximum results per page. Defaults to 20. Max allowed value is 250. (optional)</param> /// <param name="sort">Sort results by specified field. (optional)</param> /// <returns>Task of List&lt;VendorComplianceSurvey&gt;</returns> public async System.Threading.Tasks.Task<List<VendorComplianceSurvey>> GetVendorComplianceSurveyByFilterAsync (string filter = null, int? page = null, int? limit = null, string sort = null) { ApiResponse<List<VendorComplianceSurvey>> localVarResponse = await GetVendorComplianceSurveyByFilterAsyncWithHttpInfo(filter, page, limit, sort); return localVarResponse.Data; } /// <summary> /// Search vendorComplianceSurveys by filter Returns the list of vendorComplianceSurveys that match the given filter. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="filter">Query string, used to filter results. (optional)</param> /// <param name="page">Result page number. Defaults to 1. (optional)</param> /// <param name="limit">Maximum results per page. Defaults to 20. Max allowed value is 250. (optional)</param> /// <param name="sort">Sort results by specified field. (optional)</param> /// <returns>Task of ApiResponse (List&lt;VendorComplianceSurvey&gt;)</returns> public async System.Threading.Tasks.Task<ApiResponse<List<VendorComplianceSurvey>>> GetVendorComplianceSurveyByFilterAsyncWithHttpInfo (string filter = null, int? page = null, int? limit = null, string sort = null) { var localVarPath = "/v1.0/vendorComplianceSurvey/search"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (filter != null) localVarQueryParams.Add("filter", Configuration.ApiClient.ParameterToString(filter)); // query parameter if (page != null) localVarQueryParams.Add("page", Configuration.ApiClient.ParameterToString(page)); // query parameter if (limit != null) localVarQueryParams.Add("limit", Configuration.ApiClient.ParameterToString(limit)); // query parameter if (sort != null) localVarQueryParams.Add("sort", Configuration.ApiClient.ParameterToString(sort)); // query parameter // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) await Configuration.ApiClient.CallApiAsync(localVarPath, Method.GET, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("GetVendorComplianceSurveyByFilter", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<List<VendorComplianceSurvey>>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), (List<VendorComplianceSurvey>) Configuration.ApiClient.Deserialize(localVarResponse, typeof(List<VendorComplianceSurvey>))); } /// <summary> /// Get a vendorComplianceSurvey by id Returns the vendorComplianceSurvey identified by the specified id. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be returned.</param> /// <returns>VendorComplianceSurvey</returns> public VendorComplianceSurvey GetVendorComplianceSurveyById (int? vendorComplianceSurveyId) { ApiResponse<VendorComplianceSurvey> localVarResponse = GetVendorComplianceSurveyByIdWithHttpInfo(vendorComplianceSurveyId); return localVarResponse.Data; } /// <summary> /// Get a vendorComplianceSurvey by id Returns the vendorComplianceSurvey identified by the specified id. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be returned.</param> /// <returns>ApiResponse of VendorComplianceSurvey</returns> public ApiResponse< VendorComplianceSurvey > GetVendorComplianceSurveyByIdWithHttpInfo (int? vendorComplianceSurveyId) { // verify the required parameter 'vendorComplianceSurveyId' is set if (vendorComplianceSurveyId == null) throw new ApiException(400, "Missing required parameter 'vendorComplianceSurveyId' when calling VendorComplianceSurveyApi->GetVendorComplianceSurveyById"); var localVarPath = "/v1.0/vendorComplianceSurvey/{vendorComplianceSurveyId}"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (vendorComplianceSurveyId != null) localVarPathParams.Add("vendorComplianceSurveyId", Configuration.ApiClient.ParameterToString(vendorComplianceSurveyId)); // path parameter // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) Configuration.ApiClient.CallApi(localVarPath, Method.GET, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("GetVendorComplianceSurveyById", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<VendorComplianceSurvey>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), (VendorComplianceSurvey) Configuration.ApiClient.Deserialize(localVarResponse, typeof(VendorComplianceSurvey))); } /// <summary> /// Get a vendorComplianceSurvey by id Returns the vendorComplianceSurvey identified by the specified id. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be returned.</param> /// <returns>Task of VendorComplianceSurvey</returns> public async System.Threading.Tasks.Task<VendorComplianceSurvey> GetVendorComplianceSurveyByIdAsync (int? vendorComplianceSurveyId) { ApiResponse<VendorComplianceSurvey> localVarResponse = await GetVendorComplianceSurveyByIdAsyncWithHttpInfo(vendorComplianceSurveyId); return localVarResponse.Data; } /// <summary> /// Get a vendorComplianceSurvey by id Returns the vendorComplianceSurvey identified by the specified id. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="vendorComplianceSurveyId">Id of the vendorComplianceSurvey to be returned.</param> /// <returns>Task of ApiResponse (VendorComplianceSurvey)</returns> public async System.Threading.Tasks.Task<ApiResponse<VendorComplianceSurvey>> GetVendorComplianceSurveyByIdAsyncWithHttpInfo (int? vendorComplianceSurveyId) { // verify the required parameter 'vendorComplianceSurveyId' is set if (vendorComplianceSurveyId == null) throw new ApiException(400, "Missing required parameter 'vendorComplianceSurveyId' when calling VendorComplianceSurveyApi->GetVendorComplianceSurveyById"); var localVarPath = "/v1.0/vendorComplianceSurvey/{vendorComplianceSurveyId}"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (vendorComplianceSurveyId != null) localVarPathParams.Add("vendorComplianceSurveyId", Configuration.ApiClient.ParameterToString(vendorComplianceSurveyId)); // path parameter // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) await Configuration.ApiClient.CallApiAsync(localVarPath, Method.GET, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("GetVendorComplianceSurveyById", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<VendorComplianceSurvey>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), (VendorComplianceSurvey) Configuration.ApiClient.Deserialize(localVarResponse, typeof(VendorComplianceSurvey))); } /// <summary> /// Update a vendorComplianceSurvey Updates an existing vendorComplianceSurvey using the specified data. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be updated.</param> /// <returns></returns> public void UpdateVendorComplianceSurvey (VendorComplianceSurvey body) { UpdateVendorComplianceSurveyWithHttpInfo(body); } /// <summary> /// Update a vendorComplianceSurvey Updates an existing vendorComplianceSurvey using the specified data. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be updated.</param> /// <returns>ApiResponse of Object(void)</returns> public ApiResponse<Object> UpdateVendorComplianceSurveyWithHttpInfo (VendorComplianceSurvey body) { // verify the required parameter 'body' is set if (body == null) throw new ApiException(400, "Missing required parameter 'body' when calling VendorComplianceSurveyApi->UpdateVendorComplianceSurvey"); var localVarPath = "/v1.0/vendorComplianceSurvey"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { "application/json" }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (body != null && body.GetType() != typeof(byte[])) { localVarPostBody = Configuration.ApiClient.Serialize(body); // http body (model) parameter } else { localVarPostBody = body; // byte array } // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) Configuration.ApiClient.CallApi(localVarPath, Method.PUT, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("UpdateVendorComplianceSurvey", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<Object>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), null); } /// <summary> /// Update a vendorComplianceSurvey Updates an existing vendorComplianceSurvey using the specified data. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be updated.</param> /// <returns>Task of void</returns> public async System.Threading.Tasks.Task UpdateVendorComplianceSurveyAsync (VendorComplianceSurvey body) { await UpdateVendorComplianceSurveyAsyncWithHttpInfo(body); } /// <summary> /// Update a vendorComplianceSurvey Updates an existing vendorComplianceSurvey using the specified data. /// </summary> /// <exception cref="Infoplus.Client.ApiException">Thrown when fails to make API call</exception> /// <param name="body">VendorComplianceSurvey to be updated.</param> /// <returns>Task of ApiResponse</returns> public async System.Threading.Tasks.Task<ApiResponse<Object>> UpdateVendorComplianceSurveyAsyncWithHttpInfo (VendorComplianceSurvey body) { // verify the required parameter 'body' is set if (body == null) throw new ApiException(400, "Missing required parameter 'body' when calling VendorComplianceSurveyApi->UpdateVendorComplianceSurvey"); var localVarPath = "/v1.0/vendorComplianceSurvey"; var localVarPathParams = new Dictionary<String, String>(); var localVarQueryParams = new Dictionary<String, String>(); var localVarHeaderParams = new Dictionary<String, String>(Configuration.DefaultHeader); var localVarFormParams = new Dictionary<String, String>(); var localVarFileParams = new Dictionary<String, FileParameter>(); Object localVarPostBody = null; // to determine the Content-Type header String[] localVarHttpContentTypes = new String[] { "application/json" }; String localVarHttpContentType = Configuration.ApiClient.SelectHeaderContentType(localVarHttpContentTypes); // to determine the Accept header String[] localVarHttpHeaderAccepts = new String[] { "application/json" }; String localVarHttpHeaderAccept = Configuration.ApiClient.SelectHeaderAccept(localVarHttpHeaderAccepts); if (localVarHttpHeaderAccept != null) localVarHeaderParams.Add("Accept", localVarHttpHeaderAccept); // set "format" to json by default // e.g. /pet/{petId}.{format} becomes /pet/{petId}.json localVarPathParams.Add("format", "json"); if (body != null && body.GetType() != typeof(byte[])) { localVarPostBody = Configuration.ApiClient.Serialize(body); // http body (model) parameter } else { localVarPostBody = body; // byte array } // authentication (api_key) required if (!String.IsNullOrEmpty(Configuration.GetApiKeyWithPrefix("API-Key"))) { localVarHeaderParams["API-Key"] = Configuration.GetApiKeyWithPrefix("API-Key"); } // make the HTTP request IRestResponse localVarResponse = (IRestResponse) await Configuration.ApiClient.CallApiAsync(localVarPath, Method.PUT, localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarFileParams, localVarPathParams, localVarHttpContentType); int localVarStatusCode = (int) localVarResponse.StatusCode; if (ExceptionFactory != null) { Exception exception = ExceptionFactory("UpdateVendorComplianceSurvey", localVarResponse); if (exception != null) throw exception; } return new ApiResponse<Object>(localVarStatusCode, localVarResponse.Headers.ToDictionary(x => x.Name, x => x.Value.ToString()), null); } } }
using System; using System.Collections.Generic; using System.Threading.Tasks; using Flurl; using Flurl.Http; using OAuth; using Newtonsoft.Json.Linq; using Newtonsoft.Json; using System.Net.Http; using QuickBooks.Net.Exceptions; using QuickBooks.Net.Data.Error_Responses; using System.Text; using QuickBooks.Net.Data.Models; using QuickBooks.Net.Data.Models.Batch; using QuickBooks.Net.Utilities; using QuickBooks.Net.Data.Models.Query; using System.Linq; using MoreLinq; namespace QuickBooks.Net.Controllers { public abstract class BaseController { #region Url Constants private const string QbSandboxUrlV3 = "https://sandbox-quickbooks.api.intuit.com/v3"; private const string QbUrlV3 = "https://quickbooks.api.intuit.com/v3"; #endregion protected abstract string ObjectName { get; } private readonly string _oAuthVersion; private string Url { get { var baseUrl = Client.SandboxMode ? QbSandboxUrlV3 : QbUrlV3; return $"{baseUrl}/company/{Client.RealmId}/"; } } protected QuickBooksClient Client; protected BaseController(QuickBooksClient client, string oAuthVersion) { Client = client; _oAuthVersion = oAuthVersion; } protected async Task<int> GetObjectCount<T>() { var content = new StringContent($"Select Count(*) From {ObjectName}", Encoding.UTF8, "application/text"); var result = await MakeRequest<QueryResponse<T>>("query", HttpMethod.Post, content, isQuery: true); return result.Count; } protected async Task<IEnumerable<T>> QueryRequest<T>(string query, int startPosition = 1, int maxResult = 100, bool overrideOptions = false) where T : QuickBooksBaseModel { var additionalQuery = overrideOptions ? "" : $" startposition {startPosition} maxresults {maxResult}"; var content = new StringContent(query + additionalQuery, Encoding.UTF8, "application/text"); var result = await MakeRequest<QueryResponse<T>>("query", HttpMethod.Post, content, isQuery: true); return result.Data; } protected async Task<IEnumerable<T>> QueryRequestString<T>(string query, int startPosition = 1, int maxResult = 100, bool overrideOptions = false) where T : QuickBooksBaseModelString { var additionalQuery = overrideOptions ? "" : $" startposition {startPosition} maxresults {maxResult}"; var content = new StringContent(query + additionalQuery, Encoding.UTF8, "application/text"); var result = await MakeRequest<QueryResponse<T>>("query", HttpMethod.Post, content, isQuery: true); return result.Data; } protected async Task<T> GetRequest<T>(object entityId) where T : QuickBooksBaseModel { return await MakeRequest<T>(ObjectName.ToLower() + $"/{entityId}", HttpMethod.Get); } protected async Task<T> GetRequestString<T>(object entityId) where T : QuickBooksBaseModelString { return await MakeRequest<T>(ObjectName.ToLower() + $"/{entityId}", HttpMethod.Get); } internal QuickBooksBaseModel GetReturnObject<T>(T item, BatchOperation operation) where T : QuickBooksBaseModel { switch (operation) { case BatchOperation.Create: return item.CreateReturnObject(); case BatchOperation.Update: return item.UpdateReturnObject(); default: return item.DeleteReturnObject(); } } internal QuickBooksBaseModelString GetReturnObjectString<T>(T item, BatchOperation operation) where T : QuickBooksBaseModelString { switch (operation) { case BatchOperation.Create: return item.CreateReturnObject(); case BatchOperation.Update: return item.UpdateReturnObject(); default: return item.DeleteReturnObject(); } } protected async Task<IBatchResponse<T>> BatchRequest<T>(IEnumerable<T> items, BatchOperation operation) where T : QuickBooksBaseModel { if (items.Count() <= 30) { var request = new BatchItemRequest( items.Select(x => GetReturnObject(x, operation)).ToList(), operation ); var response = await MakeRequest<BatchResponse<T>>("batch", HttpMethod.Post, content: request, batchRequest: true); return response; } var multiBatchResponse = new MultiBatchResponse<T>(); var itemBatches = items.Batch(30); itemBatches.ForEach((batch) => { var request = new BatchItemRequest( batch.Select(x => GetReturnObject(x, operation)).ToList(), operation ); var response = MakeRequest<BatchResponse<T>>("batch", HttpMethod.Post, content: request, batchRequest: true).Result; multiBatchResponse.responses.Add(response); }); return multiBatchResponse; } protected async Task<IBatchResponse<T>> BatchRequestString<T>(IEnumerable<T> items, BatchOperation operation) where T : QuickBooksBaseModelString { if (items.Count() <= 30) { var request = new BatchItemRequest( items.Select(x => GetReturnObjectString(x, operation)).ToList(), operation ); var response = await MakeRequest<BatchResponse<T>>("batch", HttpMethod.Post, content: request, batchRequest: true); return response; } var multiBatchResponse = new MultiBatchResponse<T>(); var itemBatches = items.Batch(30); itemBatches.ForEach((batch) => { var request = new BatchItemRequest( batch.Select(x => GetReturnObjectString(x, operation)).ToList(), operation ); var response = MakeRequest<BatchResponse<T>>("batch", HttpMethod.Post, content: request, batchRequest: true).Result; multiBatchResponse.responses.Add(response); }); return multiBatchResponse; } protected async Task<T> PostRequest<T>(T content, bool update = false, bool delete = false, Dictionary<string, string> additionalParams = null) where T : QuickBooksBaseModel { return await MakeRequest<T>(ObjectName.ToLower(), HttpMethod.Post, content, update: update, delete: delete, additionalParams: additionalParams); } protected async Task<T> PostRequestString<T>(T content, bool update = false, bool delete = false, Dictionary<string, string> additionalParams = null) where T : QuickBooksBaseModelString { return await MakeRequest<T>(ObjectName.ToLower(), HttpMethod.Post, content, update: update, delete: delete, additionalParams: additionalParams); } protected async Task<T> SendEmailRequest<T>(string resourceUrl, Dictionary<string, string> additionalParams = null) where T : QuickBooksBaseModel { return await MakeRequest<T>($"{ObjectName.ToLower()}/{resourceUrl}", HttpMethod.Post, new StringContent("", Encoding.UTF8, "application/octet-stream"), additionalParams: additionalParams, emailRequest: true); } protected async Task<T> SendEmailRequestString<T>(string resourceUrl, Dictionary<string, string> additionalParams = null) where T : QuickBooksBaseModelString { return await MakeRequest<T>($"{ObjectName.ToLower()}/{resourceUrl}", HttpMethod.Post, new StringContent("", Encoding.UTF8, "application/octet-stream"), additionalParams: additionalParams, emailRequest: true); } protected async Task<byte[]> DownloadFile(string resourceUrl, string contentType) { return await MakeRequest<byte[]>(resourceUrl, HttpMethod.Get, acceptType: contentType, fileDownload: true); } private async Task<T> MakeRequest<T>(string resourceUrl, HttpMethod requestMethod, object content = null, string acceptType = null, Dictionary<string, string> additionalParams = null, bool isQuery = false, bool update = false, bool delete = false, bool fileDownload = false, bool emailRequest = false, bool batchRequest = false) { var queryParams = additionalParams ?? new Dictionary<string, string>(); var url = Url + resourceUrl; if (update) { queryParams.Add("operation", "update"); } else if (delete) { queryParams.Add("operation", "delete"); } queryParams.Add("minorversion", Client.MinorVersion); url = url.SetQueryParams(queryParams); var accept = Client.AcceptType; if (!string.IsNullOrEmpty(acceptType)) { accept += $", {acceptType}"; } var client = url.WithHeaders(new { Accept = accept, Authorization = GetAuthHeader(url, requestMethod, queryParams) }); try { if (requestMethod == HttpMethod.Get) { if (fileDownload) { return (T)Convert.ChangeType(await client.GetBytesAsync(), typeof(T)); } var objectResponse = await client.GetJsonAsync<JObject>(); return objectResponse[ObjectName].ToObject<T>(); } if (requestMethod == HttpMethod.Post) { var response = !emailRequest && !isQuery ? await client.PostJsonAsync(content) : await client.PostAsync((HttpContent)content); if (isQuery) { var queryContent = await response.Content.ReadAsStringAsync(); return JsonConvert.DeserializeObject<T>(queryContent); } if (batchRequest) { var batchContent = await response.Content.ReadAsStringAsync(); return JsonConvert.DeserializeObject<T>(batchContent); } var responseContentString = await response.Content.ReadAsStringAsync(); var responseContent = JsonConvert.DeserializeObject<JObject>(responseContentString); return responseContent[ObjectName].ToObject<T>(); } return default(T); } catch (FlurlHttpTimeoutException ex) { throw new QuickBooksTimeoutException("The QuickBooks request timed out.", ex.Message); } catch (FlurlHttpException ex) { if (ex.Call.Response == null) { throw new Exception( "The request failed to get a response. You may need to check your internet connection."); } //The XML parsing is because QuickBooks only returns XML on unauthorized exceptions if (ex.Call.Response.Content.Headers.ContentType.MediaType == "text/xml") { var xml = XmlHelper.ParseXmlString(ex.Call.Response.Content.ReadAsStringAsync().Result); var errorCode = xml["Message"].Split(';')[1].Split('=')[1]; throw new QuickbooksAuthorizationException($"QuickBooks application authentication failed. Message: {xml["Message"]}", xml["Detail"], errorCode); } var response = JsonConvert.DeserializeObject<QuickBooksErrorResponse>( ex.Call.Response.Content.ReadAsStringAsync().Result); throw new QuickBooksException("A Quickbooks exception occurred.", response); } } private string GetAuthHeader(string url, HttpMethod method, IDictionary<string, string> queryParams) { return new OAuthRequest { Version = _oAuthVersion, SignatureMethod = OAuthSignatureMethod.HmacSha1, ConsumerKey = Client.ConsumerKey, ConsumerSecret = Client.ConsumerSecret, Token = Client.AccessToken, TokenSecret = Client.AccessTokenSecret, Method = method.ToString(), RequestUrl = url } .GetAuthorizationHeader(queryParams); } } }
using System.Xml; using System.Xml.Xsl; using System.Xml.XPath; using System.Collections; using System.IO; namespace MonoTests.oasis_xslt { class EnvOptions { static readonly bool useDomStyle; static readonly bool useDomInstance; static readonly string outputDir; static readonly bool whitespaceStyle; static readonly bool whitespaceInstance; static readonly bool inverseResults; public static bool UseDomStyle { get {return useDomStyle;} } public static bool UseDomInstance { get {return useDomInstance;} } public static string OutputDir { get {return outputDir;} } public static bool WhitespaceStyle { get {return whitespaceStyle;} } public static bool WhitespaceInstance { get {return whitespaceInstance;} } public static bool InverseResults { get {return inverseResults;} } static EnvOptions () { IDictionary env = System.Environment.GetEnvironmentVariables(); if (env.Contains ("XSLTTEST_DOM")) { useDomStyle = true; useDomInstance = true; } if (env.Contains ("XSLTTEST_DOMXSL")) useDomStyle = true; if (env.Contains ("XSLTTEST_DOMINSTANCE")) useDomInstance = true; if (env.Contains ("XSLTTEST_WS")) { whitespaceStyle = true; whitespaceInstance = true; } if (env.Contains ("XSLTTEST_WSXSL")) whitespaceStyle = true; if (env.Contains ("XSLTTEST_WSSRC")) whitespaceInstance = true; if (env.Contains ("XSLTTEST_INVERSE_RESULTS")) inverseResults = true; if (useDomStyle || useDomInstance) outputDir = "domresults"; else outputDir = "results"; } } class Helpers { public static void ReadStrings (ArrayList array, string filename) { if (!File.Exists (filename)) return; using (StreamReader reader = new StreamReader (filename)) { foreach (string s_ in reader.ReadToEnd ().Split ("\n".ToCharArray ())) { string s = s_.Trim (); if (s.Length > 0) array.Add (s); } } } } class CatalogTestCase { string _stylesheet; string _srcxml; string _outfile; public enum CompareType { Text, HTML, XML } CompareType _compare; XmlElement _testCase; string _outputDir; public CatalogTestCase (string outputDir, XmlElement testCase) { _testCase = testCase; _outputDir = outputDir; } public bool Process () { string relPath = GetRelPath (); string path = Path.Combine (Path.Combine ("testsuite", "TESTS"), relPath); string outputPath = Path.Combine (_outputDir, relPath); if (!Directory.Exists (outputPath)) Directory.CreateDirectory (outputPath); //FIXME: this ignores negative tests. Read README if you want to fix it XmlNode scenario = _testCase.SelectSingleNode ("scenario[@operation='standard']"); if (scenario == null) return false; ProcessScenario (path, outputPath, scenario); return true; } string GetRelPath () { string filePath = _testCase.SelectSingleNode ("file-path").InnerText; string submitter = _testCase.SelectSingleNode ("./parent::test-catalog/@submitter").InnerText; if (submitter == "Lotus") return Path.Combine ("Xalan_Conformance_Tests", filePath); else if (submitter == "Microsoft") return Path.Combine ("MSFT_Conformance_Tests", filePath); else throw new System.Exception ("unknown submitter in the catalog"); } void ProcessScenario (string path, string outputPath, XmlNode scenario) { string stylesheetBase = scenario.SelectSingleNode ("input-file[@role='principal-stylesheet']").InnerText; _stylesheet = Path.Combine (path, stylesheetBase); if (!File.Exists (_stylesheet)) { using (StreamWriter wr = new StreamWriter ("missing.lst", true)) wr.WriteLine (_stylesheet); } _srcxml = Path.Combine (path, scenario.SelectSingleNode ("input-file[@role='principal-data']").InnerText); XmlNode outputNode = scenario.SelectSingleNode ("output-file[@role='principal']"); if (outputNode != null) { _outfile = Path.Combine (outputPath, outputNode.InnerText); switch (outputNode.Attributes ["compare"].Value) { case "XML": _compare = CompareType.XML; break; case "HTML": _compare = CompareType.HTML; break; default: _compare = CompareType.Text; break; } } else { _outfile = null; _compare = CompareType.Text; } } public CompareType Compare { get {return _compare;} } public string StyleSheet { get {return _stylesheet;} } public string SrcXml { get {return _srcxml;} } public string OutFile { get {return _outfile;} } } class SingleTestTransform { CatalogTestCase _testCase; public SingleTestTransform (CatalogTestCase testCase) { _testCase = testCase; } string _result; public string Result { get {return _result;} } System.Exception _exception; public System.Exception Exception { get {return _exception;} } public bool Succeeded { get {return this.Exception == null;} } public CatalogTestCase TestCase { get {return _testCase;} } XslTransform LoadTransform () { XslTransform trans = new XslTransform (); if (EnvOptions.UseDomStyle) { XmlDocument styledoc = new XmlDocument (); if (EnvOptions.WhitespaceStyle) styledoc.PreserveWhitespace = true; styledoc.Load (_testCase.StyleSheet); trans.Load (styledoc, null, null); } else trans.Load (new XPathDocument ( _testCase.StyleSheet, EnvOptions.WhitespaceStyle ? XmlSpace.Preserve : XmlSpace.Default), null, null); return trans; } IXPathNavigable LoadInput () { XmlTextReader xtr=null; try { xtr = new XmlTextReader (_testCase.SrcXml); XmlValidatingReader xvr = new XmlValidatingReader (xtr); xvr.ValidationType = ValidationType.None; IXPathNavigable input = null; if (EnvOptions.UseDomInstance) { XmlDocument dom = new XmlDocument (); if (EnvOptions.WhitespaceInstance) dom.PreserveWhitespace = true; dom.Load (xvr); input = dom; } else { input = new XPathDocument (xvr, EnvOptions.WhitespaceInstance ? XmlSpace.Preserve : XmlSpace.Default); } return input; } finally { if (xtr!=null) xtr.Close (); } } public void RunTest () { try { XslTransform trans = LoadTransform (); IXPathNavigable input = LoadInput (); using (StringWriter sw = new StringWriter ()) { trans.Transform (input, null, sw, null); _result = sw.ToString ().Replace ("\r\n", "\n"); } } catch (System.Exception e) { _exception = e; } } } }
using UnityEngine; using System.Collections; using System; using System.IO; using System.Net.Sockets; using System.Threading; using State; using System.Collections.Generic; namespace Networking { public class NetworkManagerObj : MonoBehaviour { public string Host = "192.168.1.199"; public int Port = 50505; public string channelName = "channel-1"; public int maxConnectingRetries = 3; NetworkManager manager; float btnX, btnY, btnW, btnH; Rect btnStart; // Rect btnRefresh; void Start () { manager = NetworkManager.getInstance (); manager.Host = Host; manager.Port = Port; manager.channelName = channelName; manager.maxConnectingRetries = maxConnectingRetries; manager.StartThread (); btnX = Screen.width * 0.05f; btnY = Screen.width * 0.05f; btnW = Screen.width * 0.1f; btnH = Screen.width * 0.05f; btnStart = new Rect (btnX, btnY, btnW, btnH); } void OnGUI () { if (GUI.Button (btnStart, "Connect Server")) { Debug.Log ("Connect server"); Console.WriteLine ("Console"); manager.startServer (); } } void FixUpdate () { manager.FixedUpdate (); } } // Singleton public class NetworkManager { public static readonly string kStatusNotInit = "NotInit"; public static readonly string kStatusConnecting = "Connecting"; public static readonly string kStatusReady = "Ready"; public static readonly string kStatusDisconnected = "Disconnected"; public static readonly string kStatusDrop = "Drop"; public string Host; public int Port; public string channelName; public int maxConnectingRetries; ControlInterface ctrl; int id; string status = kStatusNotInit; bool socketReady = false; bool threadRunning = false; int connectingRetries = 0; TcpClient mySocket; Thread theThread; NetworkStream theStream; StreamWriter theWriter; StreamReader theReader; static NetworkManager instance = new NetworkManager (); public NetworkManager () { } public static NetworkManager getInstance () { return instance; } public static void setController (ControlInterface ctrl) { instance.ctrl = ctrl; } public void StartThread () { // Create new thread if (!threadRunning) { threadRunning = true; ThreadStart ts = new ThreadStart (socketReader); theThread = new Thread (ts); theThread.Start (); Debug.Log ("Thread created"); } } Queue<CtrlAction> queue = new Queue<CtrlAction> (); public void FixedUpdate () { while (queue.Count > 0) { var action = queue.Dequeue (); if (action != null) { action (); } } } public void startServer () { if (socketReady) { return; } status = kStatusConnecting; setupSocket (); } public void setupSocket () { try { mySocket = new TcpClient (Host, Port); theStream = mySocket.GetStream (); theWriter = new StreamWriter (theStream); theReader = new StreamReader (theStream); socketReady = true; status = kStatusConnecting; // connect or reconnect if (id == 0) { Debug.Log ("Connecting..."); Message msg = new Message (Const.kHandshakeConnect, Const.kRoleApp); msg.channel_name = channelName; sendMessage (msg); } else { Debug.Log ("Reconnecting..."); Message msg = new Message (Const.kHandshakeReconnect, Const.kRoleApp); msg.peer_id = id; msg.channel_name = channelName; sendMessage (msg); } } catch (Exception e) { Debug.Log ("Socket error:" + e); } } public void writeSocket (string theLine) { if (!socketReady) { return; } String tmpString = theLine; theWriter.Write (tmpString); theWriter.WriteLine (); theWriter.Flush (); } // This function blocks until it reads something to return public String readSocket () { if (!socketReady) { return ""; } var line = theReader.ReadLine (); return line; } public void closeSocket () { if (!socketReady) { return; } theWriter.Close (); theReader.Close (); mySocket.Close (); socketReady = false; } public void maintainConnection () { if (socketReady && !theStream.CanRead) { Debug.Log ("Connection dropped. Reconnecting..."); status = kStatusDrop; connectingRetries = 0; // TODO: wait 500ms setupSocket(); } } public void sendMessage (Message msg) { string s = msg.Marshal (); Debug.Log ("Send message: " + s); writeSocket (s); } public void stopListening () { threadRunning = false; } public void handleMessage (Message msg) { Debug.Log ("Receive: " + msg.Marshal ()); if (msg.type == Const.kHandshakeAccept) { if (status == kStatusConnecting) { id = msg.peer_id; } else { Debug.LogError ("Unexpected handshake accept: " + msg.type); } } else if (msg.type == Const.kHandshakeError) { if (status == kStatusConnecting && connectingRetries < maxConnectingRetries) { connectingRetries += 1; Debug.LogError ("Handshake error. Retry " + connectingRetries + "/" + maxConnectingRetries + ": " + msg.type); // TODO: wait 500ms setupSocket(); closeSocket (); status = kStatusDisconnected; } else { Debug.LogError ("Handshake error. Disconnected: " + msg.type); } } else if (msg.type == Const.kHandshakeClose) { Debug.Log ("Receive handshake close"); status = kStatusDisconnected; closeSocket (); } else if (msg.type.StartsWith (Const.kHandshake)) { Debug.LogWarning ("Unknown handshake message: " + msg.type); } else if (msg.type == Const.kAck) { // Do nothing } else { handleControlMessage (msg); } } void handleControlMessage (Message msg) { try { var action = ctrl.handleCtrlMsg (msg); if (action != null) { queue.Enqueue (action); } } catch (Exception e) { Debug.LogError (e); } } // This function runs on another thread for reading socket void socketReader () { while (threadRunning) { try { var line = readSocket (); if (line != "") { Debug.Log ("Read: " + line); var msg = Message.Unmarshal (line); handleMessage (msg); } else { Thread.Sleep (50); } } catch (Exception e) { Debug.Log (e); } } } void OnApplicationQuit () { stopListening (); // wait for listening thread to terminate (max. 500ms) if (theThread != null) { theThread.Join (500); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using Xunit; namespace System.Collections.Tests { public class Hashtable_CtorTests { [Fact] public void TestCtorDefault() { Hashtable hash = null; int nAttempts = 100; int[] iIntArray = new int[nAttempts]; // // [] Constructor: Create Hashtable using default settings. // hash = new Hashtable(); Assert.NotNull(hash); // Verify that the hash tabe is empty. Assert.Equal(hash.Count, 0); } [Fact] public void TestCtorDictionarySingle() { // No exception var hash = new Hashtable(new Hashtable(), 1f); // No exception hash = new Hashtable(new Hashtable(new Hashtable(new Hashtable(new Hashtable(new Hashtable()), 1f), 1f), 1f), 1f); // []test to see if elements really get copied from old dictionary to new hashtable Hashtable tempHash = new Hashtable(); // this for assumes that MinValue is a negative! for (long i = long.MinValue; i < long.MinValue + 100; i++) { tempHash.Add(i, i); } hash = new Hashtable(tempHash, 1f); // make sure that new hashtable has the elements in it that old hashtable had for (long i = long.MinValue; i < long.MinValue + 100; i++) { Assert.True(hash.ContainsKey(i)); Assert.True(hash.ContainsValue(i)); } //[]make sure that there are no connections with the old and the new hashtable tempHash.Clear(); for (long i = long.MinValue; i < long.MinValue + 100; i++) { Assert.True(hash.ContainsKey(i)); Assert.True(hash.ContainsValue(i)); } } [Fact] public void TestCtorDictionarySingleNegative() { // variables used for tests Hashtable hash = null; Assert.Throws<ArgumentNullException>(() => { hash = new Hashtable(null, 1); } ); Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(new Hashtable(), Int32.MinValue); } ); Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(new Hashtable(), Single.NaN); } ); Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(new Hashtable(), 100.1f); } ); } [Fact] public void TestCtorDictionary() { Hashtable hash = null; Hashtable hash2 = null; Int32 i4a; // // [] Constructor: null // Assert.Throws<ArgumentNullException>(() => { hash = new Hashtable((IDictionary)null); } ); // // []Constructor: empty // hash2 = new Hashtable(); //empty dictionary // No exception hash = new Hashtable(hash2); // // []Constructor: dictionary with 100 entries... // hash2 = new Hashtable(); for (int i = 0; i < 100; i++) { hash2.Add("key_" + i, "val_" + i); } // No exception hash = new Hashtable(hash2); //Confirming the values Hashtable hash3 = new Hashtable(200); for (int ii = 0; ii < 100; ii++) { i4a = ii; hash3.Add("key_" + ii, i4a); } hash = new Hashtable(hash3); Assert.Equal(100, hash.Count); for (int ii = 0; ii < 100; ii++) { Assert.Equal(ii, (int)hash3["key_" + ii]); Assert.True(hash3.ContainsKey("key_" + ii)); } Assert.False(hash3.ContainsKey("key_100")); } [Fact] public void TestCtorIntSingle() { // variables used for tests Hashtable hash = null; // [] should get ArgumentException if trying to have large num of entries Assert.Throws<ArgumentException>(() => { hash = new Hashtable(int.MaxValue, .1f); } ); // []should not get any exceptions for valid values - we also check that the HT works here hash = new Hashtable(100, .1f); int iNumberOfElements = 100; for (int i = 0; i < iNumberOfElements; i++) { hash.Add("Key_" + i, "Value_" + i); } //Count Assert.Equal(hash.Count, iNumberOfElements); DictionaryEntry[] strValueArr = new DictionaryEntry[hash.Count]; hash.CopyTo(strValueArr, 0); Hashtable hsh3 = new Hashtable(); for (int i = 0; i < iNumberOfElements; i++) { Assert.True(hash.Contains("Key_" + i), "Error, Expected value not returned, " + hash.Contains("Key_" + i)); Assert.True(hash.ContainsKey("Key_" + i), "Error, Expected value not returned, " + hash.ContainsKey("Key_" + i)); Assert.True(hash.ContainsValue("Value_" + i), "Error, Expected value not returned, " + hash.ContainsValue("Value_" + i)); //we still need a way to make sure that there are all these unique values here -see below code for that Assert.True(hash.ContainsValue(((DictionaryEntry)strValueArr[i]).Value), "Error, Expected value not returned, " + ((DictionaryEntry)strValueArr[i]).Value); hsh3.Add(((DictionaryEntry)strValueArr[i]).Value, null); } } [Fact] public void TestCtorIntSingleNegative() { Hashtable hash = null; // []should get ArgumentOutOfRangeException if capacity range is not correct Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(5, .01f); } ); // should get ArgumentOutOfRangeException if range is not correct Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(5, 100.1f); } ); // should get OutOfMemoryException if Dictionary is null Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(int.MaxValue, 100.1f); } ); // []ArgumentOutOfRangeException if capacity is less than zero. Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(int.MinValue, 10.1f); } ); } [Fact] public void TestCtorIntCapacity() { //-------------------------------------------------------------------------- // Variable definitions. //-------------------------------------------------------------------------- Hashtable hash = null; int nCapacity = 100; // // [] Constructor: Create Hashtable using a capacity value. // hash = new Hashtable(nCapacity); Assert.NotNull(hash); // Verify that the hash tabe is empty. Assert.Equal(0, hash.Count); // // [] Constructor: Create Hashtablewith zero capacity value - valid. // hash = new Hashtable(0); // // []Constructor: Create Hashtable using a invalid capacity value. // Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(-1); } ); Assert.Throws<ArgumentException>(() => { hash = new Hashtable(Int32.MaxValue); }); } [Fact] public void TestCtorIntFloat() { //-------------------------------------------------------------------------- // Variable definitions. //-------------------------------------------------------------------------- Hashtable hash = null; int nCapacity = 100; float fltLoadFactor = (float).5; // Note: default load factor is .73 // // []Constructor: Create Hashtable using a capacity and load factor. // hash = new Hashtable(nCapacity, fltLoadFactor); Assert.NotNull(hash); // Verify that the hash tabe is empty. Assert.Equal(0, hash.Count); // // [] Constructor: Create Hashtable using a zero capacity and some load factor. // Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(-1, fltLoadFactor); }); // // [] Constructor: Create Hashtable using a invalid capacity and valid load factor. // Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(nCapacity, .09f); // min lf allowed is .01 }); Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(nCapacity, 1.1f); }); Assert.Throws<ArgumentOutOfRangeException>(() => { hash = new Hashtable(-1, -1f); }); Assert.Throws<OutOfMemoryException>(() => { hash = new Hashtable((int)100000000, .5f); }); } [Fact] public void DebuggerAttributeTests() { DebuggerAttributes.ValidateDebuggerDisplayReferences(new Hashtable()); DebuggerAttributes.ValidateDebuggerTypeProxyProperties(new Hashtable() { { "a", 1 }, { "b", 2 } }); } } }
// Copyright (c) 2012, Event Store LLP // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // Neither the name of the Event Store LLP nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using EventStore.Common.Options; using EventStore.Common.Utils; using EventStore.Core.Data; using Newtonsoft.Json; using Newtonsoft.Json.Linq; namespace EventStore.Projections.Core.Services.Processing { public class CheckpointTag : IComparable<CheckpointTag> { public readonly int Phase; public readonly TFPos Position; //TODO: rename to StreamsOrEventTypes or just Positions public readonly Dictionary<string, int> Streams; public readonly string CatalogStream; public readonly string DataStream; public readonly int CatalogPosition; public readonly int DataPosition; internal enum Mode { Phase, Position, Stream, MultiStream, EventTypeIndex, PreparePosition, ByStream } private CheckpointTag(int phase, bool completed) { Phase = phase; Position = completed ? new TFPos(long.MaxValue, long.MaxValue) : new TFPos(long.MinValue, long.MinValue); Streams = null; Mode_ = CalculateMode(); } private CheckpointTag(int phase, TFPos position, Dictionary<string, int> streams) { Phase = phase; Position = position; Streams = streams; Mode_ = CalculateMode(); } private CheckpointTag(int phase, long preparePosition) { Phase = phase; Position = new TFPos(long.MinValue, preparePosition); Mode_ = CalculateMode(); } private CheckpointTag(int phase, TFPos position) { Phase = phase; Position = position; Mode_ = CalculateMode(); } private CheckpointTag(int phase, IDictionary<string, int> streams) { Phase = phase; foreach (var stream in streams) { if (stream.Key == "") throw new ArgumentException("Empty stream name", "streams"); if (stream.Value < 0 && stream.Value != ExpectedVersion.NoStream) throw new ArgumentException("Invalid sequence number", "streams"); } Streams = new Dictionary<string, int>(streams); // clone Position = new TFPos(Int64.MinValue, Int64.MinValue); Mode_ = CalculateMode(); } private CheckpointTag(int phase, IDictionary<string, int> eventTypes, TFPos position) { Phase = phase; Position = position; foreach (var stream in eventTypes) { if (stream.Key == "") throw new ArgumentException("Empty stream name", "eventTypes"); if (stream.Value < 0 && stream.Value != ExpectedVersion.NoStream) throw new ArgumentException("Invalid sequence number", "eventTypes"); } Streams = new Dictionary<string, int>(eventTypes); // clone Mode_ = CalculateMode(); } private CheckpointTag(int phase, string stream, int sequenceNumber) { Phase = phase; if (stream == null) throw new ArgumentNullException("stream"); if (stream == "") throw new ArgumentException("stream"); if (sequenceNumber < 0 && sequenceNumber != ExpectedVersion.NoStream) throw new ArgumentException("sequenceNumber"); Position = new TFPos(Int64.MinValue, Int64.MinValue); Streams = new Dictionary<string, int> {{stream, sequenceNumber}}; Mode_ = CalculateMode(); } private CheckpointTag( int phase, string catalogStream, int catalogPosition, string dataStream, int dataPosition, long commitPosition) { Phase = phase; CatalogStream = catalogStream; CatalogPosition = catalogPosition; DataStream = dataStream; DataPosition = dataPosition; Position = new TFPos(commitPosition, Int64.MinValue); Mode_ = Mode.ByStream; } private Mode CalculateMode() { if (Streams == null || Streams.Count == 0) if (Position.CommitPosition == Int64.MinValue && Position.PreparePosition == Int64.MinValue) return Mode.Phase; else if (Position.CommitPosition == Int64.MaxValue && Position.PreparePosition == Int64.MaxValue) return Mode.Phase; else if (Position.CommitPosition == Int64.MinValue && Position.PreparePosition != Int64.MinValue) return Mode.PreparePosition; else return Mode.Position; if (Position != new TFPos(Int64.MinValue, Int64.MinValue)) return Mode.EventTypeIndex; if (Streams.Count == 1) return Mode.Stream; return Mode.MultiStream; } public static bool operator >(CheckpointTag left, CheckpointTag right) { if (ReferenceEquals(left, right)) return false; if (!ReferenceEquals(left, null) && ReferenceEquals(right, null)) return true; if (ReferenceEquals(left, null) && !ReferenceEquals(right, null)) return false; if (left.Phase > right.Phase) return true; if (left.Phase < right.Phase) return false; var leftMode = left.Mode_; var rightMode = right.Mode_; UpgradeModes(ref leftMode, ref rightMode); if (leftMode != rightMode) throw new NotSupportedException("Cannot compare checkpoint tags in different modes"); switch (leftMode) { case Mode.ByStream: CheckCatalogCompatibility(left, right); return left.CatalogPosition > right.CatalogPosition || (left.CatalogPosition == right.CatalogPosition && left.DataPosition > right.DataPosition); case Mode.Phase: return left.Position > right.Position; case Mode.Position: case Mode.EventTypeIndex: return left.Position > right.Position; case Mode.PreparePosition: return left.PreparePosition > right.PreparePosition; case Mode.Stream: if (left.Streams.Keys.First() != right.Streams.Keys.First()) throw new InvalidOperationException("Cannot compare checkpoint tags across different streams"); var result = left.Streams.Values.First() > right.Streams.Values.First(); return result; case Mode.MultiStream: int rvalue; bool anyLeftGreater = left.Streams.Any(l => !right.Streams.TryGetValue(l.Key, out rvalue) || l.Value > rvalue); int lvalue; bool anyRightGreater = right.Streams.Any(r => !left.Streams.TryGetValue(r.Key, out lvalue) || r.Value > lvalue); if (anyLeftGreater && anyRightGreater) ThrowIncomparable(left, right); return anyLeftGreater; default: throw new NotSupportedException("Checkpoint tag mode is not supported in comparison"); } } private static void CheckCatalogCompatibility(CheckpointTag left, CheckpointTag right) { if (left.CatalogStream != right.CatalogStream) throw new Exception("Cannot compare tags with different catalog streams"); } private static void ThrowIncomparable(CheckpointTag left, CheckpointTag right) { throw new InvalidOperationException( string.Format("Incomparable multi-stream checkpoint tags. '{0}' and '{1}'", left, right)); } public static bool operator >=(CheckpointTag left, CheckpointTag right) { if (ReferenceEquals(left, right)) return true; if (!ReferenceEquals(left, null) && ReferenceEquals(right, null)) return true; if (ReferenceEquals(left, null) && !ReferenceEquals(right, null)) return false; if (left.Phase > right.Phase) return true; if (left.Phase < right.Phase) return false; var leftMode = left.Mode_; var rightMode = right.Mode_; UpgradeModes(ref leftMode, ref rightMode); if (leftMode != rightMode) throw new NotSupportedException("Cannot compare checkpoint tags in different modes"); switch (leftMode) { case Mode.ByStream: CheckCatalogCompatibility(left, right); return left.CatalogPosition > right.CatalogPosition || (left.CatalogPosition == right.CatalogPosition && left.DataPosition >= right.DataPosition); case Mode.Phase: return left.Position >= right.Position; case Mode.Position: case Mode.EventTypeIndex: return left.Position >= right.Position; case Mode.PreparePosition: return left.PreparePosition >= right.PreparePosition; case Mode.Stream: if (left.Streams.Keys.First() != right.Streams.Keys.First()) throw new InvalidOperationException("Cannot compare checkpoint tags across different streams"); var result = left.Streams.Values.First() >= right.Streams.Values.First(); return result; case Mode.MultiStream: int rvalue; bool anyLeftGreater = left.Streams.Any(l => !right.Streams.TryGetValue(l.Key, out rvalue) || l.Value > rvalue); int lvalue; bool anyRightGreater = right.Streams.Any(r => !left.Streams.TryGetValue(r.Key, out lvalue) || r.Value > lvalue); if (anyLeftGreater && anyRightGreater) ThrowIncomparable(left, right); return !anyRightGreater; default: throw new NotSupportedException("Checkpoint tag mode is not supported in comparison"); } } public static bool operator <(CheckpointTag left, CheckpointTag right) { return !(left >= right); } public static bool operator <=(CheckpointTag left, CheckpointTag right) { return !(left > right); } public static bool operator ==(CheckpointTag left, CheckpointTag right) { return Equals(left, right); } public static bool operator !=(CheckpointTag left, CheckpointTag right) { return !(left == right); } protected bool Equals(CheckpointTag other) { if (Phase != other.Phase) return false; var leftMode = Mode_; var rightMode = other.Mode_; if (leftMode != rightMode) return false; UpgradeModes(ref leftMode, ref rightMode); switch (leftMode) { case Mode.ByStream: return CatalogStream == other.CatalogStream && CatalogPosition == other.CatalogPosition && DataStream == other.DataStream && DataPosition == other.DataPosition && CommitPosition == other.CommitPosition; case Mode.Phase: return Position == other.Position; case Mode.EventTypeIndex: // NOTE: we ignore stream positions as they are only suggestion on // where to start to gain better performance goto case Mode.Position; case Mode.Position: return Position == other.Position; case Mode.PreparePosition: return PreparePosition == other.PreparePosition; case Mode.Stream: if (Streams.Keys.First() != other.Streams.Keys.First()) return false; var result = Streams.Values.First() == other.Streams.Values.First(); return result; case Mode.MultiStream: int rvalue = 0; return Streams.Count == other.Streams.Count && Streams.All(l => other.Streams.TryGetValue(l.Key, out rvalue) && l.Value == rvalue); default: throw new NotSupportedException("Checkpoint tag mode is not supported in comparison"); } } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != GetType()) return false; return Equals((CheckpointTag) obj); } public override int GetHashCode() { return Position.GetHashCode(); } public long? CommitPosition { get { var commitPosition = Position.CommitPosition; switch (Mode_) { case Mode.ByStream: return commitPosition == long.MinValue ? (long?) null : commitPosition; case Mode.Position: case Mode.EventTypeIndex: return commitPosition; default: return null; } } } public long? PreparePosition { get { switch (Mode_) { case Mode.Position: case Mode.PreparePosition: case Mode.EventTypeIndex: return Position.PreparePosition; default: return null; } } } public static CheckpointTag Empty { get { return _empty; } } internal readonly Mode Mode_; private static readonly CheckpointTag _empty = new CheckpointTag(-1, false); public static CheckpointTag FromPhase(int phase, bool completed) { return new CheckpointTag(phase, completed); } public static CheckpointTag FromPosition(int phase, long commitPosition, long preparePosition) { return new CheckpointTag(phase, new TFPos(commitPosition, preparePosition)); } public static CheckpointTag FromPosition(int phase, TFPos position) { return new CheckpointTag(phase, position); } public static CheckpointTag FromPreparePosition(int phase, long preparePosition) { return new CheckpointTag(phase, preparePosition); } public static CheckpointTag FromStreamPosition(int phase, string stream, int sequenceNumber) { return new CheckpointTag(phase, stream, sequenceNumber); } public static CheckpointTag FromStreamPositions(int phase, IDictionary<string, int> streams) { // streams cloned inside return new CheckpointTag(phase, streams); } public static CheckpointTag FromEventTypeIndexPositions(int phase, TFPos position, IDictionary<string, int> streams) { // streams cloned inside return new CheckpointTag(phase, streams, position); } public static CheckpointTag FromByStreamPosition( int phase, string catalogStream, int catalogPosition, string dataStream, int dataPosition, long commitPosition) { return new CheckpointTag(phase, catalogStream, catalogPosition, dataStream, dataPosition, commitPosition); } public int CompareTo(CheckpointTag other) { return this < other ? -1 : (this > other ? 1 : 0); } public override string ToString() { string result; switch (Mode_) { case Mode.Phase: return "Phase: " + Phase + (Completed ? " (completed)" : ""); case Mode.Position: result = Position.ToString(); break; case Mode.PreparePosition: result = PreparePosition.ToString(); break; case Mode.Stream: result = Streams.Keys.First() + ": " + Streams.Values.First(); break; case Mode.MultiStream: case Mode.EventTypeIndex: var sb = new StringBuilder(); if (Mode_ == Mode.EventTypeIndex) { sb.Append(Position.ToString()); sb.Append("; "); } foreach (var stream in Streams) { sb.AppendFormat("{0}: {1}; ", stream.Key, stream.Value); } result = sb.ToString(); break; case Mode.ByStream: result = string.Format( "{0}:{1}/{2}:{3}/{4}", CatalogStream, CatalogPosition, DataStream, DataPosition, CommitPosition); break; default: return "Unsupported mode: " + Mode_.ToString(); } if (Phase == 0) return result; else { return "(" + Phase + ") " + result; } } public bool Completed { get { return Position.CommitPosition == Int64.MaxValue; } } private static void UpgradeModes(ref Mode leftMode, ref Mode rightMode) { if (leftMode == Mode.Stream && rightMode == Mode.MultiStream) { leftMode = Mode.MultiStream; return; } if (leftMode == Mode.MultiStream && rightMode == Mode.Stream) { rightMode = Mode.MultiStream; return; } if (leftMode == Mode.Position && rightMode == Mode.EventTypeIndex) { leftMode = Mode.EventTypeIndex; return; } if (leftMode == Mode.EventTypeIndex && rightMode == Mode.Position) { rightMode = Mode.EventTypeIndex; return; } } public CheckpointTag UpdateStreamPosition(string streamId, int eventSequenceNumber) { if (Mode_ != Mode.MultiStream) throw new ArgumentException("Invalid tag mode", "tag"); var resultDictionary = PatchStreamsDictionary(streamId, eventSequenceNumber); return FromStreamPositions(Phase, resultDictionary); } public CheckpointTag UpdateEventTypeIndexPosition(TFPos position, string eventType, int eventSequenceNumber) { if (Mode_ != Mode.EventTypeIndex) throw new ArgumentException("Invalid tag mode", "tag"); var resultDictionary = PatchStreamsDictionary(eventType, eventSequenceNumber); return FromEventTypeIndexPositions(Phase, position, resultDictionary); } public CheckpointTag UpdateEventTypeIndexPosition(TFPos position) { if (Mode_ != Mode.EventTypeIndex) throw new ArgumentException("Invalid tag mode", "tag"); return FromEventTypeIndexPositions(Phase, position, Streams); } private Dictionary<string, int> PatchStreamsDictionary(string streamId, int eventSequenceNumber) { var resultDictionary = new Dictionary<string, int>(); var was = false; foreach (var stream in Streams) { if (stream.Key == streamId) { was = true; if (eventSequenceNumber < stream.Value) resultDictionary.Add(stream.Key, stream.Value); else resultDictionary.Add(stream.Key, eventSequenceNumber); } else { resultDictionary.Add(stream.Key, stream.Value); } } if (!was) throw new ArgumentException("Key not found: " + streamId, "streamId"); if (resultDictionary.Count < Streams.Count) resultDictionary.Add(streamId, eventSequenceNumber); return resultDictionary; } public byte[] ToJsonBytes(ProjectionVersion projectionVersion, IEnumerable<KeyValuePair<string, JToken>> extraMetaData = null) { if (projectionVersion.ProjectionId == -1) throw new ArgumentException("projectionId is required", "projectionVersion"); using (var memoryStream = new MemoryStream()) { using (var textWriter = new StreamWriter(memoryStream, Helper.UTF8NoBom)) using (var jsonWriter = new JsonTextWriter(textWriter)) { WriteTo(projectionVersion, extraMetaData, jsonWriter); } return memoryStream.ToArray(); } } public string ToJsonString(ProjectionVersion projectionVersion, IEnumerable<KeyValuePair<string, JToken>> extraMetaData = null) { if (projectionVersion.ProjectionId == -1) throw new ArgumentException("projectionId is required", "projectionVersion"); using (var textWriter = new StringWriter()) { using (var jsonWriter = new JsonTextWriter(textWriter)) { WriteTo(projectionVersion, extraMetaData, jsonWriter); } return textWriter.ToString(); } } public string ToJsonString(IEnumerable<KeyValuePair<string, JToken>> extraMetaData = null) { using (var textWriter = new StringWriter()) { using (var jsonWriter = new JsonTextWriter(textWriter)) { WriteTo(default(ProjectionVersion), extraMetaData, jsonWriter); } return textWriter.ToString(); } } public JRaw ToJsonRaw(IEnumerable<KeyValuePair<string, JToken>> extraMetaData = null) { using (var textWriter = new StringWriter()) { using (var jsonWriter = new JsonTextWriter(textWriter)) { WriteTo(default(ProjectionVersion), extraMetaData, jsonWriter); } return new JRaw(textWriter.ToString()); } } private void WriteTo(ProjectionVersion projectionVersion, IEnumerable<KeyValuePair<string, JToken>> extraMetaData, JsonTextWriter jsonWriter) { jsonWriter.WriteStartObject(); if (projectionVersion.ProjectionId > 0) { jsonWriter.WritePropertyName("$v"); WriteVersion(projectionVersion, jsonWriter); } if (Phase != 0) { jsonWriter.WritePropertyName("$ph"); jsonWriter.WriteValue(Phase); } switch (Mode_) { case Mode.Phase: jsonWriter.WritePropertyName("$cp"); jsonWriter.WriteValue(Completed); break; case Mode.Position: case Mode.EventTypeIndex: jsonWriter.WritePropertyName("$c"); jsonWriter.WriteValue(CommitPosition.GetValueOrDefault()); jsonWriter.WritePropertyName("$p"); jsonWriter.WriteValue(PreparePosition.GetValueOrDefault()); if (Mode_ == Mode.EventTypeIndex) goto case Mode.MultiStream; break; case Mode.PreparePosition: jsonWriter.WritePropertyName("$p"); jsonWriter.WriteValue(PreparePosition.GetValueOrDefault()); break; case Mode.Stream: case Mode.MultiStream: jsonWriter.WritePropertyName("$s"); jsonWriter.WriteStartObject(); foreach (var stream in Streams) { jsonWriter.WritePropertyName(stream.Key); jsonWriter.WriteValue(stream.Value); } jsonWriter.WriteEndObject(); break; case Mode.ByStream: jsonWriter.WritePropertyName("$m"); jsonWriter.WriteValue("bs"); jsonWriter.WritePropertyName("$c"); jsonWriter.WriteValue(CommitPosition.GetValueOrDefault()); jsonWriter.WritePropertyName("$s"); jsonWriter.WriteStartArray(); jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName(CatalogStream); jsonWriter.WriteValue(CatalogPosition); jsonWriter.WriteEndObject(); if (!string.IsNullOrEmpty(DataStream)) { jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName(DataStream); jsonWriter.WriteValue(DataPosition); jsonWriter.WriteEndObject(); } jsonWriter.WriteEndArray(); break; } if (extraMetaData != null) { foreach (var pair in extraMetaData) { jsonWriter.WritePropertyName(pair.Key); pair.Value.WriteTo(jsonWriter); } } jsonWriter.WriteEndObject(); } private static void WriteVersion(ProjectionVersion projectionVersion, JsonTextWriter jsonWriter) { jsonWriter.WriteValue( projectionVersion.ProjectionId + ":" + projectionVersion.Epoch + ":" + projectionVersion.Version + ":" + Projections.VERSION); } public static CheckpointTagVersion FromJson(JsonReader reader, ProjectionVersion current) { Check(reader.Read(), reader); Check(JsonToken.StartObject, reader); long? commitPosition = null; long? preparePosition = null; string catalogStream = null; string dataStream = null; int? catalogPosition = null; int? dataPosition = null; bool byStreamMode = false; Dictionary<string, int> streams = null; Dictionary<string, JToken> extra = null; var projectionId = current.ProjectionId; var projectionEpoch = 0; var projectionVersion = 0; var projectionSystemVersion = 0; var projectionPhase = 0; while (true) { Check(reader.Read(), reader); if (reader.TokenType == JsonToken.EndObject) break; Check(JsonToken.PropertyName, reader); var name = (string) reader.Value; switch (name) { case "$cp": Check(reader.Read(), reader); var completed = (bool)reader.Value; commitPosition = completed ? Int64.MaxValue : Int64.MinValue; preparePosition = completed ? Int64.MaxValue : Int64.MinValue; break; case "$v": case "v": Check(reader.Read(), reader); if (reader.ValueType == typeof (long)) { var v = (int)(long)reader.Value; if (v > 0) // TODO: remove this if with time projectionVersion = v; } else { //TODO: better handle errors var v = (string) reader.Value; string[] parts = v.Split(':'); if (parts.Length == 2) { projectionVersion = Int32.Parse(parts[1]); } else { projectionId = Int32.Parse(parts[0]); projectionEpoch = Int32.Parse(parts[1]); projectionVersion = Int32.Parse(parts[2]); if (parts.Length >= 4) projectionSystemVersion = Int32.Parse(parts[3]); } } break; case "$c": case "c": case "commitPosition": Check(reader.Read(), reader); commitPosition = (long) reader.Value; break; case "$p": case "p": case "preparePosition": Check(reader.Read(), reader); preparePosition = (long) reader.Value; break; case "$s": case "s": case "streams": Check(reader.Read(), reader); if (reader.TokenType == JsonToken.StartArray) { Check(reader.Read(), reader); Check(JsonToken.StartObject, reader); Check(reader.Read(), reader); Check(JsonToken.PropertyName, reader); catalogStream = (string)reader.Value; Check(reader.Read(), reader); catalogPosition = (int) (long) reader.Value; Check(reader.Read(), reader); Check(JsonToken.EndObject, reader); Check(reader.Read(), reader); if (reader.TokenType == JsonToken.StartObject) { Check(reader.Read(), reader); Check(JsonToken.PropertyName, reader); dataStream = (string) reader.Value; Check(reader.Read(), reader); dataPosition = (int) (long) reader.Value; Check(reader.Read(), reader); Check(JsonToken.EndObject, reader); Check(reader.Read(), reader); } Check(JsonToken.EndArray, reader); } else { Check(JsonToken.StartObject, reader); streams = new Dictionary<string, int>(); while (true) { Check(reader.Read(), reader); if (reader.TokenType == JsonToken.EndObject) break; Check(JsonToken.PropertyName, reader); var streamName = (string) reader.Value; Check(reader.Read(), reader); var position = (int) (long) reader.Value; streams.Add(streamName, position); } } break; case "$ph": Check(reader.Read(), reader); projectionPhase = (int)(long) reader.Value; break; case "$m": Check(reader.Read(), reader); var readMode = (string) reader.Value; if (readMode != "bs") throw new ApplicationException("Unknown checkpoint tag mode: " + readMode); byStreamMode = true; break; default: if (extra == null) extra = new Dictionary<string, JToken>(); Check(reader.Read(), reader); var jToken = JToken.ReadFrom(reader); extra.Add(name, jToken); break; } } return new CheckpointTagVersion { Tag = byStreamMode ? new CheckpointTag( projectionPhase, catalogStream, catalogPosition.GetValueOrDefault(), dataStream, dataPosition ?? -1, commitPosition.GetValueOrDefault()) : new CheckpointTag( projectionPhase, new TFPos(commitPosition ?? Int64.MinValue, preparePosition ?? Int64.MinValue), streams), Version = new ProjectionVersion(projectionId, projectionEpoch, projectionVersion), SystemVersion = projectionSystemVersion, ExtraMetadata = extra, }; } public static void Check(JsonToken type, JsonReader reader) { if (reader.TokenType != type) throw new Exception("Invalid JSON"); } public static void Check(bool read, JsonReader reader) { if (!read) throw new Exception("Invalid JSON"); } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System.Collections.Specialized; using System.Linq; using osu.Framework.Allocation; using osu.Framework.Bindables; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Shapes; using osu.Framework.Graphics.Sprites; using osu.Game.Graphics; using osu.Game.Graphics.Containers; using osu.Game.Graphics.Sprites; using osu.Game.Online.API.Requests.Responses; using osu.Game.Overlays; using osu.Game.Users.Drawables; using osuTK; namespace osu.Game.Screens.OnlinePlay.Lounge.Components { public class DrawableRoomParticipantsList : OnlinePlayComposite { private const float avatar_size = 36; private FillFlowContainer<CircularAvatar> avatarFlow; private CircularAvatar hostAvatar; private LinkFlowContainer hostText; private HiddenUserCount hiddenUsers; private OsuSpriteText totalCount; public DrawableRoomParticipantsList() { AutoSizeAxes = Axes.X; Height = 60; } [BackgroundDependencyLoader] private void load(OverlayColourProvider colours) { InternalChildren = new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, Masking = true, CornerRadius = 10, Shear = new Vector2(0.2f, 0), Child = new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Background4, } }, new FillFlowContainer { RelativeSizeAxes = Axes.Y, AutoSizeAxes = Axes.X, Children = new Drawable[] { new FillFlowContainer { RelativeSizeAxes = Axes.Y, AutoSizeAxes = Axes.X, Spacing = new Vector2(8), Padding = new MarginPadding { Left = 8, Right = 16 }, Children = new Drawable[] { hostAvatar = new CircularAvatar { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, }, hostText = new LinkFlowContainer { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, AutoSizeAxes = Axes.Both } } }, new Container { RelativeSizeAxes = Axes.Y, AutoSizeAxes = Axes.X, Children = new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, Masking = true, CornerRadius = 10, Shear = new Vector2(0.2f, 0), Child = new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Background3, } }, new FillFlowContainer { Anchor = Anchor.Centre, Origin = Anchor.Centre, AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Spacing = new Vector2(4), Padding = new MarginPadding { Left = 8, Right = 16 }, Children = new Drawable[] { new SpriteIcon { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, Size = new Vector2(16), Icon = FontAwesome.Solid.User, }, totalCount = new OsuSpriteText { Font = OsuFont.Default.With(weight: FontWeight.Bold), Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, }, avatarFlow = new FillFlowContainer<CircularAvatar> { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Spacing = new Vector2(4), Margin = new MarginPadding { Left = 4 }, }, hiddenUsers = new HiddenUserCount { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, } } } } }, } } }; } protected override void LoadComplete() { base.LoadComplete(); RecentParticipants.BindCollectionChanged(onParticipantsChanged, true); ParticipantCount.BindValueChanged(_ => { updateHiddenUsers(); totalCount.Text = ParticipantCount.Value.ToString(); }, true); Host.BindValueChanged(onHostChanged, true); } private int numberOfCircles = 4; /// <summary> /// The maximum number of circles visible (including the "hidden count" circle in the overflow case). /// </summary> public int NumberOfCircles { get => numberOfCircles; set { numberOfCircles = value; if (LoadState < LoadState.Loaded) return; // Reinitialising the list looks janky, but this is unlikely to be used in a setting where it's visible. clearUsers(); foreach (var u in RecentParticipants) addUser(u); updateHiddenUsers(); } } private void onParticipantsChanged(object sender, NotifyCollectionChangedEventArgs e) { switch (e.Action) { case NotifyCollectionChangedAction.Add: foreach (var added in e.NewItems.OfType<APIUser>()) addUser(added); break; case NotifyCollectionChangedAction.Remove: foreach (var removed in e.OldItems.OfType<APIUser>()) removeUser(removed); break; case NotifyCollectionChangedAction.Reset: clearUsers(); break; case NotifyCollectionChangedAction.Replace: case NotifyCollectionChangedAction.Move: // Easiest is to just reinitialise the whole list. These are unlikely to ever be use cases. clearUsers(); foreach (var u in RecentParticipants) addUser(u); break; } updateHiddenUsers(); } private int displayedCircles => avatarFlow.Count + (hiddenUsers.Count > 0 ? 1 : 0); private void addUser(APIUser user) { if (displayedCircles < NumberOfCircles) avatarFlow.Add(new CircularAvatar { User = user }); } private void removeUser(APIUser user) { avatarFlow.RemoveAll(a => a.User == user); } private void clearUsers() { avatarFlow.Clear(); updateHiddenUsers(); } private void updateHiddenUsers() { int hiddenCount = 0; if (RecentParticipants.Count > NumberOfCircles) hiddenCount = ParticipantCount.Value - NumberOfCircles + 1; hiddenUsers.Count = hiddenCount; if (displayedCircles > NumberOfCircles) avatarFlow.Remove(avatarFlow.Last()); else if (displayedCircles < NumberOfCircles) { var nextUser = RecentParticipants.FirstOrDefault(u => avatarFlow.All(a => a.User != u)); if (nextUser != null) addUser(nextUser); } } private void onHostChanged(ValueChangedEvent<APIUser> host) { hostAvatar.User = host.NewValue; hostText.Clear(); if (host.NewValue != null) { hostText.AddText("hosted by "); hostText.AddUserLink(host.NewValue); } } private class CircularAvatar : CompositeDrawable { public APIUser User { get => avatar.User; set => avatar.User = value; } private readonly UpdateableAvatar avatar = new UpdateableAvatar(showUsernameTooltip: true) { RelativeSizeAxes = Axes.Both }; [BackgroundDependencyLoader] private void load(OverlayColourProvider colours) { Size = new Vector2(avatar_size); InternalChild = new CircularContainer { RelativeSizeAxes = Axes.Both, Masking = true, Children = new Drawable[] { new Box { Colour = colours.Background5, RelativeSizeAxes = Axes.Both, }, avatar } }; } } public class HiddenUserCount : CompositeDrawable { public int Count { get => count; set { count = value; countText.Text = $"+{count}"; if (count > 0) Show(); else Hide(); } } private int count; private readonly SpriteText countText = new OsuSpriteText { Anchor = Anchor.Centre, Origin = Anchor.Centre, Font = OsuFont.Default.With(weight: FontWeight.Bold), }; [BackgroundDependencyLoader] private void load(OverlayColourProvider colours) { Size = new Vector2(avatar_size); Alpha = 0; InternalChild = new CircularContainer { RelativeSizeAxes = Axes.Both, Masking = true, Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Background5, }, countText } }; } } } }
/* Copyright 2006 - 2010 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Collections; using System.Runtime.Serialization; namespace OpenSource.UPnP.AV.CdsMetadata { /// <summary> /// Implements a Hashtable object using an ArrayList. /// Hashtables are usually about 2-3 times bigger /// than ArrayLists with thousands of items. An ArrayList /// is even better by a factor of 3 against a HybridDictionary /// with a collection size of 10. /// </summary> public class _Hashtable //: IDictionary, ICollection, IEnumerable, ICloneable { /// <summary> /// Adds an element with the provided key and value to the IDictionary. /// </summary> /// <param name="AL"></param> /// <param name="key"></param> /// <param name="val"></param> /// <exception cref="ArgumentNullException"> /// Thrown when key or val is null. /// </exception> public static void Add (ArrayList AL, object key, object val) { if ((key == null) || (val == null)) { throw new ArgumentNullException("Cannot have null key or val."); } //lock (AL.SyncRoot) { foreach (DictionaryEntry de in AL) { if (de.Key.Equals(key)) { throw new ArgumentException("Duplicate key"); } } DictionaryEntry newEntry = new DictionaryEntry(); newEntry.Key = key; newEntry.Value = val; AL.Add(newEntry); } } /// <summary> /// Removes a value from the ArrayList with the DictionarEntry items given a key. /// </summary> /// <param name="AL"></param> /// <param name="key"></param> public static void Remove (ArrayList AL, object key) { //lock (AL.SyncRoot) { int i=0; bool remove = false; foreach (DictionaryEntry de in AL) { if (de.Key.Equals(key)) { remove = true; break; } i++; } if (remove) { AL.RemoveAt(i); } } } /// <summary> /// Determines whether the IDictionary contains an element with the specified key. /// </summary> /// <param name="key">The key to locate in the IDictionary. </param> /// <param name="AL"></param> /// <returns></returns> public static bool Contains (ArrayList AL, object key) { //lock (AL.SyncRoot) { foreach (DictionaryEntry de in AL) { if (de.Key.Equals (key)) { return true; } } } return false; } /// <summary> /// Returns Contains(ArrayList, object) /// </summary> /// <param name="AL"></param> /// <param name="key"></param> /// <returns></returns> public static bool ContainsKey (ArrayList AL, object key) { return Contains(AL, key); } /// <summary> /// Returns an IDictionaryEnumerator for the IDictionary. /// </summary> /// <returns></returns> public static IDictionaryEnumerator GetEnumerator(ArrayList AL) { return new _HashtableEnumerator(AL); } /// <summary> /// Returns the values found in an ArrayList that has IDictionaryElements. /// </summary> /// <param name="AL"></param> /// <returns></returns> public static ICollection Values(ArrayList AL) { return new _HashtableValueCollection(AL); } /// <summary> /// Returns the keys found in an ArrayList that has IDictionaryElements. /// </summary> /// <param name="AL"></param> /// <returns></returns> public static ICollection Keys (ArrayList AL) { return new _HashtableKeyCollection(AL); } /// <summary> /// Returns the value of an item in an ArrayList that has IDictionaryElements, /// using the specified key. /// </summary> /// <param name="AL"></param> /// <param name="key"></param> /// <returns></returns> public static object Get(ArrayList AL, object key) { //lock (AL.SyncRoot) { foreach (DictionaryEntry de in AL) { if (de.Key.Equals(key)) { return de.Value; } } } return null; } /// <summary> /// Given a list of keys, returns the results in an /// ArrayList, where the order of the results matches /// the order of the provided keys. /// </summary> /// <param name="AL"></param> /// <param name="keys"></param> /// <returns></returns> public static Hashtable Get(Hashtable results, ArrayList AL, object[] keys) { //lock (AL.SyncRoot) { foreach (object k in keys) { foreach (DictionaryEntry de in AL) { if (de.Key.Equals(k)) { results.Add(k, de.Value); break; } } } } return results; } public static void Set(ArrayList AL, object key, object val) { //lock (AL.SyncRoot) { for (int i=0; i < AL.Count; i++) { DictionaryEntry de = (DictionaryEntry) AL[i]; if (de.Key.Equals(key)) { if (val == null) { AL.RemoveAt(i); } else { de.Value = val; AL[i] = de; } return; } } if (val != null) { Add(AL, key, val); } } } /// <summary> /// Analogous to HashtableValueCollection. /// </summary> [Serializable()] private class _HashtableValueCollection : ICollection { public _HashtableValueCollection(ArrayList al) { this.AL = al; } public int Count { get { return this.AL.Count; } } public bool IsSynchronized { get { return this.AL.IsSynchronized; } } public object SyncRoot { get { return this.AL.SyncRoot; } } public void CopyTo(Array array, int index) { foreach (DictionaryEntry de in this.AL) { array.SetValue(de.Value, index); index++; } } public IEnumerator GetEnumerator() { return new _HashtableKeyValueEnumerator(this.AL, false); } private ArrayList AL; } [Serializable()] /// <summary> /// Analogous to HashtableKeyCollection. /// </summary> private class _HashtableKeyCollection : ICollection { public _HashtableKeyCollection(ArrayList al) { this.AL = al; } public int Count { get { return this.AL.Count; } } public bool IsSynchronized { get { return this.AL.IsSynchronized; } } public object SyncRoot { get { return this.AL.SyncRoot; } } public void CopyTo(Array array, int index) { foreach (DictionaryEntry de in this.AL) { array.SetValue(de.Value, index); index++; } } public IEnumerator GetEnumerator() { return new _HashtableKeyValueEnumerator(this.AL, true); } private ArrayList AL; } [Serializable()] /// <summary> /// Enumerator for _Hashtable key value pairs. /// </summary> private class _HashtableKeyValueEnumerator : IEnumerator { public _HashtableKeyValueEnumerator(ArrayList al, bool keys) { IE = al.GetEnumerator(); keyEnumerator = keys; } public bool MoveNext() { return IE.MoveNext(); } public void Reset() { IE.Reset(); } public object Current { get { DictionaryEntry de = (DictionaryEntry) IE.Current; if (keyEnumerator) { return de.Key; } else { return de.Value; } } } IEnumerator IE; bool keyEnumerator; } [Serializable()] /// <summary> /// Enumerator for _Hashtable. /// </summary> private class _HashtableEnumerator : IDictionaryEnumerator, IEnumerator { public _HashtableEnumerator(ArrayList al) { IE = al.GetEnumerator(); } public DictionaryEntry Entry { get { return (DictionaryEntry) this.Current; } } public object Key { get { return this.Entry.Key; } } public object Value { get { return this.Entry.Value; } } public object Current { get { return IE.Current; } } public bool MoveNext() { return IE.MoveNext(); } public void Reset() { IE.Reset(); } private IEnumerator IE; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // using System; using System.Globalization; using System.Reflection; using System.Runtime.InteropServices; using System.Security; using System.Text; namespace TestLibrary { public static class Generator { internal static Random m_rand = new Random(); internal static int? seed = null; public static int? Seed { get { if (seed.HasValue) { return seed.Value; } else { return null; } } set { if (!(seed.HasValue)) { seed = value; if (seed.HasValue) { TestFramework.LogVerbose("Seeding Random with: " + seed.Value.ToString()); m_rand = new Random(seed.Value); } } else { TestFramework.LogVerbose("Attempt to seed Random to " + value.ToString() + " rejected it was already seeded to: " + seed.Value.ToString()); } } } // returns a byte array of random data public static void GetBytes(int new_seed, byte[] buffer) { Seed = new_seed; GetBytes(buffer); } public static void GetBytes(byte[] buffer) { m_rand.NextBytes(buffer); TestFramework.LogVerbose("Random Byte[] produced: " + Utilities.ByteArrayToString(buffer)); } // returns a non-negative Int64 between 0 and Int64.MaxValue public static Int64 GetInt64(Int32 new_seed) { Seed = new_seed; return GetInt64(); } public static Int64 GetInt64() { byte[] buffer = new byte[8]; Int64 iVal; GetBytes(buffer); // convert to Int64 iVal = 0; for (int i = 0; i < buffer.Length; i++) { iVal |= ((Int64)buffer[i] << (i * 8)); } if (0 > iVal) iVal *= -1; TestFramework.LogVerbose("Random Int64 produced: " + iVal.ToString()); return iVal; } // returns a non-negative Int32 between 0 and Int32.MaxValue public static Int32 GetInt32(Int32 new_seed) { Seed = new_seed; return GetInt32(); } public static Int32 GetInt32() { Int32 i = m_rand.Next(); TestFramework.LogVerbose("Random Int32 produced: " + i.ToString()); return i; } // returns a non-negative Int16 between 0 and Int16.MaxValue public static Int16 GetInt16(Int32 new_seed) { Seed = new_seed; return GetInt16(); } public static Int16 GetInt16() { Int16 i = Convert.ToInt16(m_rand.Next() % (1 + Int16.MaxValue)); TestFramework.LogVerbose("Random Int16 produced: " + i.ToString()); return i; } // returns a non-negative Byte between 0 and Byte.MaxValue public static Byte GetByte(Int32 new_seed) { Seed = new_seed; return GetByte(); } public static Byte GetByte() { Byte i = Convert.ToByte(m_rand.Next() % (1 + Byte.MaxValue)); TestFramework.LogVerbose("Random Byte produced: " + i.ToString()); return i; } // returns a non-negative Double between 0.0 and 1.0 public static Double GetDouble(Int32 new_seed) { Seed = new_seed; return GetDouble(); } public static Double GetDouble() { Double i = m_rand.NextDouble(); TestFramework.LogVerbose("Random Double produced: " + i.ToString()); return i; } // returns a non-negative Single between 0.0 and 1.0 public static Single GetSingle(Int32 new_seed) { Seed = new_seed; return GetSingle(); } public static Single GetSingle() { Single i = Convert.ToSingle(m_rand.NextDouble()); TestFramework.LogVerbose("Random Single produced: " + i.ToString()); return i; } // returns a valid char that is a letter public static Char GetCharLetter(Int32 new_seed) { Seed = new_seed; return GetCharLetter(); } public static Char GetCharLetter() { return GetCharLetter(true); } //returns a valid char that is a letter //if allowsurrogate is true then surrogates are valid return values public static Char GetCharLetter(Int32 new_seed, bool allowsurrogate) { Seed = new_seed; return GetCharLetter(allowsurrogate); } public static Char GetCharLetter(bool allowsurrogate) { return GetCharLetter(allowsurrogate, true); } //returns a valid char that is a letter //if allowsurrogate is true then surrogates are valid return values //if allownoweight is true, then no-weight characters are valid return values public static Char GetCharLetter(Int32 new_seed, bool allowsurrogate, bool allownoweight) { Seed = new_seed; return GetCharLetter(allowsurrogate, allownoweight); } public static Char GetCharLetter(bool allowsurrogate, bool allownoweight) { Int16 iVal; Char c = 'a'; //this value is never used Int32 counter; bool loopCondition = true; // attempt to randomly find a letter counter = 100; do { counter--; iVal = GetInt16(); TestFramework.LogVerbose("Random CharLetter produced: " + Convert.ToChar(iVal).ToString()); if (false == allownoweight) { throw new NotSupportedException("allownoweight = false is not supported in TestLibrary with FEATURE_NOPINVOKES"); } c = Convert.ToChar(iVal); loopCondition = allowsurrogate ? (!Char.IsLetter(c)) : (!Char.IsLetter(c) || Char.IsSurrogate(c)); } while (loopCondition && 0 < counter); if (!Char.IsLetter(c)) { // we tried and failed to get a letter // Grab an ASCII letter c = Convert.ToChar(GetInt16() % 26 + 'A'); } TestFramework.LogVerbose("Random Char produced: " + c.ToString()); return c; } // returns a valid char that is a number public static char GetCharNumber(Int32 new_seed) { Seed = new_seed; return GetCharNumber(); } public static char GetCharNumber() { return GetCharNumber(true); } // returns a valid char that is a number //if allownoweight is true, then no-weight characters are valid return values public static char GetCharNumber(Int32 new_seed, bool allownoweight) { Seed = new_seed; return GetCharNumber(allownoweight); } public static char GetCharNumber(bool allownoweight) { Char c = '0'; //this value is never used Int32 counter; Int16 iVal; bool loopCondition = true; // attempt to randomly find a number counter = 100; do { counter--; iVal = GetInt16(); TestFramework.LogVerbose("Random Char produced: " + Convert.ToChar(iVal).ToString()); if (false == allownoweight) { throw new InvalidOperationException("allownoweight = false is not supported in TestLibrary with FEATURE_NOPINVOKES"); } c = Convert.ToChar(iVal); loopCondition = !Char.IsNumber(c); } while (loopCondition && 0 < counter); if (!Char.IsNumber(c)) { // we tried and failed to get a letter // Grab an ASCII number c = Convert.ToChar(GetInt16() % 10 + '0'); } TestFramework.LogVerbose("Random Char produced: " + c.ToString()); return c; } // returns a valid char public static Char GetChar(Int32 new_seed) { Seed = new_seed; return GetChar(); } public static Char GetChar() { return GetChar(true); } // returns a valid char //if allowsurrogate is true then surrogates are valid return values public static Char GetChar(Int32 new_seed, bool allowsurrogate) { Seed = new_seed; return GetChar(allowsurrogate); } public static Char GetChar(bool allowsurrogate) { return GetChar(allowsurrogate, true); } // returns a valid char // if allowsurrogate is true then surrogates are valid return values // if allownoweight characters then noweight characters are valid return values public static Char GetChar(Int32 new_seed, bool allowsurrogate, bool allownoweight) { Seed = new_seed; return GetChar(allowsurrogate, allownoweight); } public static Char GetChar(bool allowsurrogate, bool allownoweight) { Int16 iVal = GetInt16(); Char c = (char)(iVal); if (!Char.IsLetter(c)) { // we tried and failed to get a letter // Just grab an ASCII letter // This is a hack but will work for now c = (char)(GetInt16() % 26 + 'A'); } return c; } // returns a string. If "validPath" is set, only valid path characters // will be included public static string GetString(Int32 new_seed, Boolean validPath, Int32 minLength, Int32 maxLength) { Seed = new_seed; return GetString(validPath, minLength, maxLength); } public static string GetString(Boolean validPath, Int32 minLength, Int32 maxLength) { return GetString(validPath, true, true, minLength, maxLength); } // several string APIs don't like nulls in them, so this generates a string without nulls public static string GetString(Int32 new_seed, Boolean validPath, Boolean allowNulls, Int32 minLength, Int32 maxLength) { Seed = new_seed; return GetString(validPath, allowNulls, minLength, maxLength); } public static string GetString(Boolean validPath, Boolean allowNulls, Int32 minLength, Int32 maxLength) { return GetString(validPath, allowNulls, true, minLength, maxLength); } // some string operations don't like no-weight characters public static string GetString(Int32 new_seed, Boolean validPath, Boolean allowNulls, Boolean allowNoWeight, Int32 minLength, Int32 maxLength) { Seed = new_seed; return GetString(validPath, allowNulls, allowNoWeight, minLength, maxLength); } public static string GetString(Boolean validPath, Boolean allowNulls, Boolean allowNoWeight, Int32 minLength, Int32 maxLength) { StringBuilder sVal = new StringBuilder(); Char c; Int32 length; if (0 == minLength && 0 == maxLength) return String.Empty; if (minLength > maxLength) return null; length = minLength; if (minLength != maxLength) { length = (GetInt32() % (maxLength - minLength)) + minLength; } for (int i = 0; length > i; i++) { if (validPath) { // TODO: Make this smarter if (0 == (GetByte() % 2)) { c = GetCharLetter(true, allowNoWeight); } else { c = GetCharNumber(allowNoWeight); } } else if (!allowNulls) { do { c = GetChar(true, allowNoWeight); } while (c == '\u0000'); } else { c = GetChar(true, allowNoWeight); } sVal.Append(c); } string s = sVal.ToString(); TestFramework.LogVerbose("Random String produced: " + s); return s; } public static string[] GetStrings(Int32 new_seed, Boolean validPath, Int32 minLength, Int32 maxLength) { Seed = new_seed; return GetStrings(validPath, minLength, maxLength); } public static string[] GetStrings(Boolean validPath, Int32 minLength, Int32 maxLength) { string validString; const char c_LATIN_A = '\u0100'; const char c_LOWER_A = 'a'; const char c_UPPER_A = 'A'; const char c_ZERO_WEIGHT = '\uFEFF'; const char c_DOUBLE_WIDE_A = '\uFF21'; const string c_SURROGATE_UPPER = "\uD801\uDC00"; const string c_SURROGATE_LOWER = "\uD801\uDC28"; const char c_LOWER_SIGMA1 = (char)0x03C2; const char c_LOWER_SIGMA2 = (char)0x03C3; const char c_UPPER_SIGMA = (char)0x03A3; const char c_SPACE = ' '; int numConsts = 12; string[] retStrings; if (2 >= minLength && 2 >= maxLength || minLength > maxLength) return null; retStrings = new string[numConsts]; validString = TestLibrary.Generator.GetString(validPath, minLength - 1, maxLength - 1); retStrings[0] = TestLibrary.Generator.GetString(validPath, minLength, maxLength); retStrings[1] = validString + c_LATIN_A; retStrings[2] = validString + c_LOWER_A; retStrings[3] = validString + c_UPPER_A; retStrings[4] = validString + c_ZERO_WEIGHT; retStrings[5] = validString + c_DOUBLE_WIDE_A; retStrings[6] = TestLibrary.Generator.GetString(validPath, minLength - 2, maxLength - 2) + c_SURROGATE_UPPER; retStrings[7] = TestLibrary.Generator.GetString(validPath, minLength - 2, maxLength - 2) + c_SURROGATE_LOWER; retStrings[8] = validString + c_LOWER_SIGMA1; retStrings[9] = validString + c_LOWER_SIGMA2; retStrings[10] = validString + c_UPPER_SIGMA; retStrings[11] = validString + c_SPACE; return retStrings; } [SecuritySafeCritical] public static object GetType(Type t) { return Activator.CreateInstance(t); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Batch { using Azure; using Management; using Rest; using Rest.Azure; using Models; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; /// <summary> /// BatchAccountOperations operations. /// </summary> public partial interface IBatchAccountOperations { /// <summary> /// Creates a new Batch account with the specified parameters. Existing /// accounts cannot be updated with this API and should instead be /// updated with the Update Batch Account API. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='accountName'> /// A name for the Batch account which must be unique within the /// region. Batch account names must be between 3 and 24 characters in /// length and must use only numbers and lowercase letters. This name /// is used as part of the DNS name that is used to access the Batch /// service in the region in which the account is created. For example: /// http://accountname.region.batch.azure.com/. /// </param> /// <param name='parameters'> /// Additional parameters for account creation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<BatchAccount,BatchAccountCreateHeaders>> CreateWithHttpMessagesAsync(string resourceGroupName, string accountName, BatchAccountCreateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Updates the properties of an existing Batch account. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='accountName'> /// The name of the Batch account. /// </param> /// <param name='parameters'> /// Additional parameters for account update. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<BatchAccount>> UpdateWithHttpMessagesAsync(string resourceGroupName, string accountName, BatchAccountUpdateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the specified Batch account. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='accountName'> /// The name of the Batch account. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationHeaderResponse<BatchAccountDeleteHeaders>> DeleteWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets information about the specified Batch account. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='accountName'> /// The name of the Batch account. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<BatchAccount>> GetWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets information about the Batch accounts associated with the /// subscription. /// </summary> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<BatchAccount>>> ListWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets information about the Batch accounts associated with the /// specified resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<BatchAccount>>> ListByResourceGroupWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Synchronizes access keys for the auto-storage account configured /// for the specified Batch account. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='accountName'> /// The name of the Batch account. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> SynchronizeAutoStorageKeysWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Regenerates the specified account key for the Batch account. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='accountName'> /// The name of the Batch account. /// </param> /// <param name='keyName'> /// The type of account key to regenerate. Possible values include: /// 'Primary', 'Secondary' /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<BatchAccountKeys>> RegenerateKeyWithHttpMessagesAsync(string resourceGroupName, string accountName, AccountKeyType keyName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the account keys for the specified Batch account. /// </summary> /// <remarks> /// This operation applies only to Batch accounts created with a /// poolAllocationMode of 'BatchService'. If the Batch account was /// created with a poolAllocationMode of 'UserSubscription', clients /// cannot use access to keys to authenticate, and must use Azure /// Active Directory instead. In this case, getting the keys will fail. /// </remarks> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='accountName'> /// The name of the Batch account. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<BatchAccountKeys>> GetKeysWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Creates a new Batch account with the specified parameters. Existing /// accounts cannot be updated with this API and should instead be /// updated with the Update Batch Account API. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='accountName'> /// A name for the Batch account which must be unique within the /// region. Batch account names must be between 3 and 24 characters in /// length and must use only numbers and lowercase letters. This name /// is used as part of the DNS name that is used to access the Batch /// service in the region in which the account is created. For example: /// http://accountname.region.batch.azure.com/. /// </param> /// <param name='parameters'> /// Additional parameters for account creation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<BatchAccount,BatchAccountCreateHeaders>> BeginCreateWithHttpMessagesAsync(string resourceGroupName, string accountName, BatchAccountCreateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the specified Batch account. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the Batch account. /// </param> /// <param name='accountName'> /// The name of the Batch account. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationHeaderResponse<BatchAccountDeleteHeaders>> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets information about the Batch accounts associated with the /// subscription. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<BatchAccount>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets information about the Batch accounts associated with the /// specified resource group. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<BatchAccount>>> ListByResourceGroupNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Runtime.CompilerServices; using System.IO; using System.Collections; using System.Globalization; using System.Text; using System.Threading; using Xunit; public class File_Open_str_fm { public static String s_strActiveBugNums = ""; public static String s_strDtTmVer = "2001/02/02 10;00"; public static String s_strClassMethod = "File.Open(String,FileMode)"; public static String s_strTFName = "Open_str_fm.cs"; public static String s_strTFPath = Directory.GetCurrentDirectory(); private static String s_strLoc = "Loc_0001"; private static int s_iCountErrors = 0; private static int s_iCountTestcases = 0; [Fact] public static void runTest() { try { // CreateNew // Create // Open // OpenOrCreate // Truncate // Append // Simple call throughs to FileStream, just test functionality // [] FileMode.CreateNew // [][] File Exists // [][] File does not exist // [] FileMode.Create // [][] File Exists // [][] File does not exist // [] FileMode.Open // [][] File Exists // [][] File does not exist // [] FileMode.OpenOrCreate // [][] File Exists // [][] File does not exist // [] FileMode.Truncate // [][] File Exists // [][] File does not exist // [] FileMode.Append // [][] File Exists // [][] File does not exist TestMethod(FileMode.CreateNew); TestMethod(FileMode.Create); TestMethod(FileMode.Open); TestMethod(FileMode.OpenOrCreate); TestMethod(FileMode.Truncate); TestMethod(FileMode.Append); } catch (Exception exc_general) { ++s_iCountErrors; Console.WriteLine("Error Err_8888yyy! strLoc==" + s_strLoc + ", exc_general==" + exc_general.ToString()); } //// Finish Diagnostics if (s_iCountErrors != 0) { Console.WriteLine("FAiL! " + s_strTFName + " ,iCountErrors==" + s_iCountErrors.ToString()); } Assert.Equal(0, s_iCountErrors); } public static void TestMethod(FileMode fm) { String fileName = Path.Combine(TestInfo.CurrentDirectory, Path.GetRandomFileName()); FileInfo fil2; StreamWriter sw2; Stream fs2 = null; String str2; if (File.Exists(fileName)) File.Delete(fileName); // [] File does not exist //------------------------------------------------------------------ s_strLoc = "Loc_0001"; fil2 = new FileInfo(fileName); switch (fm) { case FileMode.CreateNew: case FileMode.Create: case FileMode.OpenOrCreate: //With a null string s_iCountTestcases++; try { fs2 = File.Open(null, fm); if (!File.Exists(fileName)) { s_iCountErrors++; printerr("Error_0002! File not created, FileMode==" + fm.ToString()); } } catch (ArgumentNullException) { } catch (Exception ex) { s_iCountErrors++; printerr("Error_0003! Unexpected exception thrown :: " + ex.ToString()); } //with anempty string s_iCountTestcases++; try { fs2 = File.Open("", fm); if (!File.Exists(fileName)) { s_iCountErrors++; printerr("Error_0004! File not created, FileMode==" + fm.ToString()); } } catch (ArgumentException) { } catch (Exception ex) { s_iCountErrors++; printerr("Error_0005! Unexpected exception thrown :: " + ex.ToString()); } fs2 = File.Open(fileName, fm); s_iCountTestcases++; if (!File.Exists(fileName)) { s_iCountErrors++; printerr("Error_0006! File not created, FileMode==" + fm.ToString()); } fs2.Dispose(); break; case FileMode.Open: case FileMode.Truncate: s_iCountTestcases++; try { fs2 = File.Open(fileName, fm); s_iCountErrors++; printerr("Error_0007! Expected exception not thrown"); fs2.Dispose(); } catch (FileNotFoundException) { } catch (Exception exc) { s_iCountErrors++; printerr("Error_0009! Incorrect exception thrown, exc==" + exc.ToString()); } break; case FileMode.Append: s_iCountTestcases++; try { fs2 = File.Open(fileName, fm); fs2.Write(new Byte[] { 54, 65, 54, 90 }, 0, 4); if (fs2.Length != 4) { s_iCountErrors++; Console.WriteLine("Unexpected file length .... " + fs2.Length); } fs2.Dispose(); } catch (Exception exc) { s_iCountErrors++; printerr("Error_0012! Incorrect exception thrown, exc==" + exc.ToString()); } break; default: s_iCountErrors++; printerr("Error_27tbv! This should not be...."); break; } if (File.Exists(fileName)) File.Delete(fileName); //------------------------------------------------------------------ // [] File already exists //------------------------------------------------------------------ s_strLoc = "Loc_4yg7b"; FileStream stream = new FileStream(fileName, FileMode.Create); sw2 = new StreamWriter(stream); str2 = "Du er en ape"; sw2.Write(str2); sw2.Dispose(); stream.Dispose(); switch (fm) { case FileMode.CreateNew: s_iCountTestcases++; try { fs2 = File.Open(fileName, fm); s_iCountErrors++; printerr("Error_27b98! Expected exception not thrown"); fs2.Dispose(); } catch (IOException) { } catch (Exception exc) { s_iCountErrors++; printerr("Error_g8782! Incorrect exception thrown, exc==" + exc.ToString()); } break; case FileMode.Create: fs2 = File.Open(fileName, fm); if (fs2.Length != 0) { s_iCountErrors++; printerr("Error_287vb! Incorrect length of file==" + fil2.Length); } fs2.Dispose(); break; case FileMode.OpenOrCreate: case FileMode.Open: fs2 = File.Open(fileName, fm); if (fs2.Length != str2.Length) { s_iCountErrors++; printerr("Error_2gy78! Incorrect length on file==" + fil2.Length); } fs2.Dispose(); break; case FileMode.Truncate: fs2 = File.Open(fileName, fm); if (fs2.Length != 0) { s_iCountErrors++; printerr("Error_29gv9! Incorrect length on file==" + fil2.Length); } fs2.Dispose(); break; case FileMode.Append: s_iCountTestcases++; try { fs2 = File.Open(fileName, fm); fs2.Write(new Byte[] { 54, 65, 54, 90 }, 0, 4); if (fs2.Length != 16) { // already 12 characters are written to the file. s_iCountErrors++; Console.WriteLine("Unexpected file length .... " + fs2.Length); } fs2.Dispose(); } catch (Exception exc) { s_iCountErrors++; printerr("Error_27878! Incorrect exception thrown, exc==" + exc.ToString()); } break; default: s_iCountErrors++; printerr("Error_587yb! This should not be..."); break; } if (File.Exists(fileName)) File.Delete(fileName); } public static void printerr(String err, [CallerMemberName] string memberName = "", [CallerFilePath] string filePath = "", [CallerLineNumber] int lineNumber = 0) { Console.WriteLine("ERROR: ({0}, {1}, {2}) {3}", memberName, filePath, lineNumber, err); } }
// -------------------------------------------------------------------------------------------------------------------- // <copyright file="PlotView.cs" company="OxyPlot"> // Copyright (c) 2014 OxyPlot contributors // </copyright> // <summary> // Provides a view that can show a <see cref="PlotModel" />. // </summary> // -------------------------------------------------------------------------------------------------------------------- namespace OxyPlot.Xamarin.Mac { using System; using Foundation; using AppKit; using OxyPlot; /// <summary> /// Provides a view that can show a <see cref="PlotModel" />. /// </summary> [Register("PlotView")] public class PlotView : NSView, IPlotView { /// <summary> /// The current plot model. /// </summary> private PlotModel model; /// <summary> /// The default plot controller. /// </summary> private IPlotController defaultController; /// <summary> /// Initializes a new instance of the <see cref="OxyPlot.Xamarin.Mac.PlotView"/> class. /// </summary> public PlotView() { this.Initialize (); } /// <summary> /// Initializes a new instance of the <see cref="OxyPlot.Xamarin.Mac.PlotView"/> class. /// </summary> /// <param name="frame">The initial frame.</param> public PlotView(CoreGraphics.CGRect frame) : base(frame) { this.Initialize (); } /// <summary> /// Initializes a new instance of the <see cref="OxyPlot.Xamarin.Mac.PlotView"/> class. /// </summary> /// <param name="coder">Coder.</param> [Export ("initWithCoder:")] public PlotView(NSCoder coder) : base (coder) { this.Initialize (); } /// <summary> /// Uses the new layout. /// </summary> /// <returns><c>true</c>, if new layout was used, <c>false</c> otherwise.</returns> [Export ("requiresConstraintBasedLayout")] bool UseNewLayout () { return true; } /// <summary> /// Initialize the view. /// </summary> private void Initialize() { this.AcceptsTouchEvents = true; } /// <summary> /// Gets or sets the <see cref="PlotModel"/> to show in the view. /// </summary> /// <value>The <see cref="PlotModel"/>.</value> public PlotModel Model { get { return this.model; } set { if (this.model != value) { if (this.model != null) { ((IPlotModel)this.model).AttachPlotView(null); this.model = null; } if (value != null) { ((IPlotModel)value).AttachPlotView(this); this.model = value; } this.InvalidatePlot(); } } } /// <summary> /// Gets or sets the <see cref="IPlotController"/> that handles input events. /// </summary> /// <value>The <see cref="IPlotController"/>.</value> public IPlotController Controller { get; set; } /// <summary> /// Gets the actual model in the view. /// </summary> /// <value> /// The actual model. /// </value> Model IView.ActualModel { get { return this.Model; } } /// <summary> /// Gets the actual <see cref="PlotModel"/> to show. /// </summary> /// <value>The actual model.</value> public PlotModel ActualModel { get { return this.Model; } } /// <summary> /// Gets the actual controller. /// </summary> /// <value> /// The actual <see cref="IController" />. /// </value> IController IView.ActualController { get { return this.ActualController; } } /// <summary> /// Gets the coordinates of the client area of the view. /// </summary> public OxyRect ClientArea { get { // TODO return new OxyRect(0, 0, 100, 100); } } /// <summary> /// Gets the actual <see cref="IPlotController"/>. /// </summary> /// <value>The actual plot controller.</value> public IPlotController ActualController { get { return this.Controller ?? (this.defaultController ?? (this.defaultController = CreateDefaultController())); } } private PlotController CreateDefaultController(){ var c = new PlotController (); c.UnbindMouseDown (OxyMouseButton.Left); c.BindMouseDown (OxyMouseButton.Left, PlotCommands.PanAt); return c; } /// <summary> /// Hides the tracker. /// </summary> public void HideTracker() { } /// <summary> /// Hides the zoom rectangle. /// </summary> public void HideZoomRectangle() { } /// <summary> /// Invalidates the plot (not blocking the UI thread) /// </summary> /// <param name="updateData">If set to <c>true</c> update data.</param> public void InvalidatePlot(bool updateData = true) { var actualModel = this.model; if (actualModel != null) { // TODO: update the model on a background thread ((IPlotModel)actualModel).Update(updateData); } if (actualModel != null && !actualModel.Background.IsUndefined()) { // this.BackgroundColor = actualModel.Background.ToUIColor(); } else { // Use white as default background color // this.BackgroundColor = UIColor.White; } this.NeedsDisplay = true; // this.SetNeedsDisplay(); } /// <summary> /// Sets the cursor type. /// </summary> /// <param name="cursorType">The cursor type.</param> public void SetCursorType(CursorType cursorType) { this.ResetCursorRects (); var cursor = Convert (cursorType); if (cursor!=null) this.AddCursorRect (this.Bounds, cursor); } public static NSCursor Convert(CursorType cursorType){ switch (cursorType) { case CursorType.Default: return null; case CursorType.Pan: return NSCursor.PointingHandCursor; case CursorType.ZoomHorizontal: return NSCursor.ResizeUpDownCursor; case CursorType.ZoomVertical: return NSCursor.ResizeLeftRightCursor; case CursorType.ZoomRectangle: return NSCursor.CrosshairCursor; default: return null; } } /// <summary> /// Shows the tracker. /// </summary> /// <param name="trackerHitResult">The tracker data.</param> public void ShowTracker(TrackerHitResult trackerHitResult) { // TODO } /// <summary> /// Shows the zoom rectangle. /// </summary> /// <param name="rectangle">The rectangle.</param> public void ShowZoomRectangle(OxyRect rectangle) { // TODO } /// <summary> /// Stores text on the clipboard. /// </summary> /// <param name="text">The text.</param> public void SetClipboardText(string text) { // TODO // UIPasteboard.General.SetValue(new NSString(text), "public.utf8-plain-text"); } /// <summary> /// Draws the content of the view. /// </summary> /// <param name="dirtyRect">The rectangle to draw.</param> public override void DrawRect(CoreGraphics.CGRect dirtyRect) { if (this.model != null) { var context = NSGraphicsContext.CurrentContext.GraphicsPort; context.TranslateCTM(0f, dirtyRect.Height); context.ScaleCTM(1f, -1f); // TODO: scale font matrix?? using (var renderer = new CoreGraphicsRenderContext(context)) { ((IPlotModel)this.model).Render(renderer, dirtyRect.Width, dirtyRect.Height); } } } public override void MouseDown (NSEvent theEvent) { base.MouseDown (theEvent); this.ActualController.HandleMouseDown (this, theEvent.ToMouseDownEventArgs(this.Bounds)); } public override void MouseDragged (NSEvent theEvent) { base.MouseDragged (theEvent); this.ActualController.HandleMouseMove (this, theEvent.ToMouseEventArgs (this.Bounds)); } public override void MouseMoved (NSEvent theEvent) { base.MouseMoved (theEvent); this.ActualController.HandleMouseMove (this, theEvent.ToMouseEventArgs (this.Bounds)); } public override void MouseUp (NSEvent theEvent) { base.MouseUp (theEvent); this.ActualController.HandleMouseUp (this, theEvent.ToMouseEventArgs (this.Bounds)); } public override void MouseEntered (NSEvent theEvent) { base.MouseEntered (theEvent); this.ActualController.HandleMouseEnter (this, theEvent.ToMouseEventArgs (this.Bounds)); } public override void MouseExited (NSEvent theEvent) { base.MouseExited (theEvent); this.ActualController.HandleMouseLeave (this, theEvent.ToMouseEventArgs (this.Bounds)); } public override void ScrollWheel (NSEvent theEvent) { // TODO: use scroll events to pan? base.ScrollWheel (theEvent); this.ActualController.HandleMouseWheel (this, theEvent.ToMouseWheelEventArgs (this.Bounds)); } public override void OtherMouseDown (NSEvent theEvent) { base.OtherMouseDown (theEvent); } public override void RightMouseDown (NSEvent theEvent) { base.RightMouseDown (theEvent); } public override void KeyDown (NSEvent theEvent) { base.KeyDown (theEvent); this.ActualController.HandleKeyDown (this, theEvent.ToKeyEventArgs ()); } public override void TouchesBeganWithEvent (NSEvent theEvent) { base.TouchesBeganWithEvent (theEvent); } public override void MagnifyWithEvent (NSEvent theEvent) { base.MagnifyWithEvent (theEvent); // TODO: handle pinch event // https://developer.apple.com/library/mac/documentation/cocoa/conceptual/eventoverview/HandlingTouchEvents/HandlingTouchEvents.html } public override void SmartMagnify (NSEvent withEvent) { base.SmartMagnify (withEvent); } public override void SwipeWithEvent (NSEvent theEvent) { base.SwipeWithEvent (theEvent); } } }
using J2N.Threading; using Lucene.Net.Attributes; using Lucene.Net.Codecs; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Support; using NUnit.Framework; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using BaseDirectoryWrapper = Lucene.Net.Store.BaseDirectoryWrapper; using Codec = Lucene.Net.Codecs.Codec; using Directory = Lucene.Net.Store.Directory; using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator; using Document = Documents.Document; using Field = Field; using FieldType = FieldType; using FilterCodec = Lucene.Net.Codecs.FilterCodec; using IOUtils = Lucene.Net.Util.IOUtils; using LockObtainFailedException = Lucene.Net.Store.LockObtainFailedException; using Lucene46Codec = Lucene.Net.Codecs.Lucene46.Lucene46Codec; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper; using PhraseQuery = Lucene.Net.Search.PhraseQuery; using PostingsFormat = Lucene.Net.Codecs.PostingsFormat; using Pulsing41PostingsFormat = Lucene.Net.Codecs.Pulsing.Pulsing41PostingsFormat; using RAMDirectory = Lucene.Net.Store.RAMDirectory; using StringField = StringField; using TestUtil = Lucene.Net.Util.TestUtil; using TextField = TextField; [TestFixture] public class TestAddIndexes : LuceneTestCase { [Test] public virtual void TestSimpleCase() { // main directory Directory dir = NewDirectory(); // two auxiliary directories Directory aux = NewDirectory(); Directory aux2 = NewDirectory(); IndexWriter writer = null; writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE)); // add 100 documents AddDocs(writer, 100); Assert.AreEqual(100, writer.MaxDoc); writer.Dispose(); TestUtil.CheckIndex(dir); writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy(false))); // add 40 documents in separate files AddDocs(writer, 40); Assert.AreEqual(40, writer.MaxDoc); writer.Dispose(); writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE)); // add 50 documents in compound files AddDocs2(writer, 50); Assert.AreEqual(50, writer.MaxDoc); writer.Dispose(); // test doc count before segments are merged writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); Assert.AreEqual(100, writer.MaxDoc); writer.AddIndexes(aux, aux2); Assert.AreEqual(190, writer.MaxDoc); writer.Dispose(); TestUtil.CheckIndex(dir); // make sure the old index is correct VerifyNumDocs(aux, 40); // make sure the new index is correct VerifyNumDocs(dir, 190); // now add another set in. Directory aux3 = NewDirectory(); writer = NewWriter(aux3, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); // add 40 documents AddDocs(writer, 40); Assert.AreEqual(40, writer.MaxDoc); writer.Dispose(); // test doc count before segments are merged writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); Assert.AreEqual(190, writer.MaxDoc); writer.AddIndexes(aux3); Assert.AreEqual(230, writer.MaxDoc); writer.Dispose(); // make sure the new index is correct VerifyNumDocs(dir, 230); VerifyTermDocs(dir, new Term("content", "aaa"), 180); VerifyTermDocs(dir, new Term("content", "bbb"), 50); // now fully merge it. writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); writer.ForceMerge(1); writer.Dispose(); // make sure the new index is correct VerifyNumDocs(dir, 230); VerifyTermDocs(dir, new Term("content", "aaa"), 180); VerifyTermDocs(dir, new Term("content", "bbb"), 50); // now add a single document Directory aux4 = NewDirectory(); writer = NewWriter(aux4, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); AddDocs2(writer, 1); writer.Dispose(); writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); Assert.AreEqual(230, writer.MaxDoc); writer.AddIndexes(aux4); Assert.AreEqual(231, writer.MaxDoc); writer.Dispose(); VerifyNumDocs(dir, 231); VerifyTermDocs(dir, new Term("content", "bbb"), 51); dir.Dispose(); aux.Dispose(); aux2.Dispose(); aux3.Dispose(); aux4.Dispose(); } [Test] public virtual void TestWithPendingDeletes() { // main directory Directory dir = NewDirectory(); // auxiliary directory Directory aux = NewDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); writer.AddIndexes(aux); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: for (int i = 0; i < 20; i++) { Document doc = new Document(); doc.Add(NewStringField("id", "" + (i % 10), Field.Store.NO)); doc.Add(NewTextField("content", "bbb " + i, Field.Store.NO)); writer.UpdateDocument(new Term("id", "" + (i % 10)), doc); } // Deletes one of the 10 added docs, leaving 9: PhraseQuery q = new PhraseQuery(); q.Add(new Term("content", "bbb")); q.Add(new Term("content", "14")); writer.DeleteDocuments(q); writer.ForceMerge(1); writer.Commit(); VerifyNumDocs(dir, 1039); VerifyTermDocs(dir, new Term("content", "aaa"), 1030); VerifyTermDocs(dir, new Term("content", "bbb"), 9); writer.Dispose(); dir.Dispose(); aux.Dispose(); } [Test] public virtual void TestWithPendingDeletes2() { // main directory Directory dir = NewDirectory(); // auxiliary directory Directory aux = NewDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: for (int i = 0; i < 20; i++) { Document doc = new Document(); doc.Add(NewStringField("id", "" + (i % 10), Field.Store.NO)); doc.Add(NewTextField("content", "bbb " + i, Field.Store.NO)); writer.UpdateDocument(new Term("id", "" + (i % 10)), doc); } writer.AddIndexes(aux); // Deletes one of the 10 added docs, leaving 9: PhraseQuery q = new PhraseQuery(); q.Add(new Term("content", "bbb")); q.Add(new Term("content", "14")); writer.DeleteDocuments(q); writer.ForceMerge(1); writer.Commit(); VerifyNumDocs(dir, 1039); VerifyTermDocs(dir, new Term("content", "aaa"), 1030); VerifyTermDocs(dir, new Term("content", "bbb"), 9); writer.Dispose(); dir.Dispose(); aux.Dispose(); } [Test] public virtual void TestWithPendingDeletes3() { // main directory Directory dir = NewDirectory(); // auxiliary directory Directory aux = NewDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: for (int i = 0; i < 20; i++) { Document doc = new Document(); doc.Add(NewStringField("id", "" + (i % 10), Field.Store.NO)); doc.Add(NewTextField("content", "bbb " + i, Field.Store.NO)); writer.UpdateDocument(new Term("id", "" + (i % 10)), doc); } // Deletes one of the 10 added docs, leaving 9: PhraseQuery q = new PhraseQuery(); q.Add(new Term("content", "bbb")); q.Add(new Term("content", "14")); writer.DeleteDocuments(q); writer.AddIndexes(aux); writer.ForceMerge(1); writer.Commit(); VerifyNumDocs(dir, 1039); VerifyTermDocs(dir, new Term("content", "aaa"), 1030); VerifyTermDocs(dir, new Term("content", "bbb"), 9); writer.Dispose(); dir.Dispose(); aux.Dispose(); } // case 0: add self or exceed maxMergeDocs, expect exception [Test] public virtual void TestAddSelf() { // main directory Directory dir = NewDirectory(); // auxiliary directory Directory aux = NewDirectory(); IndexWriter writer = null; writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); // add 100 documents AddDocs(writer, 100); Assert.AreEqual(100, writer.MaxDoc); writer.Dispose(); writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false))); // add 140 documents in separate files AddDocs(writer, 40); writer.Dispose(); writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false))); AddDocs(writer, 100); writer.Dispose(); writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); try { // cannot add self writer.AddIndexes(aux, dir); Assert.IsTrue(false); } #pragma warning disable 168 catch (ArgumentException e) #pragma warning restore 168 { Assert.AreEqual(100, writer.MaxDoc); } writer.Dispose(); // make sure the index is correct VerifyNumDocs(dir, 100); dir.Dispose(); aux.Dispose(); } // in all the remaining tests, make the doc count of the oldest segment // in dir large so that it is never merged in addIndexes() // case 1: no tail segments [Test] public virtual void TestNoTailSegments() { // main directory Directory dir = NewDirectory(); // auxiliary directory Directory aux = NewDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4))); AddDocs(writer, 10); writer.AddIndexes(aux); Assert.AreEqual(1040, writer.MaxDoc); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Dispose(); // make sure the index is correct VerifyNumDocs(dir, 1040); dir.Dispose(); aux.Dispose(); } // case 2: tail segments, invariants hold, no copy [Test] public virtual void TestNoCopySegments() { // main directory Directory dir = NewDirectory(); // auxiliary directory Directory aux = NewDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(9).SetMergePolicy(NewLogMergePolicy(4))); AddDocs(writer, 2); writer.AddIndexes(aux); Assert.AreEqual(1032, writer.MaxDoc); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Dispose(); // make sure the index is correct VerifyNumDocs(dir, 1032); dir.Dispose(); aux.Dispose(); } // case 3: tail segments, invariants hold, copy, invariants hold [Test] public virtual void TestNoMergeAfterCopy() { // main directory Directory dir = NewDirectory(); // auxiliary directory Directory aux = NewDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(4))); writer.AddIndexes(aux, new MockDirectoryWrapper(Random, new RAMDirectory(aux, NewIOContext(Random)))); Assert.AreEqual(1060, writer.MaxDoc); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Dispose(); // make sure the index is correct VerifyNumDocs(dir, 1060); dir.Dispose(); aux.Dispose(); } // case 4: tail segments, invariants hold, copy, invariants not hold [Test] public virtual void TestMergeAfterCopy() { // main directory Directory dir = NewDirectory(); // auxiliary directory Directory aux = NewDirectory(); SetUpDirs(dir, aux, true); IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); IndexWriter writer = new IndexWriter(aux, dontMergeConfig); for (int i = 0; i < 20; i++) { writer.DeleteDocuments(new Term("id", "" + i)); } writer.Dispose(); IndexReader reader = DirectoryReader.Open(aux); Assert.AreEqual(10, reader.NumDocs); reader.Dispose(); writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(4).SetMergePolicy(NewLogMergePolicy(4))); if (Verbose) { Console.WriteLine("\nTEST: now addIndexes"); } writer.AddIndexes(aux, new MockDirectoryWrapper(Random, new RAMDirectory(aux, NewIOContext(Random)))); Assert.AreEqual(1020, writer.MaxDoc); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Dispose(); dir.Dispose(); aux.Dispose(); } // case 5: tail segments, invariants not hold [Test] public virtual void TestMoreMerges() { // main directory Directory dir = NewDirectory(); // auxiliary directory Directory aux = NewDirectory(); Directory aux2 = NewDirectory(); SetUpDirs(dir, aux, true); IndexWriter writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(10))); writer.AddIndexes(aux); Assert.AreEqual(30, writer.MaxDoc); Assert.AreEqual(3, writer.SegmentCount); writer.Dispose(); IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); writer = new IndexWriter(aux, dontMergeConfig); for (int i = 0; i < 27; i++) { writer.DeleteDocuments(new Term("id", "" + i)); } writer.Dispose(); IndexReader reader = DirectoryReader.Open(aux); Assert.AreEqual(3, reader.NumDocs); reader.Dispose(); dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); writer = new IndexWriter(aux2, dontMergeConfig); for (int i = 0; i < 8; i++) { writer.DeleteDocuments(new Term("id", "" + i)); } writer.Dispose(); reader = DirectoryReader.Open(aux2); Assert.AreEqual(22, reader.NumDocs); reader.Dispose(); writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(6).SetMergePolicy(NewLogMergePolicy(4))); writer.AddIndexes(aux, aux2); Assert.AreEqual(1040, writer.MaxDoc); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Dispose(); dir.Dispose(); aux.Dispose(); aux2.Dispose(); } private IndexWriter NewWriter(Directory dir, IndexWriterConfig conf) { conf.SetMergePolicy(new LogDocMergePolicy()); IndexWriter writer = new IndexWriter(dir, conf); return writer; } private void AddDocs(IndexWriter writer, int numDocs) { for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.Add(NewTextField("content", "aaa", Field.Store.NO)); writer.AddDocument(doc); } } private void AddDocs2(IndexWriter writer, int numDocs) { for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.Add(NewTextField("content", "bbb", Field.Store.NO)); writer.AddDocument(doc); } } private void VerifyNumDocs(Directory dir, int numDocs) { IndexReader reader = DirectoryReader.Open(dir); Assert.AreEqual(numDocs, reader.MaxDoc); Assert.AreEqual(numDocs, reader.NumDocs); reader.Dispose(); } private void VerifyTermDocs(Directory dir, Term term, int numDocs) { IndexReader reader = DirectoryReader.Open(dir); DocsEnum docsEnum = TestUtil.Docs(Random, reader, term.Field, term.Bytes, null, null, DocsFlags.NONE); int count = 0; while (docsEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { count++; } Assert.AreEqual(numDocs, count); reader.Dispose(); } private void SetUpDirs(Directory dir, Directory aux) { SetUpDirs(dir, aux, false); } private void SetUpDirs(Directory dir, Directory aux, bool withID) { IndexWriter writer = null; writer = NewWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000)); // add 1000 documents in 1 segment if (withID) { AddDocsWithID(writer, 1000, 0); } else { AddDocs(writer, 1000); } Assert.AreEqual(1000, writer.MaxDoc); Assert.AreEqual(1, writer.SegmentCount); writer.Dispose(); writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10))); // add 30 documents in 3 segments for (int i = 0; i < 3; i++) { if (withID) { AddDocsWithID(writer, 10, 10 * i); } else { AddDocs(writer, 10); } writer.Dispose(); writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(1000).SetMergePolicy(NewLogMergePolicy(false, 10))); } Assert.AreEqual(30, writer.MaxDoc); Assert.AreEqual(3, writer.SegmentCount); writer.Dispose(); } // LUCENE-1270 [Test] public virtual void TestHangOnClose() { Directory dir = NewDirectory(); LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(); lmp.NoCFSRatio = 0.0; lmp.MergeFactor = 100; IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(5).SetMergePolicy(lmp)); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); customType.StoreTermVectors = true; customType.StoreTermVectorPositions = true; customType.StoreTermVectorOffsets = true; doc.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType)); for (int i = 0; i < 60; i++) { writer.AddDocument(doc); } Document doc2 = new Document(); FieldType customType2 = new FieldType(); customType2.IsStored = true; doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2)); doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2)); doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2)); doc2.Add(NewField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2)); for (int i = 0; i < 10; i++) { writer.AddDocument(doc2); } writer.Dispose(); Directory dir2 = NewDirectory(); lmp = new LogByteSizeMergePolicy(); lmp.MinMergeMB = 0.0001; lmp.NoCFSRatio = 0.0; lmp.MergeFactor = 4; writer = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(lmp)); writer.AddIndexes(dir); writer.Dispose(); dir.Dispose(); dir2.Dispose(); } // TODO: these are also in TestIndexWriter... add a simple doc-writing method // like this to LuceneTestCase? private void AddDoc(IndexWriter writer) { Document doc = new Document(); doc.Add(NewTextField("content", "aaa", Field.Store.NO)); writer.AddDocument(doc); } private abstract class RunAddIndexesThreads { internal Directory dir, dir2; internal const int NUM_INIT_DOCS = 17; internal IndexWriter writer2; internal readonly IList<Exception> failures = new List<Exception>(); internal volatile bool didClose; internal readonly IndexReader[] readers; internal readonly int NUM_COPY; internal const int NUM_THREADS = 5; internal readonly ThreadJob[] threads = new ThreadJob[NUM_THREADS]; public RunAddIndexesThreads(TestAddIndexes outerInstance, int numCopy) { NUM_COPY = numCopy; dir = new MockDirectoryWrapper(Random, new RAMDirectory()); IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); for (int i = 0; i < NUM_INIT_DOCS; i++) { outerInstance.AddDoc(writer); } writer.Dispose(); dir2 = NewDirectory(); writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); writer2.Commit(); readers = new IndexReader[NUM_COPY]; for (int i = 0; i < NUM_COPY; i++) { readers[i] = DirectoryReader.Open(dir); } } internal virtual void LaunchThreads(int numIter) { for (int i = 0; i < NUM_THREADS; i++) { threads[i] = new ThreadAnonymousInnerClassHelper(this, numIter); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Start(); } } private class ThreadAnonymousInnerClassHelper : ThreadJob { private readonly RunAddIndexesThreads outerInstance; private readonly int numIter; public ThreadAnonymousInnerClassHelper(RunAddIndexesThreads outerInstance, int numIter) { this.outerInstance = outerInstance; this.numIter = numIter; } public override void Run() { try { Directory[] dirs = new Directory[outerInstance.NUM_COPY]; for (int k = 0; k < outerInstance.NUM_COPY; k++) { dirs[k] = new MockDirectoryWrapper(Random, new RAMDirectory(outerInstance.dir, NewIOContext(Random))); } int j = 0; while (true) { // System.out.println(Thread.currentThread().getName() + ": iter j=" + j); if (numIter > 0 && j == numIter) { break; } outerInstance.DoBody(j++, dirs); } } catch (Exception t) { outerInstance.Handle(t); } } } internal virtual void JoinThreads() { for (int i = 0; i < NUM_THREADS; i++) { threads[i].Join(); } } internal virtual void Close(bool doWait) { didClose = true; writer2.Dispose(doWait); } internal virtual void CloseDir() { for (int i = 0; i < NUM_COPY; i++) { readers[i].Dispose(); } dir2.Dispose(); } internal abstract void DoBody(int j, Directory[] dirs); internal abstract void Handle(Exception t); } private class CommitAndAddIndexes : RunAddIndexesThreads { public CommitAndAddIndexes(TestAddIndexes outerInstance, int numCopy) : base(outerInstance, numCopy) { } internal override void Handle(Exception t) { Console.Error.WriteLine(t.StackTrace); lock (failures) { failures.Add(t); } } internal override void DoBody(int j, Directory[] dirs) { switch (j % 5) { case 0: if (Verbose) { Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(Dir[]) then full merge"); } writer2.AddIndexes(dirs); writer2.ForceMerge(1); break; case 1: if (Verbose) { Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(Dir[])"); } writer2.AddIndexes(dirs); break; case 2: if (Verbose) { Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(IndexReader[])"); } writer2.AddIndexes(readers); break; case 3: if (Verbose) { Console.WriteLine(Thread.CurrentThread.Name + ": TEST: addIndexes(Dir[]) then maybeMerge"); } writer2.AddIndexes(dirs); writer2.MaybeMerge(); break; case 4: if (Verbose) { Console.WriteLine(Thread.CurrentThread.Name + ": TEST: commit"); } writer2.Commit(); break; } } } // LUCENE-1335: test simultaneous addIndexes & commits // from multiple threads [Test] [Timeout(300000)] public virtual void TestAddIndexesWithThreads() { int NUM_ITER = TestNightly ? 15 : 5; const int NUM_COPY = 3; CommitAndAddIndexes c = new CommitAndAddIndexes(this, NUM_COPY); c.LaunchThreads(NUM_ITER); for (int i = 0; i < 100; i++) { AddDoc(c.writer2); } c.JoinThreads(); int expectedNumDocs = 100 + NUM_COPY * (4 * NUM_ITER / 5) * RunAddIndexesThreads.NUM_THREADS * RunAddIndexesThreads.NUM_INIT_DOCS; Assert.AreEqual(expectedNumDocs, c.writer2.NumDocs, "expected num docs don't match - failures: " + Environment.NewLine + string.Join(Environment.NewLine, c.failures.Select(x => x.ToString()))); c.Close(true); Assert.IsTrue(c.failures.Count == 0, "found unexpected failures: " + c.failures); IndexReader reader = DirectoryReader.Open(c.dir2); Assert.AreEqual(expectedNumDocs, reader.NumDocs); reader.Dispose(); c.CloseDir(); } private class CommitAndAddIndexes2 : CommitAndAddIndexes { public CommitAndAddIndexes2(TestAddIndexes outerInstance, int numCopy) : base(outerInstance, numCopy) { } internal override void Handle(Exception t) { if (!(t is ObjectDisposedException) && !(t is NullReferenceException)) { Console.Error.WriteLine(t.StackTrace); lock (failures) { failures.Add(t); } } } } // LUCENE-1335: test simultaneous addIndexes & close [Test] public virtual void TestAddIndexesWithClose() { const int NUM_COPY = 3; CommitAndAddIndexes2 c = new CommitAndAddIndexes2(this, NUM_COPY); //c.writer2.setInfoStream(System.out); c.LaunchThreads(-1); // Close w/o first stopping/joining the threads c.Close(true); //c.writer2.Dispose(); c.JoinThreads(); c.CloseDir(); Assert.IsTrue(c.failures.Count == 0); } private class CommitAndAddIndexes3 : RunAddIndexesThreads { public CommitAndAddIndexes3(TestAddIndexes outerInstance, int numCopy) : base(outerInstance, numCopy) { } internal override void DoBody(int j, Directory[] dirs) { switch (j % 5) { case 0: if (Verbose) { Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": addIndexes + full merge"); } writer2.AddIndexes(dirs); writer2.ForceMerge(1); break; case 1: if (Verbose) { Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": addIndexes"); } writer2.AddIndexes(dirs); break; case 2: if (Verbose) { Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": addIndexes(IR[])"); } writer2.AddIndexes(readers); break; case 3: if (Verbose) { Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": full merge"); } writer2.ForceMerge(1); break; case 4: if (Verbose) { Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": commit"); } writer2.Commit(); break; } } internal override void Handle(Exception t) { bool report = true; if (t is ObjectDisposedException || t is MergePolicy.MergeAbortedException || t is NullReferenceException) { report = !didClose; } // LUCENENET specific - since NoSuchDirectoryException subclasses FileNotFoundException // in Lucene, we need to handle it here to be on the safe side. else if (t is FileNotFoundException/* || t is NoSuchFileException*/ || t is DirectoryNotFoundException) { report = !didClose; } else if (t is IOException) { Exception t2 = t.InnerException; if (t2 is MergePolicy.MergeAbortedException) { report = !didClose; } } if (report) { Console.Out.WriteLine(t.StackTrace); lock (failures) { failures.Add(t); } } } } // LUCENE-1335: test simultaneous addIndexes & close [Test] [Slow] [Deadlock][Timeout(600000)] public virtual void TestAddIndexesWithCloseNoWait() { const int NUM_COPY = 50; CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY); c.LaunchThreads(-1); Thread.Sleep(TestUtil.NextInt32(Random, 10, 500)); // Close w/o first stopping/joining the threads if (Verbose) { Console.WriteLine("TEST: now close(false)"); } c.Close(false); c.JoinThreads(); if (Verbose) { Console.WriteLine("TEST: done join threads"); } c.CloseDir(); Assert.IsTrue(c.failures.Count == 0); } // LUCENE-1335: test simultaneous addIndexes & close [Test] [Timeout(300000)] public virtual void TestAddIndexesWithRollback() { int NUM_COPY = TestNightly ? 50 : 5; CommitAndAddIndexes3 c = new CommitAndAddIndexes3(this, NUM_COPY); c.LaunchThreads(-1); Thread.Sleep(TestUtil.NextInt32(Random, 10, 500)); // Close w/o first stopping/joining the threads if (Verbose) { Console.WriteLine("TEST: now force rollback"); } c.didClose = true; c.writer2.Rollback(); c.JoinThreads(); c.CloseDir(); Assert.IsTrue(c.failures.Count == 0); } // LUCENE-2996: tests that addIndexes(IndexReader) applies existing deletes correctly. [Test] public virtual void TestExistingDeletes() { Directory[] dirs = new Directory[2]; for (int i = 0; i < dirs.Length; i++) { dirs[i] = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); IndexWriter writer = new IndexWriter(dirs[i], conf); Document doc = new Document(); doc.Add(new StringField("id", "myid", Field.Store.NO)); writer.AddDocument(doc); writer.Dispose(); } IndexWriterConfig conf_ = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); IndexWriter writer_ = new IndexWriter(dirs[0], conf_); // Now delete the document writer_.DeleteDocuments(new Term("id", "myid")); IndexReader r = DirectoryReader.Open(dirs[1]); try { writer_.AddIndexes(r); } finally { r.Dispose(); } writer_.Commit(); Assert.AreEqual(1, writer_.NumDocs, "Documents from the incoming index should not have been deleted"); writer_.Dispose(); foreach (Directory dir in dirs) { dir.Dispose(); } } // just like addDocs but with ID, starting from docStart private void AddDocsWithID(IndexWriter writer, int numDocs, int docStart) { for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.Add(NewTextField("content", "aaa", Field.Store.NO)); doc.Add(NewTextField("id", "" + (docStart + i), Field.Store.YES)); writer.AddDocument(doc); } } [Test] public virtual void TestSimpleCaseCustomCodec() { // main directory Directory dir = NewDirectory(); // two auxiliary directories Directory aux = NewDirectory(); Directory aux2 = NewDirectory(); Codec codec = new CustomPerFieldCodec(); IndexWriter writer = null; writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetCodec(codec)); // add 100 documents AddDocsWithID(writer, 100, 0); Assert.AreEqual(100, writer.MaxDoc); writer.Commit(); writer.Dispose(); TestUtil.CheckIndex(dir); writer = NewWriter(aux, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetCodec(codec).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false))); // add 40 documents in separate files AddDocs(writer, 40); Assert.AreEqual(40, writer.MaxDoc); writer.Commit(); writer.Dispose(); writer = NewWriter(aux2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetCodec(codec)); // add 40 documents in compound files AddDocs2(writer, 50); Assert.AreEqual(50, writer.MaxDoc); writer.Commit(); writer.Dispose(); // test doc count before segments are merged writer = NewWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetCodec(codec)); Assert.AreEqual(100, writer.MaxDoc); writer.AddIndexes(aux, aux2); Assert.AreEqual(190, writer.MaxDoc); writer.Dispose(); dir.Dispose(); aux.Dispose(); aux2.Dispose(); } private sealed class CustomPerFieldCodec : Lucene46Codec { internal readonly PostingsFormat simpleTextFormat; internal readonly PostingsFormat defaultFormat; internal readonly PostingsFormat mockSepFormat; public CustomPerFieldCodec() { simpleTextFormat = Codecs.PostingsFormat.ForName("SimpleText"); defaultFormat = Codecs.PostingsFormat.ForName("Lucene41"); mockSepFormat = Codecs.PostingsFormat.ForName("MockSep"); } public override PostingsFormat GetPostingsFormatForField(string field) { if (field.Equals("id", StringComparison.Ordinal)) { return simpleTextFormat; } else if (field.Equals("content", StringComparison.Ordinal)) { return mockSepFormat; } else { return defaultFormat; } } } // LUCENE-2790: tests that the non CFS files were deleted by addIndexes [Test] public virtual void TestNonCFSLeftovers() { Directory[] dirs = new Directory[2]; for (int i = 0; i < dirs.Length; i++) { dirs[i] = new RAMDirectory(); IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); Document d = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); customType.StoreTermVectors = true; d.Add(new Field("c", "v", customType)); w.AddDocument(d); w.Dispose(); } IndexReader[] readers = new IndexReader[] { DirectoryReader.Open(dirs[0]), DirectoryReader.Open(dirs[1]) }; Directory dir = new MockDirectoryWrapper(Random, new RAMDirectory()); IndexWriterConfig conf = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NewLogMergePolicy(true)); MergePolicy lmp = conf.MergePolicy; // Force creation of CFS: lmp.NoCFSRatio = 1.0; lmp.MaxCFSSegmentSizeMB = double.PositiveInfinity; IndexWriter w3 = new IndexWriter(dir, conf); w3.AddIndexes(readers); w3.Dispose(); // we should now see segments_X, // segments.gen,_Y.cfs,_Y.cfe, _Z.si Assert.AreEqual(5, dir.ListAll().Length, "Only one compound segment should exist, but got: " + Arrays.ToString(dir.ListAll())); dir.Dispose(); } [CodecName("NotRegistered")] private sealed class UnRegisteredCodec : FilterCodec { public UnRegisteredCodec() : base(new Lucene46Codec()) { } } /* * simple test that ensures we getting expected exceptions */ [Test] public virtual void TestAddIndexMissingCodec() { BaseDirectoryWrapper toAdd = NewDirectory(); // Disable checkIndex, else we get an exception because // of the unregistered codec: toAdd.CheckIndexOnDispose = false; { IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); conf.SetCodec(new UnRegisteredCodec()); using (var w = new IndexWriter(toAdd, conf)) { Document doc = new Document(); FieldType customType = new FieldType(); customType.IsIndexed = true; doc.Add(NewField("foo", "bar", customType)); w.AddDocument(doc); } } { using (Directory dir = NewDirectory()) { IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); conf.SetCodec(TestUtil.AlwaysPostingsFormat(new Pulsing41PostingsFormat(1 + Random.Next(20)))); IndexWriter w = new IndexWriter(dir, conf); try { w.AddIndexes(toAdd); Assert.Fail("no such codec"); } #pragma warning disable 168 catch (ArgumentException ex) #pragma warning restore 168 { // expected } finally { w.Dispose(); } using (IndexReader open = DirectoryReader.Open(dir)) { Assert.AreEqual(0, open.NumDocs); } } } try { DirectoryReader.Open(toAdd); Assert.Fail("no such codec"); } #pragma warning disable 168 catch (ArgumentException ex) #pragma warning restore 168 { // expected } toAdd.Dispose(); } // LUCENE-3575 [Test] public virtual void TestFieldNamesChanged() { Directory d1 = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, d1); Document doc = new Document(); doc.Add(NewStringField("f1", "doc1 field1", Field.Store.YES)); doc.Add(NewStringField("id", "1", Field.Store.YES)); w.AddDocument(doc); IndexReader r1 = w.GetReader(); w.Dispose(); Directory d2 = NewDirectory(); w = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, d2); doc = new Document(); doc.Add(NewStringField("f2", "doc2 field2", Field.Store.YES)); doc.Add(NewStringField("id", "2", Field.Store.YES)); w.AddDocument(doc); IndexReader r2 = w.GetReader(); w.Dispose(); Directory d3 = NewDirectory(); w = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, d3); w.AddIndexes(r1, r2); r1.Dispose(); d1.Dispose(); r2.Dispose(); d2.Dispose(); IndexReader r3 = w.GetReader(); w.Dispose(); Assert.AreEqual(2, r3.NumDocs); for (int docID = 0; docID < 2; docID++) { Document d = r3.Document(docID); if (d.Get("id").Equals("1", StringComparison.Ordinal)) { Assert.AreEqual("doc1 field1", d.Get("f1")); } else { Assert.AreEqual("doc2 field2", d.Get("f2")); } } r3.Dispose(); d3.Dispose(); } [Test] public virtual void TestAddEmpty() { Directory d1 = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, d1); MultiReader empty = new MultiReader(); w.AddIndexes(empty); w.Dispose(); DirectoryReader dr = DirectoryReader.Open(d1); foreach (AtomicReaderContext ctx in dr.Leaves) { Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes"); } dr.Dispose(); d1.Dispose(); } // Currently it's impossible to end up with a segment with all documents // deleted, as such segments are dropped. Still, to validate that addIndexes // works with such segments, or readers that end up in such state, we fake an // all deleted segment. [Test] public virtual void TestFakeAllDeleted() { Directory src = NewDirectory(), dest = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, src); w.AddDocument(new Document()); IndexReader allDeletedReader = new AllDeletedFilterReader((AtomicReader)w.GetReader().Leaves[0].Reader); w.Dispose(); w = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, dest); w.AddIndexes(allDeletedReader); w.Dispose(); DirectoryReader dr = DirectoryReader.Open(src); foreach (AtomicReaderContext ctx in dr.Leaves) { Assert.IsTrue(ctx.Reader.MaxDoc > 0, "empty segments should be dropped by addIndexes"); } dr.Dispose(); allDeletedReader.Dispose(); src.Dispose(); dest.Dispose(); } /// <summary> /// Make sure an open IndexWriter on an incoming Directory /// causes a LockObtainFailedException /// </summary> [Test] public virtual void TestLocksBlock() { Directory src = NewDirectory(); RandomIndexWriter w1 = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, src); w1.AddDocument(new Document()); w1.Commit(); Directory dest = NewDirectory(); IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); iwc.SetWriteLockTimeout(1); RandomIndexWriter w2 = new RandomIndexWriter(Random, dest, iwc); try { w2.AddIndexes(src); Assert.Fail("did not hit expected exception"); } #pragma warning disable 168 catch (LockObtainFailedException lofe) #pragma warning restore 168 { // expected } IOUtils.Dispose(w1, w2, src, dest); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System.Collections.Generic; using System.Runtime.InteropServices; using System.ComponentModel; using Microsoft.Win32; using Microsoft.Win32.SafeHandles; using System.IO; namespace System.Management.Automation { /// <summary> /// These are platform abstractions and platform specific implementations. /// </summary> public static class Platform { private static string _tempDirectory = null; /// <summary> /// True if the current platform is Linux. /// </summary> public static bool IsLinux { get { return RuntimeInformation.IsOSPlatform(OSPlatform.Linux); } } /// <summary> /// True if the current platform is macOS. /// </summary> public static bool IsMacOS { get { return RuntimeInformation.IsOSPlatform(OSPlatform.OSX); } } /// <summary> /// True if the current platform is Windows. /// </summary> public static bool IsWindows { get { return RuntimeInformation.IsOSPlatform(OSPlatform.Windows); } } /// <summary> /// True if PowerShell was built targeting .NET Core. /// </summary> public static bool IsCoreCLR { get { return true; } } /// <summary> /// True if the underlying system is NanoServer. /// </summary> public static bool IsNanoServer { get { #if UNIX return false; #else if (_isNanoServer.HasValue) { return _isNanoServer.Value; } _isNanoServer = false; using (RegistryKey regKey = Registry.LocalMachine.OpenSubKey(@"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Server\ServerLevels")) { if (regKey != null) { object value = regKey.GetValue("NanoServer"); if (value != null && regKey.GetValueKind("NanoServer") == RegistryValueKind.DWord) { _isNanoServer = (int)value == 1; } } } return _isNanoServer.Value; #endif } } /// <summary> /// True if the underlying system is IoT. /// </summary> public static bool IsIoT { get { #if UNIX return false; #else if (_isIoT.HasValue) { return _isIoT.Value; } _isIoT = false; using (RegistryKey regKey = Registry.LocalMachine.OpenSubKey(@"SOFTWARE\Microsoft\Windows NT\CurrentVersion")) { if (regKey != null) { object value = regKey.GetValue("ProductName"); if (value != null && regKey.GetValueKind("ProductName") == RegistryValueKind.String) { _isIoT = string.Equals("IoTUAP", (string)value, StringComparison.OrdinalIgnoreCase); } } } return _isIoT.Value; #endif } } /// <summary> /// True if underlying system is Windows Desktop. /// </summary> public static bool IsWindowsDesktop { get { #if UNIX return false; #else if (_isWindowsDesktop.HasValue) { return _isWindowsDesktop.Value; } _isWindowsDesktop = !IsNanoServer && !IsIoT; return _isWindowsDesktop.Value; #endif } } #if !UNIX private static bool? _isNanoServer = null; private static bool? _isIoT = null; private static bool? _isWindowsDesktop = null; #endif // format files internal static List<string> FormatFileNames = new List<string> { "Certificate.format.ps1xml", "Diagnostics.format.ps1xml", "DotNetTypes.format.ps1xml", "Event.format.ps1xml", "FileSystem.format.ps1xml", "Help.format.ps1xml", "HelpV3.format.ps1xml", "PowerShellCore.format.ps1xml", "PowerShellTrace.format.ps1xml", "Registry.format.ps1xml", "WSMan.format.ps1xml" }; /// <summary> /// Some common environment variables used in PS have different /// names in different OS platforms. /// </summary> internal static class CommonEnvVariableNames { #if UNIX internal const string Home = "HOME"; #else internal const string Home = "USERPROFILE"; #endif } /// <summary> /// Remove the temporary directory created for the current process. /// </summary> internal static void RemoveTemporaryDirectory() { if (_tempDirectory == null) { return; } try { Directory.Delete(_tempDirectory, true); } catch { // ignore if there is a failure } _tempDirectory = null; } /// <summary> /// Get a temporary directory to use for the current process. /// </summary> internal static string GetTemporaryDirectory() { if (_tempDirectory != null) { return _tempDirectory; } _tempDirectory = PsUtils.GetTemporaryDirectory(); return _tempDirectory; } #if UNIX /// <summary> /// X Desktop Group configuration type enum. /// </summary> public enum XDG_Type { /// <summary> XDG_CONFIG_HOME/powershell </summary> CONFIG, /// <summary> XDG_CACHE_HOME/powershell </summary> CACHE, /// <summary> XDG_DATA_HOME/powershell </summary> DATA, /// <summary> XDG_DATA_HOME/powershell/Modules </summary> USER_MODULES, /// <summary> /usr/local/share/powershell/Modules </summary> SHARED_MODULES, /// <summary> XDG_CONFIG_HOME/powershell </summary> DEFAULT } /// <summary> /// Function for choosing directory location of PowerShell for profile loading. /// </summary> public static string SelectProductNameForDirectory(Platform.XDG_Type dirpath) { // TODO: XDG_DATA_DIRS implementation as per GitHub issue #1060 string xdgconfighome = System.Environment.GetEnvironmentVariable("XDG_CONFIG_HOME"); string xdgdatahome = System.Environment.GetEnvironmentVariable("XDG_DATA_HOME"); string xdgcachehome = System.Environment.GetEnvironmentVariable("XDG_CACHE_HOME"); string envHome = System.Environment.GetEnvironmentVariable(CommonEnvVariableNames.Home); if (envHome == null) { envHome = GetTemporaryDirectory(); } string xdgConfigHomeDefault = Path.Combine(envHome, ".config", "powershell"); string xdgDataHomeDefault = Path.Combine(envHome, ".local", "share", "powershell"); string xdgModuleDefault = Path.Combine(xdgDataHomeDefault, "Modules"); string xdgCacheDefault = Path.Combine(envHome, ".cache", "powershell"); switch (dirpath) { case Platform.XDG_Type.CONFIG: // the user has set XDG_CONFIG_HOME corresponding to profile path if (string.IsNullOrEmpty(xdgconfighome)) { // xdg values have not been set return xdgConfigHomeDefault; } else { return Path.Combine(xdgconfighome, "powershell"); } case Platform.XDG_Type.DATA: // the user has set XDG_DATA_HOME corresponding to module path if (string.IsNullOrEmpty(xdgdatahome)) { // create the xdg folder if needed if (!Directory.Exists(xdgDataHomeDefault)) { try { Directory.CreateDirectory(xdgDataHomeDefault); } catch (UnauthorizedAccessException) { // service accounts won't have permission to create user folder return GetTemporaryDirectory(); } } return xdgDataHomeDefault; } else { return Path.Combine(xdgdatahome, "powershell"); } case Platform.XDG_Type.USER_MODULES: // the user has set XDG_DATA_HOME corresponding to module path if (string.IsNullOrEmpty(xdgdatahome)) { // xdg values have not been set if (!Directory.Exists(xdgModuleDefault)) // module folder not always guaranteed to exist { try { Directory.CreateDirectory(xdgModuleDefault); } catch (UnauthorizedAccessException) { // service accounts won't have permission to create user folder return GetTemporaryDirectory(); } } return xdgModuleDefault; } else { return Path.Combine(xdgdatahome, "powershell", "Modules"); } case Platform.XDG_Type.SHARED_MODULES: return "/usr/local/share/powershell/Modules"; case Platform.XDG_Type.CACHE: // the user has set XDG_CACHE_HOME if (string.IsNullOrEmpty(xdgcachehome)) { // xdg values have not been set if (!Directory.Exists(xdgCacheDefault)) // module folder not always guaranteed to exist { try { Directory.CreateDirectory(xdgCacheDefault); } catch (UnauthorizedAccessException) { // service accounts won't have permission to create user folder return GetTemporaryDirectory(); } } return xdgCacheDefault; } else { if (!Directory.Exists(Path.Combine(xdgcachehome, "powershell"))) { try { Directory.CreateDirectory(Path.Combine(xdgcachehome, "powershell")); } catch (UnauthorizedAccessException) { // service accounts won't have permission to create user folder return GetTemporaryDirectory(); } } return Path.Combine(xdgcachehome, "powershell"); } case Platform.XDG_Type.DEFAULT: // default for profile location return xdgConfigHomeDefault; default: // xdgConfigHomeDefault needs to be created in the edge case that we do not have the folder or it was deleted // This folder is the default in the event of all other failures for data storage if (!Directory.Exists(xdgConfigHomeDefault)) { try { Directory.CreateDirectory(xdgConfigHomeDefault); } catch { Console.Error.WriteLine("Failed to create default data directory: " + xdgConfigHomeDefault); } } return xdgConfigHomeDefault; } } #endif /// <summary> /// The code is copied from the .NET implementation. /// </summary> internal static string GetFolderPath(System.Environment.SpecialFolder folder) { return InternalGetFolderPath(folder); } /// <summary> /// The API set 'api-ms-win-shell-shellfolders-l1-1-0.dll' was removed from NanoServer, so we cannot depend on 'SHGetFolderPathW' /// to get the special folder paths. Instead, we need to rely on the basic environment variables to get the special folder paths. /// </summary> /// <returns> /// The path to the specified system special folder, if that folder physically exists on your computer. /// Otherwise, an empty string (string.Empty). /// </returns> private static string InternalGetFolderPath(System.Environment.SpecialFolder folder) { string folderPath = null; #if UNIX string envHome = System.Environment.GetEnvironmentVariable(Platform.CommonEnvVariableNames.Home); if (envHome == null) { envHome = Platform.GetTemporaryDirectory(); } switch (folder) { case System.Environment.SpecialFolder.ProgramFiles: folderPath = "/bin"; if (!System.IO.Directory.Exists(folderPath)) { folderPath = null; } break; case System.Environment.SpecialFolder.ProgramFilesX86: folderPath = "/usr/bin"; if (!System.IO.Directory.Exists(folderPath)) { folderPath = null; } break; case System.Environment.SpecialFolder.System: case System.Environment.SpecialFolder.SystemX86: folderPath = "/sbin"; if (!System.IO.Directory.Exists(folderPath)) { folderPath = null; } break; case System.Environment.SpecialFolder.Personal: folderPath = envHome; break; case System.Environment.SpecialFolder.LocalApplicationData: folderPath = System.IO.Path.Combine(envHome, ".config"); if (!System.IO.Directory.Exists(folderPath)) { try { System.IO.Directory.CreateDirectory(folderPath); } catch (UnauthorizedAccessException) { // directory creation may fail if the account doesn't have filesystem permission such as some service accounts folderPath = string.Empty; } } break; default: throw new NotSupportedException(); } #else folderPath = System.Environment.GetFolderPath(folder); #endif return folderPath ?? string.Empty; } // Platform methods prefixed NonWindows are: // - non-windows by the definition of the IsWindows method above // - here, because porting to Linux and other operating systems // should not move the original Windows code out of the module // it belongs to, so this way the windows code can remain in it's // original source file and only the non-windows code has been moved // out here // - only to be used with the IsWindows feature query, and only if // no other more specific feature query makes sense internal static bool NonWindowsIsHardLink(ref IntPtr handle) { return Unix.IsHardLink(ref handle); } internal static bool NonWindowsIsHardLink(FileSystemInfo fileInfo) { return Unix.IsHardLink(fileInfo); } internal static bool NonWindowsIsSymLink(FileSystemInfo fileInfo) { return Unix.NativeMethods.IsSymLink(fileInfo.FullName); } internal static string NonWindowsInternalGetTarget(string path) { return Unix.NativeMethods.FollowSymLink(path); } internal static string NonWindowsGetUserFromPid(int path) { return Unix.NativeMethods.GetUserFromPid(path); } internal static string NonWindowsInternalGetLinkType(FileSystemInfo fileInfo) { if (NonWindowsIsSymLink(fileInfo)) { return "SymbolicLink"; } if (NonWindowsIsHardLink(fileInfo)) { return "HardLink"; } return null; } internal static bool NonWindowsCreateSymbolicLink(string path, string target) { // Linux doesn't care if target is a directory or not return Unix.NativeMethods.CreateSymLink(path, target) == 0; } internal static bool NonWindowsCreateHardLink(string path, string strTargetPath) { return Unix.NativeMethods.CreateHardLink(path, strTargetPath) == 0; } internal static unsafe bool NonWindowsSetDate(DateTime dateToUse) { Unix.NativeMethods.UnixTm tm = Unix.NativeMethods.DateTimeToUnixTm(dateToUse); return Unix.NativeMethods.SetDate(&tm) == 0; } // Hostname in this context seems to be the FQDN internal static string NonWindowsGetHostName() { return Unix.NativeMethods.GetFullyQualifiedName() ?? string.Empty; } internal static bool NonWindowsIsSameFileSystemItem(string pathOne, string pathTwo) { return Unix.NativeMethods.IsSameFileSystemItem(pathOne, pathTwo); } internal static bool NonWindowsGetInodeData(string path, out System.ValueTuple<UInt64, UInt64> inodeData) { UInt64 device = 0UL; UInt64 inode = 0UL; var result = Unix.NativeMethods.GetInodeData(path, out device, out inode); inodeData = (device, inode); return result == 0; } internal static bool NonWindowsIsExecutable(string path) { return Unix.NativeMethods.IsExecutable(path); } internal static uint NonWindowsGetThreadId() { return Unix.NativeMethods.GetCurrentThreadId(); } internal static int NonWindowsGetProcessParentPid(int pid) { return IsMacOS ? Unix.NativeMethods.GetPPid(pid) : Unix.GetProcFSParentPid(pid); } // Unix specific implementations of required functionality // // Please note that `Win32Exception(Marshal.GetLastWin32Error())` // works *correctly* on Linux in that it creates an exception with // the string perror would give you for the last set value of errno. // No manual mapping is required. .NET Core maps the Linux errno // to a PAL value and calls strerror_r underneath to generate the message. internal static class Unix { // This is a helper that attempts to map errno into a PowerShell ErrorCategory internal static ErrorCategory GetErrorCategory(int errno) { return (ErrorCategory)Unix.NativeMethods.GetErrorCategory(errno); } private static string s_userName; public static string UserName { get { if (string.IsNullOrEmpty(s_userName)) { s_userName = NativeMethods.GetUserName(); } return s_userName ?? string.Empty; } } public static string TemporaryDirectory { get { // POSIX temporary directory environment variables string[] environmentVariables = { "TMPDIR", "TMP", "TEMP", "TEMPDIR" }; string dir = string.Empty; foreach (string s in environmentVariables) { dir = System.Environment.GetEnvironmentVariable(s); if (!string.IsNullOrEmpty(dir)) { return dir; } } return "/tmp"; } } public static bool IsHardLink(ref IntPtr handle) { // TODO:PSL implement using fstat to query inode refcount to see if it is a hard link return false; } public static bool IsHardLink(FileSystemInfo fs) { if (!fs.Exists || (fs.Attributes & FileAttributes.Directory) == FileAttributes.Directory) { return false; } int count; string filePath = fs.FullName; int ret = NativeMethods.GetLinkCount(filePath, out count); if (ret == 0) { return count > 1; } else { throw new Win32Exception(Marshal.GetLastWin32Error()); } } public static int GetProcFSParentPid(int pid) { const int invalidPid = -1; // read /proc/<pid>/stat // 4th column will contain the ppid, 92 in the example below // ex: 93 (bash) S 92 93 2 4294967295 ... var path = $"/proc/{pid}/stat"; try { var stat = System.IO.File.ReadAllText(path); var parts = stat.Split(new[] { ' ' }, 5); if (parts.Length < 5) { return invalidPid; } return Int32.Parse(parts[3]); } catch (Exception) { return invalidPid; } } internal static class NativeMethods { private const string psLib = "libpsl-native"; // Ansi is a misnomer, it is hardcoded to UTF-8 on Linux and macOS // C bools are 1 byte and so must be marshaled as I1 [DllImport(psLib, CharSet = CharSet.Ansi)] internal static extern int GetErrorCategory(int errno); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] [return: MarshalAs(UnmanagedType.LPStr)] internal static extern string GetUserName(); [DllImport(psLib)] internal static extern int GetPPid(int pid); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] internal static extern int GetLinkCount([MarshalAs(UnmanagedType.LPStr)]string filePath, out int linkCount); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] [return: MarshalAs(UnmanagedType.I1)] internal static extern bool IsSymLink([MarshalAs(UnmanagedType.LPStr)]string filePath); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] [return: MarshalAs(UnmanagedType.I1)] internal static extern bool IsExecutable([MarshalAs(UnmanagedType.LPStr)]string filePath); [DllImport(psLib, CharSet = CharSet.Ansi)] internal static extern uint GetCurrentThreadId(); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] [return: MarshalAs(UnmanagedType.LPStr)] internal static extern string GetFullyQualifiedName(); // This is a struct tm from <time.h> [StructLayout(LayoutKind.Sequential)] internal unsafe struct UnixTm { public int tm_sec; /* Seconds (0-60) */ public int tm_min; /* Minutes (0-59) */ public int tm_hour; /* Hours (0-23) */ public int tm_mday; /* Day of the month (1-31) */ public int tm_mon; /* Month (0-11) */ public int tm_year; /* Year - 1900 */ public int tm_wday; /* Day of the week (0-6, Sunday = 0) */ public int tm_yday; /* Day in the year (0-365, 1 Jan = 0) */ public int tm_isdst; /* Daylight saving time */ } internal static UnixTm DateTimeToUnixTm(DateTime date) { UnixTm tm; tm.tm_sec = date.Second; tm.tm_min = date.Minute; tm.tm_hour = date.Hour; tm.tm_mday = date.Day; tm.tm_mon = date.Month - 1; // needs to be 0 indexed tm.tm_year = date.Year - 1900; // years since 1900 tm.tm_wday = 0; // this is ignored by mktime tm.tm_yday = 0; // this is also ignored tm.tm_isdst = date.IsDaylightSavingTime() ? 1 : 0; return tm; } [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] internal static extern unsafe int SetDate(UnixTm* tm); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] internal static extern int CreateSymLink([MarshalAs(UnmanagedType.LPStr)]string filePath, [MarshalAs(UnmanagedType.LPStr)]string target); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] internal static extern int CreateHardLink([MarshalAs(UnmanagedType.LPStr)]string filePath, [MarshalAs(UnmanagedType.LPStr)]string target); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] [return: MarshalAs(UnmanagedType.LPStr)] internal static extern string FollowSymLink([MarshalAs(UnmanagedType.LPStr)]string filePath); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] [return: MarshalAs(UnmanagedType.LPStr)] internal static extern string GetUserFromPid(int pid); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] [return: MarshalAs(UnmanagedType.I1)] internal static extern bool IsSameFileSystemItem([MarshalAs(UnmanagedType.LPStr)]string filePathOne, [MarshalAs(UnmanagedType.LPStr)]string filePathTwo); [DllImport(psLib, CharSet = CharSet.Ansi, SetLastError = true)] internal static extern int GetInodeData([MarshalAs(UnmanagedType.LPStr)]string path, out UInt64 device, out UInt64 inode); } } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using Avalonia.Utilities; using System; using System.Globalization; using System.Linq; namespace Avalonia { /// <summary> /// Defines a size. /// </summary> public struct Size { /// <summary> /// A size representing infinity. /// </summary> public static readonly Size Infinity = new Size(double.PositiveInfinity, double.PositiveInfinity); /// <summary> /// A size representing zero /// </summary> public static readonly Size Empty = new Size(0, 0); /// <summary> /// The width. /// </summary> private readonly double _width; /// <summary> /// The height. /// </summary> private readonly double _height; /// <summary> /// Initializes a new instance of the <see cref="Size"/> structure. /// </summary> /// <param name="width">The width.</param> /// <param name="height">The height.</param> public Size(double width, double height) { _width = width; _height = height; } /// <summary> /// Gets the aspect ratio of the size. /// </summary> public double AspectRatio => _width / _height; /// <summary> /// Gets the width. /// </summary> public double Width => _width; /// <summary> /// Gets the height. /// </summary> public double Height => _height; /// <summary> /// Checks for equality between two <see cref="Size"/>s. /// </summary> /// <param name="left">The first size.</param> /// <param name="right">The second size.</param> /// <returns>True if the sizes are equal; otherwise false.</returns> public static bool operator ==(Size left, Size right) { return left._width == right._width && left._height == right._height; } /// <summary> /// Checks for unequality between two <see cref="Size"/>s. /// </summary> /// <param name="left">The first size.</param> /// <param name="right">The second size.</param> /// <returns>True if the sizes are unequal; otherwise false.</returns> public static bool operator !=(Size left, Size right) { return !(left == right); } /// <summary> /// Scales a size. /// </summary> /// <param name="size">The size</param> /// <param name="scale">The scaling factor.</param> /// <returns>The scaled size.</returns> public static Size operator *(Size size, Vector scale) { return new Size(size._width * scale.X, size._height * scale.Y); } /// <summary> /// Scales a size. /// </summary> /// <param name="size">The size</param> /// <param name="scale">The scaling factor.</param> /// <returns>The scaled size.</returns> public static Size operator /(Size size, Vector scale) { return new Size(size._width / scale.X, size._height / scale.Y); } /// <summary> /// Divides a size by another size to produce a scaling factor. /// </summary> /// <param name="left">The first size</param> /// <param name="right">The second size.</param> /// <returns>The scaled size.</returns> public static Vector operator /(Size left, Size right) { return new Vector(left._width / right._width, left._height / right._height); } /// <summary> /// Scales a size. /// </summary> /// <param name="size">The size</param> /// <param name="scale">The scaling factor.</param> /// <returns>The scaled size.</returns> public static Size operator *(Size size, double scale) { return new Size(size._width * scale, size._height * scale); } /// <summary> /// Scales a size. /// </summary> /// <param name="size">The size</param> /// <param name="scale">The scaling factor.</param> /// <returns>The scaled size.</returns> public static Size operator /(Size size, double scale) { return new Size(size._width / scale, size._height / scale); } public static Size operator +(Size size, Size toAdd) { return new Size(size._width + toAdd._width, size._height + toAdd._height); } public static Size operator -(Size size, Size toSubstract) { return new Size(size._width - toSubstract._width, size._height - toSubstract._height); } /// <summary> /// Parses a <see cref="Size"/> string. /// </summary> /// <param name="s">The string.</param> /// <returns>The <see cref="Size"/>.</returns> public static Size Parse(string s) { using (var tokenizer = new StringTokenizer(s, CultureInfo.InvariantCulture, exceptionMessage: "Invalid Size")) { return new Size( tokenizer.ReadDouble(), tokenizer.ReadDouble()); } } /// <summary> /// Constrains the size. /// </summary> /// <param name="constraint">The size to constrain to.</param> /// <returns>The constrained size.</returns> public Size Constrain(Size constraint) { return new Size( Math.Min(_width, constraint._width), Math.Min(_height, constraint._height)); } /// <summary> /// Deflates the size by a <see cref="Thickness"/>. /// </summary> /// <param name="thickness">The thickness.</param> /// <returns>The deflated size.</returns> /// <remarks>The deflated size cannot be less than 0.</remarks> public Size Deflate(Thickness thickness) { return new Size( Math.Max(0, _width - thickness.Left - thickness.Right), Math.Max(0, _height - thickness.Top - thickness.Bottom)); } /// <summary> /// Checks for equality between a size and an object. /// </summary> /// <param name="obj">The object.</param> /// <returns> /// True if <paramref name="obj"/> is a size that equals the current size. /// </returns> public override bool Equals(object obj) { if (obj is Size) { var other = (Size)obj; return Width == other.Width && Height == other.Height; } return false; } /// <summary> /// Returns a hash code for a <see cref="Size"/>. /// </summary> /// <returns>The hash code.</returns> public override int GetHashCode() { unchecked { int hash = 17; hash = (hash * 23) + Width.GetHashCode(); hash = (hash * 23) + Height.GetHashCode(); return hash; } } /// <summary> /// Inflates the size by a <see cref="Thickness"/>. /// </summary> /// <param name="thickness">The thickness.</param> /// <returns>The inflated size.</returns> public Size Inflate(Thickness thickness) { return new Size( _width + thickness.Left + thickness.Right, _height + thickness.Top + thickness.Bottom); } /// <summary> /// Returns a new <see cref="Size"/> with the same height and the specified width. /// </summary> /// <param name="width">The width.</param> /// <returns>The new <see cref="Size"/>.</returns> public Size WithWidth(double width) { return new Size(width, _height); } /// <summary> /// Returns a new <see cref="Size"/> with the same width and the specified height. /// </summary> /// <param name="height">The height.</param> /// <returns>The new <see cref="Size"/>.</returns> public Size WithHeight(double height) { return new Size(_width, height); } /// <summary> /// Returns the string representation of the size. /// </summary> /// <returns>The string representation of the size.</returns> public override string ToString() { return string.Format(CultureInfo.InvariantCulture, "{0}, {1}", _width, _height); } } }
using System; using System.Net; using Ascon.Pilot.Core; using Ascon.Pilot.Server.Api.Contracts; using Ascon.Pilot.Transport; namespace Ascon.Pilot.Server.Api { public class HttpPilotClient : IImplementationFactory, IDisposable { private readonly TransportClient _client; private readonly CallbackReceiverAdapter _transportCallback; private readonly IGetService _marshaller; private readonly ICallService _unmarshaller; private IServerCallback _serverCallback; private IServerAdminCallback _adminCallback; private IServerUpdateCallback _updateCallback; private bool _isBroken; private IConnectionLostListener _connectionLostListener; public HttpPilotClient() : this(new MarshallingFactory()) { } public HttpPilotClient(IMarshallingFactory factory) { _client = new TransportClient(); _marshaller = factory.GetMarshaller(new CallServiceAdapter(_client)); _unmarshaller = factory.GetUnmarshaller(this); _transportCallback = new CallbackReceiverAdapter(_unmarshaller, CallbackError); } /// <summary> /// Init new HttpPilotClient instance with Marshalling factory as default. /// Connect with specified credentials to server /// </summary> /// <param name="credentials"></param> public HttpPilotClient(ConnectionCredentials credentials) : this(credentials, new MarshallingFactory()) { } public HttpPilotClient(ConnectionCredentials credentials, IMarshallingFactory factory) : this(factory) { Connect(credentials); } public void SetConnectionLostListener(IConnectionLostListener connectionLostListener) { _connectionLostListener = connectionLostListener; } private void CallbackError() { if (_connectionLostListener != null) _connectionLostListener.ConnectionLost(new TransportException("Client connection is disconnected by server.")); } public void Dispose() { _client.Dispose(); } public void BreakConnection() { _isBroken = true; _client.Disconnect(); } public void RestoreConnection() { _isBroken = false; } private void CheckBroken() { if (_isBroken) throw new TransportException("Connection is not available"); } public bool IsClientActive() { return _client.Active; } public void Connect(string url, IWebProxy proxy = null) { CheckBroken(); _client.SetProxy(proxy); _client.Connect(url); _client.OpenCallback(_transportCallback); } public void Connect(ConnectionCredentials credentials) { Connect(credentials.GetConnectionString()); } public void Disconnect() { _client.Disconnect(); } public IServerApi GetServerApi(IServerCallback callback) { _serverCallback = callback; return _marshaller.Get<IServerApi>(); } public IServerAdminApi GetServerAdminApi(IServerAdminCallback callback) { _adminCallback = callback; return _marshaller.Get<IServerAdminApi>(); } public IServerUpdateApi GetServerUpdateApi(IServerUpdateCallback callback) { _updateCallback = callback; return _marshaller.Get<IServerUpdateApi>(); } public IImportApi GetImportApi() { return _marshaller.Get<IImportApi>(); } public IFileArchiveApi GetFileArchiveApi(string database) { return new DataBaseNameSubstitutor(_marshaller.Get<IFileArchiveApi>(), database); } private class CallServiceAdapter : ICallService { private readonly TransportClient _client; public CallServiceAdapter(TransportClient client) { _client = client; } public byte[] Call(byte[] data) { return _client.Call(data); } } public object GetImplementation(string interfaceName) { if (interfaceName == "IServerCallback") return _serverCallback; if (interfaceName == "IServerAdminCallback") return _adminCallback; if (interfaceName == "IServerUpdateCallback") return _updateCallback; throw new NotImplementedException(interfaceName); } private class CallbackReceiverAdapter : ICallbackReceiver { private readonly ICallService _unmarshaller; private readonly Action _action; public CallbackReceiverAdapter(ICallService unmarshaller, Action action) { _unmarshaller = unmarshaller; _action = action; } public void Receive(byte[] data) { _unmarshaller.Call(data); } public void Error() { _action.Invoke(); } } private class DataBaseNameSubstitutor : IFileArchiveApi { private readonly IFileArchiveApi _api; private readonly string _database; public DataBaseNameSubstitutor(IFileArchiveApi api, string database) { _database = database; _api = api; } public byte[] GetFileChunk(string databaseName, Guid id, long pos, int count) { return _api.GetFileChunk(_database, id, pos, count); } public void PutFileChunk(string databaseName, Guid id, byte[] buffer, long pos) { _api.PutFileChunk(_database, id, buffer, pos); } public long GetFilePosition(string databaseName, Guid id) { return _api.GetFilePosition(_database, id); } public void PutFileInArchive(string databaseName, DFileBody fileBody) { _api.PutFileInArchive(_database, fileBody); } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using FlatRedBall.Graphics; using Microsoft.Xna.Framework.Graphics; using FlatRedBall; using Microsoft.Xna.Framework; using FlatRedBall.Content; using FlatRedBall.Content.Scene; using FlatRedBall.IO; using FlatRedBall.Input; using FlatRedBall.Debugging; using FlatRedBall.Math; using TMXGlueLib.DataTypes; #if TILEMAPS_ALPHA_AND_COLOR using VertexType = Microsoft.Xna.Framework.Graphics.VertexPositionTexture; #else using VertexType = Microsoft.Xna.Framework.Graphics.VertexPositionTexture; #endif namespace FlatRedBall.TileGraphics { public enum SortAxis { None, X, Y } public class MapDrawableBatch : PositionedObject, IVisible, IDrawableBatch { #region Fields protected Tileset mTileset; #region XML Docs /// <summary> /// The effect used to draw. Shared by all instances for performance reasons /// </summary> #endregion private static BasicEffect mBasicEffect; private static AlphaTestEffect mAlphaTestEffect; /// <summary> /// The vertices used to draw the map. /// </summary> /// <remarks> /// Coordinate order is: /// 3 2 /// /// 0 1 /// </remarks> protected VertexType[] mVertices; protected Texture2D mTexture; #region XML Docs /// <summary> /// The indices to draw the shape /// </summary> #endregion protected int[] mIndices; Dictionary<string, List<int>> mNamedTileOrderedIndexes = new Dictionary<string, List<int>>(); private int mCurrentNumberOfTiles = 0; public float Red = 1; public float Green = 1; public float Blue = 1; public float Alpha = 1; private SortAxis mSortAxis; #endregion #region Properties public List<TMXGlueLib.DataTypes.NamedValue> Properties { get; private set; } = new List<TMXGlueLib.DataTypes.NamedValue>(); /// <summary> /// The axis on which tiles are sorted. This is used to perform tile culling for performance. /// Setting this to SortAxis.None will turn off culling. /// </summary> public SortAxis SortAxis { get { return mSortAxis; } set { mSortAxis = value; } } #region XML Docs /// <summary> /// Here we tell the engine if we want this batch /// updated every frame. Since we have no updating to /// do though, we will set this to false /// </summary> #endregion public bool UpdateEveryFrame { get { return true; } } public float RenderingScale { get; set; } public Dictionary<string, List<int>> NamedTileOrderedIndexes { get { return mNamedTileOrderedIndexes; } } public bool Visible { get; set; } public bool ZBuffered { get; set; } public int QuadCount { get { return mVertices.Length / 4; } } public VertexPositionTexture[] Vertices { get { return mVertices; } } public Texture2D Texture { get { return mTexture; } set { if (value == null) { throw new Exception("Texture can't be null."); } if (mTexture != null && (mTexture.Width != value.Width || mTexture.Height != value.Height)) { throw new Exception("New texture must match previous texture dimensions."); } mTexture = value; } } // Doing these properties this way lets me avoid a computational step of 1 - ParallaxMultiplier in the Update() function // To explain the get & set values, algebra: // if _parallaxMultiplier = 1 - value (set) // then _parallaxMultiplier - 1 = -value // so -(_parallaxMultiplier - 1) = value // thus -_parallaxMultiplier + 1 = value (get) private float _parallaxMultiplierX; public float ParallaxMultiplierX { get { return -_parallaxMultiplierX + 1; } set { _parallaxMultiplierX = 1 - value; } } private float _parallaxMultiplierY; public float ParallaxMultiplierY { get { return -_parallaxMultiplierY + 1; } set { _parallaxMultiplierY = 1 - value; } } public TextureFilter? TextureFilter { get; set; } = null; #endregion #region Constructor / Initialization // this exists purely for Clone public MapDrawableBatch() { } public MapDrawableBatch(int numberOfTiles, Texture2D texture) : base() { if (texture == null) throw new ArgumentNullException("texture"); Visible = true; InternalInitialize(); mTexture = texture; mVertices = new VertexPositionTexture[4 * numberOfTiles]; mIndices = new int[6 * numberOfTiles]; } #region XML Docs /// <summary> /// Create and initialize all assets /// </summary> #endregion public MapDrawableBatch(int numberOfTiles, int textureTileDimensionWidth, int textureTileDimensionHeight, Texture2D texture) : base() { if (texture == null) throw new ArgumentNullException("texture"); Visible = true; InternalInitialize(); mTexture = texture; mVertices = new VertexPositionTexture[4 * numberOfTiles]; mIndices = new int[6 * numberOfTiles]; mTileset = new Tileset(texture, textureTileDimensionWidth, textureTileDimensionHeight); } //public MapDrawableBatch(int mapWidth, int mapHeight, float mapTileDimension, int textureTileDimension, string tileSetFilename) // : base() //{ // InternalInitialize(); // mTileset = new Tileset(tileSetFilename, textureTileDimension); // mMapWidth = mapWidth; // mMapHeight = mapHeight; // int numberOfTiles = mapWidth * mapHeight; // // the number of vertices is 4 times the number of tiles (each tile gets 4 vertices) // mVertices = new VertexPositionTexture[4 * numberOfTiles]; // // the number of indices is 6 times the number of tiles // mIndices = new short[6 * numberOfTiles]; // for(int i = 0; i < mapHeight; i++) // { // for (int j = 0; j < mapWidth; j++) // { // int currentTile = mapHeight * i + j; // int currentVertex = currentTile * 4; // float xOffset = j * mapTileDimension; // float yOffset = i * mapTileDimension; // int currentIndex = currentTile * 6; // 6 indices per tile // // TEMP // Vector2[] coords = mTileset.GetTextureCoordinateVectorsOfTextureIndex(new Random().Next()%4); // // END TEMP // // create vertices // mVertices[currentVertex + 0] = new VertexPositionTexture(new Vector3(xOffset + 0f, yOffset + 0f, 0f), coords[0]); // mVertices[currentVertex + 1] = new VertexPositionTexture(new Vector3(xOffset + mapTileDimension, yOffset + 0f, 0f), coords[1]); // mVertices[currentVertex + 2] = new VertexPositionTexture(new Vector3(xOffset + mapTileDimension, yOffset + mapTileDimension, 0f), coords[2]); // mVertices[currentVertex + 3] = new VertexPositionTexture(new Vector3(xOffset + 0f, yOffset + mapTileDimension, 0f), coords[3]); // // create indices // mIndices[currentIndex + 0] = (short)(currentVertex + 0); // mIndices[currentIndex + 1] = (short)(currentVertex + 1); // mIndices[currentIndex + 2] = (short)(currentVertex + 2); // mIndices[currentIndex + 3] = (short)(currentVertex + 0); // mIndices[currentIndex + 4] = (short)(currentVertex + 2); // mIndices[currentIndex + 5] = (short)(currentVertex + 3); // mCurrentNumberOfTiles++; // } // } // mTexture = FlatRedBallServices.Load<Texture2D>(@"content/tiles"); //} void InternalInitialize() { // We're going to share these because creating effects is slow... // But is this okay if we tombstone? if (mBasicEffect == null) { mBasicEffect = new BasicEffect(FlatRedBallServices.GraphicsDevice); mBasicEffect.VertexColorEnabled = false; mBasicEffect.TextureEnabled = true; } if (mAlphaTestEffect == null) { mAlphaTestEffect = new AlphaTestEffect(FlatRedBallServices.GraphicsDevice); mAlphaTestEffect.Alpha = 1; mAlphaTestEffect.VertexColorEnabled = false; } RenderingScale = 1; } #endregion #region Methods public void AddToManagers() { SpriteManager.AddDrawableBatch(this); //SpriteManager.AddPositionedObject(mMapBatch); } public void AddToManagers(Layer layer) { SpriteManager.AddToLayer(this, layer); } public static MapDrawableBatch FromScnx(string sceneFileName, string contentManagerName, bool verifySameTexturePerLayer) { // TODO: This line crashes when the path is already absolute! string absoluteFileName = FileManager.MakeAbsolute(sceneFileName); // TODO: The exception doesn't make sense when the file type is wrong. SceneSave saveInstance = SceneSave.FromFile(absoluteFileName); int startingIndex = 0; string oldRelativeDirectory = FileManager.RelativeDirectory; FileManager.RelativeDirectory = FileManager.GetDirectory(absoluteFileName); // get the list of sprites from our map file List<SpriteSave> spriteSaveList = saveInstance.SpriteList; // we use the sprites as defined in the scnx file to create and draw the map. MapDrawableBatch mMapBatch = FromSpriteSaves(spriteSaveList, startingIndex, spriteSaveList.Count, contentManagerName, verifySameTexturePerLayer); FileManager.RelativeDirectory = oldRelativeDirectory; // temp //mMapBatch = new MapDrawableBatch(32, 32, 32f, 64, @"content/tiles"); return mMapBatch; } /* This creates a MapDrawableBatch (MDB) from the list of sprites provided to us by the FlatRedBall (FRB) Scene XML (scnx) file. */ public static MapDrawableBatch FromSpriteSaves(List<SpriteSave> spriteSaveList, int startingIndex, int count, string contentManagerName, bool verifySameTexturesPerLayer) { #if DEBUG if (verifySameTexturesPerLayer) { VerifySingleTexture(spriteSaveList, startingIndex, count); } #endif // We got it! We are going to make some assumptions: // First we need the texture. We'll assume all Sprites // use the same texture: // TODO: I (Bryan) really HATE this assumption. But it will work for now. SpriteSave firstSprite = spriteSaveList[startingIndex]; // This is the file name of the texture, but the file name is relative to the .scnx location string textureRelativeToScene = firstSprite.Texture; // so we load the texture Texture2D texture = FlatRedBallServices.Load<Texture2D>(textureRelativeToScene, contentManagerName); if (!MathFunctions.IsPowerOfTwo(texture.Width) || !MathFunctions.IsPowerOfTwo(texture.Height)) { throw new Exception("The dimensions of the texture file " + texture.Name + " are not power of 2!"); } // Assume all the dimensions of the textures are the same. I.e. all tiles use the same texture width and height. // This assumption is safe for Iso and Ortho tile maps. int tileFileDimensionsWidth = 0; int tileFileDimensionsHeight = 0; if (spriteSaveList.Count > startingIndex) { SpriteSave s = spriteSaveList[startingIndex]; // deduce the dimensionality of the tile from the texture coordinates tileFileDimensionsWidth = (int)System.Math.Round((double)((s.RightTextureCoordinate - s.LeftTextureCoordinate) * texture.Width)); tileFileDimensionsHeight = (int)System.Math.Round((double)((s.BottomTextureCoordinate - s.TopTextureCoordinate) * texture.Height)); } // alas, we create the MDB MapDrawableBatch mMapBatch = new MapDrawableBatch(count, tileFileDimensionsWidth, tileFileDimensionsHeight, texture); int lastIndexExclusive = startingIndex + count; for (int i = startingIndex; i < lastIndexExclusive; i++) { SpriteSave spriteSave = spriteSaveList[i]; // We don't want objects within the IDB to have a different Z than the IDB itself // (if possible) because that makes the IDB behave differently when using sorting vs. // the zbuffer. const bool setZTo0 = true; mMapBatch.Paste(spriteSave, setZTo0); } return mMapBatch; } public MapDrawableBatch Clone() { return base.Clone<MapDrawableBatch>(); } // Bring the texture coordinates in to adjust for rendering issues on dx9/ogl public const float CoordinateAdjustment = .00002f; internal static MapDrawableBatch FromReducedLayer(TMXGlueLib.DataTypes.ReducedLayerInfo reducedLayerInfo, LayeredTileMap owner, TMXGlueLib.DataTypes.ReducedTileMapInfo rtmi, string contentManagerName) { int tileDimensionWidth = reducedLayerInfo.TileWidth; int tileDimensionHeight = reducedLayerInfo.TileHeight; float quadWidth = reducedLayerInfo.TileWidth; float quadHeight = reducedLayerInfo.TileHeight; string textureName = reducedLayerInfo.Texture; #if IOS || ANDROID textureName = textureName.ToLowerInvariant(); #endif Texture2D texture = FlatRedBallServices.Load<Texture2D>(textureName, contentManagerName); MapDrawableBatch toReturn = new MapDrawableBatch(reducedLayerInfo.Quads.Count, tileDimensionWidth, tileDimensionHeight, texture); toReturn.Name = reducedLayerInfo.Name; Vector3 position = new Vector3(); IEnumerable<TMXGlueLib.DataTypes.ReducedQuadInfo> quads = null; if (rtmi.NumberCellsWide > rtmi.NumberCellsTall) { quads = reducedLayerInfo.Quads.OrderBy(item => item.LeftQuadCoordinate).ToList(); toReturn.mSortAxis = SortAxis.X; } else { quads = reducedLayerInfo.Quads.OrderBy(item => item.BottomQuadCoordinate).ToList(); toReturn.mSortAxis = SortAxis.Y; } foreach (var quad in quads) { Vector2 tileDimensions = new Vector2(quadWidth, quadHeight); if (quad.OverridingWidth != null) { tileDimensions.X = quad.OverridingWidth.Value; } if (quad.OverridingHeight != null) { tileDimensions.Y = quad.OverridingHeight.Value; } position.X = quad.LeftQuadCoordinate; position.Y = quad.BottomQuadCoordinate; // The Z of the quad should be relative to this layer, not absolute Z values. // A multi-layer map will offset the individual layer Z values, the quads should have a Z of 0. // position.Z = reducedLayerInfo.Z; var textureValues = new Vector4(); // The purpose of CoordinateAdjustment is to bring the texture values "in", to reduce the chance of adjacent // tiles drawing on a given tile quad. If we don't do this, we can get slivers of adjacent colors appearing, causing // lines or grid patterns. // To bring the values "in" we have to consider rotated quads. textureValues.X = CoordinateAdjustment + (float)quad.LeftTexturePixel / (float)texture.Width; // Left textureValues.Y = -CoordinateAdjustment + (float)(quad.LeftTexturePixel + tileDimensionWidth) / (float)texture.Width; // Right textureValues.Z = CoordinateAdjustment + (float)quad.TopTexturePixel / (float)texture.Height; // Top textureValues.W = -CoordinateAdjustment + (float)(quad.TopTexturePixel + tileDimensionHeight) / (float)texture.Height; // Bottom // pad before doing any rotations/flipping const bool pad = true; if (pad) { const float amountToAdd = .0000001f; textureValues.X += amountToAdd; // Left textureValues.Y -= amountToAdd; // Right textureValues.Z += amountToAdd; // Top textureValues.W -= amountToAdd; // Bottom } if ((quad.FlipFlags & TMXGlueLib.DataTypes.ReducedQuadInfo.FlippedHorizontallyFlag) == TMXGlueLib.DataTypes.ReducedQuadInfo.FlippedHorizontallyFlag) { var temp = textureValues.Y; textureValues.Y = textureValues.X; textureValues.X = temp; } if ((quad.FlipFlags & TMXGlueLib.DataTypes.ReducedQuadInfo.FlippedVerticallyFlag) == TMXGlueLib.DataTypes.ReducedQuadInfo.FlippedVerticallyFlag) { var temp = textureValues.Z; textureValues.Z = textureValues.W; textureValues.W = temp; } int tileIndex = toReturn.AddTile(position, tileDimensions, //quad.LeftTexturePixel, quad.TopTexturePixel, quad.LeftTexturePixel + tileDimensionWidth, quad.TopTexturePixel + tileDimensionHeight); textureValues); if ((quad.FlipFlags & TMXGlueLib.DataTypes.ReducedQuadInfo.FlippedDiagonallyFlag) == TMXGlueLib.DataTypes.ReducedQuadInfo.FlippedDiagonallyFlag) { toReturn.ApplyDiagonalFlip(tileIndex); } // This was moved to outside of this conversion, to support shaps //if (quad.QuadSpecificProperties != null) //{ // var listToAdd = quad.QuadSpecificProperties.ToList(); // listToAdd.Add(new NamedValue { Name = "Name", Value = quad.Name }); // owner.Properties.Add(quad.Name, listToAdd); //} if (quad.RotationDegrees != 0) { // Tiled rotates clockwise :( var rotationRadians = -MathHelper.ToRadians(quad.RotationDegrees); Vector3 bottomLeftPos = toReturn.Vertices[tileIndex * 4].Position; Vector3 vertPos = toReturn.Vertices[tileIndex * 4 + 1].Position; MathFunctions.RotatePointAroundPoint(bottomLeftPos, ref vertPos, rotationRadians); toReturn.Vertices[tileIndex * 4 + 1].Position = vertPos; vertPos = toReturn.Vertices[tileIndex * 4 + 2].Position; MathFunctions.RotatePointAroundPoint(bottomLeftPos, ref vertPos, rotationRadians); toReturn.Vertices[tileIndex * 4 + 2].Position = vertPos; vertPos = toReturn.Vertices[tileIndex * 4 + 3].Position; MathFunctions.RotatePointAroundPoint(bottomLeftPos, ref vertPos, rotationRadians); toReturn.Vertices[tileIndex * 4 + 3].Position = vertPos; } toReturn.RegisterName(quad.Name, tileIndex); } return toReturn; } public void Paste(Sprite sprite) { Paste(sprite, false); } public int Paste(Sprite sprite, bool setZTo0) { // here we have the Sprite's X and Y in absolute coords as well as its texture coords // NOTE: I appended the Z coordinate for the sake of iso maps. This SHOULDN'T have an effect on the ortho maps since I believe the // TMX->SCNX tool sets all z to zero. // The AddTile method expects the bottom-left corner float x = sprite.X - sprite.ScaleX; float y = sprite.Y - sprite.ScaleY; float z = sprite.Z; if (setZTo0) { z = 0; } float width = 2f * sprite.ScaleX; // w float height = 2f * sprite.ScaleY; // z float topTextureCoordinate = sprite.TopTextureCoordinate; float bottomTextureCoordinate = sprite.BottomTextureCoordinate; float leftTextureCoordinate = sprite.LeftTextureCoordinate; float rightTextureCoordinate = sprite.RightTextureCoordinate; int tileIndex = mCurrentNumberOfTiles; RegisterName(sprite.Name, tileIndex); // add the textured tile to our map so that we may draw it. return AddTile(new Vector3(x, y, z), new Vector2(width, height), new Vector4(leftTextureCoordinate, rightTextureCoordinate, topTextureCoordinate, bottomTextureCoordinate)); } public void Paste(SpriteSave spriteSave) { Paste(spriteSave, false); } public int Paste(SpriteSave spriteSave, bool setZTo0) { // here we have the Sprite's X and Y in absolute coords as well as its texture coords // NOTE: I appended the Z coordinate for the sake of iso maps. This SHOULDN'T have an effect on the ortho maps since I believe the // TMX->SCNX tool sets all z to zero. // The AddTile method expects the bottom-left corner float x = spriteSave.X - spriteSave.ScaleX; float y = spriteSave.Y - spriteSave.ScaleY; float z = spriteSave.Z; if (setZTo0) { z = 0; } float width = 2f * spriteSave.ScaleX; // w float height = 2f * spriteSave.ScaleY; // z float topTextureCoordinate = spriteSave.TopTextureCoordinate; float bottomTextureCoordinate = spriteSave.BottomTextureCoordinate; float leftTextureCoordinate = spriteSave.LeftTextureCoordinate; float rightTextureCoordinate = spriteSave.RightTextureCoordinate; int tileIndex = mCurrentNumberOfTiles; RegisterName(spriteSave.Name, tileIndex); // add the textured tile to our map so that we may draw it. return AddTile(new Vector3(x, y, z), new Vector2(width, height), new Vector4(leftTextureCoordinate, rightTextureCoordinate, topTextureCoordinate, bottomTextureCoordinate)); } private static void VerifySingleTexture(List<SpriteSave> spriteSaveList, int startingIndex, int count) { // Every Sprite should either have the same texture if (spriteSaveList.Count != 0) { string texture = spriteSaveList[startingIndex].Texture; for (int i = startingIndex + 1; i < startingIndex + count; i++) { SpriteSave ss = spriteSaveList[i]; if (ss.Texture != texture) { float leftOfSprite = ss.X - ss.ScaleX; float indexX = leftOfSprite / (ss.ScaleX * 2); float topOfSprite = ss.Y + ss.ScaleY; float indexY = (0 - topOfSprite) / (ss.ScaleY * 2); throw new Exception("All Sprites do not have the same texture"); } } } } public void RegisterName(string name, int tileIndex) { int throwaway; if (!string.IsNullOrEmpty(name) && !int.TryParse(name, out throwaway)) { // TEMPORARY: // The tmx converter // names all Sprites with // a number if their name is // not explicitly set. Therefore // we have to ignore those and look // for explicit names (names not numbers). // Will talk to Domenic about this to fix it. if (!mNamedTileOrderedIndexes.ContainsKey(name)) { mNamedTileOrderedIndexes.Add(name, new List<int>()); } mNamedTileOrderedIndexes[name].Add(tileIndex); } } Vector2[] coords = new Vector2[4]; /// <summary> /// Paints a texture on a tile. This method takes the index of the Sprite in the order it was added /// to the MapDrawableBatch, so it supports any configuration including non-rectangular maps and maps with /// gaps. /// </summary> /// <param name="orderedTileIndex">The index of the tile to paint - this matches the index of the tile as it was added.</param> /// <param name="newTextureId"></param> public void PaintTile(int orderedTileIndex, int newTextureId) { int currentVertex = orderedTileIndex * 4; // 4 vertices per tile // Reusing the coords array saves us on allocation mTileset.GetTextureCoordinateVectorsOfTextureIndex(newTextureId, coords); // Coords are // 3 2 // // 0 1 mVertices[currentVertex + 0].TextureCoordinate = coords[0]; mVertices[currentVertex + 1].TextureCoordinate = coords[1]; mVertices[currentVertex + 2].TextureCoordinate = coords[2]; mVertices[currentVertex + 3].TextureCoordinate = coords[3]; } /// <summary> /// Sets the left and top texture coordiantes of the tile represented by orderedTileIndex. The right and bottom texture coordaintes /// are set automatically according to the tileset dimensions. /// </summary> /// <param name="orderedTileIndex">The ordered tile index.</param> /// <param name="textureXCoordinate">The left texture coordiante (in UV coordinates)</param> /// <param name="textureYCoordinate">The top texture coordainte (in UV coordinates)</param> public void PaintTileTextureCoordinates(int orderedTileIndex, float textureXCoordinate, float textureYCoordinate) { int currentVertex = orderedTileIndex * 4; // 4 vertices per tile mTileset.GetCoordinatesForTile(coords, textureXCoordinate, textureYCoordinate); mVertices[currentVertex + 0].TextureCoordinate = coords[0]; mVertices[currentVertex + 1].TextureCoordinate = coords[1]; mVertices[currentVertex + 2].TextureCoordinate = coords[2]; mVertices[currentVertex + 3].TextureCoordinate = coords[3]; } public void PaintTileTextureCoordinates(int orderedTileIndex, float leftCoordinate, float topCoordinate, float rightCoordinate, float bottomCoordinate) { int currentVertex = orderedTileIndex * 4; // 4 vertices per tile // Coords are // 3 2 // // 0 1 mVertices[currentVertex + 0].TextureCoordinate.X = leftCoordinate; mVertices[currentVertex + 0].TextureCoordinate.Y = bottomCoordinate; mVertices[currentVertex + 1].TextureCoordinate.X = rightCoordinate; mVertices[currentVertex + 1].TextureCoordinate.Y = bottomCoordinate; mVertices[currentVertex + 2].TextureCoordinate.X = rightCoordinate; mVertices[currentVertex + 2].TextureCoordinate.Y = topCoordinate; mVertices[currentVertex + 3].TextureCoordinate.X = leftCoordinate; mVertices[currentVertex + 3].TextureCoordinate.Y = topCoordinate; } // Swaps the top-right for the bottom-left verts public void ApplyDiagonalFlip(int orderedTileIndex) { int currentVertex = orderedTileIndex * 4; // 4 vertices per tile // Coords are // 3 2 // // 0 1 var old0 = mVertices[currentVertex + 0].TextureCoordinate; mVertices[currentVertex + 0].TextureCoordinate = mVertices[currentVertex + 2].TextureCoordinate; mVertices[currentVertex + 2].TextureCoordinate = old0; } public void RotateTextureCoordinatesCounterclockwise(int orderedTileIndex) { int currentVertex = orderedTileIndex * 4; // 4 vertices per tile // Coords are // 3 2 // // 0 1 var old3 = mVertices[currentVertex + 3].TextureCoordinate; mVertices[currentVertex + 3].TextureCoordinate = mVertices[currentVertex + 2].TextureCoordinate; mVertices[currentVertex + 2].TextureCoordinate = mVertices[currentVertex + 1].TextureCoordinate; mVertices[currentVertex + 1].TextureCoordinate = mVertices[currentVertex + 0].TextureCoordinate; mVertices[currentVertex + 0].TextureCoordinate = old3; } public void GetTextureCoordiantesForOrderedTile(int orderedTileIndex, out float textureX, out float textureY) { // The order is: // 3 2 // // 0 1 // So we want to add 3 to the index to get the top-left vert, then use // the texture coordinates there to get the Vector2 vector = mVertices[(orderedTileIndex * 4) + 3].TextureCoordinate; textureX = vector.X; textureY = vector.Y; } public void GetBottomLeftWorldCoordinateForOrderedTile(int orderedTileIndex, out float x, out float y) { // The order is: // 3 2 // // 0 1 // So we just need to mutiply by 4 and not add anything Vector3 vector = mVertices[(orderedTileIndex * 4)].Position; x = vector.X; y = vector.Y; } /// <summary> /// Adds a tile to the tile map /// </summary> /// <param name="bottomLeftPosition"></param> /// <param name="dimensions"></param> /// <param name="texture"> /// 4 points defining the boundaries in the texture for the tile. /// (X = left, Y = right, Z = top, W = bottom) /// </param> /// <returns>The index of the tile in the tile map, which can be used to modify the painted tile at a later time.</returns> public int AddTile(Vector3 bottomLeftPosition, Vector2 dimensions, Vector4 texture) { int toReturn = mCurrentNumberOfTiles; int currentVertex = mCurrentNumberOfTiles * 4; int currentIndex = mCurrentNumberOfTiles * 6; // 6 indices per tile (there are mVertices.Length/4 tiles) float xOffset = bottomLeftPosition.X; float yOffset = bottomLeftPosition.Y; float zOffset = bottomLeftPosition.Z; float width = dimensions.X; float height = dimensions.Y; // create vertices mVertices[currentVertex + 0] = new VertexPositionTexture(new Vector3(xOffset + 0f, yOffset + 0f, zOffset), new Vector2(texture.X, texture.W)); mVertices[currentVertex + 1] = new VertexPositionTexture(new Vector3(xOffset + width, yOffset + 0f, zOffset), new Vector2(texture.Y, texture.W)); mVertices[currentVertex + 2] = new VertexPositionTexture(new Vector3(xOffset + width, yOffset + height, zOffset), new Vector2(texture.Y, texture.Z)); mVertices[currentVertex + 3] = new VertexPositionTexture(new Vector3(xOffset + 0f, yOffset + height, zOffset), new Vector2(texture.X, texture.Z)); // create indices mIndices[currentIndex + 0] = currentVertex + 0; mIndices[currentIndex + 1] = currentVertex + 1; mIndices[currentIndex + 2] = currentVertex + 2; mIndices[currentIndex + 3] = currentVertex + 0; mIndices[currentIndex + 4] = currentVertex + 2; mIndices[currentIndex + 5] = currentVertex + 3; mCurrentNumberOfTiles++; return toReturn; } /// <summary> /// Add a tile to the map /// </summary> /// <param name="bottomLeftPosition"></param> /// <param name="tileDimensions"></param> /// <param name="textureTopLeftX">Top left pixel X coordinate in the core texture</param> /// <param name="textureTopLeftY">Top left pixel Y coordinate in the core texture</param> /// <param name="textureBottomRightX">Bottom right pixel X coordinate in the core texture</param> /// <param name="textureBottomRightY">Bottom right pixel Y coordinate in the core texture</param> public int AddTile(Vector3 bottomLeftPosition, Vector2 tileDimensions, int textureTopLeftX, int textureTopLeftY, int textureBottomRightX, int textureBottomRightY) { // Form vector4 for AddTile overload var textureValues = new Vector4(); textureValues.X = (float)textureTopLeftX / (float)mTexture.Width; // Left textureValues.Y = (float)textureBottomRightX / (float)mTexture.Width; // Right textureValues.Z = (float)textureTopLeftY / (float)mTexture.Height; // Top textureValues.W = (float)textureBottomRightY / (float)mTexture.Height; // Bottom return AddTile(bottomLeftPosition, tileDimensions, textureValues); } /// <summary> /// Renders the MapDrawableBatch /// </summary> /// <param name="camera">The currently drawing camera</param> public void Draw(Camera camera) { ////////////////////Early Out/////////////////// if (!AbsoluteVisible) { return; } if (mVertices.Length == 0) { return; } //////////////////End Early Out///////////////// int firstVertIndex; int lastVertIndex; int indexStart; int numberOfTriangles; GetRenderingIndexValues(camera, out firstVertIndex, out lastVertIndex, out indexStart, out numberOfTriangles); if (numberOfTriangles != 0) { TextureFilter? oldTextureFilter = null; if (this.TextureFilter != null && this.TextureFilter != FlatRedBallServices.GraphicsOptions.TextureFilter) { oldTextureFilter = FlatRedBallServices.GraphicsOptions.TextureFilter; FlatRedBallServices.GraphicsOptions.TextureFilter = this.TextureFilter.Value; } TextureAddressMode oldTextureAddressMode; FlatRedBall.Graphics.BlendOperation oldBlendOp; Effect effectTouse = PrepareRenderingStates(camera, out oldTextureAddressMode, out oldBlendOp); foreach (EffectPass pass in effectTouse.CurrentTechnique.Passes) { // Start each pass pass.Apply(); int numberVertsToDraw = lastVertIndex - firstVertIndex; // Right now this uses the (slower) DrawUserIndexedPrimitives // It could use DrawIndexedPrimitives instead for much faster performance, // but to do that we'd have to keep VB's around and make sure to re-create them // whenever the graphics device is lost. FlatRedBallServices.GraphicsDevice.DrawUserIndexedPrimitives<VertexPositionTexture>( PrimitiveType.TriangleList, mVertices, firstVertIndex, numberVertsToDraw, mIndices, indexStart, numberOfTriangles); } Renderer.TextureAddressMode = oldTextureAddressMode; FlatRedBall.Graphics.Renderer.BlendOperation = oldBlendOp; if (ZBuffered) { FlatRedBallServices.GraphicsDevice.DepthStencilState = DepthStencilState.DepthRead; } if (oldTextureFilter != null) { FlatRedBallServices.GraphicsOptions.TextureFilter = oldTextureFilter.Value; } } } private Effect PrepareRenderingStates(Camera camera, out TextureAddressMode oldTextureAddressMode, out FlatRedBall.Graphics.BlendOperation oldBlendOperation) { // Set graphics states FlatRedBallServices.GraphicsDevice.RasterizerState = RasterizerState.CullNone; oldBlendOperation = FlatRedBall.Graphics.Renderer.BlendOperation; #if TILEMAPS_ALPHA_AND_COLOR FlatRedBall.Graphics.Renderer.BlendOperation = BlendOperation.Regular; FlatRedBall.Graphics.Renderer.ColorOperation = ColorOperation.Modulate; #else FlatRedBall.Graphics.Renderer.BlendOperation = BlendOperation.Regular; #endif Effect effectTouse = null; if (ZBuffered) { FlatRedBallServices.GraphicsDevice.DepthStencilState = DepthStencilState.Default; camera.SetDeviceViewAndProjection(mAlphaTestEffect, false); mAlphaTestEffect.World = Matrix.CreateScale(RenderingScale) * base.TransformationMatrix; mAlphaTestEffect.Texture = mTexture; effectTouse = mAlphaTestEffect; } else { camera.SetDeviceViewAndProjection(mBasicEffect, false); mBasicEffect.World = Matrix.CreateScale(RenderingScale) * base.TransformationMatrix; mBasicEffect.Texture = mTexture; mBasicEffect.DiffuseColor = new Vector3(Red, Green, Blue); mBasicEffect.Alpha = Alpha; #if TILEMAPS_ALPHA_AND_COLOR mBasicEffect.VertexColorEnabled = true; #endif effectTouse = mBasicEffect; } // We won't need to use any other kind of texture // address mode besides clamp, and clamp is required // on the "Reach" profile when the texture is not power // of two. Let's set it to clamp here so that we don't crash // on non-power-of-two textures. oldTextureAddressMode = Renderer.TextureAddressMode; Renderer.TextureAddressMode = TextureAddressMode.Clamp; return effectTouse; } private void GetRenderingIndexValues(Camera camera, out int firstVertIndex, out int lastVertIndex, out int indexStart, out int numberOfTriangles) { firstVertIndex = 0; lastVertIndex = mVertices.Length; float tileWidth = mVertices[1].Position.X - mVertices[0].Position.X; if (mSortAxis == SortAxis.X) { float minX = camera.AbsoluteLeftXEdgeAt(this.Z); float maxX = camera.AbsoluteRightXEdgeAt(this.Z); minX -= this.X; maxX -= this.X; firstVertIndex = GetFirstAfterX(mVertices, minX - tileWidth); lastVertIndex = GetFirstAfterX(mVertices, maxX) + 4; } else if (mSortAxis == SortAxis.Y) { float minY = camera.AbsoluteBottomYEdgeAt(this.Z); float maxY = camera.AbsoluteTopYEdgeAt(this.Z); minY -= this.Y; maxY -= this.Y; firstVertIndex = GetFirstAfterY(mVertices, minY - tileWidth); lastVertIndex = GetFirstAfterY(mVertices, maxY) + 4; } lastVertIndex = System.Math.Min(lastVertIndex, mVertices.Length); indexStart = 0;// (firstVertIndex * 3) / 2; int indexEndExclusive = ((lastVertIndex - firstVertIndex) * 3) / 2; numberOfTriangles = (indexEndExclusive - indexStart) / 3; } public static int GetFirstAfterX(VertexPositionTexture[] list, float xGreaterThan) { int min = 0; int originalMax = list.Length / 4; int max = list.Length / 4; int mid = (max + min) / 2; while (min < max) { mid = (max + min) / 2; float midItem = list[mid * 4].Position.X; if (midItem > xGreaterThan) { // Is this the last one? // Not sure why this is here, because if we have just 2 items, // this will always return a value of 1 instead //if (mid * 4 + 4 >= list.Length) //{ // return mid * 4; //} // did we find it? if (mid > 0 && list[(mid - 1) * 4].Position.X <= xGreaterThan) { return mid * 4; } else { max = mid - 1; } } else if (midItem <= xGreaterThan) { if (mid == 0) { return mid * 4; } else if (mid < originalMax - 1 && list[(mid + 1) * 4].Position.X > xGreaterThan) { return (mid + 1) * 4; } else { min = mid + 1; } } } if (min == 0) { return 0; } else { return list.Length; } } public static int GetFirstAfterY(VertexPositionTexture[] list, float yGreaterThan) { int min = 0; int originalMax = list.Length / 4; int max = list.Length / 4; int mid = (max + min) / 2; while (min < max) { mid = (max + min) / 2; float midItem = list[mid * 4].Position.Y; if (midItem > yGreaterThan) { // Is this the last one? // See comment in GetFirstAfterX //if (mid * 4 + 4 >= list.Length) //{ // return mid * 4; //} // did we find it? if (mid > 0 && list[(mid - 1) * 4].Position.Y <= yGreaterThan) { return mid * 4; } else { max = mid - 1; } } else if (midItem <= yGreaterThan) { if (mid == 0) { return mid * 4; } else if (mid < originalMax - 1 && list[(mid + 1) * 4].Position.Y > yGreaterThan) { return (mid + 1) * 4; } else { min = mid + 1; } } } if (min == 0) { return 0; } else { return list.Length; } } #region XML Docs /// <summary> /// Here we update our batch - but this batch doesn't /// need to be updated /// </summary> #endregion public void Update() { float leftView = Camera.Main.AbsoluteLeftXEdgeAt(0); float topView = Camera.Main.AbsoluteTopYEdgeAt(0); float cameraOffsetX = leftView - CameraOriginX; float cameraOffsetY = topView - CameraOriginY; this.RelativeX = cameraOffsetX * _parallaxMultiplierX; this.RelativeY = cameraOffsetY * _parallaxMultiplierY; this.TimedActivity(TimeManager.SecondDifference, TimeManager.SecondDifferenceSquaredDividedByTwo, TimeManager.LastSecondDifference); // The MapDrawableBatch may be attached to a LayeredTileMap (the container of all layers) // If so, the player may move the LayeredTileMap and expect all contained layers to move along // with it. To allow this, we need to have dependencies updated. We'll do this by simply updating // dependencies here, although I don't know at this point if there's a better way - like if we should // be adding this to the SpriteManager's PositionedObjectList. This is an improvement so we'll do it for // now and revisit this in case there's a problem in the future. this.UpdateDependencies(TimeManager.CurrentTime); } // TODO: I would like to somehow make this a property on the LayeredTileMap, but right now it is easier to put them here public float CameraOriginY { get; set; } public float CameraOriginX { get; set; } IVisible IVisible.Parent { get { return this.Parent as IVisible; } } public bool AbsoluteVisible { get { if (this.Visible) { var parentAsIVisible = this.Parent as IVisible; if (parentAsIVisible == null || IgnoresParentVisibility) { return true; } else { // this is true, so return if the parent is visible: return parentAsIVisible.AbsoluteVisible; } } else { return false; } } } public bool IgnoresParentVisibility { get; set; } #region XML Docs /// <summary> /// Don't call this, instead call SpriteManager.RemoveDrawableBatch /// </summary> #endregion public void Destroy() { this.RemoveSelfFromListsBelongingTo(); } public void MergeOntoThis(IEnumerable<MapDrawableBatch> mapDrawableBatches) { int quadsToAdd = 0; int quadsOnThis = QuadCount; foreach (var mdb in mapDrawableBatches) { quadsToAdd += mdb.QuadCount; } int totalNumberOfVerts = 4 * (this.QuadCount + quadsToAdd); int totalNumberOfIndexes = 6 * (this.QuadCount + quadsToAdd); var oldVerts = mVertices; var oldIndexes = mIndices; mVertices = new VertexType[totalNumberOfVerts]; mIndices = new int[totalNumberOfIndexes]; oldVerts.CopyTo(mVertices, 0); oldIndexes.CopyTo(mIndices, 0); int currentQuadIndex = quadsOnThis; int index = 0; foreach (var mdb in mapDrawableBatches) { int startVert = currentQuadIndex * 4; int startIndex = currentQuadIndex * 6; int numberOfIndices = mdb.mIndices.Length; int numberOfNewVertices = mdb.mVertices.Length; mdb.mVertices.CopyTo(mVertices, startVert); mdb.mIndices.CopyTo(mIndices, startIndex); for (int i = startIndex; i < startIndex + numberOfIndices; i++) { mIndices[i] += startVert; } for (int i = startVert; i < startVert + numberOfNewVertices; i++) { mVertices[i].Position.Z += index + 1; } foreach (var kvp in mdb.mNamedTileOrderedIndexes) { string key = kvp.Key; List<int> toAddTo; if (mNamedTileOrderedIndexes.ContainsKey(key)) { toAddTo = mNamedTileOrderedIndexes[key]; } else { toAddTo = new List<int>(); mNamedTileOrderedIndexes[key] = toAddTo; } foreach (var namedIndex in kvp.Value) { toAddTo.Add(namedIndex + currentQuadIndex); } } currentQuadIndex += mdb.QuadCount; index++; } } public void RemoveQuads(IEnumerable<int> quadIndexes) { var vertList = mVertices.ToList(); // Reverse - go from biggest to smallest foreach (var indexToRemove in quadIndexes.Distinct().OrderBy(item => -item)) { // and go from biggest to smallest here too vertList.RemoveAt(indexToRemove * 4 + 3); vertList.RemoveAt(indexToRemove * 4 + 2); vertList.RemoveAt(indexToRemove * 4 + 1); vertList.RemoveAt(indexToRemove * 4 + 0); } mVertices = vertList.ToArray(); // The mNamedTileOrderedIndexes is a dictionary that stores which indexes are stored // with which tiles. For example, the key in the dictionary may be "Lava", in which case // the value is the indexes of the tiles that use the Lava tile. // If we do end up removing any quads, then all following quads will shift, so we need to // adjust the indexes so the naming works correctly List<int> orderedInts = quadIndexes.OrderBy(item => item).Distinct().ToList(); int numberOfRemovals = 0; foreach (var kvp in mNamedTileOrderedIndexes) { var ints = kvp.Value; numberOfRemovals = 0; for (int i = 0; i < ints.Count; i++) { // Nothing left to test, so subtract and move on.... if (numberOfRemovals == orderedInts.Count) { ints[i] -= numberOfRemovals; } else if (ints[i] == orderedInts[numberOfRemovals]) { ints.Clear(); break; } else if (ints[i] < orderedInts[numberOfRemovals]) { ints[i] -= numberOfRemovals; } else { while (numberOfRemovals < orderedInts.Count && ints[i] > orderedInts[numberOfRemovals]) { numberOfRemovals++; } if (numberOfRemovals < orderedInts.Count && ints[i] == orderedInts[numberOfRemovals]) { ints.Clear(); break; } ints[i] -= numberOfRemovals; } } } } #endregion } public static class MapDrawableBatchExtensionMethods { } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Runtime.InteropServices; using System.Text; namespace System { internal static class IriHelper { // // Checks if provided non surrogate char lies in iri range // internal static bool CheckIriUnicodeRange(char unicode, bool isQuery) { return ((unicode >= '\u00A0' && unicode <= '\uD7FF') || (unicode >= '\uF900' && unicode <= '\uFDCF') || (unicode >= '\uFDF0' && unicode <= '\uFFEF') || (isQuery && unicode >= '\uE000' && unicode <= '\uF8FF')); } // // Check if highSurr and lowSurr are a surrogate pair then // it checks if the combined char is in the range // Takes in isQuery because iri restrictions for query are different // internal static bool CheckIriUnicodeRange(char highSurr, char lowSurr, ref bool surrogatePair, bool isQuery) { bool inRange = false; surrogatePair = false; Debug.Assert(char.IsHighSurrogate(highSurr)); if (char.IsSurrogatePair(highSurr, lowSurr)) { surrogatePair = true; char[] chars = new char[2] { highSurr, lowSurr }; string surrPair = new string(chars); if (((string.CompareOrdinal(surrPair, "\U00010000") >= 0) && (string.CompareOrdinal(surrPair, "\U0001FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00020000") >= 0) && (string.CompareOrdinal(surrPair, "\U0002FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00030000") >= 0) && (string.CompareOrdinal(surrPair, "\U0003FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00040000") >= 0) && (string.CompareOrdinal(surrPair, "\U0004FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00050000") >= 0) && (string.CompareOrdinal(surrPair, "\U0005FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00060000") >= 0) && (string.CompareOrdinal(surrPair, "\U0006FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00070000") >= 0) && (string.CompareOrdinal(surrPair, "\U0007FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00080000") >= 0) && (string.CompareOrdinal(surrPair, "\U0008FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00090000") >= 0) && (string.CompareOrdinal(surrPair, "\U0009FFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000A0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000AFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000B0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000BFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000C0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000CFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000D0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000DFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U000E1000") >= 0) && (string.CompareOrdinal(surrPair, "\U000EFFFD") <= 0)) || (isQuery && (((string.CompareOrdinal(surrPair, "\U000F0000") >= 0) && (string.CompareOrdinal(surrPair, "\U000FFFFD") <= 0)) || ((string.CompareOrdinal(surrPair, "\U00100000") >= 0) && (string.CompareOrdinal(surrPair, "\U0010FFFD") <= 0))))) { inRange = true; } } return inRange; } // // Check reserved chars according to RFC 3987 in a specific component // internal static bool CheckIsReserved(char ch, UriComponents component) { if ((component != UriComponents.Scheme) && (component != UriComponents.UserInfo) && (component != UriComponents.Host) && (component != UriComponents.Port) && (component != UriComponents.Path) && (component != UriComponents.Query) && (component != UriComponents.Fragment) ) { return (component == (UriComponents)0) ? UriHelper.IsGenDelim(ch) : false; } else { switch (component) { // Reserved chars according to RFC 3987 case UriComponents.UserInfo: if (ch == '/' || ch == '?' || ch == '#' || ch == '[' || ch == ']' || ch == '@') return true; break; case UriComponents.Host: if (ch == ':' || ch == '/' || ch == '?' || ch == '#' || ch == '[' || ch == ']' || ch == '@') return true; break; case UriComponents.Path: if (ch == '/' || ch == '?' || ch == '#' || ch == '[' || ch == ']') return true; break; case UriComponents.Query: if (ch == '#' || ch == '[' || ch == ']') return true; break; case UriComponents.Fragment: if (ch == '#' || ch == '[' || ch == ']') return true; break; default: break; } return false; } } // // IRI normalization for strings containing characters that are not allowed or // escaped characters that should be unescaped in the context of the specified Uri component. // internal static unsafe string EscapeUnescapeIri(char* pInput, int start, int end, UriComponents component) { char[] dest = new char[end - start]; byte[] bytes = null; // Pin the array to do pointer accesses GCHandle destHandle = GCHandle.Alloc(dest, GCHandleType.Pinned); char* pDest = (char*)destHandle.AddrOfPinnedObject(); const int percentEncodingLen = 3; // Escaped UTF-8 will take 3 chars: %AB. const int bufferCapacityIncrease = 30 * percentEncodingLen; int bufferRemaining = 0; int next = start; int destOffset = 0; char ch; bool escape = false; bool surrogatePair = false; for (; next < end; ++next) { escape = false; surrogatePair = false; if ((ch = pInput[next]) == '%') { if (next + 2 < end) { ch = UriHelper.EscapedAscii(pInput[next + 1], pInput[next + 2]); // Do not unescape a reserved char if (ch == Uri.c_DummyChar || ch == '%' || CheckIsReserved(ch, component) || UriHelper.IsNotSafeForUnescape(ch)) { // keep as is Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next++]; Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next++]; Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; continue; } else if (ch <= '\x7F') { Debug.Assert(ch < 0xFF, "Expecting ASCII character."); Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); //ASCII pDest[destOffset++] = ch; next += 2; continue; } else { // possibly utf8 encoded sequence of unicode // check if safe to unescape according to Iri rules Debug.Assert(ch < 0xFF, "Expecting ASCII character."); int startSeq = next; int byteCount = 1; // lazy initialization of max size, will reuse the array for next sequences if ((object)bytes == null) bytes = new byte[end - next]; bytes[0] = (byte)ch; next += 3; while (next < end) { // Check on exit criterion if ((ch = pInput[next]) != '%' || next + 2 >= end) break; // already made sure we have 3 characters in str ch = UriHelper.EscapedAscii(pInput[next + 1], pInput[next + 2]); //invalid hex sequence ? if (ch == Uri.c_DummyChar) break; // character is not part of a UTF-8 sequence ? else if (ch < '\x80') break; else { //a UTF-8 sequence bytes[byteCount++] = (byte)ch; next += 3; } Debug.Assert(ch < 0xFF, "Expecting ASCII character."); } next--; // for loop will increment // Using encoder with no replacement fall-back will skip all invalid UTF-8 sequences. Encoding noFallbackCharUTF8 = Encoding.GetEncoding( Encoding.UTF8.CodePage, new EncoderReplacementFallback(""), new DecoderReplacementFallback("")); char[] unescapedChars = new char[bytes.Length]; int charCount = noFallbackCharUTF8.GetChars(bytes, 0, byteCount, unescapedChars, 0); if (charCount != 0) { // If invalid sequences were present in the original escaped string, we need to // copy the escaped versions of those sequences. // Decoded Unicode values will be kept only when they are allowed by the URI/IRI RFC // rules. UriHelper.MatchUTF8Sequence(pDest, dest, ref destOffset, unescapedChars, charCount, bytes, byteCount, component == UriComponents.Query, true); } else { // copy escaped sequence as is for (int i = startSeq; i <= next; ++i) { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[i]; } } } } else { Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; } } else if (ch > '\x7f') { // unicode char ch2; if ((char.IsHighSurrogate(ch)) && (next + 1 < end)) { ch2 = pInput[next + 1]; escape = !CheckIriUnicodeRange(ch, ch2, ref surrogatePair, component == UriComponents.Query); if (!escape) { // copy the two chars Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next++]; Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; } } else { if (CheckIriUnicodeRange(ch, component == UriComponents.Query)) { if (!UriHelper.IsBidiControlCharacter(ch)) { // copy it Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; } } else { // escape it escape = true; } } } else { // just copy the character Debug.Assert(dest.Length > destOffset, "Destination length exceeded destination offset."); pDest[destOffset++] = pInput[next]; } if (escape) { const int MaxNumberOfBytesEncoded = 4; if (bufferRemaining < MaxNumberOfBytesEncoded * percentEncodingLen) { int newBufferLength = 0; checked { // may need more memory since we didn't anticipate escaping newBufferLength = dest.Length + bufferCapacityIncrease; bufferRemaining += bufferCapacityIncrease; } char[] newDest = new char[newBufferLength]; fixed (char* pNewDest = newDest) { Buffer.MemoryCopy((byte*)pDest, (byte*)pNewDest, newBufferLength * sizeof(char), destOffset * sizeof(char)); } if (destHandle.IsAllocated) { destHandle.Free(); } dest = newDest; // re-pin new dest[] array destHandle = GCHandle.Alloc(dest, GCHandleType.Pinned); pDest = (char*)destHandle.AddrOfPinnedObject(); } byte[] encodedBytes = new byte[MaxNumberOfBytesEncoded]; fixed (byte* pEncodedBytes = &encodedBytes[0]) { int encodedBytesCount = Encoding.UTF8.GetBytes(pInput + next, surrogatePair ? 2 : 1, pEncodedBytes, MaxNumberOfBytesEncoded); Debug.Assert(encodedBytesCount <= MaxNumberOfBytesEncoded, "UTF8 encoder should not exceed specified byteCount"); bufferRemaining -= encodedBytesCount * percentEncodingLen; for (int count = 0; count < encodedBytesCount; ++count) { UriHelper.EscapeAsciiChar((char)encodedBytes[count], dest, ref destOffset); } } } } if (destHandle.IsAllocated) destHandle.Free(); Debug.Assert(destOffset <= dest.Length, "Destination length met or exceeded destination offset."); return new string(dest, 0, destOffset); } } }
using System.Data; using System.IO; using Signum.Engine.Maps; namespace Signum.Engine.CodeGeneration; public class LogicCodeGenerator { public string SolutionName = null!; public string SolutionFolder = null!; public Schema CurrentSchema = null!; protected bool? overwriteFiles = null; public virtual void GenerateLogicFromEntities() { CurrentSchema = Schema.Current; GetSolutionInfo(out SolutionFolder, out SolutionName); string projectFolder = GetProjectFolder(); if (!Directory.Exists(projectFolder)) throw new InvalidOperationException("{0} not found. Override GetProjectFolder".FormatWith(projectFolder)); foreach (var mod in GetModules()) { string str = WriteFile(mod); string fileName = Path.Combine(projectFolder, GetFileName(mod)); FileTools.CreateParentDirectory(fileName); if (!File.Exists(fileName) || SafeConsole.Ask(ref overwriteFiles, "Overwrite {0}?".FormatWith(fileName))) { File.WriteAllText(fileName, str); } } } protected virtual string GetProjectFolder() { return Path.Combine(SolutionFolder, SolutionName + ".Logic"); } protected virtual void GetSolutionInfo(out string solutionFolder, out string solutionName) { CodeGenerator.GetSolutionInfo(out solutionFolder, out solutionName); } protected virtual string GetFileName(Module t) { return t.ModuleName + "\\" + t.ModuleName + "Logic.cs"; } protected virtual IEnumerable<Module> GetModules() { Dictionary<Type, bool> types = CandidateTypes().ToDictionary(a => a, Schema.Current.Tables.ContainsKey); return CodeGenerator.GetModules(types, this.SolutionName); } protected virtual List<Type> CandidateTypes() { var assembly = Assembly.Load(Assembly.GetEntryAssembly()!.GetReferencedAssemblies().Single(a => a.Name == this.SolutionName + ".Entities")); return assembly.GetTypes().Where(t => t.IsEntity() && !t.IsAbstract).ToList(); } protected virtual string WriteFile(Module mod) { var expression = mod.Types.SelectMany(t => GetExpressions(t)).ToList(); StringBuilder sb = new StringBuilder(); foreach (var item in GetUsingNamespaces(mod, expression)) sb.AppendLine("using {0};".FormatWith(item)); sb.AppendLine(); sb.AppendLine("namespace " + GetNamespace(mod) + ";"); sb.AppendLine(); sb.Append(WriteLogicClass(mod, expression)); return sb.ToString(); } protected virtual string WriteLogicClass(Module mod, List<ExpressionInfo> expressions) { StringBuilder sb = new StringBuilder(); sb.AppendLine("public static class " + mod.ModuleName + "Logic"); sb.AppendLine("{"); foreach (var ei in expressions) { string info = WriteExpressionMethod(ei); if (info != null) { sb.Append(info.Indent(4)); sb.AppendLine(); } } sb.Append(WriteStartMethod(mod, expressions).Indent(4)); sb.AppendLine("}"); return sb.ToString(); } protected virtual string GetNamespace(Module mod) { return SolutionName + ".Logic." + mod.ModuleName; } protected virtual List<string> GetUsingNamespaces(Module mod, List<ExpressionInfo> expressions) { var result = new List<string>() { }; result.AddRange(mod.Types.Concat(expressions.Select(e => e.FromType)).Select(t => t.Namespace!).Distinct()); return result; } protected virtual string WriteStartMethod(Module mod, List<ExpressionInfo> expressions) { var allExpressions = expressions.ToList(); StringBuilder sb = new StringBuilder(); sb.AppendLine("public static void Start(SchemaBuilder sb)"); sb.AppendLine("{"); sb.AppendLine(" if (sb.NotDefined(MethodInfo.GetCurrentMethod()))"); sb.AppendLine(" {"); foreach (var item in mod.Types) { string include = WriteInclude(item, allExpressions); if (include != null) { sb.Append(include.Indent(8)); sb.AppendLine(); } string? query = WriteQuery(item); if (query != null) { sb.Append(query.Indent(8)); sb.AppendLine(); } string opers = WriteOperations(item); if (opers != null) { sb.Append(opers.Indent(8)); sb.AppendLine(); } } if (allExpressions.Any()) { foreach (var ei in allExpressions) { string register = GetRegisterExpression(ei); if (register != null) sb.AppendLine(register.Indent(8)); } sb.AppendLine(); } sb.AppendLine(" }"); sb.AppendLine("}"); return sb.ToString(); } protected virtual string GetRegisterExpression(ExpressionInfo ei) { return "QueryLogic.Expressions.Register(({from} {f}) => {f}.{name}(), () => typeof({to}).{NiceName}());" .Replace("{from}", ei.FromType.Name) .Replace("{to}", ei.ToType.Name) .Replace("{f}", GetVariableName(ei.FromType)) .Replace("{name}", ei.Name) .Replace("{NiceName}", ei.IsUnique ? "NiceName" : "NicePluralName"); } protected virtual string WriteInclude(Type type, List<ExpressionInfo> expression) { var ops = GetOperationsSymbols(type); var save = ops.SingleOrDefaultEx(o => GetOperationType(o) == OperationType.Execute && IsSave(o)); var delete = ops.SingleOrDefaultEx(o => GetOperationType(o) == OperationType.Delete); var p = ShouldWriteSimpleQuery(type) ? GetVariableName(type) : null; var simpleExpressions = expression.Extract(exp => IsSimpleExpression(exp, type)); return new[] { "sb.Include<" + type.TypeName() + ">()", GetWithVirtualMLists(type), save != null && ShouldWriteSimpleOperations(save) ? (" .WithSave(" + save.Symbol.ToString() + ")") : null, delete != null && ShouldWriteSimpleOperations(delete) ? (" .WithDelete(" + delete.Symbol.ToString() + ")") : null, simpleExpressions.HasItems() ? simpleExpressions.ToString(e => $" .WithExpressionFrom(({e.FromType.Name} {GetVariableName(e.FromType)}) => {GetVariableName(e.FromType)}.{e.Name}())", "\r\n") : null, p == null ? null : $" .WithQuery(() => {p} => {WriteQueryConstructor(type, p)})" }.NotNull().ToString("\r\n") + ";"; } protected virtual bool IsSimpleExpression(ExpressionInfo exp, Type type) { return !exp.IsUnique && type == exp.ToType; } protected virtual string? WriteQuery(Type type) { if (ShouldWriteSimpleQuery(type)) return null; string typeName = type.TypeName(); var v = GetVariableName(type); StringBuilder sb = new StringBuilder(); sb.AppendLine("QueryLogic.Queries.Register(typeof({0}), () =>".FormatWith(typeName)); sb.AppendLine(" from {0} in Database.Query<{1}>()".FormatWith(v, typeName)); sb.AppendLine(" select " + WriteQueryConstructor(type, v) + ");"); return sb.ToString(); } private string WriteQueryConstructor(Type type, string v) { StringBuilder sb = new StringBuilder(); sb.AppendLine("new"); sb.AppendLine(" {"); sb.AppendLine(" Entity = {0},".FormatWith(v)); sb.AppendLine(" {0}.Id,".FormatWith(v)); foreach (var prop in GetQueryProperties(type)) { sb.AppendLine(" {0}.{1},".FormatWith(v, prop.Name)); } sb.Append(" }"); return sb.ToString(); } protected virtual bool ShouldWriteSimpleQuery(Type type) { return true; } protected internal class ExpressionInfo { public Type FromType; public Type ToType; public PropertyInfo Property; public bool IsUnique; public string Name = null!; public ExpressionInfo(Type fromType, Type toType, PropertyInfo property, bool isUnique) { FromType = fromType; ToType = toType; Property = property; IsUnique = isUnique; } } protected virtual List<ExpressionInfo> GetExpressions(Type toType) { var result = (from pi in Reflector.PublicInstanceDeclaredPropertiesInOrder(toType) let fromType = pi.PropertyType.CleanType() where fromType.IsEntity() && !fromType.IsAbstract let fi = Reflector.TryFindFieldInfo(toType, pi) where fi != null let isUnique = fi.GetCustomAttribute<UniqueIndexAttribute>() != null select new ExpressionInfo(fromType, toType, pi, isUnique)) .ToList(); foreach (var ei in result) { ei.Name = GetExpressionName(ei); } result = result.GroupBy(ei => new { ei.FromType, ei.ToType }).Where(g => g.Count() == 1).SelectMany(g => g).ToList(); result = result.Where(ShouldWriteExpression).ToList(); return result; } protected virtual string GetExpressionName(ExpressionInfo ei) { if (ei.Property.Name == "Parent") return "Children"; if(ei.IsUnique) return Reflector.CleanTypeName(ei.ToType); return NaturalLanguageTools.Pluralize(Reflector.CleanTypeName(ei.ToType).SpacePascal()).ToPascal(); } protected virtual bool ShouldWriteExpression(ExpressionInfo ei) { return EntityKindCache.GetEntityKind(ei.FromType) switch { EntityKind.Part or EntityKind.String or EntityKind.SystemString => false, _ => true, }; } protected virtual string WriteExpressionMethod(ExpressionInfo info) { Type from = info.Property.PropertyType.CleanType(); string varFrom = GetVariableName(from); string varTo = GetVariableName(info.ToType); if (varTo == varFrom) varTo += "2"; string filter = info.Property.PropertyType.IsLite() ? "{t} => {t}.{prop}.Is({f})" : "{t} => {t}.{prop} == {f}"; string str = info.IsUnique? @"[AutoExpressionField] public static {to} {Method}(this {from} {f}) => As.Expression(() => Database.Query<{to}>().SingleOrDefaultEx({filter})); " : @"[AutoExpressionField] public static IQueryable<{to}> {Method}(this {from} {f}) => As.Expression(() => Database.Query<{to}>().Where({filter})); "; return str.Replace("{filter}", filter) .Replace("{from}", from.Name) .Replace("{to}", info.ToType.Name) .Replace("{t}", varTo) .Replace("{f}", varFrom) .Replace("{prop}", info.Property.Name) .Replace("{Method}", info.Name); } protected virtual IEnumerable<PropertyInfo> GetQueryProperties(Type type) { return (from p in Reflector.PublicInstancePropertiesInOrder(type) where Reflector.QueryableProperty(type, p) where IsSimpleValueType(p.PropertyType) || p.PropertyType.IsEntity() || p.PropertyType.IsLite() orderby p.Name.Contains("Name") ? 1 : 2 select p).Take(10); } protected virtual string? GetWithVirtualMLists(Type type) { return (from p in Reflector.PublicInstancePropertiesInOrder(type) let bp = GetVirtualMListBackReference(p) where bp != null select GetWithVirtualMList(type, p, bp)).ToString("\r\n").DefaultText(null!); } protected virtual string GetWithVirtualMList(Type type, PropertyInfo p, PropertyInfo bp) { var p1 = GetVariableName(type); var p2 = GetVariableName(p.PropertyType.ElementType()!); if (p1 == p2) p2 += "2"; var cast = p.DeclaringType == bp.PropertyType.CleanType() ? "" : $"(Lite<{p.DeclaringType!.Name}>)"; return $" .WithVirtualMList({p1} => {p1}.{p.Name}, {p2} => {cast}{p2}.{bp.Name})"; } protected virtual PropertyInfo? GetVirtualMListBackReference(PropertyInfo pi) { if (!pi.PropertyType.IsMList()) return null; if (!pi.PropertyType.ElementType()!.IsEntity()) return null; if (!pi.HasAttribute<IgnoreAttribute>()) return null; var t = pi.PropertyType.ElementType()!; var backProperty = Reflector.PublicInstancePropertiesInOrder(t).SingleOrDefaultEx(bp => IsVirtualMListBackReference(bp, pi.DeclaringType!)); return backProperty; } protected virtual bool IsVirtualMListBackReference(PropertyInfo pi, Type targetType) { if (!pi.PropertyType.IsLite()) return false; if (pi.PropertyType.CleanType() == targetType) return true; if (pi.GetCustomAttribute<ImplementedByAttribute>()?.ImplementedTypes.Contains(targetType) == true) return true; return false; } protected virtual bool IsSimpleValueType(Type type) { var t = CurrentSchema.Settings.TryGetSqlDbTypePair(type.UnNullify()); return t != null && t.UserDefinedTypeName == null && (t.DbType.IsNumber() || t.DbType.IsString() || t.DbType.IsDate()); } protected virtual string WriteOperations(Type type) { StringBuilder sb = new StringBuilder(); foreach (var oper in GetOperationsSymbols(type)) { string? operation = WriteOperation(oper); if (operation != null) { sb.Append(operation); sb.AppendLine(); } } return sb.ToString(); } protected virtual string? WriteOperation(IOperationSymbolContainer oper) { switch (GetOperationType(oper)) { case OperationType.Execute: if (IsSave(oper) && ShouldWriteSimpleOperations(oper)) return null; return WriteExecuteOperation(oper); case OperationType.Delete: return WriteDeleteOperation(oper); case OperationType.Constructor: return WriteConstructSimple(oper); case OperationType.ConstructorFrom: return WriteConstructFrom(oper); case OperationType.ConstructorFromMany: return WriteConstructFromMany(oper); default: throw new InvalidOperationException(); } } static OperationType GetOperationType(IOperationSymbolContainer oper) { string type = oper.GetType().TypeName(); if (type.Contains("ExecuteSymbolImp")) return OperationType.Execute; if (type.Contains("DeleteSymbolImp")) return OperationType.Delete; if (type.Contains("SimpleImp")) return OperationType.Constructor; if (type.Contains("FromImp")) return OperationType.ConstructorFrom; if (type.Contains("FromManyImp")) return OperationType.ConstructorFromMany; ; throw new InvalidOperationException(); } protected virtual string WriteExecuteOperation(IOperationSymbolContainer oper) { Type type = oper.GetType().GetGenericArguments().Single(); var v = GetVariableName(type); StringBuilder sb = new StringBuilder(); sb.AppendLine("new Graph<{0}>.Execute({1})".FormatWith(type.TypeName(), oper.Symbol.ToString())); sb.AppendLine("{"); if (IsSave(oper)) { sb.AppendLine(" CanBeNew = true,"); sb.AppendLine(" CanBeModified = true,"); } sb.AppendLine(" Execute = ({0}, _) => {{ }}".FormatWith(v)); sb.AppendLine("}.Register();"); return sb.ToString(); } protected virtual bool ShouldWriteSimpleOperations(IOperationSymbolContainer oper) { return true; } protected virtual bool IsSave(IOperationSymbolContainer oper) { return oper.ToString()!.Contains("Save"); } protected virtual string? WriteDeleteOperation(IOperationSymbolContainer oper) { if (ShouldWriteSimpleOperations(oper)) return null; Type type = oper.GetType().GetGenericArguments().Single(); string v = GetVariableName(type); StringBuilder sb = new StringBuilder(); sb.AppendLine("new Graph<{0}>.Delete({1})".FormatWith(type.TypeName(), oper.Symbol.ToString())); sb.AppendLine("{"); sb.AppendLine(" Delete = ({0}, _) => {0}.Delete()".FormatWith(v)); sb.AppendLine("}.Register();"); return sb.ToString(); } protected virtual string GetVariableName(Type type) { return type.Name.Substring(0, 1).ToLower(); } protected virtual string WriteConstructSimple(IOperationSymbolContainer oper) { Type type = oper.GetType().GetGenericArguments().Single(); StringBuilder sb = new StringBuilder(); sb.AppendLine("new Graph<{0}>.Construct({1})".FormatWith(type.TypeName(), oper.Symbol.ToString())); sb.AppendLine("{"); sb.AppendLine(" Construct = (_) => new {0}".FormatWith(type.TypeName())); sb.AppendLine(" {"); sb.AppendLine(" }"); sb.AppendLine("}.Register();"); return sb.ToString(); } protected virtual string WriteConstructFrom(IOperationSymbolContainer oper) { List<Type> type = oper.GetType().GetGenericArguments().ToList(); StringBuilder sb = new StringBuilder(); sb.AppendLine("new Graph<{0}>.ConstructFrom<{1}>({2})".FormatWith(type[0].TypeName(), type[1].TypeName(), oper.Symbol.ToString())); sb.AppendLine("{"); sb.AppendLine(" Construct = ({0}, _) => new {1}".FormatWith(GetVariableName(type[1]), type[0].TypeName())); sb.AppendLine(" {"); sb.AppendLine(" }"); sb.AppendLine("}.Register();"); return sb.ToString(); } protected virtual string WriteConstructFromMany(IOperationSymbolContainer oper) { List<Type> type = oper.GetType().GetGenericArguments().ToList(); StringBuilder sb = new StringBuilder(); sb.AppendLine("new Graph<{0}>.ConstructFromMany<{1}>({2})".FormatWith(type[0].TypeName(), type[1].TypeName(), oper.Symbol.ToString())); sb.AppendLine("{"); sb.AppendLine(" Construct = ({0}s, _) => new {1}".FormatWith(GetVariableName(type[1]), type[0].TypeName())); sb.AppendLine(" {"); sb.AppendLine(" }"); sb.AppendLine("}.Register();"); return sb.ToString(); } protected virtual IEnumerable<IOperationSymbolContainer> GetOperationsSymbols(Type type) { string name = type.FullName!.RemoveSuffix("Entity") + "Operation"; var operType = type.Assembly.GetType(name); if (operType == null) return Enumerable.Empty<IOperationSymbolContainer>(); return (from fi in operType.GetFields(BindingFlags.Static | BindingFlags.Public | BindingFlags.DeclaredOnly) select (IOperationSymbolContainer)fi.GetValue(null)!).ToList(); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace System.IO.Tests { public class File_Exists : FileSystemTest { #region Utilities public virtual bool Exists(string path) { return File.Exists(path); } #endregion #region UniversalTests [Fact] public void NullAsPath_ReturnsFalse() { Assert.False(Exists(null)); } [Fact] public void EmptyAsPath_ReturnsFalse() { Assert.False(Exists(string.Empty)); } [Fact] public void NonExistentValidPath_ReturnsFalse() { Assert.All((IOInputs.GetValidPathComponentNames()), (path) => { Assert.False(Exists(path), path); }); } [Fact] public void ValidPathExists_ReturnsTrue() { Assert.All((IOInputs.GetValidPathComponentNames()), (component) => { string path = Path.Combine(TestDirectory, component); FileInfo testFile = new FileInfo(path); testFile.Create().Dispose(); Assert.True(Exists(path)); }); } [Theory, MemberData(nameof(PathsWithInvalidCharacters))] public void PathWithInvalidCharactersAsPath_ReturnsFalse(string invalidPath) { // Checks that errors aren't thrown when calling Exists() on paths with impossible to create characters Assert.False(Exists(invalidPath)); Assert.False(Exists("..")); Assert.False(Exists(".")); } [Fact] public void PathAlreadyExistsAsFile() { string path = GetTestFilePath(); File.Create(path).Dispose(); Assert.True(Exists(IOServices.RemoveTrailingSlash(path))); Assert.True(Exists(IOServices.RemoveTrailingSlash(IOServices.RemoveTrailingSlash(path)))); Assert.True(Exists(IOServices.RemoveTrailingSlash(IOServices.AddTrailingSlashIfNeeded(path)))); } [Fact] public void PathEndsInTrailingSlash() { string path = GetTestFilePath() + Path.DirectorySeparatorChar; Assert.False(Exists(path)); } [Fact] public void PathAlreadyExistsAsDirectory() { string path = GetTestFilePath(); DirectoryInfo testDir = Directory.CreateDirectory(path); Assert.False(Exists(IOServices.RemoveTrailingSlash(path))); Assert.False(Exists(IOServices.RemoveTrailingSlash(IOServices.RemoveTrailingSlash(path)))); Assert.False(Exists(IOServices.RemoveTrailingSlash(IOServices.AddTrailingSlashIfNeeded(path)))); } [Fact] public void DirectoryLongerThanMaxDirectoryAsPath_DoesntThrow() { Assert.All((IOInputs.GetPathsLongerThanMaxDirectory(GetTestFilePath())), (path) => { Assert.False(Exists(path)); }); } [Fact] public void DirectoryLongerThanMaxPathAsPath_DoesntThrow() { Assert.All((IOInputs.GetPathsLongerThanMaxPath(GetTestFilePath())), (path) => { Assert.False(Exists(path), path); }); } [ConditionalFact(nameof(CanCreateSymbolicLinks))] public void SymLinksMayExistIndependentlyOfTarget() { var path = GetTestFilePath(); var linkPath = GetTestFilePath(); File.Create(path).Dispose(); Assert.True(MountHelper.CreateSymbolicLink(linkPath, path, isDirectory: false)); // Both the symlink and the target exist Assert.True(File.Exists(path), "path should exist"); Assert.True(File.Exists(linkPath), "linkPath should exist"); // Delete the target. The symlink should still exist File.Delete(path); Assert.False(File.Exists(path), "path should now not exist"); Assert.True(File.Exists(linkPath), "linkPath should still exist"); // Now delete the symlink. File.Delete(linkPath); Assert.False(File.Exists(linkPath), "linkPath should no longer exist"); } #endregion #region PlatformSpecific [Fact] [PlatformSpecific(TestPlatforms.Windows)] // Unix equivalent tested already in CreateDirectory public void WindowsNonSignificantWhiteSpaceAsPath_ReturnsFalse() { // Checks that errors aren't thrown when calling Exists() on impossible paths Assert.All((IOInputs.GetWhiteSpace()), (component) => { Assert.False(Exists(component)); }); } [Fact] [PlatformSpecific(CaseInsensitivePlatforms)] public void DoesCaseInsensitiveInvariantComparions() { FileInfo testFile = new FileInfo(GetTestFilePath()); testFile.Create().Dispose(); Assert.True(Exists(testFile.FullName)); Assert.True(Exists(testFile.FullName.ToUpperInvariant())); Assert.True(Exists(testFile.FullName.ToLowerInvariant())); } [Fact] [PlatformSpecific(CaseSensitivePlatforms)] public void DoesCaseSensitiveComparions() { FileInfo testFile = new FileInfo(GetTestFilePath()); testFile.Create().Dispose(); Assert.True(Exists(testFile.FullName)); Assert.False(Exists(testFile.FullName.ToUpperInvariant())); Assert.False(Exists(testFile.FullName.ToLowerInvariant())); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // In Windows, trailing whitespace in a path is trimmed public void TrimTrailingWhitespacePath() { FileInfo testFile = new FileInfo(GetTestFilePath()); testFile.Create().Dispose(); Assert.All((IOInputs.GetWhiteSpace()), (component) => { Assert.True(Exists(testFile.FullName + component)); // string concat in case Path.Combine() trims whitespace before Exists gets to it }); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // alternate data stream public void PathWithAlternateDataStreams_ReturnsFalse() { Assert.All((IOInputs.GetPathsWithAlternativeDataStreams()), (component) => { Assert.False(Exists(component)); }); } [Fact] [OuterLoop] [PlatformSpecific(TestPlatforms.Windows)] // device names public void PathWithReservedDeviceNameAsPath_ReturnsFalse() { Assert.All((IOInputs.GetPathsWithReservedDeviceNames()), (component) => { Assert.False(Exists(component)); }); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // UNC paths public void UncPathWithoutShareNameAsPath_ReturnsFalse() { Assert.All((IOInputs.GetUncPathsWithoutShareName()), (component) => { Assert.False(Exists(component)); }); } [Fact] [PlatformSpecific(TestPlatforms.Windows)] // max directory length not fixed on Unix public void DirectoryWithComponentLongerThanMaxComponentAsPath_ReturnsFalse() { Assert.All((IOInputs.GetPathsWithComponentLongerThanMaxComponent()), (component) => { Assert.False(Exists(component)); }); } [Fact] [PlatformSpecific(TestPlatforms.AnyUnix)] // Uses P/Invokes public void FalseForNonRegularFile() { string fileName = GetTestFilePath(); Assert.Equal(0, mkfifo(fileName, 0)); Assert.True(File.Exists(fileName)); } #endregion } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Threading; using System.Threading.Tasks; using Hyak.Common; using Microsoft.Azure.Management.RecoveryServices; using Microsoft.Azure.Management.RecoveryServices.Models; using Microsoft.Azure.Management.SiteRecovery.Models; using Newtonsoft.Json.Linq; namespace Microsoft.Azure.Management.RecoveryServices { /// <summary> /// Definition of vault operations for the Site Recovery extension. /// </summary> internal partial class VaultOperations : IServiceOperations<RecoveryServicesManagementClient>, IVaultOperations { /// <summary> /// Initializes a new instance of the VaultOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal VaultOperations(RecoveryServicesManagementClient client) { this._client = client; } private RecoveryServicesManagementClient _client; /// <summary> /// Gets a reference to the /// Microsoft.Azure.Management.RecoveryServices.RecoveryServicesManagementClient. /// </summary> public RecoveryServicesManagementClient Client { get { return this._client; } } /// <summary> /// Creates a vault /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the (resource group?) cloud service /// containing the job collection. /// </param> /// <param name='vaultName'> /// Required. The name of the vault to create. /// </param> /// <param name='vaultCreationInput'> /// Required. Vault object to be created /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response model for the Vm group object. /// </returns> public async Task<VaultCreateResponse> BeginCreatingAsync(string resourceGroupName, string vaultName, VaultCreateArgs vaultCreationInput, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (vaultName == null) { throw new ArgumentNullException("vaultName"); } if (vaultCreationInput == null) { throw new ArgumentNullException("vaultCreationInput"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("vaultName", vaultName); tracingParameters.Add("vaultCreationInput", vaultCreationInput); TracingAdapter.Enter(invocationId, this, "BeginCreatingAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/Subscriptions/"; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/resourceGroups/"; url = url + Uri.EscapeDataString(resourceGroupName); url = url + "/providers/"; url = url + Uri.EscapeDataString(this.Client.ResourceNamespace); url = url + "/"; url = url + "SiteRecoveryVault"; url = url + "/"; url = url + Uri.EscapeDataString(vaultName); List<string> queryParameters = new List<string>(); queryParameters.Add("api-version=2015-03-15"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Put; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/json; charset=utf-8"); httpRequest.Headers.Add("x-ms-version", "2015-01-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; JToken requestDoc = null; JObject resourceValue = new JObject(); requestDoc = resourceValue; if (vaultCreationInput.Properties != null) { JObject propertiesValue = new JObject(); resourceValue["properties"] = propertiesValue; if (vaultCreationInput.Properties.Sku != null) { JObject skuValue = new JObject(); propertiesValue["sku"] = skuValue; if (vaultCreationInput.Properties.Sku.Name != null) { skuValue["name"] = vaultCreationInput.Properties.Sku.Name; } } if (vaultCreationInput.Properties.ProvisioningState != null) { propertiesValue["provisioningState"] = vaultCreationInput.Properties.ProvisioningState; } } if (vaultCreationInput.Location != null) { resourceValue["location"] = vaultCreationInput.Location; } requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Created && statusCode != HttpStatusCode.Accepted) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result VaultCreateResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK || statusCode == HttpStatusCode.Created || statusCode == HttpStatusCode.Accepted) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new VaultCreateResponse(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { JToken idValue = responseDoc["id"]; if (idValue != null && idValue.Type != JTokenType.Null) { string idInstance = ((string)idValue); result.Id = idInstance; } JToken nameValue = responseDoc["name"]; if (nameValue != null && nameValue.Type != JTokenType.Null) { string nameInstance = ((string)nameValue); result.Name = nameInstance; } JToken typeValue = responseDoc["type"]; if (typeValue != null && typeValue.Type != JTokenType.Null) { string typeInstance = ((string)typeValue); result.Type = typeInstance; } JToken locationValue = responseDoc["location"]; if (locationValue != null && locationValue.Type != JTokenType.Null) { string locationInstance = ((string)locationValue); result.Location = locationInstance; } JToken tagsSequenceElement = ((JToken)responseDoc["tags"]); if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null) { foreach (JProperty property in tagsSequenceElement) { string tagsKey = ((string)property.Name); string tagsValue = ((string)property.Value); result.Tags.Add(tagsKey, tagsValue); } } JToken propertiesValue2 = responseDoc["properties"]; if (propertiesValue2 != null && propertiesValue2.Type != JTokenType.Null) { VaultProperties propertiesInstance = new VaultProperties(); result.Properties = propertiesInstance; JToken skuValue2 = propertiesValue2["sku"]; if (skuValue2 != null && skuValue2.Type != JTokenType.Null) { VaultSku skuInstance = new VaultSku(); propertiesInstance.Sku = skuInstance; JToken nameValue2 = skuValue2["name"]; if (nameValue2 != null && nameValue2.Type != JTokenType.Null) { string nameInstance2 = ((string)nameValue2); skuInstance.Name = nameInstance2; } } JToken provisioningStateValue = propertiesValue2["provisioningState"]; if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null) { string provisioningStateInstance = ((string)provisioningStateValue); propertiesInstance.ProvisioningState = provisioningStateInstance; } } JToken etagValue = responseDoc["etag"]; if (etagValue != null && etagValue.Type != JTokenType.Null) { string etagInstance = ((string)etagValue); result.ETag = etagInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("ETag")) { result.ETag = httpResponse.Headers.GetValues("ETag").FirstOrDefault(); } if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Deletes a vault /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the (Resource Group) cloud service containing /// the job collection. /// </param> /// <param name='vaultName'> /// Required. The name of the vault to delete. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> public async Task<RecoveryServicesOperationStatusResponse> BeginDeletingAsync(string resourceGroupName, string vaultName, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } if (vaultName == null) { throw new ArgumentNullException("vaultName"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("vaultName", vaultName); TracingAdapter.Enter(invocationId, this, "BeginDeletingAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/Subscriptions/"; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/resourceGroups/"; url = url + Uri.EscapeDataString(resourceGroupName); url = url + "/providers/"; url = url + Uri.EscapeDataString(this.Client.ResourceNamespace); url = url + "/"; url = url + "SiteRecoveryVault"; url = url + "/"; url = url + Uri.EscapeDataString(vaultName); string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Delete; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/json; charset=utf-8"); httpRequest.Headers.Add("x-ms-version", "2015-01-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.Accepted) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result RecoveryServicesOperationStatusResponse result = null; // Deserialize Response result = new RecoveryServicesOperationStatusResponse(); result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Creates a vault /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the (resource group) cloud service containing /// the job collection. /// </param> /// <param name='vaultName'> /// Optional. The name of the vault to create. /// </param> /// <param name='vaultCreationInput'> /// Required. Vault object to be created /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> public async Task<RecoveryServicesOperationStatusResponse> CreateAsync(string resourceGroupName, string vaultName, VaultCreateArgs vaultCreationInput, CancellationToken cancellationToken) { RecoveryServicesManagementClient client = this.Client; bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("vaultName", vaultName); tracingParameters.Add("vaultCreationInput", vaultCreationInput); TracingAdapter.Enter(invocationId, this, "CreateAsync", tracingParameters); } cancellationToken.ThrowIfCancellationRequested(); VaultCreateResponse response = await client.Vaults.BeginCreatingAsync(resourceGroupName, vaultName, vaultCreationInput, cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); RecoveryServicesOperationStatusResponse result = await client.GetOperationStatusAsync(response.RequestId, cancellationToken).ConfigureAwait(false); int delayInSeconds = 15; if (client.LongRunningOperationInitialTimeout >= 0) { delayInSeconds = client.LongRunningOperationInitialTimeout; } while ((result.Status != RecoveryServicesOperationStatus.InProgress) == false) { cancellationToken.ThrowIfCancellationRequested(); await TaskEx.Delay(delayInSeconds * 1000, cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); result = await client.GetOperationStatusAsync(response.RequestId, cancellationToken).ConfigureAwait(false); delayInSeconds = 10; if (client.LongRunningOperationRetryTimeout >= 0) { delayInSeconds = client.LongRunningOperationRetryTimeout; } } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } if (result.Status != RecoveryServicesOperationStatus.Succeeded) { if (result.Error != null) { CloudException ex = new CloudException(result.Error.Code + " : " + result.Error.Message); ex.Error = new CloudError(); ex.Error.Code = result.Error.Code; ex.Error.Message = result.Error.Message; if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } else { CloudException ex = new CloudException(""); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } } result.ETag = response.ETag; return result; } /// <summary> /// Deletes a vault /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the (Resource Group) cloud service containing /// the job collection. /// </param> /// <param name='vaultName'> /// Required. The name of the vault to delete. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> public async Task<RecoveryServicesOperationStatusResponse> DeleteAsync(string resourceGroupName, string vaultName, CancellationToken cancellationToken) { RecoveryServicesManagementClient client = this.Client; bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("vaultName", vaultName); TracingAdapter.Enter(invocationId, this, "DeleteAsync", tracingParameters); } cancellationToken.ThrowIfCancellationRequested(); RecoveryServicesOperationStatusResponse response = await client.Vaults.BeginDeletingAsync(resourceGroupName, vaultName, cancellationToken).ConfigureAwait(false); if (response.Status == RecoveryServicesOperationStatus.Succeeded) { return response; } cancellationToken.ThrowIfCancellationRequested(); RecoveryServicesOperationStatusResponse result = await client.GetOperationStatusAsync(response.RequestId, cancellationToken).ConfigureAwait(false); int delayInSeconds = 15; if (client.LongRunningOperationInitialTimeout >= 0) { delayInSeconds = client.LongRunningOperationInitialTimeout; } while ((result.Status != RecoveryServicesOperationStatus.InProgress) == false) { cancellationToken.ThrowIfCancellationRequested(); await TaskEx.Delay(delayInSeconds * 1000, cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); result = await client.GetOperationStatusAsync(response.RequestId, cancellationToken).ConfigureAwait(false); delayInSeconds = 10; if (client.LongRunningOperationRetryTimeout >= 0) { delayInSeconds = client.LongRunningOperationRetryTimeout; } } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } if (result.Status != RecoveryServicesOperationStatus.Succeeded) { if (result.Error != null) { CloudException ex = new CloudException(result.Error.Code + " : " + result.Error.Message); ex.Error = new CloudError(); ex.Error.Code = result.Error.Code; ex.Error.Message = result.Error.Message; if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } else { CloudException ex = new CloudException(""); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } } return result; } /// <summary> /// Get the Vaults. /// </summary> /// <param name='resourceGroupName'> /// Required. The name of the (resource group?) cloud service /// containing the vault collection. /// </param> /// <param name='customRequestHeaders'> /// Optional. Request header parameters. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response model for Vault. /// </returns> public async Task<VaultListResponse> GetAsync(string resourceGroupName, CustomRequestHeaders customRequestHeaders, CancellationToken cancellationToken) { // Validate if (resourceGroupName == null) { throw new ArgumentNullException("resourceGroupName"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("customRequestHeaders", customRequestHeaders); TracingAdapter.Enter(invocationId, this, "GetAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/Subscriptions/"; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/resourceGroups/"; url = url + Uri.EscapeDataString(resourceGroupName); url = url + "/providers/"; url = url + Uri.EscapeDataString(this.Client.ResourceNamespace); url = url + "/SiteRecoveryVault"; List<string> queryParameters = new List<string>(); queryParameters.Add("api-version=2015-03-15"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId); httpRequest.Headers.Add("x-ms-version", "2015-01-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result VaultListResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new VaultListResponse(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { JToken valueArray = responseDoc["Value"]; if (valueArray != null && valueArray.Type != JTokenType.Null) { foreach (JToken valueValue in ((JArray)valueArray)) { Vault vaultInstance = new Vault(); result.Vaults.Add(vaultInstance); JToken idValue = valueValue["id"]; if (idValue != null && idValue.Type != JTokenType.Null) { string idInstance = ((string)idValue); vaultInstance.Id = idInstance; } JToken nameValue = valueValue["name"]; if (nameValue != null && nameValue.Type != JTokenType.Null) { string nameInstance = ((string)nameValue); vaultInstance.Name = nameInstance; } JToken typeValue = valueValue["type"]; if (typeValue != null && typeValue.Type != JTokenType.Null) { string typeInstance = ((string)typeValue); vaultInstance.Type = typeInstance; } JToken locationValue = valueValue["location"]; if (locationValue != null && locationValue.Type != JTokenType.Null) { string locationInstance = ((string)locationValue); vaultInstance.Location = locationInstance; } JToken tagsSequenceElement = ((JToken)valueValue["tags"]); if (tagsSequenceElement != null && tagsSequenceElement.Type != JTokenType.Null) { foreach (JProperty property in tagsSequenceElement) { string tagsKey = ((string)property.Name); string tagsValue = ((string)property.Value); vaultInstance.Tags.Add(tagsKey, tagsValue); } } JToken propertiesValue = valueValue["properties"]; if (propertiesValue != null && propertiesValue.Type != JTokenType.Null) { VaultProperties propertiesInstance = new VaultProperties(); vaultInstance.Properties = propertiesInstance; JToken skuValue = propertiesValue["sku"]; if (skuValue != null && skuValue.Type != JTokenType.Null) { VaultSku skuInstance = new VaultSku(); propertiesInstance.Sku = skuInstance; JToken nameValue2 = skuValue["name"]; if (nameValue2 != null && nameValue2.Type != JTokenType.Null) { string nameInstance2 = ((string)nameValue2); skuInstance.Name = nameInstance2; } } JToken provisioningStateValue = propertiesValue["provisioningState"]; if (provisioningStateValue != null && provisioningStateValue.Type != JTokenType.Null) { string provisioningStateInstance = ((string)provisioningStateValue); propertiesInstance.ProvisioningState = provisioningStateInstance; } } JToken eTagValue = valueValue["eTag"]; if (eTagValue != null && eTagValue.Type != JTokenType.Null) { string eTagInstance = ((string)eTagValue); vaultInstance.ETag = eTagInstance; } } } } } result.StatusCode = statusCode; if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
using System; using System.Drawing; using System.Collections; using System.ComponentModel; using System.Windows.Forms; using HtmlHelp; using HtmlHelp.UIComponents; namespace HtmlHelpViewer { /// <summary> /// Summary description for CustomizeContent. /// </summary> public class CustomizeContent : System.Windows.Forms.Form { private System.Windows.Forms.PictureBox pictureBox1; private System.Windows.Forms.PictureBox pictureBox2; private System.Windows.Forms.CheckBox chkAll; private System.Windows.Forms.GroupBox groupBox1; private System.Windows.Forms.Label label1; private System.Windows.Forms.ListView lvTypes; private System.Windows.Forms.Label lblDescription; private System.Windows.Forms.ColumnHeader chName; private System.Windows.Forms.ColumnHeader chType; private System.Windows.Forms.Button btnCancel; private System.Windows.Forms.Button btnOK; private System.ComponentModel.IContainer components = null; private InfoTypeCategoryFilter _filter = null; private ListViewItem _liExclusive=null; private HtmlHelp.UIComponents.HelpProviderEx helpProviderEx1; private bool _exclusiveUpdate = false; public CustomizeContent() { // // Required for Windows Form Designer support // InitializeComponent(); helpProviderEx1.Viewer = Viewer.Current; // set the active viewer } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(CustomizeContent)); this.pictureBox1 = new System.Windows.Forms.PictureBox(); this.pictureBox2 = new System.Windows.Forms.PictureBox(); this.chkAll = new System.Windows.Forms.CheckBox(); this.groupBox1 = new System.Windows.Forms.GroupBox(); this.lblDescription = new System.Windows.Forms.Label(); this.lvTypes = new System.Windows.Forms.ListView(); this.chName = new System.Windows.Forms.ColumnHeader(); this.chType = new System.Windows.Forms.ColumnHeader(); this.label1 = new System.Windows.Forms.Label(); this.btnCancel = new System.Windows.Forms.Button(); this.btnOK = new System.Windows.Forms.Button(); this.helpProviderEx1 = new HtmlHelp.UIComponents.HelpProviderEx(); this.groupBox1.SuspendLayout(); this.SuspendLayout(); // // pictureBox1 // this.pictureBox1.Image = ((System.Drawing.Image)(resources.GetObject("pictureBox1.Image"))); this.pictureBox1.Location = new System.Drawing.Point(-1, 0); this.pictureBox1.Name = "pictureBox1"; this.pictureBox1.Size = new System.Drawing.Size(120, 226); this.pictureBox1.SizeMode = System.Windows.Forms.PictureBoxSizeMode.AutoSize; this.pictureBox1.TabIndex = 0; this.pictureBox1.TabStop = false; // // pictureBox2 // this.pictureBox2.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.pictureBox2.Image = ((System.Drawing.Image)(resources.GetObject("pictureBox2.Image"))); this.pictureBox2.Location = new System.Drawing.Point(0, 226); this.pictureBox2.Name = "pictureBox2"; this.pictureBox2.Size = new System.Drawing.Size(446, 2); this.pictureBox2.SizeMode = System.Windows.Forms.PictureBoxSizeMode.StretchImage; this.pictureBox2.TabIndex = 1; this.pictureBox2.TabStop = false; // // chkAll // this.helpProviderEx1.SetHelpString(this.chkAll, "Check this if you want to view ALL help contents"); this.chkAll.Location = new System.Drawing.Point(128, 3); this.chkAll.Name = "chkAll"; this.chkAll.Size = new System.Drawing.Size(231, 24); this.chkAll.TabIndex = 2; this.chkAll.Text = "Display &all help contents"; this.chkAll.CheckedChanged += new System.EventHandler(this.chkAll_CheckedChanged); // // groupBox1 // this.groupBox1.Controls.Add(this.lblDescription); this.groupBox1.Controls.Add(this.lvTypes); this.groupBox1.Controls.Add(this.label1); this.groupBox1.Location = new System.Drawing.Point(127, 29); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(306, 193); this.groupBox1.TabIndex = 3; this.groupBox1.TabStop = false; this.groupBox1.Text = "Information types and categories"; // // lblDescription // this.lblDescription.Location = new System.Drawing.Point(8, 158); this.lblDescription.Name = "lblDescription"; this.lblDescription.Size = new System.Drawing.Size(288, 32); this.lblDescription.TabIndex = 2; // // lvTypes // this.lvTypes.CheckBoxes = true; this.lvTypes.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { this.chName, this.chType}); this.lvTypes.FullRowSelect = true; this.lvTypes.HeaderStyle = System.Windows.Forms.ColumnHeaderStyle.Nonclickable; this.helpProviderEx1.SetHelpString(this.lvTypes, "Select one or more categories/information types which should be included in your " + "content filter. NOTE: Exclusive information types can not be combined with other" + " information types or categories !"); this.lvTypes.HideSelection = false; this.lvTypes.LabelWrap = false; this.lvTypes.Location = new System.Drawing.Point(6, 35); this.lvTypes.MultiSelect = false; this.lvTypes.Name = "lvTypes"; this.lvTypes.Size = new System.Drawing.Size(293, 119); this.lvTypes.TabIndex = 1; this.lvTypes.View = System.Windows.Forms.View.Details; this.lvTypes.SelectedIndexChanged += new System.EventHandler(this.lvTypes_SelectedIndexChanged); this.lvTypes.ItemCheck += new System.Windows.Forms.ItemCheckEventHandler(this.lvTypes_ItemCheck); // // chName // this.chName.Text = "Name"; this.chName.Width = 188; // // chType // this.chType.Text = "Type"; this.chType.Width = 100; // // label1 // this.label1.Location = new System.Drawing.Point(8, 19); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(291, 19); this.label1.TabIndex = 0; this.label1.Text = "Check all info types and categories you want to view"; // // btnCancel // this.btnCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel; this.btnCancel.Location = new System.Drawing.Point(357, 234); this.btnCancel.Name = "btnCancel"; this.btnCancel.TabIndex = 4; this.btnCancel.Text = "&Cancel"; // // btnOK // this.btnOK.Location = new System.Drawing.Point(272, 234); this.btnOK.Name = "btnOK"; this.btnOK.TabIndex = 5; this.btnOK.Text = "&OK"; this.btnOK.Click += new System.EventHandler(this.btnOK_Click); // // helpProviderEx1 // this.helpProviderEx1.Viewer = null; // // CustomizeContent // this.AcceptButton = this.btnOK; this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.ClientSize = new System.Drawing.Size(440, 267); this.Controls.Add(this.btnOK); this.Controls.Add(this.btnCancel); this.Controls.Add(this.groupBox1); this.Controls.Add(this.chkAll); this.Controls.Add(this.pictureBox2); this.Controls.Add(this.pictureBox1); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.HelpButton = true; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "CustomizeContent"; this.ShowInTaskbar = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "Customize content"; this.Load += new System.EventHandler(this.CustomizeContent_Load); this.groupBox1.ResumeLayout(false); this.ResumeLayout(false); } #endregion /// <summary> /// Gets/Sets the filter /// </summary> public InfoTypeCategoryFilter Filter { get { return _filter; } set { _filter = value; } } /// <summary> /// Checks an exclusive info type item (unchecks all others) /// </summary> /// <param name="li">listview item which is of infotype mode exclusive</param> private void CheckExclusive(ListViewItem li) { foreach(ListViewItem curItem in lvTypes.Items) { curItem.Checked = ( curItem == li); } _liExclusive = li; } /// <summary> /// Called if the form loads /// </summary> /// <param name="sender">sender of the event</param> /// <param name="e">event parameter</param> private void CustomizeContent_Load(object sender, System.EventArgs e) { if(_filter == null) { DialogResult = DialogResult.Cancel; this.Close(); } chkAll.Checked = !_filter.FilterEnabled; groupBox1.Enabled = _filter.FilterEnabled; ArrayList arrInfotypes = HtmlHelpSystem.Current.InformationTypes; ArrayList arrCategories = HtmlHelpSystem.Current.Categories; int i=0; for(i=0; i<arrInfotypes.Count;i++) { InformationType curType = arrInfotypes[i] as InformationType; // hidden types are only for API called if(curType.Mode != InformationTypeMode.Hidden) { if( !curType.IsInCategory ) { ListViewItem liIT = new ListViewItem(curType.Name); liIT.SubItems.Add(curType.Mode.ToString()); liIT.Tag = curType; bool bCheck = _filter.ContainsInformationType(curType); if((bCheck)&&(curType.Mode == InformationTypeMode.Exclusive)) { _liExclusive = liIT; } liIT.Checked = bCheck; lvTypes.Items.Add(liIT); } } } for(i=0;i<arrCategories.Count;i++) { Category curCat = arrCategories[i] as Category; ListViewItem liC = new ListViewItem(curCat.Name); liC.SubItems.Add("Category"); liC.Tag = curCat; bool bCheck = _filter.ContainsCategory(curCat); liC.Checked = bCheck; lvTypes.Items.Add(liC); } if(_liExclusive != null) CheckExclusive(_liExclusive); } /// <summary> /// Called if the user clicks the "Display all" checkbox /// </summary> /// <param name="sender">sender of the event</param> /// <param name="e">event parameter</param> private void chkAll_CheckedChanged(object sender, System.EventArgs e) { groupBox1.Enabled = !chkAll.Checked; } /// <summary> /// Called if the user changes the selection of the list view /// </summary> /// <param name="sender">sender of the event</param> /// <param name="e">event parameter</param> private void lvTypes_SelectedIndexChanged(object sender, System.EventArgs e) { if( lvTypes.SelectedItems.Count > 0) { ListViewItem selItem = lvTypes.SelectedItems[0]; InformationType iT = selItem.Tag as InformationType; Category cat = selItem.Tag as Category; if(iT!=null) { lblDescription.Text = iT.Description; } if(cat != null) { lblDescription.Text = cat.Description; } } else { lblDescription.Text = ""; } } /// <summary> /// Called if the user changes the check state of an item /// </summary> /// <param name="sender">sender of the event</param> /// <param name="e">event parameter</param> private void lvTypes_ItemCheck(object sender, System.Windows.Forms.ItemCheckEventArgs e) { if(!_exclusiveUpdate) { ListViewItem lvCheck = lvTypes.Items[ e.Index ]; InformationType iT = lvCheck.Tag as InformationType; if(iT!=null) { if((iT.Mode == InformationTypeMode.Exclusive)&&(e.NewValue == CheckState.Checked)) { _exclusiveUpdate = true; CheckExclusive(lvCheck); _exclusiveUpdate=false; return; } else if((iT.Mode == InformationTypeMode.Exclusive)&&(e.NewValue == CheckState.Unchecked)) { _liExclusive = null; return; } } if(_liExclusive != null) { _exclusiveUpdate = true; CheckExclusive(null); _exclusiveUpdate=false; } } } /// <summary> /// Called if the user clicks the OK button /// </summary> /// <param name="sender">sender of the event</param> /// <param name="e">event parameter</param> private void btnOK_Click(object sender, System.EventArgs e) { _filter.ResetFilter(); foreach(ListViewItem curItem in lvTypes.Items) { InformationType iT = curItem.Tag as InformationType; Category cat = curItem.Tag as Category; if(iT!=null) if(curItem.Checked) _filter.AddInformationType(iT); if(cat != null) if(curItem.Checked) _filter.AddCategory(cat); } DialogResult = DialogResult.OK; this.Close(); } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. #nullable enable using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; using osu.Framework.Allocation; using osu.Framework.Bindables; using osu.Framework.Graphics; using osu.Framework.Logging; using osu.Game.Database; using osu.Game.Online.API; using osu.Game.Online.API.Requests.Responses; using osu.Game.Online.Rooms; using osu.Game.Online.Rooms.RoomStatuses; using osu.Game.Rulesets; using osu.Game.Rulesets.Mods; using osu.Game.Utils; using APIUser = osu.Game.Online.API.Requests.Responses.APIUser; namespace osu.Game.Online.Multiplayer { public abstract class MultiplayerClient : Component, IMultiplayerClient, IMultiplayerRoomServer { /// <summary> /// Invoked when any change occurs to the multiplayer room. /// </summary> public event Action? RoomUpdated; public event Action<MultiplayerRoomUser>? UserJoined; public event Action<MultiplayerRoomUser>? UserLeft; public event Action<MultiplayerRoomUser>? UserKicked; /// <summary> /// Invoked when the multiplayer server requests the current beatmap to be loaded into play. /// </summary> public event Action? LoadRequested; /// <summary> /// Invoked when the multiplayer server requests gameplay to be started. /// </summary> public event Action? MatchStarted; /// <summary> /// Invoked when the multiplayer server has finished collating results. /// </summary> public event Action? ResultsReady; /// <summary> /// Whether the <see cref="MultiplayerClient"/> is currently connected. /// This is NOT thread safe and usage should be scheduled. /// </summary> public abstract IBindable<bool> IsConnected { get; } /// <summary> /// The joined <see cref="MultiplayerRoom"/>. /// </summary> public MultiplayerRoom? Room { get; private set; } /// <summary> /// The users in the joined <see cref="Room"/> which are participating in the current gameplay loop. /// </summary> public IBindableList<int> CurrentMatchPlayingUserIds => PlayingUserIds; protected readonly BindableList<int> PlayingUserIds = new BindableList<int>(); public readonly Bindable<PlaylistItem?> CurrentMatchPlayingItem = new Bindable<PlaylistItem?>(); /// <summary> /// The <see cref="MultiplayerRoomUser"/> corresponding to the local player, if available. /// </summary> public MultiplayerRoomUser? LocalUser => Room?.Users.SingleOrDefault(u => u.User?.Id == API.LocalUser.Value.Id); /// <summary> /// Whether the <see cref="LocalUser"/> is the host in <see cref="Room"/>. /// </summary> public bool IsHost { get { var localUser = LocalUser; return localUser != null && Room?.Host != null && localUser.Equals(Room.Host); } } [Resolved] protected IAPIProvider API { get; private set; } = null!; [Resolved] protected RulesetStore Rulesets { get; private set; } = null!; [Resolved] private UserLookupCache userLookupCache { get; set; } = null!; protected Room? APIRoom { get; private set; } [BackgroundDependencyLoader] private void load() { IsConnected.BindValueChanged(connected => { // clean up local room state on server disconnect. if (!connected.NewValue && Room != null) { Logger.Log("Connection to multiplayer server was lost.", LoggingTarget.Runtime, LogLevel.Important); LeaveRoom(); } }); } private readonly TaskChain joinOrLeaveTaskChain = new TaskChain(); private CancellationTokenSource? joinCancellationSource; /// <summary> /// Joins the <see cref="MultiplayerRoom"/> for a given API <see cref="Room"/>. /// </summary> /// <param name="room">The API <see cref="Room"/>.</param> /// <param name="password">An optional password to use for the join operation.</param> public async Task JoinRoom(Room room, string? password = null) { var cancellationSource = joinCancellationSource = new CancellationTokenSource(); await joinOrLeaveTaskChain.Add(async () => { if (Room != null) throw new InvalidOperationException("Cannot join a multiplayer room while already in one."); Debug.Assert(room.RoomID.Value != null); // Join the server-side room. var joinedRoom = await JoinRoom(room.RoomID.Value.Value, password ?? room.Password.Value).ConfigureAwait(false); Debug.Assert(joinedRoom != null); // Populate playlist items. var playlistItems = await Task.WhenAll(joinedRoom.Playlist.Select(createPlaylistItem)).ConfigureAwait(false); // Populate users. Debug.Assert(joinedRoom.Users != null); await Task.WhenAll(joinedRoom.Users.Select(PopulateUser)).ConfigureAwait(false); // Update the stored room (must be done on update thread for thread-safety). await scheduleAsync(() => { Room = joinedRoom; APIRoom = room; APIRoom.Playlist.Clear(); APIRoom.Playlist.AddRange(playlistItems); Debug.Assert(LocalUser != null); addUserToAPIRoom(LocalUser); foreach (var user in joinedRoom.Users) updateUserPlayingState(user.UserID, user.State); updateLocalRoomSettings(joinedRoom.Settings); OnRoomJoined(); }, cancellationSource.Token).ConfigureAwait(false); }, cancellationSource.Token).ConfigureAwait(false); } /// <summary> /// Fired when the room join sequence is complete /// </summary> protected virtual void OnRoomJoined() { } /// <summary> /// Joins the <see cref="MultiplayerRoom"/> with a given ID. /// </summary> /// <param name="roomId">The room ID.</param> /// <param name="password">An optional password to use when joining the room.</param> /// <returns>The joined <see cref="MultiplayerRoom"/>.</returns> protected abstract Task<MultiplayerRoom> JoinRoom(long roomId, string? password = null); public Task LeaveRoom() { // The join may have not completed yet, so certain tasks that either update the room or reference the room should be cancelled. // This includes the setting of Room itself along with the initial update of the room settings on join. joinCancellationSource?.Cancel(); // Leaving rooms is expected to occur instantaneously whilst the operation is finalised in the background. // However a few members need to be reset immediately to prevent other components from entering invalid states whilst the operation hasn't yet completed. // For example, if a room was left and the user immediately pressed the "create room" button, then the user could be taken into the lobby if the value of Room is not reset in time. var scheduledReset = scheduleAsync(() => { APIRoom = null; Room = null; CurrentMatchPlayingItem.Value = null; PlayingUserIds.Clear(); RoomUpdated?.Invoke(); }); return joinOrLeaveTaskChain.Add(async () => { await scheduledReset.ConfigureAwait(false); await LeaveRoomInternal().ConfigureAwait(false); }); } protected abstract Task LeaveRoomInternal(); /// <summary> /// Change the current <see cref="MultiplayerRoom"/> settings. /// </summary> /// <remarks> /// A room must be joined for this to have any effect. /// </remarks> /// <param name="name">The new room name, if any.</param> /// <param name="password">The new password, if any.</param> /// <param name="matchType">The type of the match, if any.</param> /// <param name="queueMode">The new queue mode, if any.</param> public Task ChangeSettings(Optional<string> name = default, Optional<string> password = default, Optional<MatchType> matchType = default, Optional<QueueMode> queueMode = default) { if (Room == null) throw new InvalidOperationException("Must be joined to a match to change settings."); return ChangeSettings(new MultiplayerRoomSettings { Name = name.GetOr(Room.Settings.Name), Password = password.GetOr(Room.Settings.Password), MatchType = matchType.GetOr(Room.Settings.MatchType), QueueMode = queueMode.GetOr(Room.Settings.QueueMode), }); } /// <summary> /// Toggles the <see cref="LocalUser"/>'s ready state. /// </summary> /// <exception cref="InvalidOperationException">If a toggle of ready state is not valid at this time.</exception> public async Task ToggleReady() { var localUser = LocalUser; if (localUser == null) return; switch (localUser.State) { case MultiplayerUserState.Idle: await ChangeState(MultiplayerUserState.Ready).ConfigureAwait(false); return; case MultiplayerUserState.Ready: await ChangeState(MultiplayerUserState.Idle).ConfigureAwait(false); return; default: throw new InvalidOperationException($"Cannot toggle ready when in {localUser.State}"); } } /// <summary> /// Toggles the <see cref="LocalUser"/>'s spectating state. /// </summary> /// <exception cref="InvalidOperationException">If a toggle of the spectating state is not valid at this time.</exception> public async Task ToggleSpectate() { var localUser = LocalUser; if (localUser == null) return; switch (localUser.State) { case MultiplayerUserState.Idle: case MultiplayerUserState.Ready: await ChangeState(MultiplayerUserState.Spectating).ConfigureAwait(false); return; case MultiplayerUserState.Spectating: await ChangeState(MultiplayerUserState.Idle).ConfigureAwait(false); return; default: throw new InvalidOperationException($"Cannot toggle spectate when in {localUser.State}"); } } public abstract Task TransferHost(int userId); public abstract Task KickUser(int userId); public abstract Task ChangeSettings(MultiplayerRoomSettings settings); public abstract Task ChangeState(MultiplayerUserState newState); public abstract Task ChangeBeatmapAvailability(BeatmapAvailability newBeatmapAvailability); /// <summary> /// Change the local user's mods in the currently joined room. /// </summary> /// <param name="newMods">The proposed new mods, excluding any required by the room itself.</param> public Task ChangeUserMods(IEnumerable<Mod> newMods) => ChangeUserMods(newMods.Select(m => new APIMod(m)).ToList()); public abstract Task ChangeUserMods(IEnumerable<APIMod> newMods); public abstract Task SendMatchRequest(MatchUserRequest request); public abstract Task StartMatch(); public abstract Task AddPlaylistItem(MultiplayerPlaylistItem item); Task IMultiplayerClient.RoomStateChanged(MultiplayerRoomState state) { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; Debug.Assert(APIRoom != null); Room.State = state; switch (state) { case MultiplayerRoomState.Open: APIRoom.Status.Value = new RoomStatusOpen(); break; case MultiplayerRoomState.Playing: APIRoom.Status.Value = new RoomStatusPlaying(); break; case MultiplayerRoomState.Closed: APIRoom.Status.Value = new RoomStatusEnded(); break; } RoomUpdated?.Invoke(); }, false); return Task.CompletedTask; } async Task IMultiplayerClient.UserJoined(MultiplayerRoomUser user) { if (Room == null) return; await PopulateUser(user).ConfigureAwait(false); Scheduler.Add(() => { if (Room == null) return; // for sanity, ensure that there can be no duplicate users in the room user list. if (Room.Users.Any(existing => existing.UserID == user.UserID)) return; Room.Users.Add(user); addUserToAPIRoom(user); UserJoined?.Invoke(user); RoomUpdated?.Invoke(); }); } Task IMultiplayerClient.UserLeft(MultiplayerRoomUser user) => handleUserLeft(user, UserLeft); Task IMultiplayerClient.UserKicked(MultiplayerRoomUser user) { if (LocalUser == null) return Task.CompletedTask; if (user.Equals(LocalUser)) LeaveRoom(); return handleUserLeft(user, UserKicked); } private void addUserToAPIRoom(MultiplayerRoomUser user) { Debug.Assert(APIRoom != null); APIRoom.RecentParticipants.Add(user.User ?? new APIUser { Id = user.UserID, Username = "[Unresolved]" }); APIRoom.ParticipantCount.Value++; } private Task handleUserLeft(MultiplayerRoomUser user, Action<MultiplayerRoomUser>? callback) { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; Room.Users.Remove(user); PlayingUserIds.Remove(user.UserID); Debug.Assert(APIRoom != null); APIRoom.RecentParticipants.RemoveAll(u => u.Id == user.UserID); APIRoom.ParticipantCount.Value--; callback?.Invoke(user); RoomUpdated?.Invoke(); }, false); return Task.CompletedTask; } Task IMultiplayerClient.HostChanged(int userId) { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; Debug.Assert(APIRoom != null); var user = Room.Users.FirstOrDefault(u => u.UserID == userId); Room.Host = user; APIRoom.Host.Value = user?.User; RoomUpdated?.Invoke(); }, false); return Task.CompletedTask; } Task IMultiplayerClient.SettingsChanged(MultiplayerRoomSettings newSettings) { Scheduler.Add(() => updateLocalRoomSettings(newSettings)); return Task.CompletedTask; } Task IMultiplayerClient.UserStateChanged(int userId, MultiplayerUserState state) { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; Room.Users.Single(u => u.UserID == userId).State = state; updateUserPlayingState(userId, state); RoomUpdated?.Invoke(); }, false); return Task.CompletedTask; } Task IMultiplayerClient.MatchUserStateChanged(int userId, MatchUserState state) { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; Room.Users.Single(u => u.UserID == userId).MatchState = state; RoomUpdated?.Invoke(); }, false); return Task.CompletedTask; } Task IMultiplayerClient.MatchRoomStateChanged(MatchRoomState state) { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; Room.MatchState = state; RoomUpdated?.Invoke(); }, false); return Task.CompletedTask; } public Task MatchEvent(MatchServerEvent e) { // not used by any match types just yet. return Task.CompletedTask; } Task IMultiplayerClient.UserBeatmapAvailabilityChanged(int userId, BeatmapAvailability beatmapAvailability) { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { var user = Room?.Users.SingleOrDefault(u => u.UserID == userId); // errors here are not critical - beatmap availability state is mostly for display. if (user == null) return; user.BeatmapAvailability = beatmapAvailability; RoomUpdated?.Invoke(); }, false); return Task.CompletedTask; } public Task UserModsChanged(int userId, IEnumerable<APIMod> mods) { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { var user = Room?.Users.SingleOrDefault(u => u.UserID == userId); // errors here are not critical - user mods are mostly for display. if (user == null) return; user.Mods = mods; RoomUpdated?.Invoke(); }, false); return Task.CompletedTask; } Task IMultiplayerClient.LoadRequested() { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; LoadRequested?.Invoke(); }, false); return Task.CompletedTask; } Task IMultiplayerClient.MatchStarted() { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; MatchStarted?.Invoke(); }, false); return Task.CompletedTask; } Task IMultiplayerClient.ResultsReady() { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; ResultsReady?.Invoke(); }, false); return Task.CompletedTask; } public async Task PlaylistItemAdded(MultiplayerPlaylistItem item) { if (Room == null) return; var playlistItem = await createPlaylistItem(item).ConfigureAwait(false); Scheduler.Add(() => { if (Room == null) return; Debug.Assert(APIRoom != null); Room.Playlist.Add(item); APIRoom.Playlist.Add(playlistItem); RoomUpdated?.Invoke(); }); } public Task PlaylistItemRemoved(long playlistItemId) { if (Room == null) return Task.CompletedTask; Scheduler.Add(() => { if (Room == null) return; Debug.Assert(APIRoom != null); Room.Playlist.Remove(Room.Playlist.Single(existing => existing.ID == playlistItemId)); APIRoom.Playlist.RemoveAll(existing => existing.ID == playlistItemId); RoomUpdated?.Invoke(); }); return Task.CompletedTask; } public async Task PlaylistItemChanged(MultiplayerPlaylistItem item) { if (Room == null) return; var playlistItem = await createPlaylistItem(item).ConfigureAwait(false); Scheduler.Add(() => { if (Room == null) return; Debug.Assert(APIRoom != null); Room.Playlist[Room.Playlist.IndexOf(Room.Playlist.Single(existing => existing.ID == item.ID))] = item; int existingIndex = APIRoom.Playlist.IndexOf(APIRoom.Playlist.Single(existing => existing.ID == item.ID)); APIRoom.Playlist.RemoveAt(existingIndex); APIRoom.Playlist.Insert(existingIndex, playlistItem); // If the currently-selected item was the one that got replaced, update the selected item to the new one. if (CurrentMatchPlayingItem.Value?.ID == playlistItem.ID) CurrentMatchPlayingItem.Value = playlistItem; RoomUpdated?.Invoke(); }); } /// <summary> /// Populates the <see cref="APIUser"/> for a given <see cref="MultiplayerRoomUser"/>. /// </summary> /// <param name="multiplayerUser">The <see cref="MultiplayerRoomUser"/> to populate.</param> protected async Task PopulateUser(MultiplayerRoomUser multiplayerUser) => multiplayerUser.User ??= await userLookupCache.GetUserAsync(multiplayerUser.UserID).ConfigureAwait(false); /// <summary> /// Updates the local room settings with the given <see cref="MultiplayerRoomSettings"/>. /// </summary> /// <remarks> /// This updates both the joined <see cref="MultiplayerRoom"/> and the respective API <see cref="Room"/>. /// </remarks> /// <param name="settings">The new <see cref="MultiplayerRoomSettings"/> to update from.</param> private void updateLocalRoomSettings(MultiplayerRoomSettings settings) { if (Room == null) return; Debug.Assert(APIRoom != null); // Update a few properties of the room instantaneously. Room.Settings = settings; APIRoom.Name.Value = Room.Settings.Name; APIRoom.Password.Value = Room.Settings.Password; APIRoom.QueueMode.Value = Room.Settings.QueueMode; RoomUpdated?.Invoke(); CurrentMatchPlayingItem.Value = APIRoom.Playlist.SingleOrDefault(p => p.ID == settings.PlaylistItemId); } private async Task<PlaylistItem> createPlaylistItem(MultiplayerPlaylistItem item) { var set = await GetOnlineBeatmapSet(item.BeatmapID).ConfigureAwait(false); // The incoming response is deserialised without circular reference handling currently. // Because we require using metadata from this instance, populate the nested beatmaps' sets manually here. foreach (var b in set.Beatmaps) b.BeatmapSet = set; var beatmap = set.Beatmaps.Single(b => b.OnlineID == item.BeatmapID); beatmap.Checksum = item.BeatmapChecksum; var ruleset = Rulesets.GetRuleset(item.RulesetID); var rulesetInstance = ruleset.CreateInstance(); var playlistItem = new PlaylistItem { ID = item.ID, Beatmap = { Value = beatmap }, Ruleset = { Value = ruleset }, Expired = item.Expired }; playlistItem.RequiredMods.AddRange(item.RequiredMods.Select(m => m.ToMod(rulesetInstance))); playlistItem.AllowedMods.AddRange(item.AllowedMods.Select(m => m.ToMod(rulesetInstance))); return playlistItem; } /// <summary> /// Retrieves a <see cref="APIBeatmapSet"/> from an online source. /// </summary> /// <param name="beatmapId">The beatmap set ID.</param> /// <param name="cancellationToken">A token to cancel the request.</param> /// <returns>The <see cref="APIBeatmapSet"/> retrieval task.</returns> protected abstract Task<APIBeatmapSet> GetOnlineBeatmapSet(int beatmapId, CancellationToken cancellationToken = default); /// <summary> /// For the provided user ID, update whether the user is included in <see cref="CurrentMatchPlayingUserIds"/>. /// </summary> /// <param name="userId">The user's ID.</param> /// <param name="state">The new state of the user.</param> private void updateUserPlayingState(int userId, MultiplayerUserState state) { bool wasPlaying = PlayingUserIds.Contains(userId); bool isPlaying = state >= MultiplayerUserState.WaitingForLoad && state <= MultiplayerUserState.FinishedPlay; if (isPlaying == wasPlaying) return; if (isPlaying) PlayingUserIds.Add(userId); else PlayingUserIds.Remove(userId); } private Task scheduleAsync(Action action, CancellationToken cancellationToken = default) { var tcs = new TaskCompletionSource<bool>(); Scheduler.Add(() => { if (cancellationToken.IsCancellationRequested) { tcs.SetCanceled(); return; } try { action(); tcs.SetResult(true); } catch (Exception ex) { tcs.SetException(ex); } }); return tcs.Task; } } }
/******************************************************************************* * Copyright 2008-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and * limitations under the License. * ***************************************************************************** * __ _ _ ___ * ( )( \/\/ )/ __) * /__\ \ / \__ \ * (_)(_) \/\/ (___/ * * AWS SDK for .NET */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; namespace Amazon.EC2.Model { /// <summary> /// Associates an elastic IP address with an instance. If the IP address is /// currently assigned to another instance, the IP address is assigned /// to the new instance. /// </summary> /// <remarks> /// This is an idempotent operation. If you call it more than once, Amazon EC2 does not return an error. /// </remarks> [XmlRootAttribute(IsNullable = false)] public class AssociateAddressRequest : EC2Request { private string instanceIdField; private string publicIpField; private string allocationIdField; private string networkInterfaceIdField; private string privateIpAddressField; private bool? allowReassociationField; /// <summary> /// The instance to associate with the IP address. /// </summary> [XmlElementAttribute(ElementName = "InstanceId")] public string InstanceId { get { return this.instanceIdField; } set { this.instanceIdField = value; } } /// <summary> /// Sets the instance to associate with the IP address. /// </summary> /// <param name="instanceId">The instance to associate with the IP address.</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssociateAddressRequest WithInstanceId(string instanceId) { this.instanceIdField = instanceId; return this; } /// <summary> /// Checks if InstanceId property is set /// </summary> /// <returns>true if InstanceId property is set</returns> public bool IsSetInstanceId() { return this.instanceIdField != null; } /// <summary> /// The Elastic IP address to assign to the instance. /// </summary> [XmlElementAttribute(ElementName = "PublicIp")] public string PublicIp { get { return this.publicIpField; } set { this.publicIpField = value; } } /// <summary> /// Sets the Elastic IP address to assign to the instance. /// </summary> /// <param name="publicIp">IP address that you are assigning to the /// instance.</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssociateAddressRequest WithPublicIp(string publicIp) { this.publicIpField = publicIp; return this; } /// <summary> /// Checks if PublicIp property is set /// </summary> /// <returns>true if PublicIp property is set</returns> public bool IsSetPublicIp() { return this.publicIpField != null; } /// <summary> /// The allocation ID that AWS returned when you allocated /// the elastic IP address for use with Amazon VPC. /// /// Condition: Required for VPC elastic IP addresses /// </summary> [XmlElementAttribute(ElementName = "AllocationId")] public string AllocationId { get { return this.allocationIdField; } set { this.allocationIdField = value; } } /// <summary> /// Sets the allocation ID for use with Amazon VPC. /// </summary> /// <param name="allocationId">The allocation ID that AWS returned when you allocated /// the elastic IP address for use with Amazon VPC.</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssociateAddressRequest WithAllocationId(string allocationId) { this.allocationIdField = allocationId; return this; } /// <summary> /// Checks if AllocationId property is set /// </summary> /// <returns>true if AllocationId property is set</returns> public bool IsSetAllocationId() { return this.allocationIdField != null; } /// <summary> /// The network interface ID to associate with an instance. /// Association fails when specifying an instance ID unless exactly one interface is attached. /// </summary> [XmlElementAttribute(ElementName = "NetworkInterfaceId")] public string NetworkInterfaceId { get { return this.networkInterfaceIdField; } set { this.networkInterfaceIdField = value; } } /// <summary> /// Sets the network interface ID to associate with an instance. /// </summary> /// <param name="networkInterfaceId">Network interface ID</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssociateAddressRequest WithNetworkInterfaceId(string networkInterfaceId) { this.networkInterfaceIdField = networkInterfaceId; return this; } /// <summary> /// Checks if the NetworkInterfaceId property is set /// </summary> /// <returns>true if the NetworkInterfaceId property is set</returns> public bool IsSetNetworkInterfaceId() { return !string.IsNullOrEmpty(this.networkInterfaceIdField); } /// <summary> /// The primary or secondary private IP address to associate with the Elastic IP address. /// If no private IP is specified, the Elastic IP address is associated with the primary /// private IP address. This is only available in Amazon VPC. /// </summary> [XmlElementAttribute(ElementName = "PrivateIpAddress")] public string PrivateIpAddress { get { return this.privateIpAddressField; } set { this.privateIpAddressField = value; } } /// <summary> /// Sets the primary or secondary private IP address to associate with the Elastic IP address. /// </summary> /// <param name="privateIpAddress">Private IP address.</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssociateAddressRequest WithPrivateIpAddress(string privateIpAddress) { this.privateIpAddressField = privateIpAddress; return this; } /// <summary> /// Checks if the PrivateIpAddress property is set /// </summary> /// <returns>true if the PrivateIpAddress property is set</returns> public bool IsSetPrivateIpAddress() { return !string.IsNullOrEmpty(this.privateIpAddressField); } /// <summary> /// Specify whether to allow an Elastic IP address that is already associated with another /// network interface or instance to be re-associated with the specified instance or interface. /// If the Elastic IP address is associated, and this option is not specified, the operation will /// fail. This is only available in Amazon VPC. /// </summary> [XmlElementAttribute(ElementName = "AllowReassociation")] public bool AllowReassociation { get { return this.allowReassociationField.GetValueOrDefault(); } set { this.allowReassociationField = value; } } /// <summary> /// Sets whether to allow an Elastic IP address that is already associated with another /// network interface or instance to be re-associated with the specified instance or interface. /// </summary> /// <param name="allowReassociation">Whether reassociation is allowed.</param> /// <returns>this instance</returns> [Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")] public AssociateAddressRequest WithAllowReassociation(bool allowReassociation) { this.allowReassociationField = allowReassociation; return this; } /// <summary> /// Checks if the AllowReassociation property is set /// </summary> /// <returns>true if the AllowReassociation property is set</returns> public bool IsSetAllowReassociation() { return this.allowReassociationField != null; } } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // namespace OneGet.Sdk { using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Security; using Resources; public abstract class Request { private Dictionary<string, string[]> _options; private string[] _packageSources; #region OneGet Interfaces public interface IProviderServices { bool IsElevated { get; } string GetCanonicalPackageId(string providerName, string packageName, string version); string ParseProviderName(string canonicalPackageId); string ParsePackageName(string canonicalPackageId); string ParsePackageVersion(string canonicalPackageId); void DownloadFile(Uri remoteLocation, string localFilename, Request requestObject); bool IsSupportedArchive(string localFilename, Request requestObject); IEnumerable<string> UnpackArchive(string localFilename, string destinationFolder, Request requestObject); void AddPinnedItemToTaskbar(string item, Request requestObject); void RemovePinnedItemFromTaskbar(string item, Request requestObject); void CreateShortcutLink(string linkPath, string targetPath, string description, string workingDirectory, string arguments, Request requestObject); void SetEnvironmentVariable(string variable, string value, string context, Request requestObject); void RemoveEnvironmentVariable(string variable, string context, Request requestObject); void CopyFile(string sourcePath, string destinationPath, Request requestObject); void Delete(string path, Request requestObject); void DeleteFolder(string folder, Request requestObject); void CreateFolder(string folder, Request requestObject); void DeleteFile(string filename, Request requestObject); string GetKnownFolder(string knownFolder, Request requestObject); string CanonicalizePath(string text, string currentDirectory); bool FileExists(string path); bool DirectoryExists(string path); bool Install(string fileName, string additionalArgs, Request requestObject); bool IsSignedAndTrusted(string filename, Request requestObject); bool ExecuteElevatedAction(string provider, string payload, Request requestObject); } public interface IPackageProvider { } public interface IPackageManagementService { int Version { get; } IEnumerable<string> ProviderNames { get; } IEnumerable<string> AllProviderNames { get; } IEnumerable<IPackageProvider> PackageProviders { get; } IEnumerable<IPackageProvider> SelectProvidersWithFeature(string featureName); IEnumerable<IPackageProvider> SelectProvidersWithFeature(string featureName, string value); IEnumerable<IPackageProvider> SelectProviders(string providerName, Request requestObject); bool RequirePackageProvider(string requestor, string packageProviderName, string minimumVersion, Request requestObject); } #endregion #region core-apis public abstract dynamic PackageManagementService {get;} public abstract IProviderServices ProviderServices {get;} #endregion #region copy host-apis /* Synced/Generated code =================================================== */ public abstract bool IsCanceled {get;} public abstract string GetMessageString(string messageText, string defaultText); public abstract bool Warning(string messageText); public abstract bool Error(string id, string category, string targetObjectValue, string messageText); public abstract bool Message(string messageText); public abstract bool Verbose(string messageText); public abstract bool Debug(string messageText); public abstract int StartProgress(int parentActivityId, string messageText); public abstract bool Progress(int activityId, int progressPercentage, string messageText); public abstract bool CompleteProgress(int activityId, bool isSuccessful); /// <summary> /// Used by a provider to request what metadata keys were passed from the user /// </summary> /// <returns></returns> public abstract IEnumerable<string> OptionKeys {get;} /// <summary> /// </summary> /// <param name="key"></param> /// <returns></returns> public abstract IEnumerable<string> GetOptionValues(string key); public abstract IEnumerable<string> Sources {get;} public abstract string CredentialUsername {get;} public abstract SecureString CredentialPassword {get;} public abstract bool ShouldBootstrapProvider(string requestor, string providerName, string providerVersion, string providerType, string location, string destination); public abstract bool ShouldContinueWithUntrustedPackageSource(string package, string packageSource); public abstract bool AskPermission(string permission); public abstract bool IsInteractive {get;} public abstract int CallCount {get;} #endregion #region copy response-apis /* Synced/Generated code =================================================== */ /// <summary> /// Used by a provider to return fields for a SoftwareIdentity. /// </summary> /// <param name="fastPath"></param> /// <param name="name"></param> /// <param name="version"></param> /// <param name="versionScheme"></param> /// <param name="summary"></param> /// <param name="source"></param> /// <param name="searchKey"></param> /// <param name="fullPath"></param> /// <param name="packageFileName"></param> /// <returns></returns> public abstract bool YieldSoftwareIdentity(string fastPath, string name, string version, string versionScheme, string summary, string source, string searchKey, string fullPath, string packageFileName); public abstract bool YieldSoftwareMetadata(string parentFastPath, string name, string value); public abstract bool YieldEntity(string parentFastPath, string name, string regid, string role, string thumbprint); public abstract bool YieldLink(string parentFastPath, string referenceUri, string relationship, string mediaType, string ownership, string use, string appliesToMedia, string artifact); #if M2 public abstract bool YieldSwidtag(string fastPath, string xmlOrJsonDoc); public abstract bool YieldMetadata(string fieldId, string @namespace, string name, string value); #endif /// <summary> /// Used by a provider to return fields for a package source (repository) /// </summary> /// <param name="name"></param> /// <param name="location"></param> /// <param name="isTrusted"></param> /// <param name="isRegistered"></param> /// <param name="isValidated"></param> /// <returns></returns> public abstract bool YieldPackageSource(string name, string location, bool isTrusted, bool isRegistered, bool isValidated); /// <summary> /// Used by a provider to return the fields for a Metadata Definition /// The cmdlets can use this to supply tab-completion for metadata to the user. /// </summary> /// <param name="name">the provider-defined name of the option</param> /// <param name="expectedType"> one of ['string','int','path','switch']</param> /// <param name="isRequired">if the parameter is mandatory</param> /// <returns></returns> public abstract bool YieldDynamicOption(string name, string expectedType, bool isRequired); public abstract bool YieldKeyValuePair(string key, string value); public abstract bool YieldValue(string value); #endregion /// <summary> /// Yield values in a dictionary as key/value pairs. (one pair for each value in each key) /// </summary> /// <param name="dictionary"></param> /// <returns></returns> public bool Yield(Dictionary<string, string[]> dictionary) { return dictionary.All(Yield); } public bool Yield(KeyValuePair<string, string[]> pair) { if (pair.Value.Length == 0) { return YieldKeyValuePair(pair.Key, null); } return pair.Value.All(each => YieldKeyValuePair(pair.Key, each)); } public bool Error(ErrorCategory category, string targetObjectValue, string messageText, params object[] args) { return Error(messageText, category.ToString(), targetObjectValue, FormatMessageString(messageText, args)); } public bool Warning(string messageText, params object[] args) { return Warning(FormatMessageString(messageText, args)); } public bool Message(string messageText, params object[] args) { return Message(FormatMessageString(messageText, args)); } public bool Verbose(string messageText, params object[] args) { return Verbose(FormatMessageString(messageText, args)); } public bool Debug(string messageText, params object[] args) { return Debug(FormatMessageString(messageText, args)); } public int StartProgress(int parentActivityId, string messageText, params object[] args) { return StartProgress(parentActivityId, FormatMessageString(messageText, args)); } public bool Progress(int activityId, int progressPercentage, string messageText, params object[] args) { return Progress(activityId, progressPercentage, FormatMessageString(messageText, args)); } public string GetOptionValue(string name) { // get the value from the request return (GetOptionValues(name) ?? Enumerable.Empty<string>()).LastOrDefault(); } private static string FixMeFormat(string formatString, object[] args) { if (args == null || args.Length == 0) { // not really any args, and not really expectng any return formatString.Replace('{', '\u00ab').Replace('}', '\u00bb'); } return args.Aggregate(formatString.Replace('{', '\u00ab').Replace('}', '\u00bb'), (current, arg) => current + string.Format(CultureInfo.CurrentCulture, " \u00ab{0}\u00bb", arg)); } internal string GetMessageStringInternal(string messageText) { return Messages.ResourceManager.GetString(messageText); } internal string FormatMessageString(string messageText, params object[] args) { if (string.IsNullOrEmpty(messageText)) { return string.Empty; } if (args == null) { return messageText; } if (messageText.StartsWith(Constants.MSGPrefix, true, CultureInfo.CurrentCulture)) { // check with the caller first, then with the local resources, and fallback to using the messageText itself. messageText = GetMessageString(messageText.Substring(Constants.MSGPrefix.Length), GetMessageStringInternal(messageText) ?? messageText) ?? GetMessageStringInternal(messageText) ?? messageText; } // if it doesn't look like we have the correct number of parameters // let's return a fix-me-format string. var c = messageText.ToCharArray().Where(each => each == '{').Count(); if (c < args.Length) { return FixMeFormat(messageText, args); } return string.Format(CultureInfo.CurrentCulture, messageText, args); } public bool YieldDynamicOption(string name, string expectedType, bool isRequired, IEnumerable<string> permittedValues) { return YieldDynamicOption(name, expectedType, isRequired) && (permittedValues ?? Enumerable.Empty<string>()).All(each => YieldKeyValuePair(name, each)); } public Dictionary<string, string[]> Options { get { return _options ?? (_options = OptionKeys.Where(each => !string.IsNullOrWhiteSpace(each)).ToDictionary(k => k, (k) => (GetOptionValues(k) ?? new string[0]).ToArray())); } } public IEnumerable<string> PackageSources { get { return _packageSources ?? (_packageSources = (Sources ?? new string[0]).ToArray()); } } } }
// This file was created automatically, do not modify the contents of this file. // ReSharper disable InvalidXmlDocComment // ReSharper disable InconsistentNaming // ReSharper disable CheckNamespace // ReSharper disable MemberCanBePrivate.Global using System; using System.Runtime.InteropServices; // Source file C:\Program Files\Epic Games\UE_4.22\Engine\Source\Runtime\Engine\Classes\Components\SkyLightComponent.h:96 namespace UnrealEngine { [ManageType("ManageSkyLightComponent")] public partial class ManageSkyLightComponent : USkyLightComponent, IManageWrapper { public ManageSkyLightComponent(IntPtr adress) : base(adress) { } #region DLLInmport [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_UpdateLightGUIDs(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_DetachFromParent(IntPtr self, bool bMaintainWorldPosition, bool bCallModify); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnAttachmentChanged(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnHiddenInGameChanged(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnVisibilityChanged(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PropagateLightingScenarioChange(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_UpdateBounds(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_UpdatePhysicsVolume(IntPtr self, bool bTriggerNotifiers); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_Activate(IntPtr self, bool bReset); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_BeginPlay(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_CreateRenderState_Concurrent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_Deactivate(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_DestroyComponent(IntPtr self, bool bPromoteChildren); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_DestroyRenderState_Concurrent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_InitializeComponent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_InvalidateLightingCacheDetailed(IntPtr self, bool bInvalidateBuildEnqueuedLighting, bool bTranslationOnly); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnActorEnableCollisionChanged(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnComponentCreated(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnComponentDestroyed(IntPtr self, bool bDestroyingHierarchy); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnCreatePhysicsState(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnDestroyPhysicsState(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnRegister(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnRep_IsActive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnUnregister(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_RegisterComponentTickFunctions(IntPtr self, bool bRegister); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_SendRenderDynamicData_Concurrent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_SendRenderTransform_Concurrent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_SetActive(IntPtr self, bool bNewActive, bool bReset); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_SetAutoActivate(IntPtr self, bool bNewAutoActivate); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_SetComponentTickEnabled(IntPtr self, bool bEnabled); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_SetComponentTickEnabledAsync(IntPtr self, bool bEnabled); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_ToggleActive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_UninitializeComponent(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_BeginDestroy(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_FinishDestroy(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_MarkAsEditorOnlySubobject(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PostCDOContruct(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PostEditImport(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PostInitProperties(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PostLoad(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PostNetReceive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PostRepNotifies(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PostSaveRoot(IntPtr self, bool bCleanupIsRequired); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PreDestroyFromReplication(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_PreNetReceive(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_ShutdownAfterError(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_CreateCluster(IntPtr self); [DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)] private static extern void E__Supper__USkyLightComponent_OnClusterMarkedAsPendingKill(IntPtr self); #endregion #region Methods /// <summary> /// Update/reset light GUIDs. /// </summary> public override void UpdateLightGUIDs() => E__Supper__USkyLightComponent_UpdateLightGUIDs(this); /// <summary> /// DEPRECATED - Use DetachFromComponent() instead /// </summary> public override void DetachFromParentDeprecated(bool bMaintainWorldPosition, bool bCallModify) => E__Supper__USkyLightComponent_DetachFromParent(this, bMaintainWorldPosition, bCallModify); /// <summary> /// Called when AttachParent changes, to allow the scene to update its attachment state. /// </summary> public override void OnAttachmentChanged() => E__Supper__USkyLightComponent_OnAttachmentChanged(this); /// <summary> /// Overridable internal function to respond to changes in the hidden in game value of the component. /// </summary> protected override void OnHiddenInGameChanged() => E__Supper__USkyLightComponent_OnHiddenInGameChanged(this); /// <summary> /// Overridable internal function to respond to changes in the visibility of the component. /// </summary> protected override void OnVisibilityChanged() => E__Supper__USkyLightComponent_OnVisibilityChanged(this); /// <summary> /// Updates any visuals after the lighting has changed /// </summary> public override void PropagateLightingScenarioChange() => E__Supper__USkyLightComponent_PropagateLightingScenarioChange(this); /// <summary> /// Update the Bounds of the component. /// </summary> public override void UpdateBounds() => E__Supper__USkyLightComponent_UpdateBounds(this); /// <summary> /// Updates the PhysicsVolume of this SceneComponent, if bShouldUpdatePhysicsVolume is true. /// </summary> /// <param name="bTriggerNotifiers">if true, send zone/volume change events</param> public override void UpdatePhysicsVolume(bool bTriggerNotifiers) => E__Supper__USkyLightComponent_UpdatePhysicsVolume(this, bTriggerNotifiers); /// <summary> /// Activates the SceneComponent, should be overridden by native child classes. /// </summary> /// <param name="bReset">Whether the activation should happen even if ShouldActivate returns false.</param> public override void Activate(bool bReset) => E__Supper__USkyLightComponent_Activate(this, bReset); /// <summary> /// BeginsPlay for the component. Occurs at level startup or actor spawn. This is before BeginPlay (Actor or Component). /// <para>All Components (that want initialization) in the level will be Initialized on load before any </para> /// Actor/Component gets BeginPlay. /// <para>Requires component to be registered and initialized. </para> /// </summary> public override void BeginPlay() => E__Supper__USkyLightComponent_BeginPlay(this); /// <summary> /// Used to create any rendering thread information for this component /// <para>@warning This is called concurrently on multiple threads (but never the same component concurrently) </para> /// </summary> protected override void CreateRenderState_Concurrent() => E__Supper__USkyLightComponent_CreateRenderState_Concurrent(this); /// <summary> /// Deactivates the SceneComponent. /// </summary> public override void Deactivate() => E__Supper__USkyLightComponent_Deactivate(this); /// <summary> /// Unregister the component, remove it from its outer Actor's Components array and mark for pending kill. /// </summary> public override void DestroyComponent(bool bPromoteChildren) => E__Supper__USkyLightComponent_DestroyComponent(this, bPromoteChildren); /// <summary> /// Used to shut down any rendering thread structure for this component /// <para>@warning This is called concurrently on multiple threads (but never the same component concurrently) </para> /// </summary> protected override void DestroyRenderState_Concurrent() => E__Supper__USkyLightComponent_DestroyRenderState_Concurrent(this); /// <summary> /// Initializes the component. Occurs at level startup or actor spawn. This is before BeginPlay (Actor or Component). /// <para>All Components in the level will be Initialized on load before any Actor/Component gets BeginPlay </para> /// Requires component to be registered, and bWantsInitializeComponent to be true. /// </summary> public override void InitializeComponent() => E__Supper__USkyLightComponent_InitializeComponent(this); /// <summary> /// Called when this actor component has moved, allowing it to discard statically cached lighting information. /// </summary> public override void InvalidateLightingCacheDetailed(bool bInvalidateBuildEnqueuedLighting, bool bTranslationOnly) => E__Supper__USkyLightComponent_InvalidateLightingCacheDetailed(this, bInvalidateBuildEnqueuedLighting, bTranslationOnly); /// <summary> /// Called on each component when the Actor's bEnableCollisionChanged flag changes /// </summary> public override void OnActorEnableCollisionChanged() => E__Supper__USkyLightComponent_OnActorEnableCollisionChanged(this); /// <summary> /// Called when a component is created (not loaded). This can happen in the editor or during gameplay /// </summary> public override void OnComponentCreated() => E__Supper__USkyLightComponent_OnComponentCreated(this); /// <summary> /// Called when a component is destroyed /// </summary> /// <param name="bDestroyingHierarchy">True if the entire component hierarchy is being torn down, allows avoiding expensive operations</param> public override void OnComponentDestroyed(bool bDestroyingHierarchy) => E__Supper__USkyLightComponent_OnComponentDestroyed(this, bDestroyingHierarchy); /// <summary> /// Used to create any physics engine information for this component /// </summary> protected override void OnCreatePhysicsState() => E__Supper__USkyLightComponent_OnCreatePhysicsState(this); /// <summary> /// Used to shut down and physics engine structure for this component /// </summary> protected override void OnDestroyPhysicsState() => E__Supper__USkyLightComponent_OnDestroyPhysicsState(this); /// <summary> /// Called when a component is registered, after Scene is set, but before CreateRenderState_Concurrent or OnCreatePhysicsState are called. /// </summary> protected override void OnRegister() => E__Supper__USkyLightComponent_OnRegister(this); public override void OnRep_IsActive() => E__Supper__USkyLightComponent_OnRep_IsActive(this); /// <summary> /// Called when a component is unregistered. Called after DestroyRenderState_Concurrent and OnDestroyPhysicsState are called. /// </summary> protected override void OnUnregister() => E__Supper__USkyLightComponent_OnUnregister(this); /// <summary> /// Virtual call chain to register all tick functions /// </summary> /// <param name="bRegister">true to register, false, to unregister</param> protected override void RegisterComponentTickFunctions(bool bRegister) => E__Supper__USkyLightComponent_RegisterComponentTickFunctions(this, bRegister); /// <summary> /// Called to send dynamic data for this component to the rendering thread /// </summary> protected override void SendRenderDynamicData_Concurrent() => E__Supper__USkyLightComponent_SendRenderDynamicData_Concurrent(this); /// <summary> /// Called to send a transform update for this component to the rendering thread /// <para>@warning This is called concurrently on multiple threads (but never the same component concurrently) </para> /// </summary> protected override void SendRenderTransform_Concurrent() => E__Supper__USkyLightComponent_SendRenderTransform_Concurrent(this); /// <summary> /// Sets whether the component is active or not /// </summary> /// <param name="bNewActive">The new active state of the component</param> /// <param name="bReset">Whether the activation should happen even if ShouldActivate returns false.</param> public override void SetActive(bool bNewActive, bool bReset) => E__Supper__USkyLightComponent_SetActive(this, bNewActive, bReset); /// <summary> /// Sets whether the component should be auto activate or not. Only safe during construction scripts. /// </summary> /// <param name="bNewAutoActivate">The new auto activate state of the component</param> public override void SetAutoActivate(bool bNewAutoActivate) => E__Supper__USkyLightComponent_SetAutoActivate(this, bNewAutoActivate); /// <summary> /// Set this component's tick functions to be enabled or disabled. Only has an effect if the function is registered /// </summary> /// <param name="bEnabled">Whether it should be enabled or not</param> public override void SetComponentTickEnabled(bool bEnabled) => E__Supper__USkyLightComponent_SetComponentTickEnabled(this, bEnabled); /// <summary> /// Spawns a task on GameThread that will call SetComponentTickEnabled /// </summary> /// <param name="bEnabled">Whether it should be enabled or not</param> public override void SetComponentTickEnabledAsync(bool bEnabled) => E__Supper__USkyLightComponent_SetComponentTickEnabledAsync(this, bEnabled); /// <summary> /// Toggles the active state of the component /// </summary> public override void ToggleActive() => E__Supper__USkyLightComponent_ToggleActive(this); /// <summary> /// Handle this component being Uninitialized. /// <para>Called from AActor::EndPlay only if bHasBeenInitialized is true </para> /// </summary> public override void UninitializeComponent() => E__Supper__USkyLightComponent_UninitializeComponent(this); /// <summary> /// Called before destroying the object. This is called immediately upon deciding to destroy the object, to allow the object to begin an /// <para>asynchronous cleanup process. </para> /// </summary> public override void BeginDestroy() => E__Supper__USkyLightComponent_BeginDestroy(this); /// <summary> /// Called to finish destroying the object. After UObject::FinishDestroy is called, the object's memory should no longer be accessed. /// <para>@warning Because properties are destroyed here, Super::FinishDestroy() should always be called at the end of your child class's FinishDestroy() method, rather than at the beginning. </para> /// </summary> public override void FinishDestroy() => E__Supper__USkyLightComponent_FinishDestroy(this); /// <summary> /// Called during subobject creation to mark this component as editor only, which causes it to get stripped in packaged builds /// </summary> public override void MarkAsEditorOnlySubobject() => E__Supper__USkyLightComponent_MarkAsEditorOnlySubobject(this); /// <summary> /// Called after the C++ constructor has run on the CDO for a class. This is an obscure routine used to deal with the recursion /// <para>in the construction of the default materials </para> /// </summary> public override void PostCDOContruct() => E__Supper__USkyLightComponent_PostCDOContruct(this); /// <summary> /// Called after importing property values for this object (paste, duplicate or .t3d import) /// <para>Allow the object to perform any cleanup for properties which shouldn't be duplicated or </para> /// are unsupported by the script serialization /// </summary> public override void PostEditImport() => E__Supper__USkyLightComponent_PostEditImport(this); /// <summary> /// Called after the C++ constructor and after the properties have been initialized, including those loaded from config. /// <para>This is called before any serialization or other setup has happened. </para> /// </summary> public override void PostInitProperties() => E__Supper__USkyLightComponent_PostInitProperties(this); /// <summary> /// Do any object-specific cleanup required immediately after loading an object. /// <para>This is not called for newly-created objects, and by default will always execute on the game thread. </para> /// </summary> public override void PostLoad() => E__Supper__USkyLightComponent_PostLoad(this); /// <summary> /// Called right after receiving a bunch /// </summary> public override void PostNetReceive() => E__Supper__USkyLightComponent_PostNetReceive(this); /// <summary> /// Called right after calling all OnRep notifies (called even when there are no notifies) /// </summary> public override void PostRepNotifies() => E__Supper__USkyLightComponent_PostRepNotifies(this); /// <summary> /// Called from within SavePackage on the passed in base/root object. /// <para>This function is called after the package has been saved and can perform cleanup. </para> /// </summary> /// <param name="bCleanupIsRequired">Whether PreSaveRoot dirtied state that needs to be cleaned up</param> public override void PostSaveRoot(bool bCleanupIsRequired) => E__Supper__USkyLightComponent_PostSaveRoot(this, bCleanupIsRequired); /// <summary> /// Called right before being marked for destruction due to network replication /// </summary> public override void PreDestroyFromReplication() => E__Supper__USkyLightComponent_PreDestroyFromReplication(this); /// <summary> /// Called right before receiving a bunch /// </summary> public override void PreNetReceive() => E__Supper__USkyLightComponent_PreNetReceive(this); /// <summary> /// After a critical error, perform any mission-critical cleanup, such as restoring the video mode orreleasing hardware resources. /// </summary> public override void ShutdownAfterError() => E__Supper__USkyLightComponent_ShutdownAfterError(this); /// <summary> /// Called after PostLoad to create UObject cluster /// </summary> public override void CreateCluster() => E__Supper__USkyLightComponent_CreateCluster(this); /// <summary> /// Called during Garbage Collection to perform additional cleanup when the cluster is about to be destroyed due to PendingKill flag being set on it. /// </summary> public override void OnClusterMarkedAsPendingKill() => E__Supper__USkyLightComponent_OnClusterMarkedAsPendingKill(this); #endregion public static implicit operator IntPtr(ManageSkyLightComponent self) { return self?.NativePointer ?? IntPtr.Zero; } public static implicit operator ManageSkyLightComponent(ObjectPointerDescription PtrDesc) { return NativeManager.GetWrapper<ManageSkyLightComponent>(PtrDesc); } } }
// <copyright file="IncompleteLU.cs" company="Math.NET"> // Math.NET Numerics, part of the Math.NET Project // http://numerics.mathdotnet.com // http://github.com/mathnet/mathnet-numerics // http://mathnetnumerics.codeplex.com // // Copyright (c) 2009-2010 Math.NET // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. // </copyright> using System; using MathNet.Numerics.LinearAlgebra.Solvers; using MathNet.Numerics.Properties; namespace MathNet.Numerics.LinearAlgebra.Complex32.Solvers { using Numerics; /// <summary> /// An incomplete, level 0, LU factorization preconditioner. /// </summary> /// <remarks> /// The ILU(0) algorithm was taken from: <br/> /// Iterative methods for sparse linear systems <br/> /// Yousef Saad <br/> /// Algorithm is described in Chapter 10, section 10.3.2, page 275 <br/> /// </remarks> public sealed class ILU0Preconditioner : IPreconditioner<Complex32> { /// <summary> /// The matrix holding the lower (L) and upper (U) matrices. The /// decomposition matrices are combined to reduce storage. /// </summary> SparseMatrix _decompositionLU; /// <summary> /// Returns the upper triagonal matrix that was created during the LU decomposition. /// </summary> /// <returns>A new matrix containing the upper triagonal elements.</returns> internal Matrix<Complex32> UpperTriangle() { var result = new SparseMatrix(_decompositionLU.RowCount); for (var i = 0; i < _decompositionLU.RowCount; i++) { for (var j = i; j < _decompositionLU.ColumnCount; j++) { result[i, j] = _decompositionLU[i, j]; } } return result; } /// <summary> /// Returns the lower triagonal matrix that was created during the LU decomposition. /// </summary> /// <returns>A new matrix containing the lower triagonal elements.</returns> internal Matrix<Complex32> LowerTriangle() { var result = new SparseMatrix(_decompositionLU.RowCount); for (var i = 0; i < _decompositionLU.RowCount; i++) { for (var j = 0; j <= i; j++) { if (i == j) { result[i, j] = 1.0f; } else { result[i, j] = _decompositionLU[i, j]; } } } return result; } /// <summary> /// Initializes the preconditioner and loads the internal data structures. /// </summary> /// <param name="matrix">The matrix upon which the preconditioner is based. </param> /// <exception cref="ArgumentNullException">If <paramref name="matrix"/> is <see langword="null" />.</exception> /// <exception cref="ArgumentException">If <paramref name="matrix"/> is not a square matrix.</exception> public void Initialize(Matrix<Complex32> matrix) { if (matrix == null) { throw new ArgumentNullException("matrix"); } if (matrix.RowCount != matrix.ColumnCount) { throw new ArgumentException(Resources.ArgumentMatrixSquare, "matrix"); } _decompositionLU = SparseMatrix.OfMatrix(matrix); // M == A // for i = 2, ... , n do // for k = 1, .... , i - 1 do // if (i,k) == NZ(Z) then // compute z(i,k) = z(i,k) / z(k,k); // for j = k + 1, ...., n do // if (i,j) == NZ(Z) then // compute z(i,j) = z(i,j) - z(i,k) * z(k,j) // end // end // end // end // end for (var i = 0; i < _decompositionLU.RowCount; i++) { for (var k = 0; k < i; k++) { if (_decompositionLU[i, k] != 0.0f) { var t = _decompositionLU[i, k]/_decompositionLU[k, k]; _decompositionLU[i, k] = t; if (_decompositionLU[k, i] != 0.0f) { _decompositionLU[i, i] = _decompositionLU[i, i] - (t*_decompositionLU[k, i]); } for (var j = k + 1; j < _decompositionLU.RowCount; j++) { if (j == i) { continue; } if (_decompositionLU[i, j] != 0.0f) { _decompositionLU[i, j] = _decompositionLU[i, j] - (t*_decompositionLU[k, j]); } } } } } } /// <summary> /// Approximates the solution to the matrix equation <b>Ax = b</b>. /// </summary> /// <param name="rhs">The right hand side vector.</param> /// <param name="lhs">The left hand side vector. Also known as the result vector.</param> public void Approximate(Vector<Complex32> rhs, Vector<Complex32> lhs) { if (_decompositionLU == null) { throw new ArgumentException(Resources.ArgumentMatrixDoesNotExist); } if ((lhs.Count != rhs.Count) || (lhs.Count != _decompositionLU.RowCount)) { throw new ArgumentException(Resources.ArgumentVectorsSameLength); } // Solve: // Lz = y // Which gives // for (int i = 1; i < matrix.RowLength; i++) // { // z_i = l_ii^-1 * (y_i - SUM_(j<i) l_ij * z_j) // } // NOTE: l_ii should be 1 because u_ii has to be the value var rowValues = new DenseVector(_decompositionLU.RowCount); for (var i = 0; i < _decompositionLU.RowCount; i++) { // Clear the rowValues rowValues.Clear(); _decompositionLU.Row(i, rowValues); var sum = Complex32.Zero; for (var j = 0; j < i; j++) { sum += rowValues[j]*lhs[j]; } lhs[i] = rhs[i] - sum; } // Solve: // Ux = z // Which gives // for (int i = matrix.RowLength - 1; i > -1; i--) // { // x_i = u_ii^-1 * (z_i - SUM_(j > i) u_ij * x_j) // } for (var i = _decompositionLU.RowCount - 1; i > -1; i--) { _decompositionLU.Row(i, rowValues); var sum = Complex32.Zero; for (var j = _decompositionLU.RowCount - 1; j > i; j--) { sum += rowValues[j]*lhs[j]; } lhs[i] = 1/rowValues[i]*(lhs[i] - sum); } } } }
using System; using NBitcoin.BouncyCastle.Crypto; using NBitcoin.BouncyCastle.Crypto.Parameters; using NBitcoin.BouncyCastle.Math; using NBitcoin.BouncyCastle.Security; namespace NBitcoin.BouncyCastle.Crypto.Generators { /** * Generator for Pbe derived keys and ivs as defined by Pkcs 12 V1.0. * <p> * The document this implementation is based on can be found at * <a href="http://www.rsasecurity.com/rsalabs/pkcs/pkcs-12/index.html"> * RSA's Pkcs12 Page</a> * </p> */ public class Pkcs12ParametersGenerator : PbeParametersGenerator { public const int KeyMaterial = 1; public const int IVMaterial = 2; public const int MacMaterial = 3; private readonly IDigest digest; private readonly int u; private readonly int v; /** * Construct a Pkcs 12 Parameters generator. * * @param digest the digest to be used as the source of derived keys. * @exception ArgumentException if an unknown digest is passed in. */ public Pkcs12ParametersGenerator( IDigest digest) { this.digest = digest; u = digest.GetDigestSize(); v = digest.GetByteLength(); } /** * add a + b + 1, returning the result in a. The a value is treated * as a BigInteger of length (b.Length * 8) bits. The result is * modulo 2^b.Length in case of overflow. */ private void Adjust( byte[] a, int aOff, byte[] b) { int x = (b[b.Length - 1] & 0xff) + (a[aOff + b.Length - 1] & 0xff) + 1; a[aOff + b.Length - 1] = (byte)x; x = (int) ((uint) x >> 8); for (int i = b.Length - 2; i >= 0; i--) { x += (b[i] & 0xff) + (a[aOff + i] & 0xff); a[aOff + i] = (byte)x; x = (int) ((uint) x >> 8); } } /** * generation of a derived key ala Pkcs12 V1.0. */ private byte[] GenerateDerivedKey( int idByte, int n) { byte[] D = new byte[v]; byte[] dKey = new byte[n]; for (int i = 0; i != D.Length; i++) { D[i] = (byte)idByte; } byte[] S; if ((mSalt != null) && (mSalt.Length != 0)) { S = new byte[v * ((mSalt.Length + v - 1) / v)]; for (int i = 0; i != S.Length; i++) { S[i] = mSalt[i % mSalt.Length]; } } else { S = new byte[0]; } byte[] P; if ((mPassword != null) && (mPassword.Length != 0)) { P = new byte[v * ((mPassword.Length + v - 1) / v)]; for (int i = 0; i != P.Length; i++) { P[i] = mPassword[i % mPassword.Length]; } } else { P = new byte[0]; } byte[] I = new byte[S.Length + P.Length]; Array.Copy(S, 0, I, 0, S.Length); Array.Copy(P, 0, I, S.Length, P.Length); byte[] B = new byte[v]; int c = (n + u - 1) / u; byte[] A = new byte[u]; for (int i = 1; i <= c; i++) { digest.BlockUpdate(D, 0, D.Length); digest.BlockUpdate(I, 0, I.Length); digest.DoFinal(A, 0); for (int j = 1; j != mIterationCount; j++) { digest.BlockUpdate(A, 0, A.Length); digest.DoFinal(A, 0); } for (int j = 0; j != B.Length; j++) { B[j] = A[j % A.Length]; } for (int j = 0; j != I.Length / v; j++) { Adjust(I, j * v, B); } if (i == c) { Array.Copy(A, 0, dKey, (i - 1) * u, dKey.Length - ((i - 1) * u)); } else { Array.Copy(A, 0, dKey, (i - 1) * u, A.Length); } } return dKey; } /** * Generate a key parameter derived from the password, salt, and iteration * count we are currently initialised with. * * @param keySize the size of the key we want (in bits) * @return a KeyParameter object. */ [Obsolete("Use version with 'algorithm' parameter")] public override ICipherParameters GenerateDerivedParameters( int keySize) { keySize /= 8; byte[] dKey = GenerateDerivedKey(KeyMaterial, keySize); return new KeyParameter(dKey, 0, keySize); } public override ICipherParameters GenerateDerivedParameters( string algorithm, int keySize) { keySize /= 8; byte[] dKey = GenerateDerivedKey(KeyMaterial, keySize); return ParameterUtilities.CreateKeyParameter(algorithm, dKey, 0, keySize); } /** * Generate a key with initialisation vector parameter derived from * the password, salt, and iteration count we are currently initialised * with. * * @param keySize the size of the key we want (in bits) * @param ivSize the size of the iv we want (in bits) * @return a ParametersWithIV object. */ [Obsolete("Use version with 'algorithm' parameter")] public override ICipherParameters GenerateDerivedParameters( int keySize, int ivSize) { keySize /= 8; ivSize /= 8; byte[] dKey = GenerateDerivedKey(KeyMaterial, keySize); byte[] iv = GenerateDerivedKey(IVMaterial, ivSize); return new ParametersWithIV(new KeyParameter(dKey, 0, keySize), iv, 0, ivSize); } public override ICipherParameters GenerateDerivedParameters( string algorithm, int keySize, int ivSize) { keySize /= 8; ivSize /= 8; byte[] dKey = GenerateDerivedKey(KeyMaterial, keySize); KeyParameter key = ParameterUtilities.CreateKeyParameter(algorithm, dKey, 0, keySize); byte[] iv = GenerateDerivedKey(IVMaterial, ivSize); return new ParametersWithIV(key, iv, 0, ivSize); } /** * Generate a key parameter for use with a MAC derived from the password, * salt, and iteration count we are currently initialised with. * * @param keySize the size of the key we want (in bits) * @return a KeyParameter object. */ public override ICipherParameters GenerateDerivedMacParameters( int keySize) { keySize /= 8; byte[] dKey = GenerateDerivedKey(MacMaterial, keySize); return new KeyParameter(dKey, 0, keySize); } } }
using OpenKh.Kh2; using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Windows; using Xe.Tools; using Xe.Tools.Models; using Xe.Tools.Wpf.Commands; using Xe.Tools.Wpf.Models; namespace OpenKh.Tools.ObjentryEditor.ViewModels { public class ObjentryViewModel : GenericListModel<ObjentryViewModel.ObjentryEntryViewModel> { public class ObjentryEntryViewModel : BaseNotifyPropertyChanged { public Objentry Objentry { get; } public ObjentryEntryViewModel(Objentry objEntry) { Objentry = objEntry; } public string Name => $"{Id} {ModelName}"; public string Id => $"{Objentry.ObjectId:X02}"; public ushort ObjectId { get => (ushort)Objentry.ObjectId; set { Objentry.ObjectId = value; OnPropertyChanged(nameof(Name)); } } public Objentry.Type ObjectType { get => Objentry.ObjectType; set { Objentry.ObjectType = value; OnPropertyChanged(nameof(ObjectType)); } } public byte SubType { get => Objentry.SubType; set => Objentry.SubType = value; } public byte DrawPriority { get => Objentry.DrawPriority; set => Objentry.DrawPriority = value; } public byte WeaponJoint { get => Objentry.WeaponJoint; set => Objentry.WeaponJoint = value; } public string ModelName { get => Objentry.ModelName; set => Objentry.ModelName = value; } public string AnimationName { get => Objentry.AnimationName; set => Objentry.AnimationName = value; } public ushort Flags { get => Objentry.Flags; set => Objentry.Flags = value; } public Objentry.TargetType ObjectTargetType { get => Objentry.ObjectTargetType; set => Objentry.ObjectTargetType = value; } public ushort NeoStatus { get => Objentry.NeoStatus; set => Objentry.NeoStatus = value; } public ushort NeoMoveset { get => Objentry.NeoMoveset; set => Objentry.NeoMoveset = value; } public float Weight { get => Objentry.Weight; set => Objentry.Weight = value; } public byte SpawnLimiter { get => Objentry.SpawnLimiter; set => Objentry.SpawnLimiter = value; } public byte Page { get => Objentry.Page; set => Objentry.Page = value; } public Objentry.ShadowSize ObjectShadowSize { get => Objentry.ObjectShadowSize; set => Objentry.ObjectShadowSize = value; } public Objentry.Form ObjectForm { get => Objentry.ObjectForm; set { Objentry.ObjectForm = value; OnPropertyChanged(nameof(ObjectForm)); } } public ushort SpawnObject1 { get => Objentry.SpawnObject1; set => Objentry.SpawnObject1 = value; } public ushort SpawnObject2 { get => Objentry.SpawnObject2; set => Objentry.SpawnObject2 = value; } public ushort SpawnObject3 { get => Objentry.SpawnObject3; set => Objentry.SpawnObject3 = value; } public ushort SpawnObject4 { get => Objentry.SpawnObject4; set => Objentry.SpawnObject4 = value; } public bool NoApdx { get => Objentry.NoApdx; set => Objentry.NoApdx = value; } public bool Before { get => Objentry.Before; set => Objentry.Before = value; } public bool FixColor { get => Objentry.FixColor; set => Objentry.FixColor = value; } public bool Fly { get => Objentry.Fly; set => Objentry.Fly = value; } public bool Scissoring { get => Objentry.Scissoring; set => Objentry.Scissoring = value; } public bool IsPirate { get => Objentry.IsPirate; set => Objentry.IsPirate = value; } public bool WallOcclusion { get => Objentry.WallOcclusion; set => Objentry.WallOcclusion = value; } public bool Hift { get => Objentry.Hift; set => Objentry.Hift = value; } public override string ToString() => Name; } private string _searchTerm; public EnumModel<Objentry.Type> ObjEntryTypes { get; } public EnumModel<Objentry.TargetType> TargetTypes { get; } public EnumModel<Objentry.ShadowSize> ShadowSizes{ get; } public EnumModel<Objentry.Form> Forms { get; } public ObjentryViewModel(IEnumerable<Objentry> items) : base(items.Select(Map)) { ObjEntryTypes = new EnumModel<Objentry.Type>(); TargetTypes = new EnumModel<Objentry.TargetType>(); ShadowSizes= new EnumModel<Objentry.ShadowSize>(); Forms = new EnumModel<Objentry.Form>(); AddAndSelectCommand = new RelayCommand(x => { AddCommand.Execute(null); SelectedIndex = Items.Count - 1; }); CloneCommand = new RelayCommand(x => { var clonedItem = Clone(SelectedItem.Objentry); Items.Add(new ObjentryEntryViewModel(clonedItem)); OnPropertyChanged(nameof(Items)); }, x => SelectedItem != null); ClearObject1 = new RelayCommand(x => { SelectedItem.SpawnObject1 = 0; OnPropertyChanged(nameof(SelectedItem)); }); ClearObject2 = new RelayCommand(x => { SelectedItem.SpawnObject2 = 0; OnPropertyChanged(nameof(SelectedItem)); }); ClearObject3 = new RelayCommand(x => { SelectedItem.SpawnObject3 = 0; OnPropertyChanged(nameof(SelectedItem)); }); ClearObject4 = new RelayCommand(x => { SelectedItem.SpawnObject4 = 0; OnPropertyChanged(nameof(SelectedItem)); }); } public RelayCommand AddAndSelectCommand { get; set; } public RelayCommand CloneCommand { get; set; } public RelayCommand ClearObject1 { get; set; } public RelayCommand ClearObject2 { get; set; } public RelayCommand ClearObject3 { get; set; } public RelayCommand ClearObject4 { get; set; } public Visibility IsItemEditingVisible => IsItemSelected ? Visibility.Visible : Visibility.Collapsed; public Visibility IsItemEditMessageVisible => !IsItemSelected ? Visibility.Visible : Visibility.Collapsed; public string SearchTerm { get => _searchTerm; set { _searchTerm = value; PerformFiltering(); } } public IEnumerable<Objentry> AsObjEntries() => Items.Select(x => x.Objentry); protected override void OnSelectedItem(ObjentryEntryViewModel item) { base.OnSelectedItem(item); OnPropertyChanged(nameof(IsItemEditingVisible)); OnPropertyChanged(nameof(IsItemEditMessageVisible)); } private void PerformFiltering() { if (string.IsNullOrWhiteSpace(_searchTerm)) Filter(FilterNone); else Filter(FilterByCharacter); } protected override ObjentryEntryViewModel OnNewItem() { return new ObjentryEntryViewModel(new Objentry() { ObjectId = GetObjectIdForNewEntry() }); } private Objentry Clone(Objentry source) { var newObj = Activator.CreateInstance<Objentry>(); foreach(var field in newObj.GetType().GetProperties(BindingFlags.Public | BindingFlags.Instance)) { field.SetValue(newObj, field.GetValue(source)); } newObj.ObjectId = GetObjectIdForNewEntry(); return newObj; } private ushort GetObjectIdForNewEntry() { return (ushort)(Items.LastOrDefault()?.ObjectId + 1 ?? 0); } private bool FilterNone(ObjentryEntryViewModel arg) => true; private bool FilterByCharacter(ObjentryEntryViewModel arg) => arg.Name.ToUpper().Contains(SearchTerm.ToUpper()); private static ObjentryEntryViewModel Map(Objentry item) => new ObjentryEntryViewModel(item); } }
//! \file AudioNWA.cs //! \date Mon Apr 18 15:10:59 2016 //! \brief RealLive audio format. // // Copyright (C) 2016 by morkt // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // using System; using System.ComponentModel.Composition; using System.IO; using GameRes.Utility; namespace GameRes.Formats.RealLive { internal class NwaMetaData { public WaveFormat Format; public int Compression; public bool RunLengthEncoded; public int BlockCount; public int PcmSize; public int PackedSize; public int SampleCount; public int BlockSize; public int FinalBlockSize; } [Export(typeof(AudioFormat))] public class NwaAudio : AudioFormat { public override string Tag { get { return "NWA"; } } public override string Description { get { return "RealLive engine audio format"; } } public override uint Signature { get { return 0; } } public override SoundInput TryOpen (IBinaryStream file) { var header = file.ReadHeader (0x28); ushort channels = header.ToUInt16 (0); if (0 == channels || channels > 2) return null; ushort bps = header.ToUInt16 (2); if (bps != 8 && bps != 16) return null; var info = new NwaMetaData { Compression = header.ToInt32 (8), RunLengthEncoded = 0 != header.ToInt32 (0xC), BlockCount = header.ToInt32 (0x10), PcmSize = header.ToInt32 (0x14), PackedSize = header.ToInt32 (0x18), SampleCount = header.ToInt32 (0x1C), BlockSize = header.ToInt32 (0x20), FinalBlockSize = header.ToInt32 (0x24), }; if (info.PcmSize <= 0) return null; info.Format.FormatTag = 1; info.Format.Channels = channels; info.Format.BitsPerSample = bps; info.Format.SamplesPerSecond = header.ToUInt32 (4); info.Format.BlockAlign = (ushort)(channels * bps/8); info.Format.AverageBytesPerSecond = info.Format.BlockAlign * info.Format.SamplesPerSecond; if (-1 == info.Compression) { if (info.PcmSize > file.Length - 0x2C) return null; return new RawPcmInput (new StreamRegion (file.AsStream, 0x2C, info.PcmSize), info.Format); } if (info.Compression > 5) return null; if (info.PcmSize != info.SampleCount * bps / 8) return null; using (var decoder = new NwaDecoder (file, info)) { decoder.Decode(); var pcm = new MemoryStream (decoder.Output); var sound = new RawPcmInput (pcm, info.Format); file.Dispose(); return sound; } } } internal sealed class NwaDecoder : IDisposable { IBinaryStream m_input; byte[] m_output; NwaMetaData m_info; short[] m_sample; LsbBitStream m_bits; public byte[] Output { get { return m_output; } } public NwaDecoder (IBinaryStream input, NwaMetaData info) { m_input = input; m_info = info; m_output = new byte[m_info.PcmSize]; m_sample = new short[2]; m_bits = new LsbBitStream (input.AsStream, true); } int m_dst; public void Decode () { m_input.Position = 0x2C; var offsets = new uint[m_info.BlockCount]; for (int i = 0; i < offsets.Length; ++i) offsets[i] = m_input.ReadUInt32(); m_dst = 0; for (int i = 0; i < offsets.Length-1; ++i) { m_input.Position = offsets[i]; DecodeBlock (m_info.BlockSize); } m_input.Position = offsets[offsets.Length-1]; if (m_info.FinalBlockSize > 0) DecodeBlock (m_info.FinalBlockSize); else DecodeBlock (m_info.BlockSize); } void DecodeBlock (int block_size) { int channel_count = m_info.Format.Channels; for (int c = 0; c < channel_count; ++c) { if (8 == m_info.Format.BitsPerSample) m_sample[c] = m_input.ReadUInt8(); else m_sample[c] = m_input.ReadInt16(); } m_bits.Reset(); int channel = 0; int repeat_count = 0; for (int i = 0; i < block_size; ++i) { if (0 == repeat_count) { int ctl = m_bits.GetBits (3); if (7 == ctl) { if (1 == m_bits.GetNextBit()) { m_sample[channel] = 0; } else { int bits = 8; int shift = 9; if (m_info.Compression < 3) { bits -= m_info.Compression; shift += m_info.Compression; } int sign_bit = 1 << (bits - 1); int mask = sign_bit - 1; int val = m_bits.GetBits (bits); if (0 != (val & sign_bit)) m_sample[channel] -= (short)((val & mask) << shift); else m_sample[channel] += (short)((val & mask) << shift); } } else if (ctl != 0) { int bits, shift; if (m_info.Compression < 3) { bits = 5 - m_info.Compression; shift = 2 + ctl + m_info.Compression; } else { bits = 3 + m_info.Compression; shift = 1 + ctl; } int sign_bit = 1 << (bits - 1); int mask = sign_bit - 1; int val = m_bits.GetBits (bits); if (0 != (val & sign_bit)) m_sample[channel] -= (short)((val & mask) << shift); else m_sample[channel] += (short)((val & mask) << shift); } else if (m_info.RunLengthEncoded) { repeat_count = m_bits.GetNextBit(); if (1 == repeat_count) { repeat_count = m_bits.GetBits (2); if (3 == repeat_count) repeat_count = m_bits.GetBits (8); } } } else { --repeat_count; } if (8 == m_info.Format.BitsPerSample) { m_output[m_dst++] = (byte)m_sample[channel]; } else { LittleEndian.Pack (m_sample[channel], m_output, m_dst); m_dst += 2; } if (2 == channel_count) channel ^= 1; } } #region IDisposable Members public void Dispose () { } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using Xunit; namespace System.Linq.Expressions.Tests { public static class LambdaUnaryNotNullableTests { #region Test methods [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableByteTest(bool useInterpreter) { foreach (byte? value in new byte?[] { null, 0, 1, byte.MaxValue }) { VerifyUnaryNotNullableByte(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableIntTest(bool useInterpreter) { foreach (int? value in new int?[] { null, 0, 1, -1, int.MinValue, int.MaxValue }) { VerifyUnaryNotNullableInt(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableLongTest(bool useInterpreter) { foreach (long? value in new long?[] { null, 0, 1, -1, long.MinValue, long.MaxValue }) { VerifyUnaryNotNullableLong(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableSByteTest(bool useInterpreter) { foreach (sbyte? value in new sbyte?[] { null, 0, 1, -1, sbyte.MinValue, sbyte.MaxValue }) { VerifyUnaryNotNullableSByte(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableShortTest(bool useInterpreter) { foreach (short? value in new short?[] { null, 0, 1, -1, short.MinValue, short.MaxValue }) { VerifyUnaryNotNullableShort(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableUIntTest(bool useInterpreter) { foreach (uint? value in new uint?[] { null, 0, 1, uint.MaxValue }) { VerifyUnaryNotNullableUInt(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableULongTest(bool useInterpreter) { foreach (ulong? value in new ulong?[] { null, 0, 1, ulong.MaxValue }) { VerifyUnaryNotNullableULong(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableUShortTest(bool useInterpreter) { foreach (ushort? value in new ushort?[] { null, 0, 1, ushort.MaxValue }) { VerifyUnaryNotNullableUShort(value, useInterpreter); } } #endregion #region Test verifiers private static void VerifyUnaryNotNullableByte(byte? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(byte?), "p"); // parameter hard coded Expression<Func<byte?>> e1 = Expression.Lambda<Func<byte?>>( Expression.Invoke( Expression.Lambda<Func<byte?, byte?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(byte?)) }), Enumerable.Empty<ParameterExpression>()); Func<byte?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<byte?, Func<byte?>>> e2 = Expression.Lambda<Func<byte?, Func<byte?>>>( Expression.Lambda<Func<byte?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<byte?, Func<byte?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<byte?, byte?>>> e3 = Expression.Lambda<Func<Func<byte?, byte?>>>( Expression.Invoke( Expression.Lambda<Func<Func<byte?, byte?>>>( Expression.Lambda<Func<byte?, byte?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<byte?, byte?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<byte?, byte?>>> e4 = Expression.Lambda<Func<Func<byte?, byte?>>>( Expression.Lambda<Func<byte?, byte?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<byte?, byte?>> f4 = e4.Compile(useInterpreter); byte? expected = (byte?)~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableInt(int? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(int?), "p"); // parameter hard coded Expression<Func<int?>> e1 = Expression.Lambda<Func<int?>>( Expression.Invoke( Expression.Lambda<Func<int?, int?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(int?)) }), Enumerable.Empty<ParameterExpression>()); Func<int?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<int?, Func<int?>>> e2 = Expression.Lambda<Func<int?, Func<int?>>>( Expression.Lambda<Func<int?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<int?, Func<int?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<int?, int?>>> e3 = Expression.Lambda<Func<Func<int?, int?>>>( Expression.Invoke( Expression.Lambda<Func<Func<int?, int?>>>( Expression.Lambda<Func<int?, int?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<int?, int?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<int?, int?>>> e4 = Expression.Lambda<Func<Func<int?, int?>>>( Expression.Lambda<Func<int?, int?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<int?, int?>> f4 = e4.Compile(useInterpreter); int? expected = ~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableLong(long? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(long?), "p"); // parameter hard coded Expression<Func<long?>> e1 = Expression.Lambda<Func<long?>>( Expression.Invoke( Expression.Lambda<Func<long?, long?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(long?)) }), Enumerable.Empty<ParameterExpression>()); Func<long?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<long?, Func<long?>>> e2 = Expression.Lambda<Func<long?, Func<long?>>>( Expression.Lambda<Func<long?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<long?, Func<long?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<long?, long?>>> e3 = Expression.Lambda<Func<Func<long?, long?>>>( Expression.Invoke( Expression.Lambda<Func<Func<long?, long?>>>( Expression.Lambda<Func<long?, long?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<long?, long?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<long?, long?>>> e4 = Expression.Lambda<Func<Func<long?, long?>>>( Expression.Lambda<Func<long?, long?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<long?, long?>> f4 = e4.Compile(useInterpreter); long? expected = ~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableSByte(sbyte? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(sbyte?), "p"); // parameter hard coded Expression<Func<sbyte?>> e1 = Expression.Lambda<Func<sbyte?>>( Expression.Invoke( Expression.Lambda<Func<sbyte?, sbyte?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(sbyte?)) }), Enumerable.Empty<ParameterExpression>()); Func<sbyte?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<sbyte?, Func<sbyte?>>> e2 = Expression.Lambda<Func<sbyte?, Func<sbyte?>>>( Expression.Lambda<Func<sbyte?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<sbyte?, Func<sbyte?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<sbyte?, sbyte?>>> e3 = Expression.Lambda<Func<Func<sbyte?, sbyte?>>>( Expression.Invoke( Expression.Lambda<Func<Func<sbyte?, sbyte?>>>( Expression.Lambda<Func<sbyte?, sbyte?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<sbyte?, sbyte?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<sbyte?, sbyte?>>> e4 = Expression.Lambda<Func<Func<sbyte?, sbyte?>>>( Expression.Lambda<Func<sbyte?, sbyte?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<sbyte?, sbyte?>> f4 = e4.Compile(useInterpreter); sbyte? expected = (sbyte?)~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableShort(short? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(short?), "p"); // parameter hard coded Expression<Func<short?>> e1 = Expression.Lambda<Func<short?>>( Expression.Invoke( Expression.Lambda<Func<short?, short?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(short?)) }), Enumerable.Empty<ParameterExpression>()); Func<short?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<short?, Func<short?>>> e2 = Expression.Lambda<Func<short?, Func<short?>>>( Expression.Lambda<Func<short?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<short?, Func<short?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<short?, short?>>> e3 = Expression.Lambda<Func<Func<short?, short?>>>( Expression.Invoke( Expression.Lambda<Func<Func<short?, short?>>>( Expression.Lambda<Func<short?, short?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<short?, short?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<short?, short?>>> e4 = Expression.Lambda<Func<Func<short?, short?>>>( Expression.Lambda<Func<short?, short?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<short?, short?>> f4 = e4.Compile(useInterpreter); short? expected = (short?)~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableUInt(uint? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(uint?), "p"); // parameter hard coded Expression<Func<uint?>> e1 = Expression.Lambda<Func<uint?>>( Expression.Invoke( Expression.Lambda<Func<uint?, uint?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(uint?)) }), Enumerable.Empty<ParameterExpression>()); Func<uint?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<uint?, Func<uint?>>> e2 = Expression.Lambda<Func<uint?, Func<uint?>>>( Expression.Lambda<Func<uint?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<uint?, Func<uint?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<uint?, uint?>>> e3 = Expression.Lambda<Func<Func<uint?, uint?>>>( Expression.Invoke( Expression.Lambda<Func<Func<uint?, uint?>>>( Expression.Lambda<Func<uint?, uint?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<uint?, uint?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<uint?, uint?>>> e4 = Expression.Lambda<Func<Func<uint?, uint?>>>( Expression.Lambda<Func<uint?, uint?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<uint?, uint?>> f4 = e4.Compile(useInterpreter); uint? expected = ~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableULong(ulong? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(ulong?), "p"); // parameter hard coded Expression<Func<ulong?>> e1 = Expression.Lambda<Func<ulong?>>( Expression.Invoke( Expression.Lambda<Func<ulong?, ulong?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(ulong?)) }), Enumerable.Empty<ParameterExpression>()); Func<ulong?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<ulong?, Func<ulong?>>> e2 = Expression.Lambda<Func<ulong?, Func<ulong?>>>( Expression.Lambda<Func<ulong?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<ulong?, Func<ulong?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<ulong?, ulong?>>> e3 = Expression.Lambda<Func<Func<ulong?, ulong?>>>( Expression.Invoke( Expression.Lambda<Func<Func<ulong?, ulong?>>>( Expression.Lambda<Func<ulong?, ulong?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<ulong?, ulong?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<ulong?, ulong?>>> e4 = Expression.Lambda<Func<Func<ulong?, ulong?>>>( Expression.Lambda<Func<ulong?, ulong?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<ulong?, ulong?>> f4 = e4.Compile(useInterpreter); ulong? expected = ~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableUShort(ushort? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(ushort?), "p"); // parameter hard coded Expression<Func<ushort?>> e1 = Expression.Lambda<Func<ushort?>>( Expression.Invoke( Expression.Lambda<Func<ushort?, ushort?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(ushort?)) }), Enumerable.Empty<ParameterExpression>()); Func<ushort?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<ushort?, Func<ushort?>>> e2 = Expression.Lambda<Func<ushort?, Func<ushort?>>>( Expression.Lambda<Func<ushort?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<ushort?, Func<ushort?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<ushort?, ushort?>>> e3 = Expression.Lambda<Func<Func<ushort?, ushort?>>>( Expression.Invoke( Expression.Lambda<Func<Func<ushort?, ushort?>>>( Expression.Lambda<Func<ushort?, ushort?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<ushort?, ushort?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<ushort?, ushort?>>> e4 = Expression.Lambda<Func<Func<ushort?, ushort?>>>( Expression.Lambda<Func<ushort?, ushort?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<ushort?, ushort?>> f4 = e4.Compile(useInterpreter); ushort? expected = (ushort?)~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } #endregion } }
// Copyright 2019 Esri // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net.Http; using System.Runtime.Serialization.Json; using System.Threading.Tasks; using System.Web.Script.Serialization; using System.Windows; using System.Windows.Input; using System.Windows.Media; using ArcGIS.Desktop.Core; using ArcGIS.Desktop.Core.Portal; using ArcGIS.Desktop.Framework; using ArcGIS.Desktop.Framework.Contracts; using SharingContracts = ArcGIS.Desktop.Tests.APIHelpers.SharingDataContracts; using System.Text; using System.Collections.ObjectModel; using System.Windows.Data; namespace CreateFeatureService { /// <summary> /// This sample provides a new tab and controls that allow you to create /// feature service from the csv file that has been uploaded to AGOL or /// portal. /// </summary> internal class Dockpane1ViewModel : DockPane { private const string DockPaneID = "CreateFeatureService_Dockpane1"; private const string RestAPIFileTypes = "serviceDefinition|shapefile|csv|excel|tilePackage|featureService|featureCollection|fileGeodatabase|geojson|scenepackage|vectortilepackage|imageCollection|mapService|sqliteGeodatabase"; private ObservableCollection<CsvPortalItem> _csvPortalItems = new ObservableCollection<CsvPortalItem>(); protected Dockpane1ViewModel() { var brush = FrameworkApplication.ApplicationTheme == ApplicationTheme.Default ? Brushes.DarkBlue : Brushes.AliceBlue; BaseUrlBorderBrush = brush; PortalItemBorderBrush = brush; UsernameBorderBrush = brush; FileTypeBorderBrush = brush; PublishParametersBorderBrush = brush; AnalyzeParametersBorderBrush = brush; AnalyzeParameters = "{}"; BindingOperations.EnableCollectionSynchronization(_csvPortalItems, new object()); } private Brush _errorBorderBrush { get { var colorString = FrameworkApplication.ApplicationTheme == ApplicationTheme.Default ? "#C6542D" : "#C75028"; return new SolidColorBrush((Color)ColorConverter.ConvertFromString(colorString)); } } private Brush _greenBorderBrush { get { var colorString = FrameworkApplication.ApplicationTheme == ApplicationTheme.Default ? "#5A9359" : "#58AD57"; return new SolidColorBrush((Color)ColorConverter.ConvertFromString(colorString)); } } /// <summary> /// Check if input fields are empty or invalid /// </summary> /// <returns></returns> Tuple<bool, string> CheckEmptyFields(bool bAnalyze = false) { #region var brush = FrameworkApplication.ApplicationTheme == ApplicationTheme.Default ? Brushes.DarkBlue : Brushes.AliceBlue; BaseUrlBorderBrush = brush; PortalItemBorderBrush = brush; UsernameBorderBrush = brush; FileTypeBorderBrush = brush; PublishParametersBorderBrush = brush; AnalyzeParametersBorderBrush = brush; #endregion string msg = string.Empty; if (string.IsNullOrEmpty(BaseUrl)) { BaseUrlBorderBrush = _errorBorderBrush; msg += "\tPortal Url cannot be empty.\n"; } else { Uri uriResult; bool result = Uri.TryCreate(BaseUrl, UriKind.Absolute, out uriResult) && (uriResult.Scheme == Uri.UriSchemeHttp || uriResult.Scheme == Uri.UriSchemeHttps); if (!result) { BaseUrlBorderBrush = _errorBorderBrush; msg += "\tPortal Url is invalid.\n"; } } if (string.IsNullOrEmpty(CsvPortalItem.Id)) { PortalItemBorderBrush = _errorBorderBrush; msg += "\titem Id cannot be empty.\n"; } if (string.IsNullOrEmpty(Username)) { UsernameBorderBrush = _errorBorderBrush; msg += "\tusername cannot be empty.\n"; } else if (string.IsNullOrEmpty(CsvPortalItem.Id)) { Tuple<bool, string> res = GetServiceName(BaseUrl, Username, CsvPortalItem.Id); if (!res.Item1) { PortalItemBorderBrush = _errorBorderBrush; msg += "\tItem Id is not associated with existing data.\n"; } } if (string.IsNullOrEmpty(FileType)) { FileTypeBorderBrush = _errorBorderBrush; msg += "\tFile type cannot be empty.\n"; } if (bAnalyze) { if (string.IsNullOrEmpty(AnalyzeParameters)) { AnalyzeParametersBorderBrush = _errorBorderBrush; msg += "\tAnalyze Parameters cannot be empty.\n"; } } else { if (string.IsNullOrEmpty(PublishParameters)) { PublishParametersBorderBrush = _errorBorderBrush; msg += "\tPublish Parameters cannot be empty.\n"; } } if (string.IsNullOrEmpty(msg)) return new Tuple<bool, string>(false, msg); else return new Tuple<bool, string>(true, "Form cannot be submitted due to:\n" + msg); } /// <summary> /// Command called when "clear Contents" button is clicked /// </summary> public ICommand CmdClearContent { get { return new RelayCommand(() => { BaseUrl = ""; CsvPortalItem = null; Username = ""; FileType = ""; PublishParameters = ""; }, () => true); } } /// <summary> /// Command called when "Fix It" button is clicked /// </summary> public ICommand CmdFixIt { get { return new RelayCommand(() => { PublishParameters = Properties.Resources.PointOfInterest_json; }, () => !string.IsNullOrEmpty(PublishParameters)); } } /// <summary> /// Command called when the "publishAnalyze" Button is clicked /// </summary> public ICommand CmdAnalyzeSubmit { get { return new RelayCommand(() => { Tuple<bool, string> tup = CheckEmptyFields(true); if (!tup.Item1) { //publishAnalyze.IsEnabled = false; ServiceInfo = $@"Portal info: {BaseUrl}{Environment.NewLine}"; ServiceInfo += "Item info: (Item ID) " + CsvPortalItem.Id + "; (user name) " + Username + "; (File Type) " + FileType + "; (Parameters) " + AnalyzeParameters + "\n"; //Step 1: analyze the portal item; If return false, quit; Else return tuple containing publish parameters Tuple<bool, string> analyzeResult = AnalyzeService(BaseUrl, CsvPortalItem.Id, FileType, AnalyzeParameters); if (analyzeResult.Item1) { PublishParameters = analyzeResult.Item2; } else { ServiceInfo = analyzeResult.Item2 + " for request - " + ServiceInfo; ServiceInfoForeground = _errorBorderBrush; } } else ServiceInfo = tup.Item2; }, () => CsvPortalItem != null); } } /// <summary> /// Command called when the "publishSubmit" Button is clicked /// </summary> public ICommand CmdPublishSubmit { get { return new RelayCommand(() => { Tuple<bool, string> tup = CheckEmptyFields(); if (!tup.Item1) { //publishSubmit.IsEnabled = false; ServiceInfo = $@"Portal info: {BaseUrl}{Environment.NewLine}"; ServiceInfo += "Item info: (Item ID) " + CsvPortalItem.Id + "; (user name) " + Username + "; (File Type) " + FileType + "; (Parameters) " + PublishParameters + "\n"; //Step 1: get the item name based on its ID Tuple<bool, string> itemNameRes = GetServiceName(BaseUrl, Username, CsvPortalItem.Id); if (itemNameRes.Item1) { //Step 2: check if the service name is already in use Tuple<bool, string> availableRes = IsServiceNameAvailable(BaseUrl, itemNameRes.Item2, "Feature Service"); if (availableRes.Item1) { //Step 3: publish the service based on the portal item Tuple<bool, string> publishResult = PublishService(BaseUrl, Username, CsvPortalItem.Id, FileType, PublishParameters); if (publishResult.Item1) { ServiceInfo = "Service created successfully!"; ServiceInfoForeground = _greenBorderBrush; ServiceLinkText = publishResult.Item2; //serviceLinkLabel.Visibility = System.Windows.Visibility.Visible; //serviceLinkText.Visibility = System.Windows.Visibility.Visible; } else { ServiceInfo = publishResult.Item2 + " for request - " + ServiceInfo; ServiceInfoForeground = _errorBorderBrush; } } else { ServiceInfo = "Service Name is not available: " + availableRes.Item2 + " for request - " + ServiceInfo; ServiceInfoForeground = _errorBorderBrush; } } else { ServiceInfo = "Call to get item name failed: " + itemNameRes.Item2 + " for request - " + ServiceInfo; ServiceInfoForeground = _errorBorderBrush; } } else ServiceInfo = tup.Item2; }, () => CsvPortalItem != null); } } public ICommand CmdRefreshPortalContent { get { return new RelayCommand(async () => { _csvPortalItems.Clear(); // Use the active portal connection var portal = ArcGISPortalManager.Current.GetActivePortal(); string userName = ArcGISPortalManager.Current.GetActivePortal().GetSignOnUsername(); //Searching for csv in the current user's content var pqp = PortalQueryParameters.CreateForItemsOfTypeWithOwner(PortalItemType.CSV, userName); //pqp.Query += $@"owner:\""{userName}\"""; //Execute to return a result set PortalQueryResultSet<PortalItem> results = await ArcGISPortalExtensions.SearchForContentAsync(portal, pqp); if (results.Results.Count == 0) { MessageBox.Show("Please refer to the sample instructions and upload the sample csv file to your Portal content"); } else { foreach (var pItem in results.Results) { var theFileType = string.Empty; switch (pItem.PortalItemType) { case PortalItemType.CSV: theFileType = "csv"; break; case PortalItemType.MicrosoftExcel: theFileType = "excel"; break; } _csvPortalItems.Add(new CsvPortalItem() { Name = pItem.Name, Id = pItem.ID, FileType = theFileType }); } } }, () => true); } } /// <summary> /// Gets the item title/name based on its item ID /// </summary> /// <param name="baseURI"></param> /// <param name="username"></param> /// <param name="itemId"></param> /// <returns></returns> Tuple<bool, string> GetServiceName(string baseURI, string username, string itemId) { EsriHttpClient myClient = new EsriHttpClient(); #region REST call to get service name string requestUrl = baseURI + @"/sharing/rest/content/users/" + username + @"/items/" + itemId + "?f=json"; var response = myClient.Get(requestUrl); if (response == null) return new Tuple<bool, String>(false, "HTTP response is null"); string outStr = response.Content.ReadAsStringAsync().Result; //De-serialize the response in JSON into a usable object. JavaScriptSerializer serializer = new JavaScriptSerializer(); SharingContracts.OnlineItem obj = (SharingContracts.OnlineItem)serializer.Deserialize(outStr, typeof(SharingContracts.OnlineItem)); if (obj?.item?.title == null) { SharingContracts.Error err = (SharingContracts.Error)serializer.Deserialize(outStr, typeof(SharingContracts.Error)); var msg = string.IsNullOrEmpty(err.message) ? $@"error code: {err.code}" : err.message; return new Tuple<bool, String>(false, $@"Failed item call: {msg}"); } return new Tuple<bool, String>(true, obj.item.title); #endregion } /// <summary> /// Check if the service name is already in use /// </summary> /// <param name="baseURI"></param> /// <param name="serviceName"></param> /// <param name="serviceType"></param> /// <returns></returns> Tuple<bool, string> IsServiceNameAvailable(string baseURI, string serviceName, string serviceType) { EsriHttpClient myClient = new EsriHttpClient(); #region REST call to get appInfo.Item.id and user.lastLogin of the licensing portal string selfUri = @"/sharing/rest/portals/self?f=json"; var selfResponse = myClient.Get(baseURI + selfUri); if (selfResponse == null) return new Tuple<bool, String>(false, "HTTP response is null"); string outStr = selfResponse.Content.ReadAsStringAsync().Result; //Deserialize the response in JSON into a usable object. JavaScriptSerializer serializer = new JavaScriptSerializer(); SharingContracts.PortalSelf self_obj = (SharingContracts.PortalSelf)serializer.Deserialize(outStr, typeof(SharingContracts.PortalSelf)); if ((self_obj == null) || (self_obj.id == null)) { return new Tuple<bool, String>(false, "Failed portal self call"); } #endregion string requestUrl = baseURI + @"/sharing/rest/portals/" + self_obj.id + @"/isServiceNameAvailable"; requestUrl += "?f=json&type=" + serviceType + "&name=" + serviceName; EsriHttpResponseMessage respMsg = myClient.Get(requestUrl); if (respMsg == null) return new Tuple<bool, String>(false, "HTTP response is null"); outStr = respMsg.Content.ReadAsStringAsync().Result; //De-serialize the response in JSON into a usable object. SharingContracts.AvailableResult obj = (SharingContracts.AvailableResult)serializer.Deserialize(outStr, typeof(SharingContracts.AvailableResult)); if (obj == null) return new Tuple<bool, String>(false, "Service fails to be analyzed - " + outStr); return new Tuple<bool, string>(obj.available, outStr); } /// <summary> /// Post "analyze" request on the portal item /// </summary> /// <param name="baseURI"></param> /// <param name="itemId"></param> /// <param name="fileType"></param> /// <param name="analyzeParameters"></param> /// <returns></returns> Tuple<bool, string> AnalyzeService(string baseURI, string itemId, string fileType, string analyzeParameters) { EsriHttpClient myClient = new EsriHttpClient(); string requestUrl = baseURI + @"/sharing/rest/content/features/analyze"; var postData = new List<KeyValuePair<string, string>>(); postData.Add(new KeyValuePair<string, string>("f", "json")); postData.Add(new KeyValuePair<string, string>("itemId", itemId)); postData.Add(new KeyValuePair<string, string>("fileType", fileType)); postData.Add(new KeyValuePair<string, string>("analyzeParameters", analyzeParameters)); HttpContent content = new FormUrlEncodedContent(postData); EsriHttpResponseMessage respMsg = myClient.Post(requestUrl, content); if (respMsg == null) return new Tuple<bool, String>(false, "HTTP response is null"); string outStr = respMsg.Content.ReadAsStringAsync().Result; //De-serialize the response in JSON into a usable object. JavaScriptSerializer serializer = new JavaScriptSerializer(); //De-serialize the response in JSON into a usable object. SharingContracts.AnalyzedService obj = (SharingContracts.AnalyzedService)serializer.Deserialize(outStr, typeof(SharingContracts.AnalyzedService)); if (obj == null) return new Tuple<bool, String>(false, "Service fails to be analyzed - " + outStr); if (obj.publishParameters != null) { string respReturn = SerializeToString(obj.publishParameters); if (respReturn == "") return new Tuple<bool, String>(false, "Service fails to be analyzed - " + outStr); else return new Tuple<bool, String>(true, respReturn); } return new Tuple<bool, String>(false, "Service fails to be analyzed - " + outStr); } /// <summary> /// Post "publish" request on the portal item /// </summary> /// <param name="baseURI"></param> /// <param name="username"></param> /// <param name="itemId"></param> /// <param name="fileType"></param> /// <param name="publishParameters"></param> /// <returns>tuple: bool ok/failed string: result msg/error msg</returns> Tuple<bool, string> PublishService(string baseURI, string username, string itemId, string fileType, string publishParameters) { EsriHttpClient myClient = new EsriHttpClient(); string requestUrl = $@"{baseURI}/sharing/rest/content/users/{username}/publish"; var postData = new List<KeyValuePair<string, string>>(); postData.Add(new KeyValuePair<string, string>("f", "json")); postData.Add(new KeyValuePair<string, string>("itemId", itemId)); postData.Add(new KeyValuePair<string, string>("fileType", fileType)); postData.Add(new KeyValuePair<string, string>("publishParameters", publishParameters)); HttpContent content = new FormUrlEncodedContent(postData); EsriHttpResponseMessage respMsg = myClient.Post(requestUrl, content); if (respMsg == null) return new Tuple<bool, String>(false, "HTTP response is null"); string outStr = respMsg.Content.ReadAsStringAsync().Result; //De-serialize the response in JSON into a usable object. JavaScriptSerializer serializer = new JavaScriptSerializer(); //De-serialize the response in JSON into a usable object. SharingContracts.PublishedServices obj = (SharingContracts.PublishedServices)serializer.Deserialize(outStr, typeof(SharingContracts.PublishedServices)); if (obj?.services == null) { return new Tuple<bool, String>(false, "Service creation fails - " + outStr); } string respReturn = ""; foreach (SharingContracts.PublishedService ps in obj.services) respReturn += ps.serviceurl; if (respReturn == "") return new Tuple<bool, String>(false, "Service creation fails - " + outStr); else return new Tuple<bool, String>(true, respReturn); } public ICommand CmdGetActivePortal { get { return new RelayCommand(() => { BaseUrl = ArcGISPortalManager.Current.GetActivePortal().PortalUri.ToString(); Username = ArcGISPortalManager.Current.GetActivePortal().GetSignOnUsername(); }, () => true); } } private Brush _serviceInfoForeground; public Brush ServiceInfoForeground { get { return _serviceInfoForeground; } set { SetProperty(ref _serviceInfoForeground, value, () => ServiceInfoForeground); } } private Brush _baseUrlBorderBrush; public Brush BaseUrlBorderBrush { get { return _baseUrlBorderBrush; } set { SetProperty(ref _baseUrlBorderBrush, value, () => BaseUrlBorderBrush); } } private Brush _portalItemBorderBrush; public Brush PortalItemBorderBrush { get { return _portalItemBorderBrush; } set { SetProperty(ref _portalItemBorderBrush, value, () => PortalItemBorderBrush); } } private Brush _usernameBorderBrush; public Brush UsernameBorderBrush { get { return _usernameBorderBrush; } set { SetProperty(ref _usernameBorderBrush, value, () => UsernameBorderBrush); } } private Brush _fileTypeBorderBrush; public Brush FileTypeBorderBrush { get { return _fileTypeBorderBrush; } set { SetProperty(ref _fileTypeBorderBrush, value, () => FileTypeBorderBrush); } } private Brush _analyzeParametersBorderBrush; public Brush AnalyzeParametersBorderBrush { get { return _analyzeParametersBorderBrush; } set { SetProperty(ref _analyzeParametersBorderBrush, value, () => AnalyzeParametersBorderBrush); } } private Brush _publishParametersBorderBrush; public Brush PublishParametersBorderBrush { get { return _publishParametersBorderBrush; } set { SetProperty(ref _publishParametersBorderBrush, value, () => PublishParametersBorderBrush); } } private string _serviceLinkText; public string ServiceLinkText { get { return string.IsNullOrEmpty(_serviceLinkText) ? string.Empty : _serviceLinkText.Trim(); } set { SetProperty(ref _serviceLinkText, value, () => ServiceLinkText); } } private string _baseUrl; public string BaseUrl { get { return string.IsNullOrEmpty(_baseUrl) ? string.Empty : _baseUrl.Trim(); } set { SetProperty(ref _baseUrl, value, () => BaseUrl); } } private string _username; public string Username { get { return string.IsNullOrEmpty(_username) ? string.Empty : _username.Trim(); } set { SetProperty(ref _username, value, () => Username); } } private string _fileType; public string FileType { get { return _fileType; } set { SetProperty(ref _fileType, value, () => FileType); } } public IList<string> FileTypes { get { IList<string> fileTypes = RestAPIFileTypes.Split("|".ToCharArray()).ToList<string>(); return fileTypes; } } private string _analyzeParameters; public string AnalyzeParameters { get { return _analyzeParameters; } set { SetProperty(ref _analyzeParameters, value, () => AnalyzeParameters); } } private string _publishParameters; public string PublishParameters { get { return _publishParameters; } set { SetProperty(ref _publishParameters, value, () => PublishParameters); } } private string _serviceInfo; public string ServiceInfo { get { return _serviceInfo; } set { SetProperty(ref _serviceInfo, value, () => ServiceInfo); } } public ObservableCollection<CsvPortalItem> CsvPortalItems { get { return _csvPortalItems; } set { SetProperty(ref _csvPortalItems, value, () => CsvPortalItems); } } private CsvPortalItem _csvPortalItem; public CsvPortalItem CsvPortalItem { get { return _csvPortalItem; } set { SetProperty(ref _csvPortalItem, value, () => CsvPortalItem); if (_csvPortalItem != null && !string.IsNullOrEmpty(_csvPortalItem.FileType)) { FileType = _csvPortalItem.FileType; } } } /// <summary> /// Print the de-serialized object into JSON formatted string /// </summary> /// <param name="objectToSerialize"></param> /// <returns></returns> string SerializeToString(object objectToSerialize) { using (MemoryStream ms = new MemoryStream()) { var ser = new DataContractJsonSerializer(objectToSerialize.GetType()); ser.WriteObject(ms, objectToSerialize); return Encoding.Default.GetString(ms.ToArray()); } } /// <summary> /// Show the DockPane. /// </summary> internal static void Show() { DockPane pane = FrameworkApplication.DockPaneManager.Find(DockPaneID); if (pane == null) return; pane.Activate(); } /// <summary> /// Text shown near the top of the DockPane. /// </summary> private string _heading = "My DockPane"; public string Heading { get { return _heading; } set { SetProperty(ref _heading, value, () => Heading); } } } /// <summary> /// Button implementation to show the DockPane. /// </summary> internal class Dockpane1_ShowButton : Button { protected override void OnClick() { Dockpane1ViewModel.Show(); } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using ITGlobal.MarkDocs.Format; using ITGlobal.MarkDocs.Impl; using ITGlobal.MarkDocs.Source; using ITGlobal.MarkDocs.Source.Impl; using Microsoft.AspNetCore.StaticFiles; namespace ITGlobal.MarkDocs.Blog.Impl { internal sealed class BlogAssetTreeReaderWorker : IShallowPageAssetReader { #region fields private readonly IContentHashProvider _contentHashProvider; private readonly IContentTypeProvider _contentTypeProvider; private readonly IContentMetadataProvider _contentMetadataProvider; private readonly string[] _includeFiles; private readonly string[] _ignorePatterns; private readonly Dictionary<string, ShallowPageAsset> _pages = new Dictionary<string, ShallowPageAsset>(StringComparer.OrdinalIgnoreCase); private readonly Dictionary<string, FileAsset> _files = new Dictionary<string, FileAsset>(StringComparer.OrdinalIgnoreCase); #endregion #region .ctor public BlogAssetTreeReaderWorker( IFormat format, IContentHashProvider contentHashProvider, IContentTypeProvider contentTypeProvider, IContentMetadataProvider contentMetadataProvider, IResourceUrlResolver resourceUrlResolver, string[] includeFiles, string[] ignorePatterns, ISourceTreeRoot root, ICompilationReportBuilder report, IMarkDocsLog log) { Format = format; _contentHashProvider = contentHashProvider; _contentTypeProvider = contentTypeProvider; _contentMetadataProvider = contentMetadataProvider; ResourceUrlResolver = resourceUrlResolver; _includeFiles = includeFiles; _ignorePatterns = ignorePatterns; Root = root; Report = report; Log = log; } #endregion #region metadata public ISourceTreeRoot Root { get; } public IMarkDocsLog Log { get; } public string RootDirectory => Root.RootDirectory; public IFormat Format { get; } public ICompilationReportBuilder Report { get; } public IResourceUrlResolver ResourceUrlResolver { get; } #endregion #region public methods public AssetTree ReadAssetTree() { Log.Debug($"Scanning directory \"{RootDirectory}\"..."); var filepathes = ScanDirectoryFiles(RootDirectory).ToHashSet(StringComparer.OrdinalIgnoreCase); Log.Debug($"Reading assets from \"{RootDirectory}\"..."); foreach (var shallowPage in LoadShallowPages()) { _pages[shallowPage.Id] = shallowPage; } var pages = LoadFullPages(_pages.Values).ToArray(); foreach (var page in pages) { filepathes.Remove(page.AbsolutePath); } // Make a list of referenced files foreach (var (_, asset) in _files) { if(asset is PhysicalFileAsset fileAsset) { filepathes.Remove(fileAsset.AbsolutePath); } } foreach (var path in filepathes) { var id = GetRelativePath(path); ResourceId.Normalize(ref id); ResolveFileAsset(id); } var files = _files.Values.Where(_ => _ != null).OrderBy(_ => _.Id).ToArray(); // Assemble an asset tree var tree = new AssetTree( Root.SourceTree.Id, RootDirectory, Root.SourceInfo, new BranchPageAsset( "/", "/", RootDirectory, "0000000000000000000000000000000000000000", new BlogRootPageContent(), PageMetadata.Empty, pages ), files ); tree.Validate(new PageValidateContext(this)); return tree; IEnumerable<ShallowPageAsset> LoadShallowPages() { foreach (var file in filepathes) { var ext = Path.GetExtension(file); if (Format.Extensions.Contains(ext)) { var shallowPage = LeafPage(file); if (shallowPage != null) { yield return shallowPage; } } } } IEnumerable<PageAsset> LoadFullPages(IEnumerable<ShallowPageAsset> shallowPages) { foreach (var shallowPage in shallowPages) { var page = shallowPage.ReadAsset(this); if (page != null) { yield return page; } } } } public PageMetadata GetMetadata(string filename, bool isIndexFile) { var metadata = _contentMetadataProvider.GetMetadata( sourceTreeRoot: Root, filename: filename, report: Report, isIndexFile: isIndexFile ); return metadata; } public ShallowPageAsset ResolvePageAsset(string path) { if (!_pages.TryGetValue(path, out var page)) { return null; } return page; } public FileAsset ResolveFileAsset(string path) { if (_files.TryGetValue(path, out var file)) { return file; } file = PhysicalFile(GetAbsolutePath(path)); _files[path] = file; return file; } public void RegisterAsset(GeneratedFileAsset asset) { _files[asset.Id] = asset; } #endregion #region asset factories private LeafShallowPageAsset LeafPage(string path) { var id = GetRelativePath(path); var filename = Path.GetFileName(id); var isIndexFile = false; foreach (var indexFileName in Format.IndexFileNames) { if (string.Equals(indexFileName, filename, StringComparison.OrdinalIgnoreCase)) { isIndexFile = true; break; } } id = Path.GetDirectoryName(id); if (!isIndexFile) { id = Path.Combine(id, Path.GetFileNameWithoutExtension(path)); } ResourceId.Normalize(ref id); if (!_contentHashProvider.TryGetContentHash(path, out var contentHash)) { contentHash = ""; } return new LeafShallowPageAsset( id: id, relativePath: id, absolutePath: path, contentHash: contentHash ); } private PhysicalFileAsset PhysicalFile(string path) { if (!File.Exists(path)) { return null; } var ignoreRules = GetIgnoreRulesForDirectory(Path.GetDirectoryName(path)); if (ignoreRules.ShouldIgnore(path)) { return null; } var id = GetRelativePath(path); ResourceId.Normalize(ref id); if (!_contentTypeProvider.TryGetContentType(path, out var contentType)) { contentType = Asset.DEFAULT_MIME_TYPE; } if (!_contentHashProvider.TryGetContentHash(path, out var contentHash)) { contentHash = ""; } return new PhysicalFileAsset( id: id, relativePath: id, absolutePath: path, contentHash: contentHash, contentType: contentType ); } #endregion #region directory traverse private IEnumerable<string> ScanDirectoryFiles(string directory) { var ignoreRules = GetIgnoreRulesForDirectory(directory); foreach (var yearDir in Directory.EnumerateDirectories(directory)) { if (ignoreRules.ShouldIgnore(yearDir)) { continue; } if (!int.TryParse(Path.GetFileName(yearDir), out var year)) { continue; } foreach (var monthDir in Directory.EnumerateDirectories(yearDir)) { if (ignoreRules.ShouldIgnore(monthDir)) { continue; } if (!int.TryParse(Path.GetFileName(monthDir), out var month)) { continue; } foreach (var dayDir in Directory.EnumerateDirectories(monthDir)) { if (ignoreRules.ShouldIgnore(dayDir)) { continue; } if (!int.TryParse(Path.GetFileName(dayDir), out var day)) { continue; } try { _ = new DateTime(year, month, day); } catch { continue; } //var files = from filter in _includeFiles // from filename in Directory.EnumerateFiles(dayDir, filter) // select filename; var files = from filename in Directory.EnumerateFiles(dayDir, "*") select filename; foreach (var filename in files) { if (ignoreRules.ShouldIgnore(filename)) { continue; } yield return filename; } } } } } private string GetAbsolutePath(string path) => PathHelper.GetAbsolutePath(RootDirectory, path); private string GetRelativePath(string path) => PathHelper.GetRelativePath(RootDirectory, path); private IIgnoreRule GetIgnoreRulesForDirectory(string path) { var ignoreRules = EnumerateIgnoreRulesForDirectory(path).ToArray(); return new IgnoreRuleList(ignoreRules); } private IEnumerable<IIgnoreRule> EnumerateIgnoreRulesForDirectory(string path) { if (_ignorePatterns?.Length > 0) { yield return new SingleIgnoreRule(RootDirectory, _ignorePatterns); } foreach (var directory in WalkDirectoriesUp(path)) { var mdIgnoreFileName = Path.Combine(directory, MdIgnoreFileRule.FileName); if (File.Exists(mdIgnoreFileName)) { yield return new MdIgnoreFileRule(directory, mdIgnoreFileName); } } } private IEnumerable<string> WalkDirectoriesUp(string path) { var directory = path; directory = Path.GetFullPath(directory); while (directory != null && directory != RootDirectory) { yield return directory; directory = Path.GetDirectoryName(directory); } yield return RootDirectory; } #endregion } }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for details. using System; using System.Diagnostics; using System.Globalization; using System.IO; using System.Threading; using System.Windows.Forms; using Microsoft.Win32; using OpenLiveWriter.CoreServices.Threading; using OpenLiveWriter.Interop.Windows; namespace OpenLiveWriter.CoreServices.Diagnostics { /// <summary> /// A TraceListener that write trace events to a log file. /// </summary> public class LogFileTraceListener : TraceListener { #region Static & Constant Declarations /// <summary> /// Fail text. /// </summary> private const string FailText = "Fail"; #endregion #region Private Member Variables /// <summary> /// The sequence number. /// </summary> private int sequenceNumber; /// <summary> /// The size at which the log file should be archived and a new one created. /// This number is bigger in Debug configuration because we log a lot more messages. /// </summary> #if DEBUG private int LOG_FILE_SIZE_THRESHOLD = 5000000; #else private const int LOG_FILE_SIZE_THRESHOLD = 1000000; #endif private FileLogger logger; /// <summary> /// The facility. /// </summary> private string facility; /// <summary> /// The ID of the current process. /// </summary> private static readonly string processId; #endregion Private Member Variables #region Class Initialization & Termination static LogFileTraceListener() { int pid; using (Process p = Process.GetCurrentProcess()) pid = p.Id; uint sid; // ProcessIdToSessionId returns false if session id couldn't be determined if (!Kernel32.ProcessIdToSessionId((uint)pid, out sid)) processId = "??." + pid; else if (sid == 0) processId = pid.ToString(CultureInfo.InvariantCulture); else processId = sid + "." + pid; } /// <summary> /// Initializes a new instance of the LogFileTraceListener class. /// </summary> /// <param name="logFileName">The log file name to write to.</param> /// <param name="facility">The facility name.</param> public LogFileTraceListener(string logFileName, string facility) { logger = new FileLogger(logFileName, logFileName + ".old", LOG_FILE_SIZE_THRESHOLD); this.facility = facility; } #endregion Class Initialization & Termination #region Public Methods /// <summary> /// Emits the specified error message. /// </summary> /// <param name="message">A message to emit.</param> public override void Fail(string message) { WriteEntry(message, FailText, Environment.StackTrace); OnFail(); } /// <summary> /// Emits an error message, and a detailed error message. /// </summary> /// <param name="message">A message to emit.</param> /// <param name="detailMessage">A detailed message to emit.</param> public override void Fail(string message, string detailMessage) { if (detailMessage != null && detailMessage.Length != 0) WriteEntry(String.Format(CultureInfo.InvariantCulture, "{0} {1}", message, detailMessage), FailText, Environment.StackTrace); else WriteEntry(message, FailText, Environment.StackTrace); OnFail(); } private void OnFail() { if (ApplicationDiagnostics.AutomationMode) { WriteEntry("Assertion failed, exiting Writer"); logger.Flush(); Process.GetCurrentProcess().Kill(); } } /// <summary> /// Writes the value of the object's ToString method. /// </summary> /// <param name="o">An Object whose fully qualified class name you want to write.</param> public override void Write(object o) { WriteEntry(o.ToString()); } /// <summary> /// Writes a message to the listener. /// </summary> /// <param name="message">A message to write.</param> public override void Write(string message) { WriteEntry(message); } /// <summary> /// Writes a category name and the value of the object's ToString method. /// </summary> /// <param name="o">An Object whose fully qualified class name you want to write.</param> /// <param name="category">A category name used to organize the output.</param> public override void Write(object o, string category) { WriteEntry(o.ToString(), category); } /// <summary> /// Writes a message to the listener you create in the derived class, followed by a line terminator. /// </summary> /// <param name="message">A message to write.</param> /// <param name="category">A category name used to organize the output.</param> public override void Write(string message, string category) { WriteEntry(message, category); } /// <summary> /// Writes the value of the object's ToString method. /// </summary> /// <param name="o">An Object whose fully qualified class name you want to write.</param> public override void WriteLine(object o) { WriteEntry(o.ToString()); } /// <summary> /// Writes a message to the listener you create in the derived class, followed by a line terminator. /// </summary> /// <param name="message">A message to write.</param> public override void WriteLine(string message) { WriteEntry(message); } /// <summary> /// Writes a category name and the value of the object's ToString method. /// </summary> /// <param name="o">An Object whose fully qualified class name you want to write.</param> /// <param name="category">A category name used to organize the output.</param> public override void WriteLine(object o, string category) { WriteEntry(o.ToString(), category); } /// <summary> /// Writes a message to the listener you create in the derived class, followed by a line terminator. /// </summary> /// <param name="message">A message to write.</param> /// <param name="category">A category name used to organize the output.</param> public override void WriteLine(string message, string category) { WriteEntry(message, category); } #endregion Public Methods #region Private Methods /// <summary> /// Adds an entry. /// </summary> /// <param name="message">The message of the entry.</param> private void WriteEntry(string message) { WriteEntry(message, null, null); } /// <summary> /// Adds an entry. /// </summary> /// <param name="message">The message of the entry.</param> /// <param name="category">The category of the entry.</param> private void WriteEntry(string message, string category) { WriteEntry(message, category, null); } /// <summary> /// Adds an entry. /// </summary> /// <param name="message">The message of the entry.</param> /// <param name="category">The category of the entry.</param> /// <param name="stackTrace">The stack trace of the entry.</param> private void WriteEntry(string message, string category, string stackTrace) { // Obtain the DateTime the message reached us. DateTime dateTime = DateTime.Now; // Default the message, as needed. if (message == null || message.Length == 0) message = "[No Message]"; // Default the category, as needed. if (category == null || category.Length == 0) category = "None"; int seqNum = Interlocked.Increment(ref sequenceNumber); DebugLogEntry logEntry = new DebugLogEntry(facility, processId, seqNum, dateTime, message, category, stackTrace); logger.AddEntry(logEntry); } #endregion Private Methods public class DebugLogEntry { internal readonly string Facility; internal readonly string ProcessId; internal readonly int SequenceNumber; internal readonly DateTime TimeStamp; internal readonly string Message; internal readonly string Category; internal readonly string StackTrace; internal DebugLogEntry(string facility, string processId, int sequenceNumber, DateTime timestamp, string message, string category, string stackTrace) { this.Facility = facility; this.ProcessId = processId; this.SequenceNumber = sequenceNumber; this.TimeStamp = timestamp; this.Message = message; this.Category = category; this.StackTrace = stackTrace; } public override string ToString() { return string.Format(CultureInfo.InvariantCulture, "{0},{1},{2},{3:00000},{4:dd-MMM-yyyy HH:mm:ss.fff},\"{5}\",\"{6}\"\r\n", Facility, ProcessId, Category, SequenceNumber, TimeStamp, Message.Replace("\"", "\"\""), StackTrace); } } } }
/* * CID001e.cs - th culture handler. * * Copyright (c) 2003 Southern Storm Software, Pty Ltd * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ // Generated from "th.txt". namespace I18N.Other { using System; using System.Globalization; using I18N.Common; public class CID001e : RootCulture { public CID001e() : base(0x001E) {} public CID001e(int culture) : base(culture) {} public override String Name { get { return "th"; } } public override String ThreeLetterISOLanguageName { get { return "tha"; } } public override String ThreeLetterWindowsLanguageName { get { return "THA"; } } public override String TwoLetterISOLanguageName { get { return "th"; } } public override DateTimeFormatInfo DateTimeFormat { get { DateTimeFormatInfo dfi = base.DateTimeFormat; dfi.AMDesignator = "\u0E01\u0E48\u0E2D\u0E19\u0E40\u0E17\u0E35\u0E48\u0E22\u0E07"; dfi.PMDesignator = "\u0E2B\u0E25\u0E31\u0E07\u0E40\u0E17\u0E35\u0E48\u0E22\u0E07"; dfi.AbbreviatedDayNames = new String[] {"\u0E2D\u0E32.", "\u0E08.", "\u0E2D.", "\u0E1E.", "\u0E1E\u0E24.", "\u0E28.", "\u0E2A."}; dfi.DayNames = new String[] {"\u0E27\u0E31\u0E19\u0E2D\u0E32\u0E17\u0E34\u0E15\u0E22\u0E4C", "\u0E27\u0E31\u0E19\u0E08\u0E31\u0E19\u0E17\u0E23\u0E4C", "\u0E27\u0E31\u0E19\u0E2D\u0E31\u0E07\u0E04\u0E32\u0E23", "\u0E27\u0E31\u0E19\u0E1E\u0E38\u0E18", "\u0E27\u0E31\u0E19\u0E1E\u0E24\u0E2B\u0E31\u0E2A\u0E1A\u0E14\u0E35", "\u0E27\u0E31\u0E19\u0E28\u0E38\u0E01\u0E23\u0E4C", "\u0E27\u0E31\u0E19\u0E40\u0E2A\u0E32\u0E23\u0E4C"}; dfi.AbbreviatedMonthNames = new String[] {"\u0E21.\u0E04.", "\u0E01.\u0E1E.", "\u0E21\u0E35.\u0E04.", "\u0E40\u0E21.\u0E22.", "\u0E1E.\u0E04.", "\u0E21\u0E34.\u0E22.", "\u0E01.\u0E04.", "\u0E2A.\u0E04.", "\u0E01.\u0E22.", "\u0E15.\u0E04.", "\u0E1E.\u0E22.", "\u0E18.\u0E04.", ""}; dfi.MonthNames = new String[] {"\u0E21\u0E01\u0E23\u0E32\u0E04\u0E21", "\u0E01\u0E38\u0E21\u0E20\u0E32\u0E1E\u0E31\u0E19\u0E18\u0E4C", "\u0E21\u0E35\u0E19\u0E32\u0E04\u0E21", "\u0E40\u0E21\u0E29\u0E32\u0E22\u0E19", "\u0E1E\u0E24\u0E29\u0E20\u0E32\u0E04\u0E21", "\u0E21\u0E34\u0E16\u0E38\u0E19\u0E32\u0E22\u0E19", "\u0E01\u0E23\u0E01\u0E0E\u0E32\u0E04\u0E21", "\u0E2A\u0E34\u0E07\u0E2B\u0E32\u0E04\u0E21", "\u0E01\u0E31\u0E19\u0E22\u0E32\u0E22\u0E19", "\u0E15\u0E38\u0E25\u0E32\u0E04\u0E21", "\u0E1E\u0E24\u0E28\u0E08\u0E34\u0E01\u0E32\u0E22\u0E19", "\u0E18\u0E31\u0E19\u0E27\u0E32\u0E04\u0E21", ""}; dfi.DateSeparator = "/"; dfi.TimeSeparator = ":"; dfi.LongDatePattern = "dddd'\u0d17\u0d35\u0d48 'd MMMM G yyyy"; dfi.LongTimePattern = "\u0d34\u0d19\u0d32\u0d17\u0d35'"; dfi.ShortDatePattern = "d MMM yyyy"; dfi.ShortTimePattern = "H:mm:ss"; dfi.FullDateTimePattern = "H:mm H' \u0d19\u0d32\u0d2C\u0d34\u0d01\u0d32 'm' \u0d19\u0d32\u0d17\u0d35 'ss' \u0d27"; dfi.I18NSetDateTimePatterns(new String[] { "d:d MMM yyyy", "D:H:mm", "f:H:mm H' \u0d19\u0d32\u0d2C\u0d34\u0d01\u0d32 'm' \u0d19\u0d32\u0d17\u0d35 'ss' \u0d27", "f:H:mm \u0d34\u0d19\u0d32\u0d17\u0d35'", "f:H:mm H' \u0d19\u0d32\u0d2C\u0d34\u0d01\u0d32 'm' \u0d19\u0d32\u0d17\u0d35'", "f:H:mm H:mm:ss", "F:H:mm HH:mm:ss", "g:d MMM yyyy H' \u0d19\u0d32\u0d2C\u0d34\u0d01\u0d32 'm' \u0d19\u0d32\u0d17\u0d35 'ss' \u0d27", "g:d MMM yyyy \u0d34\u0d19\u0d32\u0d17\u0d35'", "g:d MMM yyyy H' \u0d19\u0d32\u0d2C\u0d34\u0d01\u0d32 'm' \u0d19\u0d32\u0d17\u0d35'", "g:d MMM yyyy H:mm:ss", "G:d MMM yyyy HH:mm:ss", "m:MMMM dd", "M:MMMM dd", "r:ddd, dd MMM yyyy HH':'mm':'ss 'GMT'", "R:ddd, dd MMM yyyy HH':'mm':'ss 'GMT'", "s:yyyy'-'MM'-'dd'T'HH':'mm':'ss", "t:H' \u0d19\u0d32\u0d2C\u0d34\u0d01\u0d32 'm' \u0d19\u0d32\u0d17\u0d35 'ss' \u0d27", "t:\u0d34\u0d19\u0d32\u0d17\u0d35'", "t:H' \u0d19\u0d32\u0d2C\u0d34\u0d01\u0d32 'm' \u0d19\u0d32\u0d17\u0d35'", "t:H:mm:ss", "T:HH:mm:ss", "u:yyyy'-'MM'-'dd HH':'mm':'ss'Z'", "U:dddd, dd MMMM yyyy HH:mm:ss", "y:yyyy MMMM", "Y:yyyy MMMM", }); return dfi; } set { base.DateTimeFormat = value; // not used } } public override String ResolveLanguage(String name) { switch(name) { case "ab": return "\u0e41\u0e2d\u0e1a\u0e01\u0e32\u0e40\u0e0b\u0e35\u0e22"; case "aa": return "\u0e2d\u0e32\u0e1f\u0e32"; case "af": return "\u0e41\u0e2d\u0e1f\u0e23\u0e34\u0e01\u0e31\u0e19"; case "sq": return "\u0e41\u0e2d\u0e25\u0e40\u0e1a\u0e40\u0e19\u0e35\u0e22"; case "am": return "\u0e2d\u0e31\u0e21\u0e2e\u0e32\u0e23\u0e34\u0e04"; case "ar": return "\u0e2d\u0e32\u0e23\u0e30\u0e1a\u0e34\u0e04"; case "hy": return "\u0e2d\u0e32\u0e23\u0e4c\u0e21\u0e35\u0e40\u0e19\u0e35\u0e22"; case "as": return "\u0e2d\u0e31\u0e2a\u0e2a\u0e31\u0e21\u0e21\u0e34\u0e2a"; case "ay": return "\u0e44\u0e2d\u0e21\u0e32\u0e23\u0e32"; case "az": return "\u0e2d\u0e32\u0e40\u0e0b\u0e2d\u0e23\u0e4c\u0e44\u0e1a\u0e08\u0e32\u0e19\u0e35"; case "ba": return "\u0e1a\u0e32\u0e2a\u0e0a\u0e4c\u0e01\u0e35\u0e23\u0e4c"; case "eu": return "\u0e41\u0e1a\u0e2a\u0e01\u0e4c"; case "bn": return "\u0e40\u0e1a\u0e19\u0e01\u0e32\u0e23\u0e35"; case "dz": return "\u0e20\u0e39\u0e10\u0e32\u0e19\u0e35"; case "bh": return "\u0e1a\u0e34\u0e2e\u0e32\u0e23\u0e35"; case "bi": return "\u0e1a\u0e34\u0e2a\u0e25\u0e32\u0e21\u0e32"; case "br": return "\u0e1a\u0e23\u0e35\u0e17\u0e31\u0e19"; case "bg": return "\u0e1a\u0e31\u0e25\u0e41\u0e01\u0e40\u0e23\u0e35\u0e22"; case "my": return "\u0e1e\u0e21\u0e48\u0e32"; case "be": return "\u0e1a\u0e32\u0e22\u0e42\u0e25\u0e23\u0e31\u0e2a\u0e40\u0e0b\u0e35\u0e22"; case "km": return "\u0e40\u0e02\u0e21\u0e23"; case "ca": return "\u0e41\u0e04\u0e15\u0e32\u0e41\u0e25\u0e19"; case "zh": return "\u0e08\u0e35\u0e19"; case "co": return "\u0e04\u0e2d\u0e23\u0e4c\u0e0b\u0e34\u0e01\u0e32"; case "hr": return "\u0e42\u0e04\u0e23\u0e40\u0e2d\u0e40\u0e17\u0e35\u0e22"; case "cs": return "\u0e40\u0e0a\u0e47\u0e04"; case "da": return "\u0e40\u0e14\u0e19\u0e21\u0e32\u0e23\u0e4c\u0e01"; case "nl": return "\u0e2e\u0e2d\u0e25\u0e31\u0e19\u0e14\u0e32"; case "en": return "\u0e2d\u0e31\u0e07\u0e01\u0e24\u0e29"; case "eo": return "\u0e40\u0e2d\u0e2a\u0e40\u0e1b\u0e2d\u0e23\u0e31\u0e19\u0e42\u0e15"; case "et": return "\u0e40\u0e2d\u0e2a\u0e42\u0e15\u0e40\u0e19\u0e35\u0e22"; case "fo": return "\u0e1f\u0e32\u0e42\u0e23\u0e2a"; case "fj": return "\u0e1f\u0e34\u0e08\u0e34"; case "fi": return "\u0e1f\u0e34\u0e19"; case "fr": return "\u0e1d\u0e23\u0e31\u0e48\u0e07\u0e40\u0e28\u0e2a"; case "fy": return "\u0e1f\u0e23\u0e35\u0e2a\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "gl": return "\u0e01\u0e30\u0e25\u0e35\u0e40\u0e0a\u0e35\u0e22"; case "ka": return "\u0e08\u0e2d\u0e23\u0e4c\u0e40\u0e08\u0e35\u0e22\u0e19"; case "de": return "\u0e40\u0e22\u0e2d\u0e23\u0e21\u0e31\u0e19"; case "el": return "\u0e01\u0e23\u0e35\u0e01"; case "kl": return "\u0e01\u0e23\u0e35\u0e19\u0e41\u0e25\u0e19\u0e14\u0e4c\u0e14\u0e34\u0e04"; case "gn": return "\u0e01\u0e31\u0e27\u0e23\u0e32\u0e19\u0e35"; case "gu": return "\u0e01\u0e39\u0e08\u0e32\u0e23\u0e32\u0e15\u0e34"; case "ha": return "\u0e42\u0e2e\u0e0b\u0e32"; case "he": return "\u0e22\u0e34\u0e27"; case "hi": return "\u0e2e\u0e35\u0e19\u0e14\u0e34"; case "hu": return "\u0e2e\u0e31\u0e07\u0e01\u0e32\u0e23\u0e35"; case "is": return "\u0e44\u0e2d\u0e0b\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c\u0e14\u0e34\u0e04"; case "id": return "\u0e2d\u0e34\u0e19\u0e42\u0e14\u0e19\u0e35\u0e40\u0e0a\u0e35\u0e22"; case "ia": return "\u0e2d\u0e34\u0e19\u0e40\u0e15\u0e2d\u0e23\u0e4c\u0e25\u0e34\u0e07\u0e01\u0e27\u0e32"; case "ie": return "\u0e2d\u0e34\u0e19\u0e40\u0e15\u0e2d\u0e23\u0e4c\u0e25\u0e34\u0e07\u0e04\u0e4c"; case "iu": return "\u0e44\u0e2d\u0e19\u0e38\u0e01\u0e15\u0e34\u0e15\u0e31\u0e17"; case "ik": return "\u0e44\u0e2d\u0e19\u0e39\u0e40\u0e1b\u0e35\u0e22\u0e01"; case "ga": return "\u0e44\u0e2d\u0e23\u0e34\u0e0a"; case "it": return "\u0e2d\u0e34\u0e15\u0e32\u0e25\u0e35"; case "ja": return "\u0e0d\u0e35\u0e48\u0e1b\u0e38\u0e48\u0e19"; case "jv": return "\u0e0a\u0e27\u0e32"; case "kn": return "\u0e01\u0e32\u0e19\u0e32\u0e14\u0e32"; case "ks": return "\u0e04\u0e31\u0e0a\u0e21\u0e35\u0e23\u0e35"; case "kk": return "\u0e04\u0e32\u0e0b\u0e31\u0e04"; case "rw": return "\u0e04\u0e34\u0e19\u0e22\u0e32\u0e27\u0e31\u0e19\u0e14\u0e32"; case "ky": return "\u0e40\u0e04\u0e2d\u0e23\u0e4c\u0e01\u0e34\u0e0b"; case "rn": return "\u0e04\u0e34\u0e23\u0e31\u0e19\u0e14\u0e35"; case "ko": return "\u0e40\u0e01\u0e32\u0e2b\u0e25\u0e35"; case "ku": return "\u0e40\u0e04\u0e34\u0e14"; case "lo": return "\u0e25\u0e32\u0e27"; case "la": return "\u0e25\u0e30\u0e15\u0e34\u0e19"; case "lv": return "\u0e41\u0e25\u0e15\u0e40\u0e27\u0e35\u0e22 (\u0e40\u0e25\u0e17\u0e17\u0e34\u0e2a\u0e0a\u0e4c)"; case "ln": return "\u0e25\u0e34\u0e07\u0e01\u0e32\u0e25\u0e32"; case "lt": return "\u0e25\u0e34\u0e18\u0e31\u0e27\u0e40\u0e19\u0e35\u0e22"; case "mk": return "\u0e41\u0e21\u0e0b\u0e35\u0e42\u0e14\u0e40\u0e19\u0e35\u0e22"; case "mg": return "\u0e21\u0e32\u0e25\u0e32\u0e01\u0e32\u0e0b\u0e35"; case "ms": return "\u0e21\u0e25\u0e32\u0e22\u0e39"; case "ml": return "\u0e41\u0e21\u0e25\u0e30\u0e22\u0e32\u0e25\u0e31\u0e21"; case "mt": return "\u0e21\u0e2d\u0e25\u0e15\u0e32"; case "mi": return "\u0e40\u0e21\u0e32\u0e23\u0e35"; case "mr": return "\u0e21\u0e32\u0e23\u0e32\u0e17\u0e35"; case "mo": return "\u0e42\u0e21\u0e14\u0e32\u0e40\u0e27\u0e35\u0e22"; case "mn": return "\u0e21\u0e2d\u0e07\u0e42\u0e01\u0e25"; case "na": return "\u0e19\u0e2d\u0e23\u0e39"; case "ne": return "\u0e40\u0e19\u0e1b\u0e32\u0e25"; case "no": return "\u0e19\u0e2d\u0e23\u0e4c\u0e40\u0e27\u0e22\u0e4c"; case "oc": return "\u0e2d\u0e2d\u0e01\u0e0b\u0e34\u0e17\u0e31\u0e19"; case "or": return "\u0e42\u0e2d\u0e23\u0e34\u0e22\u0e32"; case "om": return "\u0e42\u0e2d\u0e42\u0e23\u0e42\u0e21 (\u0e2d\u0e32\u0e1f\u0e32\u0e19)"; case "ps": return "\u0e1e\u0e32\u0e2a\u0e0a\u0e4c\u0e42\u0e15 (\u0e1e\u0e38\u0e2a\u0e0a\u0e4c\u0e42\u0e15)"; case "fa": return "\u0e40\u0e1b\u0e2d\u0e23\u0e4c\u0e40\u0e0b\u0e35\u0e22"; case "pl": return "\u0e42\u0e1b\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "pt": return "\u0e42\u0e1b\u0e23\u0e15\u0e38\u0e40\u0e01\u0e2a"; case "pa": return "\u0e1b\u0e31\u0e0d\u0e08\u0e32\u0e1b"; case "qu": return "\u0e04\u0e34\u0e27\u0e0a\u0e31\u0e27"; case "rm": return "\u0e40\u0e23\u0e42\u0e15-\u0e42\u0e23\u0e41\u0e21\u0e19\u0e0b\u0e4c"; case "ro": return "\u0e42\u0e23\u0e21\u0e31\u0e19"; case "ru": return "\u0e23\u0e31\u0e2a\u0e40\u0e0b\u0e35\u0e22"; case "sm": return "\u0e0b\u0e32\u0e21\u0e31\u0e27"; case "sg": return "\u0e2a\u0e31\u0e19\u0e42\u0e04"; case "sa": return "\u0e2a\u0e31\u0e19\u0e2a\u0e01\u0e24\u0e15"; case "gd": return "\u0e2a\u0e01\u0e47\u0e2d\u0e15\u0e2a\u0e4c\u0e40\u0e01\u0e25\u0e34\u0e04"; case "sr": return "\u0e40\u0e0b\u0e2d\u0e23\u0e4c\u0e40\u0e1a\u0e35\u0e22"; case "sh": return "\u0e40\u0e0b\u0e2d\u0e23\u0e4c\u0e42\u0e1a-\u0e42\u0e04\u0e23\u0e40\u0e2d\u0e40\u0e17\u0e35\u0e22\u0e19"; case "st": return "\u0e40\u0e0b\u0e42\u0e2a\u0e42\u0e17"; case "tn": return "\u0e40\u0e0b\u0e15\u0e2a\u0e27\u0e32\u0e19\u0e32"; case "sn": return "\u0e42\u0e0b\u0e19\u0e32"; case "sd": return "\u0e0b\u0e34\u0e19\u0e14\u0e34"; case "si": return "\u0e2a\u0e34\u0e07\u0e2b\u0e25"; case "ss": return "\u0e0b\u0e35\u0e2a\u0e27\u0e32\u0e15\u0e34"; case "sk": return "\u0e2a\u0e42\u0e25\u0e27\u0e31\u0e04"; case "sl": return "\u0e2a\u0e42\u0e25\u0e40\u0e27\u0e40\u0e19\u0e35\u0e22"; case "so": return "\u0e42\u0e0b\u0e21\u0e32\u0e25\u0e35"; case "es": return "\u0e2a\u0e40\u0e1b\u0e19"; case "su": return "\u0e0b\u0e31\u0e19\u0e14\u0e32\u0e19\u0e35\u0e2a"; case "sw": return "\u0e0b\u0e27\u0e32\u0e2e\u0e34\u0e23\u0e35"; case "sv": return "\u0e2a\u0e27\u0e35\u0e40\u0e14\u0e19"; case "tl": return "\u0e15\u0e32\u0e01\u0e32\u0e25\u0e47\u0e2d\u0e01"; case "tg": return "\u0e17\u0e32\u0e08\u0e34\u0e04"; case "ta": return "\u0e17\u0e21\u0e34\u0e2c"; case "tt": return "\u0e15\u0e32\u0e14"; case "te": return "\u0e17\u0e34\u0e25\u0e39\u0e01\u0e39"; case "th": return "\u0e44\u0e17\u0e22"; case "bo": return "\u0e17\u0e34\u0e40\u0e1a\u0e15"; case "ti": return "\u0e17\u0e34\u0e01\u0e23\u0e34\u0e19\u0e22\u0e32"; case "to": return "\u0e17\u0e2d\u0e07\u0e01\u0e49\u0e32"; case "ts": return "\u0e0b\u0e2d\u0e07\u0e01\u0e32"; case "tr": return "\u0e15\u0e38\u0e23\u0e01\u0e35"; case "tk": return "\u0e40\u0e15\u0e34\u0e23\u0e4c\u0e01\u0e40\u0e21\u0e19"; case "tw": return "\u0e17\u0e27\u0e35"; case "ug": return "\u0e2d\u0e38\u0e22\u0e01\u0e31\u0e27"; case "uk": return "\u0e22\u0e39\u0e40\u0e04\u0e23\u0e19"; case "ur": return "\u0e2d\u0e34\u0e23\u0e14\u0e39"; case "uz": return "\u0e2d\u0e38\u0e2a\u0e40\u0e1a\u0e04"; case "vi": return "\u0e40\u0e27\u0e35\u0e22\u0e14\u0e19\u0e32\u0e21"; case "vo": return "\u0e42\u0e27\u0e25\u0e32\u0e1e\u0e38\u0e01"; case "cy": return "\u0e40\u0e27\u0e25\u0e2a\u0e4c"; case "wo": return "\u0e27\u0e39\u0e25\u0e2d\u0e1f"; case "xh": return "\u0e42\u0e0b\u0e2a\u0e32"; case "yi": return "\u0e22\u0e35\u0e14\u0e34\u0e0a"; case "yo": return "\u0e42\u0e22\u0e23\u0e39\u0e1a\u0e32"; case "za": return "\u0e08\u0e27\u0e07"; case "zu": return "\u0e0b\u0e39\u0e25\u0e39"; } return base.ResolveLanguage(name); } public override String ResolveCountry(String name) { switch(name) { case "AF": return "\u0e2d\u0e31\u0e1f\u0e01\u0e32\u0e19\u0e34\u0e2a\u0e16\u0e32\u0e19"; case "AL": return "\u0e41\u0e2d\u0e25\u0e40\u0e1a\u0e40\u0e19\u0e35\u0e22"; case "DZ": return "\u0e41\u0e2d\u0e25\u0e08\u0e35\u0e40\u0e23\u0e35\u0e22"; case "AD": return "\u0e2d\u0e31\u0e19\u0e14\u0e2d\u0e23\u0e4c\u0e23\u0e32"; case "AO": return "\u0e2d\u0e31\u0e19\u0e42\u0e01\u0e25\u0e32"; case "AI": return "\u0e2d\u0e31\u0e19\u0e01\u0e34\u0e25\u0e48\u0e32"; case "AR": return "\u0e2d\u0e32\u0e23\u0e4c\u0e40\u0e08\u0e19\u0e15\u0e34\u0e19\u0e48\u0e32"; case "AM": return "\u0e2d\u0e32\u0e23\u0e4c\u0e21\u0e35\u0e40\u0e19\u0e35\u0e22"; case "AW": return "\u0e2d\u0e32\u0e23\u0e39\u0e1a\u0e32"; case "AU": return "\u0e2d\u0e2d\u0e2a\u0e40\u0e15\u0e23\u0e40\u0e25\u0e35\u0e22"; case "AT": return "\u0e2d\u0e2d\u0e2a\u0e40\u0e15\u0e23\u0e35\u0e22"; case "AZ": return "\u0e2d\u0e32\u0e40\u0e0b\u0e2d\u0e23\u0e4c\u0e44\u0e1a\u0e08\u0e31\u0e19"; case "BS": return "\u0e1a\u0e32\u0e2e\u0e32\u0e21\u0e32\u0e2a"; case "BH": return "\u0e1a\u0e32\u0e2b\u0e4c\u0e40\u0e23\u0e19"; case "BD": return "\u0e1a\u0e31\u0e07\u0e04\u0e25\u0e32\u0e40\u0e17\u0e28"; case "BB": return "\u0e1a\u0e32\u0e23\u0e4c\u0e1a\u0e32\u0e14\u0e2d\u0e2a"; case "BY": return "\u0e40\u0e1a\u0e25\u0e25\u0e32\u0e23\u0e31\u0e2a"; case "BE": return "\u0e40\u0e1a\u0e25\u0e40\u0e22\u0e35\u0e48\u0e22\u0e21"; case "BZ": return "\u0e40\u0e1a\u0e25\u0e34\u0e0b"; case "BJ": return "\u0e40\u0e1a\u0e19\u0e34\u0e19"; case "BM": return "\u0e40\u0e1a\u0e2d\u0e23\u0e4c\u0e21\u0e34\u0e27\u0e14\u0e49\u0e32"; case "BT": return "\u0e20\u0e39\u0e10\u0e32\u0e19"; case "BO": return "\u0e42\u0e1a\u0e25\u0e34\u0e40\u0e27\u0e35\u0e22"; case "BA": return "\u0e1a\u0e2d\u0e2a\u0e40\u0e19\u0e35\u0e22 \u0e41\u0e25\u0e30 \u0e40\u0e2e\u0e34\u0e23\u0e4c\u0e0b\u0e42\u0e01\u0e27\u0e34\u0e40\u0e19\u0e35\u0e22"; case "BW": return "\u0e1a\u0e2d\u0e15\u0e2a\u0e27\u0e32\u0e19\u0e32"; case "BR": return "\u0e1a\u0e23\u0e32\u0e0b\u0e34\u0e25"; case "BN": return "\u0e1a\u0e23\u0e39\u0e44\u0e19"; case "BG": return "\u0e1a\u0e31\u0e25\u0e41\u0e01\u0e40\u0e23\u0e35\u0e22"; case "BF": return "\u0e40\u0e1a\u0e2d\u0e23\u0e4c\u0e01\u0e34\u0e19\u0e32\u0e1f\u0e32\u0e42\u0e0b"; case "BI": return "\u0e1a\u0e39\u0e23\u0e31\u0e19\u0e14\u0e34"; case "KH": return "\u0e01\u0e31\u0e21\u0e1e\u0e39\u0e0a\u0e32"; case "CM": return "\u0e04\u0e32\u0e40\u0e21\u0e23\u0e39\u0e19"; case "CA": return "\u0e41\u0e04\u0e19\u0e32\u0e14\u0e32"; case "CV": return "\u0e40\u0e04\u0e1e\u0e40\u0e27\u0e2d\u0e23\u0e4c\u0e14"; case "CF": return "\u0e2a\u0e32\u0e18\u0e32\u0e23\u0e13\u0e23\u0e31\u0e10\u0e41\u0e2d\u0e1f\u0e23\u0e34\u0e01\u0e32\u0e01\u0e25\u0e32\u0e07"; case "TD": return "\u0e0a\u0e32\u0e14"; case "CL": return "\u0e0a\u0e34\u0e25\u0e35"; case "CN": return "\u0e08\u0e35\u0e19"; case "CO": return "\u0e42\u0e04\u0e25\u0e31\u0e21\u0e40\u0e1a\u0e35\u0e22"; case "KM": return "\u0e42\u0e04\u0e42\u0e21\u0e23\u0e2d\u0e2a"; case "CG": return "\u0e04\u0e2d\u0e07\u0e42\u0e01"; case "CR": return "\u0e04\u0e2d\u0e2a\u0e15\u0e32\u0e23\u0e34\u0e01\u0e49\u0e32"; case "CI": return "\u0e1d\u0e31\u0e48\u0e07\u0e17\u0e30\u0e40\u0e25\u0e44\u0e2d\u0e27\u0e2d\u0e23\u0e34"; case "HR": return "\u0e42\u0e04\u0e23\u0e40\u0e2d\u0e40\u0e0a\u0e35\u0e22"; case "CU": return "\u0e04\u0e34\u0e27\u0e1a\u0e32"; case "CY": return "\u0e44\u0e0b\u0e1b\u0e23\u0e31\u0e2a"; case "CZ": return "\u0e2a\u0e32\u0e18\u0e32\u0e23\u0e13\u0e23\u0e31\u0e10\u0e40\u0e0a\u0e47\u0e04"; case "DK": return "\u0e40\u0e14\u0e19\u0e21\u0e32\u0e23\u0e4c\u0e01"; case "DJ": return "\u0e14\u0e34\u0e42\u0e1a\u0e15\u0e34"; case "DM": return "\u0e42\u0e14\u0e21\u0e34\u0e19\u0e34\u0e01\u0e49\u0e32"; case "DO": return "\u0e2a\u0e32\u0e18\u0e32\u0e23\u0e13\u0e23\u0e31\u0e10\u0e42\u0e14\u0e21\u0e34\u0e19\u0e34\u0e01\u0e31\u0e19"; case "TL": return "\u0e15\u0e34\u0e21\u0e2d\u0e23\u0e4c\u0e15\u0e30\u0e27\u0e31\u0e19\u0e2d\u0e2d\u0e01"; case "EC": return "\u0e40\u0e2d\u0e01\u0e27\u0e32\u0e14\u0e2d\u0e23\u0e4c"; case "EG": return "\u0e2d\u0e35\u0e22\u0e34\u0e1b\u0e15\u0e4c"; case "SV": return "\u0e40\u0e2d\u0e25\u0e0b\u0e32\u0e27\u0e32\u0e14\u0e2d\u0e23\u0e4c"; case "GQ": return "\u0e40\u0e2d\u0e04\u0e27\u0e32\u0e42\u0e17\u0e40\u0e23\u0e35\u0e22\u0e25\u0e01\u0e34\u0e19\u0e35"; case "ER": return "\u0e2d\u0e34\u0e23\u0e34\u0e17\u0e23\u0e35"; case "EE": return "\u0e40\u0e2d\u0e2a\u0e42\u0e15\u0e40\u0e19\u0e35\u0e22"; case "ET": return "\u0e40\u0e2d\u0e18\u0e34\u0e42\u0e2d\u0e40\u0e1b\u0e35\u0e22"; case "FJ": return "\u0e1f\u0e34\u0e08\u0e34"; case "FI": return "\u0e1f\u0e34\u0e19\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "FR": return "\u0e1d\u0e23\u0e31\u0e48\u0e07\u0e40\u0e28\u0e2a"; case "GF": return "\u0e40\u0e1f\u0e23\u0e47\u0e19\u0e0a\u0e01\u0e34\u0e27\u0e19\u0e48\u0e32"; case "PF": return "\u0e40\u0e1f\u0e23\u0e47\u0e19\u0e0a\u0e42\u0e1e\u0e25\u0e34\u0e19\u0e35\u0e40\u0e0b\u0e35\u0e22"; case "TF": return "\u0e2d\u0e32\u0e13\u0e32\u0e40\u0e02\u0e15\u0e17\u0e32\u0e07\u0e43\u0e15\u0e49\u0e02\u0e2d\u0e07\u0e1d\u0e23\u0e31\u0e48\u0e07\u0e40\u0e28\u0e2a"; case "GA": return "\u0e01\u0e32\u0e1a\u0e2d\u0e19"; case "GM": return "\u0e41\u0e01\u0e21\u0e40\u0e1a\u0e35\u0e22"; case "GE": return "\u0e08\u0e2d\u0e23\u0e4c\u0e40\u0e08\u0e35\u0e22"; case "DE": return "\u0e40\u0e22\u0e2d\u0e23\u0e21\u0e19\u0e35"; case "GH": return "\u0e01\u0e32\u0e19\u0e48\u0e32"; case "GR": return "\u0e01\u0e23\u0e35\u0e0b"; case "GP": return "\u0e01\u0e31\u0e27\u0e40\u0e14\u0e2d\u0e25\u0e39\u0e1b"; case "GT": return "\u0e01\u0e31\u0e27\u0e40\u0e15\u0e21\u0e32\u0e25\u0e32"; case "GN": return "\u0e01\u0e34\u0e27\u0e19\u0e35"; case "GW": return "\u0e01\u0e34\u0e27\u0e19\u0e35-\u0e1a\u0e34\u0e2a\u0e42\u0e0b"; case "GY": return "\u0e01\u0e39\u0e22\u0e32\u0e19\u0e48\u0e32"; case "HT": return "\u0e44\u0e2e\u0e15\u0e35"; case "HN": return "\u0e2e\u0e2d\u0e19\u0e14\u0e39\u0e23\u0e31\u0e2a"; case "HK": return "\u0e2e\u0e48\u0e2d\u0e07\u0e01\u0e07"; case "HU": return "\u0e2e\u0e31\u0e07\u0e01\u0e32\u0e23\u0e35"; case "IS": return "\u0e44\u0e2d\u0e0b\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "IN": return "\u0e2d\u0e34\u0e19\u0e40\u0e14\u0e35\u0e22"; case "ID": return "\u0e2d\u0e34\u0e19\u0e42\u0e14\u0e19\u0e35\u0e40\u0e0b\u0e35\u0e22"; case "IR": return "\u0e2d\u0e34\u0e2b\u0e23\u0e48\u0e32\u0e19"; case "IQ": return "\u0e2d\u0e34\u0e23\u0e31\u0e01"; case "IE": return "\u0e44\u0e2d\u0e23\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "IL": return "\u0e2d\u0e34\u0e2a\u0e23\u0e32\u0e40\u0e2d\u0e25"; case "IT": return "\u0e2d\u0e34\u0e15\u0e32\u0e25\u0e35"; case "JM": return "\u0e08\u0e32\u0e44\u0e21\u0e01\u0e49\u0e32"; case "JP": return "\u0e0d\u0e35\u0e48\u0e1b\u0e38\u0e48\u0e19"; case "JO": return "\u0e08\u0e2d\u0e23\u0e4c\u0e41\u0e14\u0e19"; case "KZ": return "\u0e04\u0e32\u0e0b\u0e31\u0e04\u0e2a\u0e16\u0e32\u0e19"; case "KE": return "\u0e40\u0e04\u0e19\u0e22\u0e48\u0e32"; case "KI": return "\u0e04\u0e34\u0e23\u0e35\u0e1a\u0e32\u0e15\u0e34"; case "KP": return "\u0e40\u0e01\u0e32\u0e2b\u0e25\u0e35\u0e40\u0e2b\u0e19\u0e37\u0e2d"; case "KR": return "\u0e40\u0e01\u0e32\u0e2b\u0e25\u0e35\u0e43\u0e15\u0e49"; case "KW": return "\u0e04\u0e39\u0e40\u0e27\u0e15"; case "KG": return "\u0e40\u0e04\u0e2d\u0e23\u0e4c\u0e01\u0e34\u0e2a\u0e16\u0e32\u0e19"; case "LA": return "\u0e25\u0e32\u0e27"; case "LV": return "\u0e25\u0e32\u0e15\u0e40\u0e27\u0e35\u0e22"; case "LB": return "\u0e40\u0e25\u0e1a\u0e32\u0e19\u0e2d\u0e19"; case "LS": return "\u0e40\u0e25\u0e42\u0e0b\u0e42\u0e17"; case "LR": return "\u0e25\u0e34\u0e40\u0e1a\u0e2d\u0e23\u0e4c\u0e40\u0e25\u0e35\u0e22"; case "LY": return "\u0e25\u0e34\u0e40\u0e1a\u0e35\u0e22"; case "LI": return "\u0e44\u0e25\u0e40\u0e17\u0e19\u0e2a\u0e44\u0e15\u0e19\u0e4c"; case "LT": return "\u0e25\u0e34\u0e40\u0e17\u0e2d\u0e23\u0e4c\u0e40\u0e19\u0e35\u0e22"; case "LU": return "\u0e25\u0e31\u0e01\u0e0b\u0e4c\u0e40\u0e0b\u0e21\u0e40\u0e1a\u0e2d\u0e23\u0e4c\u0e01"; case "MK": return "\u0e41\u0e21\u0e0b\u0e35\u0e42\u0e14\u0e40\u0e19\u0e35\u0e22"; case "MG": return "\u0e21\u0e32\u0e14\u0e32\u0e01\u0e32\u0e2a\u0e01\u0e49\u0e32"; case "MO": return "\u0e21\u0e32\u0e40\u0e01\u0e4a\u0e32"; case "MY": return "\u0e21\u0e32\u0e40\u0e25\u0e40\u0e0b\u0e35\u0e22"; case "ML": return "\u0e21\u0e32\u0e25\u0e35"; case "MT": return "\u0e21\u0e31\u0e25\u0e15\u0e49\u0e32"; case "MQ": return "\u0e21\u0e32\u0e23\u0e4c\u0e15\u0e34\u0e19\u0e34\u0e01"; case "MR": return "\u0e21\u0e2d\u0e23\u0e34\u0e17\u0e32\u0e40\u0e19\u0e35\u0e22"; case "MU": return "\u0e21\u0e2d\u0e23\u0e34\u0e40\u0e15\u0e35\u0e22\u0e2a"; case "YT": return "\u0e21\u0e32\u0e22\u0e2d\u0e15"; case "MX": return "\u0e41\u0e21\u0e47\u0e01\u0e0b\u0e34\u0e42\u0e01"; case "FM": return "\u0e44\u0e21\u0e42\u0e04\u0e23\u0e19\u0e34\u0e40\u0e0b\u0e35\u0e22"; case "MD": return "\u0e42\u0e21\u0e25\u0e42\u0e14\u0e27\u0e32"; case "MC": return "\u0e42\u0e21\u0e19\u0e32\u0e42\u0e04"; case "MN": return "\u0e21\u0e2d\u0e07\u0e42\u0e01\u0e40\u0e25\u0e35\u0e22"; case "MS": return "\u0e21\u0e2d\u0e19\u0e15\u0e4c\u0e40\u0e0b\u0e2d\u0e23\u0e32\u0e15"; case "MA": return "\u0e42\u0e21\u0e23\u0e2d\u0e04\u0e42\u0e04"; case "MZ": return "\u0e42\u0e21\u0e41\u0e0b\u0e21\u0e1a\u0e34\u0e04"; case "MM": return "\u0e2a\u0e2b\u0e20\u0e32\u0e1e\u0e1e\u0e21\u0e48\u0e32"; case "NA": return "\u0e19\u0e32\u0e21\u0e34\u0e40\u0e1a\u0e35\u0e22"; case "NP": return "\u0e40\u0e19\u0e1b\u0e32\u0e25"; case "NL": return "\u0e40\u0e19\u0e40\u0e18\u0e2d\u0e23\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "AN": return "\u0e40\u0e19\u0e40\u0e18\u0e2d\u0e23\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c\u0e41\u0e2d\u0e19\u0e17\u0e34\u0e25\u0e25\u0e4c"; case "NC": return "\u0e19\u0e34\u0e27\u0e04\u0e32\u0e25\u0e34\u0e42\u0e14\u0e40\u0e19\u0e35\u0e22"; case "NZ": return "\u0e19\u0e34\u0e27\u0e0b\u0e35\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "NI": return "\u0e19\u0e34\u0e04\u0e32\u0e23\u0e32\u0e01\u0e31\u0e27"; case "NE": return "\u0e44\u0e19\u0e40\u0e08\u0e2d\u0e23\u0e4c"; case "NG": return "\u0e44\u0e19\u0e08\u0e35\u0e40\u0e23\u0e35\u0e22"; case "NU": return "\u0e19\u0e35\u0e22\u0e39"; case "NO": return "\u0e19\u0e2d\u0e23\u0e4c\u0e40\u0e27\u0e22\u0e4c"; case "OM": return "\u0e42\u0e2d\u0e21\u0e32\u0e19"; case "PK": return "\u0e1b\u0e32\u0e01\u0e35\u0e2a\u0e16\u0e32\u0e19"; case "PA": return "\u0e1b\u0e32\u0e19\u0e32\u0e21\u0e32"; case "PG": return "\u0e1b\u0e32\u0e1b\u0e31\u0e27\u0e19\u0e34\u0e27\u0e01\u0e35\u0e19\u0e35"; case "PY": return "\u0e1b\u0e32\u0e23\u0e32\u0e01\u0e27\u0e31\u0e22"; case "PE": return "\u0e40\u0e1b\u0e23\u0e39"; case "PH": return "\u0e1f\u0e34\u0e25\u0e34\u0e1b\u0e1b\u0e34\u0e19\u0e2a\u0e4c"; case "PL": return "\u0e42\u0e1b\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "PT": return "\u0e42\u0e1b\u0e15\u0e38\u0e01\u0e31\u0e25"; case "PR": return "\u0e40\u0e1b\u0e2d\u0e23\u0e4c\u0e42\u0e15\u0e23\u0e34\u0e42\u0e01"; case "QA": return "\u0e01\u0e32\u0e15\u0e32\u0e23\u0e4c"; case "RO": return "\u0e23\u0e39\u0e40\u0e21\u0e40\u0e19\u0e35\u0e22"; case "RU": return "\u0e23\u0e31\u0e2a\u0e40\u0e0b\u0e35\u0e22"; case "RW": return "\u0e23\u0e32\u0e27\u0e31\u0e25\u0e14\u0e32"; case "SA": return "\u0e0b\u0e32\u0e2d\u0e38\u0e14\u0e34\u0e2d\u0e32\u0e23\u0e30\u0e40\u0e1a\u0e35\u0e22"; case "SN": return "\u0e0b\u0e34\u0e19\u0e35\u0e01\u0e31\u0e25"; case "SP": return "\u0e40\u0e0b\u0e2d\u0e23\u0e4c\u0e40\u0e1a\u0e35\u0e22"; case "SC": return "\u0e40\u0e0b\u0e22\u0e4c\u0e41\u0e0a\u0e25\u0e25\u0e4c"; case "SL": return "\u0e40\u0e0b\u0e35\u0e22\u0e23\u0e4c\u0e23\u0e48\u0e32\u0e25\u0e35\u0e2d\u0e2d\u0e19"; case "SG": return "\u0e2a\u0e34\u0e07\u0e04\u0e42\u0e1b\u0e23\u0e4c"; case "SK": return "\u0e2a\u0e42\u0e25\u0e27\u0e32\u0e40\u0e01\u0e35\u0e22"; case "SI": return "\u0e2a\u0e42\u0e25\u0e27\u0e34\u0e40\u0e19\u0e35\u0e22"; case "SO": return "\u0e42\u0e0b\u0e21\u0e32\u0e40\u0e25\u0e35\u0e22"; case "ZA": return "\u0e41\u0e2d\u0e1f\u0e23\u0e34\u0e01\u0e32\u0e43\u0e15\u0e49"; case "ES": return "\u0e2a\u0e40\u0e1b\u0e19"; case "LK": return "\u0e28\u0e23\u0e35\u0e25\u0e31\u0e07\u0e01\u0e32"; case "SD": return "\u0e0b\u0e39\u0e14\u0e32\u0e19"; case "SR": return "\u0e0b\u0e39\u0e23\u0e34\u0e19\u0e32\u0e21\u0e34"; case "SZ": return "\u0e2a\u0e27\u0e32\u0e0b\u0e34\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "SE": return "\u0e2a\u0e27\u0e35\u0e40\u0e14\u0e19"; case "CH": return "\u0e2a\u0e27\u0e34\u0e2a\u0e40\u0e0b\u0e2d\u0e23\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "SY": return "\u0e0b\u0e35\u0e40\u0e23\u0e35\u0e22"; case "TW": return "\u0e44\u0e15\u0e49\u0e2b\u0e27\u0e31\u0e19"; case "TJ": return "\u0e17\u0e32\u0e08\u0e34\u0e01\u0e34\u0e2a\u0e16\u0e32\u0e19"; case "TZ": return "\u0e17\u0e32\u0e19\u0e0b\u0e32\u0e40\u0e19\u0e35\u0e22"; case "TH": return "\u0e1b\u0e23\u0e30\u0e40\u0e17\u0e28\u0e44\u0e17\u0e22"; case "TG": return "\u0e42\u0e15\u0e42\u0e01"; case "TK": return "\u0e42\u0e17\u0e01\u0e34\u0e42\u0e25"; case "TO": return "\u0e17\u0e2d\u0e07\u0e01\u0e49\u0e32"; case "TT": return "\u0e17\u0e23\u0e34\u0e19\u0e34\u0e41\u0e14\u0e14 \u0e41\u0e25\u0e30\u0e42\u0e17\u0e1a\u0e32\u0e42\u0e01"; case "TN": return "\u0e15\u0e39\u0e19\u0e34\u0e40\u0e0b\u0e35\u0e22"; case "TR": return "\u0e15\u0e38\u0e23\u0e01\u0e35"; case "TM": return "\u0e40\u0e15\u0e34\u0e23\u0e4c\u0e01\u0e40\u0e21\u0e19\u0e34\u0e2a\u0e16\u0e32\u0e19"; case "UG": return "\u0e2d\u0e39\u0e01\u0e32\u0e19\u0e14\u0e32"; case "UA": return "\u0e22\u0e39\u0e40\u0e04\u0e23\u0e19"; case "AE": return "\u0e2a\u0e2b\u0e23\u0e31\u0e10\u0e2d\u0e32\u0e2b\u0e23\u0e31\u0e1a\u0e40\u0e2d\u0e21\u0e34\u0e40\u0e23\u0e15\u0e2a\u0e4c"; case "GB": return "\u0e2a\u0e2b\u0e23\u0e32\u0e0a\u0e2d\u0e32\u0e13\u0e32\u0e08\u0e31\u0e01\u0e23"; case "US": return "\u0e2a\u0e2b\u0e23\u0e31\u0e10\u0e2d\u0e40\u0e21\u0e23\u0e34\u0e01\u0e32"; case "UY": return "\u0e2d\u0e38\u0e23\u0e39\u0e01\u0e27\u0e31\u0e22"; case "UZ": return "\u0e2d\u0e38\u0e0b\u0e40\u0e1a\u0e01\u0e34\u0e2a\u0e16\u0e32\u0e19"; case "VU": return "\u0e27\u0e32\u0e19\u0e31\u0e27\u0e15\u0e39"; case "VA": return "\u0e27\u0e32\u0e15\u0e34\u0e01\u0e31\u0e19"; case "VE": return "\u0e40\u0e27\u0e40\u0e19\u0e0b\u0e39\u0e40\u0e2d\u0e25\u0e48\u0e32"; case "VN": return "\u0e40\u0e27\u0e35\u0e22\u0e14\u0e19\u0e32\u0e21"; case "VG": return "\u0e1a\u0e23\u0e34\u0e17\u0e34\u0e0a\u0e40\u0e27\u0e2d\u0e23\u0e4c\u0e08\u0e34\u0e19\u0e44\u0e2d\u0e2a\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "VI": return "\u0e22\u0e39\u0e40\u0e2d\u0e2a\u0e40\u0e27\u0e2d\u0e23\u0e4c\u0e08\u0e34\u0e19\u0e44\u0e2d\u0e2a\u0e4c\u0e41\u0e25\u0e19\u0e14\u0e4c"; case "EH": return "\u0e0b\u0e32\u0e2e\u0e32\u0e23\u0e48\u0e32\u0e15\u0e30\u0e27\u0e31\u0e19\u0e15\u0e01"; case "YE": return "\u0e40\u0e22\u0e40\u0e21\u0e19"; case "YU": return "\u0e22\u0e39\u0e42\u0e01\u0e2a\u0e25\u0e32\u0e40\u0e27\u0e35\u0e22"; case "ZM": return "\u0e41\u0e0b\u0e21\u0e40\u0e1a\u0e35\u0e22"; case "ZW": return "\u0e0b\u0e34\u0e21\u0e1a\u0e32\u0e1a\u0e40\u0e27"; } return base.ResolveCountry(name); } private class PrivateTextInfo : _I18NTextInfo { public PrivateTextInfo(int culture) : base(culture) {} public override int ANSICodePage { get { return 874; } } public override int EBCDICCodePage { get { return 20838; } } public override int MacCodePage { get { return 10021; } } public override int OEMCodePage { get { return 874; } } }; // class PrivateTextInfo public override TextInfo TextInfo { get { return new PrivateTextInfo(LCID); } } }; // class CID001e public class CNth : CID001e { public CNth() : base() {} }; // class CNth }; // namespace I18N.Other
namespace Colorspace { public struct ColorRGB { private readonly double _alpha; private readonly double _r; private readonly double _g; private readonly double _b; public double Alpha { get { return _alpha; } } public double R { get { return _r; } } public double G { get { return _g; } } public double B { get { return _b; } } public static void CheckRGBInRange(double a, double r, double g, double b) { ColorUtil.CheckRange_0_1(a, typeof(ColorRGB), "A"); ColorUtil.CheckRange_0_1(r, typeof(ColorRGB), "R"); ColorUtil.CheckRange_0_1(g, typeof(ColorRGB), "G"); ColorUtil.CheckRange_0_1(b, typeof(ColorRGB), "B"); } public bool IsChromatic() { return !((this.R == this.G) && (this.G == this.B)); } public static void CheckRGBInRange(double r, double g, double b) { CheckRGBInRange(1.0,r,g,b); } public override string ToString() { return string.Format(System.Globalization.CultureInfo.InvariantCulture, "{0}({1:0.##},{2:0.0##},{3:0.0##},{4:0.0##})", this.GetType().Name, _alpha, _r, _g, _b); } public ColorRGB(double r, double g, double b) { CheckRGBInRange(r, g, b); this._alpha = 1.0; this._r = r; this._g = g; this._b = b; } public ColorRGB(double alpha, double r, double g, double b) { CheckRGBInRange(alpha, r, g, b); this._alpha = alpha; this._r = r; this._g = g; this._b = b; } public ColorRGB(int n) { var rgb = FromColor(new ColorRGB32Bit(n)); this._alpha = rgb.Alpha; this._r = rgb.R; this._g = rgb.G; this._b = rgb.B; } public ColorRGB(uint n) { var rgb = FromColor(new ColorRGB32Bit(n)); this._alpha = rgb.Alpha; this._r = rgb._r; this._g = rgb._g; this._b = rgb._b; } public static explicit operator ColorRGB32Bit(Colorspace.ColorRGB color) { byte a = (byte) (color._alpha * 255); byte r = (byte) (color._r * 255); byte g = (byte) (color._g * 255); byte b = (byte) (color._b * 255); return new ColorRGB32Bit(a, r, g, b); } public static explicit operator int (ColorRGB color) { var c = new ColorRGB32Bit(color); return c.ToInt(); } public ColorRGB(ColorRGB32Bit color) { var temp = FromColor(color); this._alpha = temp._alpha; this._r = temp._r; this._g = temp._g; this._b = temp._b; } public ColorRGB(ColorHSV hsv) { double XR = double.NaN; double XG = double.NaN; double XB = double.NaN; if (double.IsNaN(hsv.H) || hsv.S == 0.0) { // Make it some kind of gray this._alpha = hsv.Alpha; this._r = hsv.V; this._g = hsv.V; this._b = hsv.V; return; } double H = hsv.H; if (hsv.H > 1.0) { throw new System.ArgumentOutOfRangeException("H"); } else if (hsv.H == 1.0) { H = 0.0; } double step = 1.0 / 6.0; double vh = H / step; int i = (int)System.Math.Floor(vh); double f = vh - i; double p = hsv.V * (1.0 - hsv.S); double q = hsv.V * (1.0 - (hsv.S * f)); double t = hsv.V * (1.0 - (hsv.S * (1.0 - f))); switch (i) { case 0: { XR = hsv.V; XG = t; XB = p; break; } case 1: { XR = q; XG = hsv.V; XB = p; break; } case 2: { XR = p; XG = hsv.V; XB = t; break; } case 3: { XR = p; XG = q; XB = hsv.V; break; } case 4: { XR = t; XG = p; XB = hsv.V; break; } case 5: { XR = hsv.V; XG = p; XB = q; break; } default: { // not possible - if we get here it is an internal error throw new System.ArgumentException(); } } this._alpha = hsv.Alpha; this._r = XR; this._g = XG; this._b = XB; } private static double hue_2_rgb(double m1, double m2, double h) { h = ColorUtil.NormalizeHue(h); if ((6.0 * h) < 1.0) { return (m1 + (m2 - m1) * 6.0 * h); } if ((2.0 * h) < 1.0) { return m2; } if ((3.0 * h) < 2.0) { return m1 + (m2 - m1) * ((2.0 / 3.0) - h) * 6.0; } return m1; } public ColorRGB(ColorHSL hsl) { if (double.IsNaN(hsl.H) || hsl.S == 0) //HSL values = From 0 to 1 { this._alpha = hsl.Alpha; this._r = hsl.L; //RGB results = From 0 to 255 this._g = hsl.L; this._b = hsl.L; return; } double m2 = (hsl.L < 0.5) ? hsl.L * (1.0 + hsl.S) : (hsl.L + hsl.S) - (hsl.S * hsl.L); double m1 = (2.0 * hsl.L) - m2; const double onethird = (1.0 / 3.0); this._alpha = hsl.Alpha; this._r = 1.0 * hue_2_rgb(m1, m2, hsl.H + onethird); this._g = 1.0 * hue_2_rgb(m1, m2, hsl.H); this._b = 1.0 * hue_2_rgb(m1, m2, hsl.H - onethird); } public ColorRGB(ColorCMYK cmyk) { double CMY_C = cmyk.C * (1 - cmyk.K) + cmyk.K; double CMY_M = cmyk.M * (1 - cmyk.K) + cmyk.K; double CMY_Y = cmyk.Y * (1 - cmyk.K) + cmyk.K; this._alpha = cmyk.Alpha; this._r = 1 - CMY_C; this._g = 1 - CMY_M; this._b = 1 - CMY_Y; } public ColorRGB(ColorXYZ xyz, Colorspace.RGBWorkingSpace ws) { var m = ws.XYZToRGBMatrix; double x = xyz.X / 100; double y = xyz.Y / 100; double z = xyz.Z / 100; var lin_r = (x * m[0, 0]) + (y * m[0, 1]) + (z * m[0, 2]); // red var lin_g = (x * m[1, 0]) + (y * m[1, 1]) + (z * m[1, 2]); // green var lin_b = (x * m[2, 0]) + (y * m[2, 1]) + (z * m[2, 2]); // blue double r = (lin_r <= 0.0031308) ? 12.92 * lin_r : (1.055) * System.Math.Pow(lin_r, (1.0 / 2.4)) - 0.055; double g = (lin_g <= 0.0031308) ? 12.92 * lin_g : (1.055) * System.Math.Pow(lin_g, (1.0 / 2.4)) - 0.055; double b = (lin_b <= 0.0031308) ? 12.92 * lin_b : (1.055) * System.Math.Pow(lin_b, (1.0 / 2.4)) - 0.055; r = ClampToRange_0_1(r); g = ClampToRange_0_1(g); b = ClampToRange_0_1(b); this._alpha = xyz.Alpha; this._r = r; this._g = g; this._b = b; } private static double ClampToRange_0_1(double component) { if (component < 0.0) { component = 0.0; } else if (component > 1.0) { component = 1.0; } return component; } private static ColorRGB FromColor(ColorRGB32Bit color) { var c = new ColorRGB(color.Alpha / 255.0, color.R / 255.0, color.G / 255.0, color.B / 255.0); return c; } } }
using System; using System.Data; using System.Data.OleDb; using System.Collections; using System.Configuration; using PCSComUtils.DataAccess; using PCSComUtils.PCSExc; using PCSComUtils.Common; namespace PCSComMaterials.ActualCost.DS { public class cst_FreightDetailDS { private const string THIS = "PCSComMaterials.ActualCost.DS.cst_FreightDetailDS"; public void Add(object pobjObjectVO) { const string METHOD_NAME = THIS + ".Add()"; OleDbConnection oconPCS =null; OleDbCommand ocmdPCS =null; try { cst_FreightDetailVO objObject = (cst_FreightDetailVO) pobjObjectVO; string strSql = String.Empty; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand("", oconPCS); strSql= "INSERT INTO cst_FreightDetail(" + cst_FreightDetailTable.QUANTITY_FLD + "," + cst_FreightDetailTable.UNITPRICECIF_FLD + "," + cst_FreightDetailTable.AMOUNT_FLD + "," + cst_FreightDetailTable.FREIGHTMASTERID_FLD + "," + cst_FreightDetailTable.PRODUCTID_FLD + "," + cst_FreightDetailTable.BUYINGUMID_FLD + "," + cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD + "," // 12-05-2006 dungla: removed in order to compliable //+ cst_FreightDetailTable.LINE_FLD + "," + cst_FreightDetailTable.VATAMOUNT_FLD + ")" + "VALUES(?,?,?,?,?,?,?,?,?)"; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.QUANTITY_FLD, OleDbType.Decimal)); ocmdPCS.Parameters[cst_FreightDetailTable.QUANTITY_FLD].Value = objObject.Quantity; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.UNITPRICECIF_FLD, OleDbType.Decimal)); ocmdPCS.Parameters[cst_FreightDetailTable.UNITPRICECIF_FLD].Value = objObject.UnitPriceCIF; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.AMOUNT_FLD, OleDbType.Decimal)); ocmdPCS.Parameters[cst_FreightDetailTable.AMOUNT_FLD].Value = objObject.Amount; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.FREIGHTMASTERID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[cst_FreightDetailTable.FREIGHTMASTERID_FLD].Value = objObject.FreightMasterID; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.PRODUCTID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[cst_FreightDetailTable.PRODUCTID_FLD].Value = objObject.ProductID; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.BUYINGUMID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[cst_FreightDetailTable.BUYINGUMID_FLD].Value = objObject.BuyingUMID; // 12-05-2006 dungla: removed in order to compliable // ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.LINE_FLD, OleDbType.Integer)); // ocmdPCS.Parameters[cst_FreightDetailTable.LINE_FLD].Value = objObject.Line; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD].Value = objObject.ReturnToVendorDetailID; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.VATAMOUNT_FLD, OleDbType.Decimal)); ocmdPCS.Parameters[cst_FreightDetailTable.VATAMOUNT_FLD].Value = objObject.VATAmount; ocmdPCS.CommandText = strSql; ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); } catch(OleDbException ex) { if (ex.Errors.Count > 1) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } public void Delete(int pintID) { const string METHOD_NAME = THIS + ".Delete()"; string strSql = String.Empty; strSql= "DELETE " + cst_FreightDetailTable.TABLE_NAME + " WHERE " + "FreightDetailID" + "=" + pintID.ToString(); OleDbConnection oconPCS=null; OleDbCommand ocmdPCS =null; try { Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); ocmdPCS = null; } catch(OleDbException ex) { if (ex.Errors.Count > 1) { if (ex.Errors[1].NativeError == ErrorCode.SQLCASCADE_PREVENT_KEYCODE) { throw new PCSDBException(ErrorCode.CASCADE_DELETE_PREVENT, METHOD_NAME, ex); } } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } public object GetObjectVO(int pintID) { const string METHOD_NAME = THIS + ".GetObjectVO()"; DataSet dstPCS = new DataSet(); OleDbDataReader odrPCS = null; OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + cst_FreightDetailTable.FREIGHTDETAILID_FLD + "," + cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD + "," + cst_FreightDetailTable.QUANTITY_FLD + "," + cst_FreightDetailTable.UNITPRICECIF_FLD + "," + cst_FreightDetailTable.AMOUNT_FLD + "," + cst_FreightDetailTable.FREIGHTMASTERID_FLD + "," + cst_FreightDetailTable.PRODUCTID_FLD + "," + cst_FreightDetailTable.BUYINGUMID_FLD + "," // 12-05-2006 dungla: removed in order to compliable //+ cst_FreightDetailTable.LINE_FLD + "," + cst_FreightDetailTable.VATAMOUNT_FLD + " FROM " + cst_FreightDetailTable.TABLE_NAME +" WHERE " + cst_FreightDetailTable.FREIGHTDETAILID_FLD + "=" + pintID; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); odrPCS = ocmdPCS.ExecuteReader(); cst_FreightDetailVO objObject = new cst_FreightDetailVO(); while (odrPCS.Read()) { objObject.FreightDetailID = int.Parse(odrPCS[cst_FreightDetailTable.FREIGHTDETAILID_FLD].ToString().Trim()); objObject.Quantity = Decimal.Parse(odrPCS[cst_FreightDetailTable.QUANTITY_FLD].ToString().Trim()); objObject.UnitPriceCIF = Decimal.Parse(odrPCS[cst_FreightDetailTable.UNITPRICECIF_FLD].ToString().Trim()); objObject.Amount = Decimal.Parse(odrPCS[cst_FreightDetailTable.AMOUNT_FLD].ToString().Trim()); objObject.FreightMasterID = int.Parse(odrPCS[cst_FreightDetailTable.FREIGHTMASTERID_FLD].ToString().Trim()); objObject.ProductID = int.Parse(odrPCS[cst_FreightDetailTable.PRODUCTID_FLD].ToString().Trim()); objObject.BuyingUMID = int.Parse(odrPCS[cst_FreightDetailTable.BUYINGUMID_FLD].ToString().Trim()); // 12-05-2006 dungla: removed in order to compliable //objObject.Line = int.Parse(odrPCS[cst_FreightDetailTable.LINE_FLD].ToString().Trim()); objObject.VATAmount = Decimal.Parse(odrPCS[cst_FreightDetailTable.VATAMOUNT_FLD].ToString().Trim()); objObject.ReturnToVendorDetailID = int.Parse(odrPCS[cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD].ToString().Trim()); } return objObject; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } public void Update(object pobjObjecVO) { const string METHOD_NAME = THIS + ".Update()"; cst_FreightDetailVO objObject = (cst_FreightDetailVO) pobjObjecVO; //prepare value for parameters OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); strSql= "UPDATE cst_FreightDetail SET " + cst_FreightDetailTable.QUANTITY_FLD + "= ?" + "," + cst_FreightDetailTable.UNITPRICECIF_FLD + "= ?" + "," + cst_FreightDetailTable.AMOUNT_FLD + "= ?" + "," + cst_FreightDetailTable.FREIGHTMASTERID_FLD + "= ?" + "," + cst_FreightDetailTable.PRODUCTID_FLD + "= ?" + "," + cst_FreightDetailTable.BUYINGUMID_FLD + "= ?" + "," + cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD + "= ?" + "," // 12-05-2006 dungla: removed in order to compliable //+ cst_FreightDetailTable.LINE_FLD + "= ?" + "," + cst_FreightDetailTable.VATAMOUNT_FLD + "= ?" +" WHERE " + cst_FreightDetailTable.FREIGHTDETAILID_FLD + "= ?"; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.QUANTITY_FLD, OleDbType.Decimal)); ocmdPCS.Parameters[cst_FreightDetailTable.QUANTITY_FLD].Value = objObject.Quantity; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.UNITPRICECIF_FLD, OleDbType.Decimal)); ocmdPCS.Parameters[cst_FreightDetailTable.UNITPRICECIF_FLD].Value = objObject.UnitPriceCIF; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.AMOUNT_FLD, OleDbType.Decimal)); ocmdPCS.Parameters[cst_FreightDetailTable.AMOUNT_FLD].Value = objObject.Amount; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.FREIGHTMASTERID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[cst_FreightDetailTable.FREIGHTMASTERID_FLD].Value = objObject.FreightMasterID; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.PRODUCTID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[cst_FreightDetailTable.PRODUCTID_FLD].Value = objObject.ProductID; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.BUYINGUMID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[cst_FreightDetailTable.BUYINGUMID_FLD].Value = objObject.BuyingUMID; // 12-05-2006 dungla: removed in order to compliable // ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.LINE_FLD, OleDbType.Integer)); // ocmdPCS.Parameters[cst_FreightDetailTable.LINE_FLD].Value = objObject.Line; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.VATAMOUNT_FLD, OleDbType.Decimal)); ocmdPCS.Parameters[cst_FreightDetailTable.VATAMOUNT_FLD].Value = objObject.VATAmount; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD].Value = objObject.ReturnToVendorDetailID; ocmdPCS.Parameters.Add(new OleDbParameter(cst_FreightDetailTable.FREIGHTDETAILID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[cst_FreightDetailTable.FREIGHTDETAILID_FLD].Value = objObject.FreightDetailID; ocmdPCS.CommandText = strSql; ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); } catch(OleDbException ex) { if (ex.Errors.Count > 1) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } public DataSet List() { const string METHOD_NAME = THIS + ".List()"; DataSet dstPCS = new DataSet(); OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + cst_FreightDetailTable.FREIGHTDETAILID_FLD + "," + cst_FreightDetailTable.QUANTITY_FLD + "," + cst_FreightDetailTable.UNITPRICECIF_FLD + "," + cst_FreightDetailTable.AMOUNT_FLD + "," + cst_FreightDetailTable.FREIGHTMASTERID_FLD + "," + cst_FreightDetailTable.PRODUCTID_FLD + "," + cst_FreightDetailTable.BUYINGUMID_FLD + "," + cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD + "," // 12-05-2006 dungla: removed in order to compliable //+ cst_FreightDetailTable.LINE_FLD + "," + cst_FreightDetailTable.VATAMOUNT_FLD + " FROM " + cst_FreightDetailTable.TABLE_NAME; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dstPCS,cst_FreightDetailTable.TABLE_NAME); return dstPCS; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } /// <summary> /// GetFreightDetailByMasterID /// </summary> /// <param name="pintFreightMasterID"></param> /// <returns></returns> /// <author>Trada</author> /// <date>Tuesday, Feb 28 2006</date> public DataSet GetFreightDetailByMasterID(int pintFreightMasterID) { const string METHOD_NAME = THIS + ".GetFreightDetailByMasterID()"; DataSet dstPCS = new DataSet(); OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " // 12-05-2006 dungla: removed in order to compliable //+ " FD." + cst_FreightDetailTable.LINE_FLD + "," + " FD." + cst_FreightDetailTable.FREIGHTDETAILID_FLD + "," + " P." + ITM_ProductTable.CODE_FLD + "," + " P." + ITM_ProductTable.DESCRIPTION_FLD + "," + " P." + ITM_ProductTable.REVISION_FLD + "," + " UM." + MST_UnitOfMeasureTable.CODE_FLD + Constants.WHITE_SPACE + MST_UnitOfMeasureTable.TABLE_NAME + MST_UnitOfMeasureTable.CODE_FLD + " ," + " FD." + cst_FreightDetailTable.QUANTITY_FLD + "," + " FD." + cst_FreightDetailTable.UNITPRICECIF_FLD + "," + " FD." + cst_FreightDetailTable.IMPORTTAXPERCENT_FLD + "," + " FD." + cst_FreightDetailTable.AMOUNT_FLD + "," + " FD." + cst_FreightDetailTable.FREIGHTMASTERID_FLD + "," + " FD." + cst_FreightDetailTable.PRODUCTID_FLD + "," + " FD." + cst_FreightDetailTable.BUYINGUMID_FLD + "," + " FD." + cst_FreightDetailTable.VATAMOUNT_FLD + ", 0.0 TotalAmount, " + " FD." + cst_FreightDetailTable.ADJUSTMENTID_FLD + "," + " FD." + cst_FreightDetailTable.INVOICEMASTERID_FLD + "," + " FD." + cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD + "," + " AD." + IV_AdjustmentTable.TRANSNO_FLD + "," + " IVM." + PO_InvoiceMasterTable.INVOICENO_FLD + "," + " 0 PurchaseOrderReceiptID, 0 PurchaseOrderReceiptDetailID, 0 PurchaseOrderMasterID" + " FROM " + cst_FreightDetailTable.TABLE_NAME + " FD " + " INNER JOIN " + ITM_ProductTable.TABLE_NAME + " P ON FD." + cst_FreightDetailTable.PRODUCTID_FLD + " = P." + ITM_ProductTable.PRODUCTID_FLD + " INNER JOIN " + MST_UnitOfMeasureTable.TABLE_NAME + " UM ON FD." + cst_FreightDetailTable.BUYINGUMID_FLD + " = UM." + MST_UnitOfMeasureTable.UNITOFMEASUREID_FLD + " LEFT JOIN " + IV_AdjustmentTable.TABLE_NAME + " AD ON AD." + IV_AdjustmentTable.ADJUSTMENTID_FLD + " = FD." + cst_FreightDetailTable.ADJUSTMENTID_FLD + " LEFT JOIN " + PO_InvoiceMasterTable.TABLE_NAME + " IVM ON IVM." + PO_InvoiceMasterTable.INVOICEMASTERID_FLD + " = FD." + cst_FreightDetailTable.INVOICEMASTERID_FLD + " WHERE FD." + cst_FreightDetailTable.FREIGHTMASTERID_FLD + " = " + pintFreightMasterID.ToString(); // 12-05-2006 dungla: removed in order to compliable //+ " ORDER BY FD." + cst_FreightDetailTable.LINE_FLD; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dstPCS,cst_FreightDetailTable.TABLE_NAME); return dstPCS; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } /// <summary> /// GetReturnToVendorByMasterID /// </summary> /// <param name="pintReturnToVendorMasterID"></param> /// <returns></returns> /// <author>Trada</author> /// <date>Thursday, July 6 2006</date> public DataSet GetReturnToVendorByMasterID(int pintReturnToVendorMasterID) { const string METHOD_NAME = THIS + ".GetReturnToVendorByMasterID()"; DataSet dstPCS = new DataSet(); OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + "RD.ProductID, P.Code, P.Description, RD.ReturnToVendorDetailID, " + " P.Revision,RD.BuyingUMID, UM.Code MST_UnitOfMeasureCode, RD.Quantity, " + " RD.UnitPrice UnitPriceCIF,0.0 ImportTax, RD.Amount,RD.VATPercent VAT, RD.VATAmount, RD.TotalAmount,0 FreightMasterID, 0 FreightDetailID, " + "0.0 VAT, 0 AdjustmentID, '' TransNo, '' InvoiceNo, 0 InvoiceMasterID " + " FROM PO_returnTovendorMaster RM " + " inner join PO_ReturnToVendorDetail RD On RD.ReturnToVendorMasterID = RM.ReturnToVendorMasterID " + " left Join ITM_Product P ON P.ProductID = RD.ProductID " + " left join MST_UnitOfMeasure UM ON UM.UnitOfMeasureID = RD.BuyingUMID " + " WHERE RM." + PO_ReturnToVendorMasterTable.RETURNTOVENDORMASTERID_FLD + " = " + pintReturnToVendorMasterID.ToString(); strSql += " select PM.Code, IM.InvoiceNo from PO_ReturnToVendorMaster RM " + " Left join dbo.PO_PurchaseOrderMaster PM ON PM.PurchaseOrderMasterID = RM.PurchaseOrderMasterID " + " Left join dbo.PO_InvoiceMaster IM ON IM.InvoiceMasterID = RM.InvoiceMasterID " + " where " + PO_ReturnToVendorMasterTable.RETURNTOVENDORMASTERID_FLD + " = " + pintReturnToVendorMasterID.ToString(); Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dstPCS, PO_ReturnToVendorMasterTable.TABLE_NAME); return dstPCS; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } public void UpdateDataSet(DataSet pData) { const string METHOD_NAME = THIS + ".UpdateDataSet()"; string strSql; OleDbConnection oconPCS =null; OleDbCommandBuilder odcbPCS ; OleDbDataAdapter odadPCS = new OleDbDataAdapter(); try { strSql= "SELECT " + cst_FreightDetailTable.FREIGHTDETAILID_FLD + "," + cst_FreightDetailTable.QUANTITY_FLD + "," + cst_FreightDetailTable.UNITPRICECIF_FLD + "," + cst_FreightDetailTable.AMOUNT_FLD + "," + cst_FreightDetailTable.FREIGHTMASTERID_FLD + "," + cst_FreightDetailTable.PRODUCTID_FLD + "," + cst_FreightDetailTable.BUYINGUMID_FLD + "," + cst_FreightDetailTable.IMPORTTAXPERCENT_FLD + "," + cst_FreightDetailTable.ADJUSTMENTID_FLD + "," + cst_FreightDetailTable.INVOICEMASTERID_FLD + "," // 12-05-2006 dungla: removed in order to compliable //+ cst_FreightDetailTable.LINE_FLD + "," + cst_FreightDetailTable.RETURNTOVENDORDETAILID_FLD + "," + cst_FreightDetailTable.VATAMOUNT_FLD + " FROM " + cst_FreightDetailTable.TABLE_NAME; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); odadPCS.SelectCommand = new OleDbCommand(strSql, oconPCS); odcbPCS = new OleDbCommandBuilder(odadPCS); pData.EnforceConstraints = false; odadPCS.Update(pData,cst_FreightDetailTable.TABLE_NAME); } catch(OleDbException ex) { if(ex.Errors.Count > 1) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else if (ex.Errors[1].NativeError == ErrorCode.SQLCASCADE_PREVENT_KEYCODE) { throw new PCSDBException(ErrorCode.CASCADE_DELETE_PREVENT, METHOD_NAME, ex); } } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } /// <summary> /// List all freight amount in period of time /// </summary> /// <param name="pintCCNID">CCN</param> /// <param name="pdtmFromDate">From Date</param> /// <param name="pdtmToDate">To Date</param> /// <returns></returns> public DataTable ListAll(int pintCCNID, DateTime pdtmFromDate, DateTime pdtmToDate) { const string METHOD_NAME = THIS + ".ListAll()"; OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT (cst_FreightDetail.Amount * cst_FreightMaster.ExchangeRate) AS Amount," + "cst_FreightDetail.ReturnToVendorDetailID," + " cst_FreightDetail.ProductID, cst_FreightMaster.ACPurposeID, cst_FreightMaster.ACObjectID" + " FROM cst_FreightDetail JOIN cst_FreightMaster" + " ON cst_FreightDetail.FreightMasterID = cst_FreightMaster.FreightMasterID" + " WHERE cst_FreightMaster.CCNID = " + pintCCNID + " AND PostDate >= ?" + " AND PostDate <= ?"; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Parameters.Add(new OleDbParameter("FromDate", OleDbType.Date)).Value = pdtmFromDate; ocmdPCS.Parameters.Add(new OleDbParameter("ToDate", OleDbType.Date)).Value = pdtmToDate; ocmdPCS.Connection.Open(); DataTable dtbData = new DataTable(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dtbData); return dtbData; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } } }
// DirectXTK MakeSpriteFont tool // // THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF // ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A // PARTICULAR PURPOSE. // // Copyright (c) Microsoft Corporation. All rights reserved. // // http://go.microsoft.com/fwlink/?LinkId=248929 using System; using System.Collections.Generic; using System.Globalization; using System.Drawing; using System.Drawing.Drawing2D; using System.Drawing.Imaging; using System.Drawing.Text; using System.Runtime.InteropServices; namespace MakeSpriteFont { // Uses System.Drawing (aka GDI+) to rasterize TrueType fonts into a series of glyph bitmaps. public class TrueTypeImporter : IFontImporter { // Properties hold the imported font data. public IEnumerable<Glyph> Glyphs { get; private set; } public float LineSpacing { get; private set; } // Size of the temp surface used for GDI+ rasterization. const int MaxGlyphSize = 1024; public void Import(CommandLineOptions options) { // Create a bunch of GDI+ objects. using (Font font = CreateFont(options)) using (Brush brush = new SolidBrush(Color.White)) using (StringFormat stringFormat = new StringFormat(StringFormatFlags.NoFontFallback)) using (Bitmap bitmap = new Bitmap(MaxGlyphSize, MaxGlyphSize, PixelFormat.Format32bppArgb)) using (Graphics graphics = Graphics.FromImage(bitmap)) { graphics.PixelOffsetMode = options.Sharp ? PixelOffsetMode.None : PixelOffsetMode.HighQuality; graphics.InterpolationMode = InterpolationMode.HighQualityBicubic; graphics.TextRenderingHint = TextRenderingHint.AntiAliasGridFit; // Which characters do we want to include? var characters = CharacterRegion.Flatten(options.CharacterRegions); var glyphList = new List<Glyph>(); // Rasterize each character in turn. foreach (char character in characters) { Glyph glyph = ImportGlyph(character, font, brush, stringFormat, bitmap, graphics); glyphList.Add(glyph); } Glyphs = glyphList; // Store the font height. LineSpacing = font.GetHeight(); } } // Attempts to instantiate the requested GDI+ font object. static Font CreateFont(CommandLineOptions options) { Font font = new Font(options.SourceFont, PointsToPixels(options.FontSize), options.FontStyle, GraphicsUnit.Pixel); try { // The font constructor automatically substitutes fonts if it can't find the one requested. // But we prefer the caller to know if anything is wrong with their data. A simple string compare // isn't sufficient because some fonts (eg. MS Mincho) change names depending on the locale. // Early out: in most cases the name will match the current or invariant culture. if (options.SourceFont.Equals(font.FontFamily.GetName(CultureInfo.CurrentCulture.LCID), StringComparison.OrdinalIgnoreCase) || options.SourceFont.Equals(font.FontFamily.GetName(CultureInfo.InvariantCulture.LCID), StringComparison.OrdinalIgnoreCase)) { return font; } // Check the font name in every culture. foreach (CultureInfo culture in CultureInfo.GetCultures(CultureTypes.SpecificCultures)) { if (options.SourceFont.Equals(font.FontFamily.GetName(culture.LCID), StringComparison.OrdinalIgnoreCase)) { return font; } } // A font substitution must have occurred. throw new Exception(string.Format("Can't find font '{0}'.", options.SourceFont)); } catch { font.Dispose(); throw; } } // Converts a font size from points to pixels. Can't just let GDI+ do this for us, // because we want identical results on every machine regardless of system DPI settings. static float PointsToPixels(float points) { return points * 96 / 72; } // Rasterizes a single character glyph. static Glyph ImportGlyph(char character, Font font, Brush brush, StringFormat stringFormat, Bitmap bitmap, Graphics graphics) { string characterString = character.ToString(); // Measure the size of this character. SizeF size = graphics.MeasureString(characterString, font, Point.Empty, stringFormat); int characterWidth = (int)Math.Ceiling(size.Width); int characterHeight = (int)Math.Ceiling(size.Height); // Pad to make sure we capture any overhangs (negative ABC spacing, etc.) int padWidth = characterWidth; int padHeight = characterHeight / 2; int bitmapWidth = characterWidth + padWidth * 2; int bitmapHeight = characterHeight + padHeight * 2; if (bitmapWidth > MaxGlyphSize || bitmapHeight > MaxGlyphSize) throw new Exception("Excessively large glyph won't fit in my lazily implemented fixed size temp surface."); // Render the character. graphics.Clear(Color.Black); graphics.DrawString(characterString, font, brush, padWidth, padHeight, stringFormat); graphics.Flush(); // Clone the newly rendered image. Bitmap glyphBitmap = bitmap.Clone(new Rectangle(0, 0, bitmapWidth, bitmapHeight), PixelFormat.Format32bppArgb); BitmapUtils.ConvertGreyToAlpha(glyphBitmap); // Query its ABC spacing. float? abc = GetCharacterWidth(character, font, graphics); // Construct the output Glyph object. return new Glyph(character, glyphBitmap) { XOffset = -padWidth, XAdvance = abc.HasValue ? padWidth - bitmapWidth + abc.Value : -padWidth, YOffset = -padHeight, }; } // Queries APC spacing for the specified character. static float? GetCharacterWidth(char character, Font font, Graphics graphics) { // Look up the native device context and font handles. IntPtr hdc = graphics.GetHdc(); try { IntPtr hFont = font.ToHfont(); try { // Select our font into the DC. IntPtr oldFont = NativeMethods.SelectObject(hdc, hFont); try { // Query the character spacing. var result = new NativeMethods.ABCFloat[1]; if (NativeMethods.GetCharABCWidthsFloat(hdc, character, character, result)) { return result[0].A + result[0].B + result[0].C; } else { return null; } } finally { NativeMethods.SelectObject(hdc, oldFont); } } finally { NativeMethods.DeleteObject(hFont); } } finally { graphics.ReleaseHdc(hdc); } } // Interop to the native GDI GetCharABCWidthsFloat method. static class NativeMethods { [DllImport("gdi32.dll")] public static extern IntPtr SelectObject(IntPtr hdc, IntPtr hObject); [DllImport("gdi32.dll")] public static extern bool DeleteObject(IntPtr hObject); [DllImport("gdi32.dll")] public static extern bool GetCharABCWidthsFloat(IntPtr hdc, uint iFirstChar, uint iLastChar, [Out] ABCFloat[] lpABCF); [StructLayout(LayoutKind.Sequential)] public struct ABCFloat { public float A; public float B; public float C; } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CSharp.Completion.KeywordRecommenders; using Microsoft.CodeAnalysis.Text; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.Recommendations { public class VarKeywordRecommenderTests : RecommenderTests { private readonly VarKeywordRecommender _recommender = new VarKeywordRecommender(); public VarKeywordRecommenderTests() { this.keywordText = "var"; this.RecommendKeywordsAsync = (position, context) => _recommender.RecommendKeywordsAsync(position, context, CancellationToken.None); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAtRoot_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"$$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterClass_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"class C { } $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterGlobalStatement_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"System.Console.WriteLine(); $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterGlobalVariableDeclaration_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"int i = 0; $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInUsingAlias() { await VerifyAbsenceAsync( @"using Foo = $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterStackAlloc() { await VerifyAbsenceAsync( @"class C { int* foo = stackalloc $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInFixedStatement() { await VerifyAbsenceAsync(AddInsideMethod( @"fixed ($$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInDelegateReturnType() { await VerifyAbsenceAsync( @"public delegate $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInCastType() { // Could be a deconstruction await VerifyKeywordAsync(AddInsideMethod( @"var str = (($$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInCastType2() { // Could be a deconstruction await VerifyKeywordAsync(AddInsideMethod( @"var str = (($$)items) as string;")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestEmptyStatement() { await VerifyKeywordAsync(AddInsideMethod( @"$$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestBeforeStatement() { await VerifyKeywordAsync(AddInsideMethod( @"$$ return true;")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterStatement() { await VerifyKeywordAsync(AddInsideMethod( @"return true; $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterBlock() { await VerifyKeywordAsync(AddInsideMethod( @"if (true) { } $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterLock() { await VerifyAbsenceAsync(AddInsideMethod( @"lock $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterLock2() { await VerifyAbsenceAsync(AddInsideMethod( @"lock ($$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterLock3() { await VerifyAbsenceAsync(AddInsideMethod( @"lock (l$$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInClass() { await VerifyAbsenceAsync(@"class C { $$ }"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInFor() { await VerifyKeywordAsync(AddInsideMethod( @"for ($$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInFor() { await VerifyAbsenceAsync(AddInsideMethod( @"for (var $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInFor2() { await VerifyKeywordAsync(AddInsideMethod( @"for ($$;")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInFor3() { await VerifyKeywordAsync(AddInsideMethod( @"for ($$;;")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterVar() { await VerifyAbsenceAsync(AddInsideMethod( @"var $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInForEach() { await VerifyKeywordAsync(AddInsideMethod( @"foreach ($$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInForEach() { await VerifyAbsenceAsync(AddInsideMethod( @"foreach (var $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUsing() { await VerifyKeywordAsync(AddInsideMethod( @"using ($$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInUsing() { await VerifyAbsenceAsync(AddInsideMethod( @"using (var $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterConstLocal() { await VerifyKeywordAsync(AddInsideMethod( @"const $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterConstField() { await VerifyAbsenceAsync( @"class C { const $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] [WorkItem(12121, "https://github.com/dotnet/roslyn/issues/12121")] public async Task TestAfterOutKeywordInArgument() { await VerifyKeywordAsync(AddInsideMethod( @"M(out $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] [WorkItem(12121, "https://github.com/dotnet/roslyn/issues/12121")] public async Task TestAfterOutKeywordInParameter() { await VerifyAbsenceAsync( @"class C { void M1(out $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestVarPatternInSwitch() { await VerifyKeywordAsync(AddInsideMethod( @"switch(o) { case $$ } ")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestVarPatternInIs() { await VerifyKeywordAsync(AddInsideMethod("var b = o is $$ ")); } } }
using System; using NUnit.Framework; using OpenQA.Selenium.Environment; namespace OpenQA.Selenium { [TestFixture] public class TypingTest : DriverTestFixture { [Test] public void ShouldFireKeyPressEvents() { driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys("a"); IWebElement result = driver.FindElement(By.Id("result")); string text = result.Text; Assert.That(text, Does.Contain("press:")); } [Test] public void ShouldFireKeyDownEvents() { driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys("I"); IWebElement result = driver.FindElement(By.Id("result")); string text = result.Text; Assert.That(text, Does.Contain("down:")); } [Test] public void ShouldFireKeyUpEvents() { driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys("a"); IWebElement result = driver.FindElement(By.Id("result")); string text = result.Text; Assert.That(text, Does.Contain("up:")); } [Test] public void ShouldTypeLowerCaseLetters() { driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys("abc def"); Assert.AreEqual("abc def", keyReporter.GetAttribute("value")); } [Test] public void ShouldBeAbleToTypeCapitalLetters() { driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys("ABC DEF"); Assert.AreEqual("ABC DEF", keyReporter.GetAttribute("value")); } [Test] public void ShouldBeAbleToTypeQuoteMarks() { driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys("\""); Assert.AreEqual("\"", keyReporter.GetAttribute("value")); } [Test] public void ShouldBeAbleToTypeTheAtCharacter() { // simon: I tend to use a US/UK or AUS keyboard layout with English // as my primary language. There are consistent reports that we're // not handling i18nised keyboards properly. This test exposes this // in a lightweight manner when my keyboard is set to the DE mapping // and we're using IE. driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys("@"); Assert.AreEqual("@", keyReporter.GetAttribute("value")); } [Test] public void ShouldBeAbleToMixUpperAndLowerCaseLetters() { driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys("me@eXample.com"); Assert.AreEqual("me@eXample.com", keyReporter.GetAttribute("value")); } [Test] public void ArrowKeysShouldNotBePrintable() { driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys(Keys.ArrowLeft); Assert.AreEqual(string.Empty, keyReporter.GetAttribute("value")); } [Test] public void ShouldBeAbleToUseArrowKeys() { driver.Url = javascriptPage; IWebElement keyReporter = driver.FindElement(By.Id("keyReporter")); keyReporter.SendKeys("Tet" + Keys.ArrowLeft + "s"); Assert.AreEqual("Test", keyReporter.GetAttribute("value")); } [Test] public void WillSimulateAKeyUpWhenEnteringTextIntoInputElements() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyUp")); element.SendKeys("I like cheese"); IWebElement result = driver.FindElement(By.Id("result")); Assert.AreEqual("I like cheese", result.Text); } [Test] public void WillSimulateAKeyDownWhenEnteringTextIntoInputElements() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyDown")); element.SendKeys("I like cheese"); IWebElement result = driver.FindElement(By.Id("result")); // Because the key down gets the result before the input element is // filled, we're a letter short here Assert.AreEqual("I like chees", result.Text); } [Test] public void WillSimulateAKeyPressWhenEnteringTextIntoInputElements() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyPress")); element.SendKeys("I like cheese"); IWebElement result = driver.FindElement(By.Id("result")); // Because the key down gets the result before the input element is // filled, we're a letter short here Assert.AreEqual("I like chees", result.Text); } [Test] public void WillSimulateAKeyUpWhenEnteringTextIntoTextAreas() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyUpArea")); element.SendKeys("I like cheese"); IWebElement result = driver.FindElement(By.Id("result")); Assert.AreEqual("I like cheese", result.Text); } [Test] public void WillSimulateAKeyDownWhenEnteringTextIntoTextAreas() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyDownArea")); element.SendKeys("I like cheese"); IWebElement result = driver.FindElement(By.Id("result")); // Because the key down gets the result before the input element is // filled, we're a letter short here Assert.AreEqual("I like chees", result.Text); } [Test] public void WillSimulateAKeyPressWhenEnteringTextIntoTextAreas() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyPressArea")); element.SendKeys("I like cheese"); IWebElement result = driver.FindElement(By.Id("result")); // Because the key down gets the result before the input element is // filled, we're a letter short here Assert.AreEqual("I like chees", result.Text); } [Test] public void ShouldFireFocusKeyEventsInTheRightOrder() { driver.Url = javascriptPage; IWebElement result = driver.FindElement(By.Id("result")); IWebElement element = driver.FindElement(By.Id("theworks")); element.SendKeys("a"); Assert.AreEqual("focus keydown keypress keyup", result.Text.Trim()); } [Test] public void ShouldReportKeyCodeOfArrowKeys() { driver.Url = javascriptPage; IWebElement result = driver.FindElement(By.Id("result")); IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys(Keys.ArrowDown); CheckRecordedKeySequence(result, 40); element.SendKeys(Keys.ArrowUp); CheckRecordedKeySequence(result, 38); element.SendKeys(Keys.ArrowLeft); CheckRecordedKeySequence(result, 37); element.SendKeys(Keys.ArrowRight); CheckRecordedKeySequence(result, 39); // And leave no rubbish/printable keys in the "keyReporter" Assert.AreEqual(string.Empty, element.GetAttribute("value")); } [Test] public void ShouldReportKeyCodeOfArrowKeysUpDownEvents() { driver.Url = javascriptPage; IWebElement result = driver.FindElement(By.Id("result")); IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys(Keys.ArrowDown); string text = result.Text.Trim(); Assert.That(text, Does.Contain("down: 40")); Assert.That(text, Does.Contain("up: 40")); element.SendKeys(Keys.ArrowUp); text = result.Text.Trim(); Assert.That(text, Does.Contain("down: 38")); Assert.That(text, Does.Contain("up: 38")); element.SendKeys(Keys.ArrowLeft); text = result.Text.Trim(); Assert.That(text, Does.Contain("down: 37")); Assert.That(text, Does.Contain("up: 37")); element.SendKeys(Keys.ArrowRight); text = result.Text.Trim(); Assert.That(text, Does.Contain("down: 39")); Assert.That(text, Does.Contain("up: 39")); // And leave no rubbish/printable keys in the "keyReporter" Assert.AreEqual(string.Empty, element.GetAttribute("value")); } [Test] public void NumericNonShiftKeys() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); string numericLineCharsNonShifted = "`1234567890-=[]\\;,.'/42"; element.SendKeys(numericLineCharsNonShifted); Assert.AreEqual(numericLineCharsNonShifted, element.GetAttribute("value")); } [Test] public void NumericShiftKeys() { driver.Url = javascriptPage; IWebElement result = driver.FindElement(By.Id("result")); IWebElement element = driver.FindElement(By.Id("keyReporter")); string numericShiftsEtc = "~!@#$%^&*()_+{}:\"<>?|END~"; element.SendKeys(numericShiftsEtc); Assert.AreEqual(numericShiftsEtc, element.GetAttribute("value")); string text = result.Text.Trim(); Assert.That(text, Does.Contain(" up: 16")); } [Test] public void LowerCaseAlphaKeys() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); String lowerAlphas = "abcdefghijklmnopqrstuvwxyz"; element.SendKeys(lowerAlphas); Assert.AreEqual(lowerAlphas, element.GetAttribute("value")); } [Test] public void UppercaseAlphaKeys() { driver.Url = javascriptPage; IWebElement result = driver.FindElement(By.Id("result")); IWebElement element = driver.FindElement(By.Id("keyReporter")); String upperAlphas = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; element.SendKeys(upperAlphas); Assert.AreEqual(upperAlphas, element.GetAttribute("value")); string text = result.Text.Trim(); Assert.That(text, Does.Contain(" up: 16")); } [Test] public void AllPrintableKeys() { driver.Url = javascriptPage; IWebElement result = driver.FindElement(By.Id("result")); IWebElement element = driver.FindElement(By.Id("keyReporter")); String allPrintable = "!\"#$%&'()*+,-./0123456789:;<=>?@ ABCDEFGHIJKLMNO" + "PQRSTUVWXYZ [\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"; element.SendKeys(allPrintable); Assert.AreEqual(allPrintable, element.GetAttribute("value")); string text = result.Text.Trim(); Assert.That(text, Does.Contain(" up: 16")); } [Test] public void ArrowKeysAndPageUpAndDown() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys("a" + Keys.Left + "b" + Keys.Right + Keys.Up + Keys.Down + Keys.PageUp + Keys.PageDown + "1"); Assert.AreEqual("ba1", element.GetAttribute("value")); } [Test] public void HomeAndEndAndPageUpAndPageDownKeys() { // FIXME: macs don't have HOME keys, would PGUP work? if (System.Environment.OSVersion.Platform == PlatformID.MacOSX) { return; } driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys("abc" + Keys.Home + "0" + Keys.Left + Keys.Right + Keys.PageUp + Keys.PageDown + Keys.End + "1" + Keys.Home + "0" + Keys.PageUp + Keys.End + "111" + Keys.Home + "00"); Assert.AreEqual("0000abc1111", element.GetAttribute("value")); } [Test] public void DeleteAndBackspaceKeys() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys("abcdefghi"); Assert.AreEqual("abcdefghi", element.GetAttribute("value")); element.SendKeys(Keys.Left + Keys.Left + Keys.Delete); Assert.AreEqual("abcdefgi", element.GetAttribute("value")); element.SendKeys(Keys.Left + Keys.Left + Keys.Backspace); Assert.AreEqual("abcdfgi", element.GetAttribute("value")); } [Test] public void SpecialSpaceKeys() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys("abcd" + Keys.Space + "fgh" + Keys.Space + "ij"); Assert.AreEqual("abcd fgh ij", element.GetAttribute("value")); } [Test] public void NumberpadKeys() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys("abcd" + Keys.Multiply + Keys.Subtract + Keys.Add + Keys.Decimal + Keys.Separator + Keys.NumberPad0 + Keys.NumberPad9 + Keys.Add + Keys.Semicolon + Keys.Equal + Keys.Divide + Keys.NumberPad3 + "abcd"); Assert.AreEqual("abcd*-+.,09+;=/3abcd", element.GetAttribute("value")); } [Test] public void FunctionKeys() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys("FUNCTION" + Keys.F8 + "-KEYS" + Keys.F8); element.SendKeys("" + Keys.F8 + "-TOO" + Keys.F8); Assert.AreEqual("FUNCTION-KEYS-TOO", element.GetAttribute("value")); } [Test] public void ShiftSelectionDeletes() { driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys("abcd efgh"); Assert.AreEqual(element.GetAttribute("value"), "abcd efgh"); //Could be chord problem element.SendKeys(Keys.Shift + Keys.Left + Keys.Left + Keys.Left); element.SendKeys(Keys.Delete); Assert.AreEqual("abcd e", element.GetAttribute("value")); } [Test] public void ChordControlHomeShiftEndDelete() { // FIXME: macs don't have HOME keys, would PGUP work? if (System.Environment.OSVersion.Platform == PlatformID.MacOSX) { return; } driver.Url = javascriptPage; IWebElement result = driver.FindElement(By.Id("result")); IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys("!\"#$%&'()*+,-./0123456789:;<=>?@ ABCDEFG"); element.SendKeys(Keys.Home); element.SendKeys("" + Keys.Shift + Keys.End + Keys.Delete); Assert.AreEqual(string.Empty, element.GetAttribute("value")); string text = result.Text.Trim(); Assert.That(text, Does.Contain(" up: 16")); } [Test] public void ChordReveseShiftHomeSelectionDeletes() { // FIXME: macs don't have HOME keys, would PGUP work? if (System.Environment.OSVersion.Platform == PlatformID.MacOSX) { return; } driver.Url = javascriptPage; IWebElement result = driver.FindElement(By.Id("result")); IWebElement element = driver.FindElement(By.Id("keyReporter")); element.SendKeys("done" + Keys.Home); Assert.AreEqual("done", element.GetAttribute("value")); //Sending chords element.SendKeys("" + Keys.Shift + "ALL " + Keys.Home); Assert.AreEqual("ALL done", element.GetAttribute("value")); element.SendKeys(Keys.Delete); Assert.AreEqual("done", element.GetAttribute("value"), "done"); element.SendKeys("" + Keys.End + Keys.Shift + Keys.Home); Assert.AreEqual("done", element.GetAttribute("value")); // Note: trailing SHIFT up here string text = result.Text.Trim(); Assert.That(text, Does.Contain(" up: 16"), "Text should contain ' up: 16'. Actual text: {0}", text); element.SendKeys("" + Keys.Delete); Assert.AreEqual(string.Empty, element.GetAttribute("value")); } // control-x control-v here for cut & paste tests, these work on windows // and linux, but not on the MAC. [Test] public void ChordControlCutAndPaste() { // FIXME: macs don't have HOME keys, would PGUP work? if (System.Environment.OSVersion.Platform == PlatformID.MacOSX) { return; } driver.Url = javascriptPage; IWebElement element = driver.FindElement(By.Id("keyReporter")); IWebElement result = driver.FindElement(By.Id("result")); String paste = "!\"#$%&'()*+,-./0123456789:;<=>?@ ABCDEFG"; element.SendKeys(paste); Assert.AreEqual(paste, element.GetAttribute("value")); //Chords element.SendKeys("" + Keys.Home + Keys.Shift + Keys.End); string text = result.Text.Trim(); Assert.That(text, Does.Contain(" up: 16")); element.SendKeys(Keys.Control + "x"); Assert.AreEqual(string.Empty, element.GetAttribute("value")); element.SendKeys(Keys.Control + "v"); Assert.AreEqual(paste, element.GetAttribute("value")); element.SendKeys("" + Keys.Left + Keys.Left + Keys.Left + Keys.Shift + Keys.End); element.SendKeys(Keys.Control + "x" + "v"); Assert.AreEqual(paste, element.GetAttribute("value")); element.SendKeys(Keys.Home); element.SendKeys(Keys.Control + "v"); element.SendKeys(Keys.Control + "v" + "v"); element.SendKeys(Keys.Control + "v" + "v" + "v"); Assert.AreEqual("EFGEFGEFGEFGEFGEFG" + paste, element.GetAttribute("value")); element.SendKeys("" + Keys.End + Keys.Shift + Keys.Home + Keys.Null + Keys.Delete); Assert.AreEqual(element.GetAttribute("value"), string.Empty); } [Test] public void ShouldTypeIntoInputElementsThatHaveNoTypeAttribute() { driver.Url = formsPage; IWebElement element = driver.FindElement(By.Id("no-type")); element.SendKeys("Should Say Cheese"); Assert.AreEqual("Should Say Cheese", element.GetAttribute("value")); } [Test] public void ShouldNotTypeIntoElementsThatPreventKeyDownEvents() { driver.Url = javascriptPage; IWebElement silent = driver.FindElement(By.Name("suppress")); silent.SendKeys("s"); Assert.AreEqual(string.Empty, silent.GetAttribute("value")); } [Test] public void GenerateKeyPressEventEvenWhenElementPreventsDefault() { driver.Url = javascriptPage; IWebElement silent = driver.FindElement(By.Name("suppress")); IWebElement result = driver.FindElement(By.Id("result")); silent.SendKeys("s"); string text = result.Text; } [Test] public void ShouldBeAbleToTypeOnAnEmailInputField() { driver.Url = formsPage; IWebElement email = driver.FindElement(By.Id("email")); email.SendKeys("foobar"); Assert.AreEqual("foobar", email.GetAttribute("value")); } [Test] public void ShouldBeAbleToTypeOnANumberInputField() { driver.Url = formsPage; IWebElement numberElement = driver.FindElement(By.Id("age")); numberElement.SendKeys("33"); Assert.AreEqual("33", numberElement.GetAttribute("value")); } [Test] public void ShouldThrowIllegalArgumentException() { driver.Url = formsPage; IWebElement email = driver.FindElement(By.Id("age")); Assert.That(() => email.SendKeys(null), Throws.InstanceOf<ArgumentNullException>()); } [Test] public void CanSafelyTypeOnElementThatIsRemovedFromTheDomOnKeyPress() { driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("key_tests/remove_on_keypress.html"); IWebElement input = driver.FindElement(By.Id("target")); IWebElement log = driver.FindElement(By.Id("log")); Assert.AreEqual("", log.GetAttribute("value")); input.SendKeys("b"); string expected = "keydown (target)\nkeyup (target)\nkeyup (body)"; Assert.AreEqual(expected, GetValueText(log)); input.SendKeys("a"); // Some drivers (IE, Firefox) do not always generate the final keyup event since the element // is removed from the DOM in response to the keypress (note, this is a product of how events // are generated and does not match actual user behavior). expected += "\nkeydown (target)\na pressed; removing"; Assert.That(GetValueText(log), Is.EqualTo(expected).Or.EqualTo(expected + "\nkeyup (body)")); } [Test] public void CanClearNumberInputAfterTypingInvalidInput() { driver.Url = formsPage; IWebElement input = driver.FindElement(By.Id("age")); input.SendKeys("e"); input.Clear(); input.SendKeys("3"); Assert.AreEqual("3", input.GetAttribute("value")); } //------------------------------------------------------------------ // Tests below here are not included in the Java test suite //------------------------------------------------------------------ [Test] [IgnoreBrowser(Browser.Firefox, "Browser does not automatically focus body element in frame")] [IgnoreBrowser(Browser.Opera, "Does not support contentEditable")] public void TypingIntoAnIFrameWithContentEditableOrDesignModeSet() { driver.Url = richTextPage; driver.SwitchTo().Frame("editFrame"); IWebElement element = driver.SwitchTo().ActiveElement(); element.SendKeys("Fishy"); driver.SwitchTo().DefaultContent(); IWebElement trusted = driver.FindElement(By.Id("istrusted")); IWebElement id = driver.FindElement(By.Id("tagId")); Assert.That(trusted.Text, Is.EqualTo("[true]").Or.EqualTo("[n/a]").Or.EqualTo("[]")); Assert.That(id.Text, Is.EqualTo("[frameHtml]").Or.EqualTo("[theBody]")); } [Test] //[IgnoreBrowser(Browser.Chrome, "Driver prepends text in contentEditable")] [IgnoreBrowser(Browser.Firefox, "Browser does not automatically focus body element in frame")] [IgnoreBrowser(Browser.Opera, "Does not support contentEditable")] public void NonPrintableCharactersShouldWorkWithContentEditableOrDesignModeSet() { driver.Url = richTextPage; // not tested on mac // FIXME: macs don't have HOME keys, would PGUP work? if (System.Environment.OSVersion.Platform == PlatformID.MacOSX) { return; } driver.SwitchTo().Frame("editFrame"); IWebElement element = driver.SwitchTo().ActiveElement(); //Chords element.SendKeys("Dishy" + Keys.Backspace + Keys.Left + Keys.Left); element.SendKeys(Keys.Left + Keys.Left + "F" + Keys.Delete + Keys.End + "ee!"); Assert.AreEqual(element.Text, "Fishee!"); } [Test] [IgnoreBrowser(Browser.Opera, "Does not support contentEditable")] public void ShouldBeAbleToTypeIntoEmptyContentEditableElement() { driver.Url = readOnlyPage; IWebElement editable = driver.FindElement(By.Id("content-editable")); editable.Clear(); editable.SendKeys("cheese"); // requires focus on OS X Assert.AreEqual("cheese", editable.Text); } [Test] [IgnoreBrowser(Browser.Chrome, "Driver prepends text in contentEditable")] [IgnoreBrowser(Browser.Firefox, "Driver prepends text in contentEditable")] public void ShouldBeAbleToTypeIntoContentEditableElementWithExistingValue() { driver.Url = readOnlyPage; IWebElement editable = driver.FindElement(By.Id("content-editable")); string initialText = editable.Text; editable.SendKeys(", edited"); Assert.AreEqual(initialText + ", edited", editable.Text); } [Test] [NeedsFreshDriver(IsCreatedAfterTest = true)] public void ShouldBeAbleToTypeIntoTinyMCE() { driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("tinymce.html"); driver.SwitchTo().Frame("mce_0_ifr"); IWebElement editable = driver.FindElement(By.Id("tinymce")); editable.Clear(); editable.SendKeys("cheese"); // requires focus on OS X Assert.AreEqual("cheese", editable.Text); } private string GetValueText(IWebElement el) { // Standardize on \n and strip any trailing whitespace. return el.GetAttribute("value").Replace("\r\n", "\n").Trim(); } private void CheckRecordedKeySequence(IWebElement element, int expectedKeyCode) { string withKeyPress = string.Format("down: {0} press: {0} up: {0}", expectedKeyCode); string withoutKeyPress = string.Format("down: {0} up: {0}", expectedKeyCode); Assert.That(element.Text.Trim(), Is.AnyOf(withKeyPress, withoutKeyPress)); } } }
using Lucene.Net.Support; using System; using ArrayUtil = Lucene.Net.Util.ArrayUtil; namespace Lucene.Net.Codecs.Compressing { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using CorruptIndexException = Lucene.Net.Index.CorruptIndexException; using IndexInput = Lucene.Net.Store.IndexInput; using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s; using RamUsageEstimator = Lucene.Net.Util.RamUsageEstimator; using SegmentInfo = Lucene.Net.Index.SegmentInfo; /// <summary> /// Random-access reader for <see cref="CompressingStoredFieldsIndexWriter"/>. /// <para/> /// @lucene.internal /// </summary> public sealed class CompressingStoredFieldsIndexReader #if FEATURE_CLONEABLE : System.ICloneable #endif { internal static long MoveLowOrderBitToSign(long n) { return (((long)((ulong)n >> 1)) ^ -(n & 1)); } internal readonly int maxDoc; internal readonly int[] docBases; internal readonly long[] startPointers; internal readonly int[] avgChunkDocs; internal readonly long[] avgChunkSizes; internal readonly PackedInt32s.Reader[] docBasesDeltas; // delta from the avg internal readonly PackedInt32s.Reader[] startPointersDeltas; // delta from the avg // It is the responsibility of the caller to close fieldsIndexIn after this constructor // has been called internal CompressingStoredFieldsIndexReader(IndexInput fieldsIndexIn, SegmentInfo si) { maxDoc = si.DocCount; int[] docBases = new int[16]; long[] startPointers = new long[16]; int[] avgChunkDocs = new int[16]; long[] avgChunkSizes = new long[16]; PackedInt32s.Reader[] docBasesDeltas = new PackedInt32s.Reader[16]; PackedInt32s.Reader[] startPointersDeltas = new PackedInt32s.Reader[16]; int packedIntsVersion = fieldsIndexIn.ReadVInt32(); int blockCount = 0; for (; ; ) { int numChunks = fieldsIndexIn.ReadVInt32(); if (numChunks == 0) { break; } if (blockCount == docBases.Length) { int newSize = ArrayUtil.Oversize(blockCount + 1, 8); docBases = Arrays.CopyOf(docBases, newSize); startPointers = Arrays.CopyOf(startPointers, newSize); avgChunkDocs = Arrays.CopyOf(avgChunkDocs, newSize); avgChunkSizes = Arrays.CopyOf(avgChunkSizes, newSize); docBasesDeltas = Arrays.CopyOf(docBasesDeltas, newSize); startPointersDeltas = Arrays.CopyOf(startPointersDeltas, newSize); } // doc bases docBases[blockCount] = fieldsIndexIn.ReadVInt32(); avgChunkDocs[blockCount] = fieldsIndexIn.ReadVInt32(); int bitsPerDocBase = fieldsIndexIn.ReadVInt32(); if (bitsPerDocBase > 32) { throw new CorruptIndexException("Corrupted bitsPerDocBase (resource=" + fieldsIndexIn + ")"); } docBasesDeltas[blockCount] = PackedInt32s.GetReaderNoHeader(fieldsIndexIn, PackedInt32s.Format.PACKED, packedIntsVersion, numChunks, bitsPerDocBase); // start pointers startPointers[blockCount] = fieldsIndexIn.ReadVInt64(); avgChunkSizes[blockCount] = fieldsIndexIn.ReadVInt64(); int bitsPerStartPointer = fieldsIndexIn.ReadVInt32(); if (bitsPerStartPointer > 64) { throw new CorruptIndexException("Corrupted bitsPerStartPointer (resource=" + fieldsIndexIn + ")"); } startPointersDeltas[blockCount] = PackedInt32s.GetReaderNoHeader(fieldsIndexIn, PackedInt32s.Format.PACKED, packedIntsVersion, numChunks, bitsPerStartPointer); ++blockCount; } this.docBases = Arrays.CopyOf(docBases, blockCount); this.startPointers = Arrays.CopyOf(startPointers, blockCount); this.avgChunkDocs = Arrays.CopyOf(avgChunkDocs, blockCount); this.avgChunkSizes = Arrays.CopyOf(avgChunkSizes, blockCount); this.docBasesDeltas = Arrays.CopyOf(docBasesDeltas, blockCount); this.startPointersDeltas = Arrays.CopyOf(startPointersDeltas, blockCount); } private int Block(int docID) { int lo = 0, hi = docBases.Length - 1; while (lo <= hi) { int mid = (int)((uint)(lo + hi) >> 1); int midValue = docBases[mid]; if (midValue == docID) { return mid; } else if (midValue < docID) { lo = mid + 1; } else { hi = mid - 1; } } return hi; } private int RelativeDocBase(int block, int relativeChunk) { int expected = avgChunkDocs[block] * relativeChunk; long delta = MoveLowOrderBitToSign(docBasesDeltas[block].Get(relativeChunk)); return expected + (int)delta; } private long RelativeStartPointer(int block, int relativeChunk) { long expected = avgChunkSizes[block] * relativeChunk; long delta = MoveLowOrderBitToSign(startPointersDeltas[block].Get(relativeChunk)); return expected + delta; } private int RelativeChunk(int block, int relativeDoc) { int lo = 0, hi = docBasesDeltas[block].Count - 1; while (lo <= hi) { int mid = (int)((uint)(lo + hi) >> 1); int midValue = RelativeDocBase(block, mid); if (midValue == relativeDoc) { return mid; } else if (midValue < relativeDoc) { lo = mid + 1; } else { hi = mid - 1; } } return hi; } internal long GetStartPointer(int docID) { if (docID < 0 || docID >= maxDoc) { throw new ArgumentException("docID out of range [0-" + maxDoc + "]: " + docID); } int block = Block(docID); int relativeChunk = RelativeChunk(block, docID - docBases[block]); return startPointers[block] + RelativeStartPointer(block, relativeChunk); } public object Clone() { return this; } internal long RamBytesUsed() { long res = 0; foreach (PackedInt32s.Reader r in docBasesDeltas) { res += r.RamBytesUsed(); } foreach (PackedInt32s.Reader r in startPointersDeltas) { res += r.RamBytesUsed(); } res += RamUsageEstimator.SizeOf(docBases); res += RamUsageEstimator.SizeOf(startPointers); res += RamUsageEstimator.SizeOf(avgChunkDocs); res += RamUsageEstimator.SizeOf(avgChunkSizes); return res; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Specialized; using System.IO; using System.Net; using System.Net.Sockets; using System.Text; using HttpServer; using HttpServer.FormDecoders; using NUnit.Framework; using OpenSim.Framework.Servers.HttpServer; namespace OpenSim.Framework.Servers.Tests { [TestFixture] public class OSHttpTests { // we need an IHttpClientContext for our tests public class TestHttpClientContext: IHttpClientContext { private bool _secured; public bool IsSecured { get { return _secured; } } public bool Secured { get { return _secured; } } public TestHttpClientContext(bool secured) { _secured = secured; } public void Disconnect(SocketError error) {} public void Respond(string httpVersion, HttpStatusCode statusCode, string reason, string body) {} public void Respond(string httpVersion, HttpStatusCode statusCode, string reason) {} public void Respond(string body) {} public void Send(byte[] buffer) {} public void Send(byte[] buffer, int offset, int size) {} public void Respond(string httpVersion, HttpStatusCode statusCode, string reason, string body, string contentType) {} public void Close() { } public bool EndWhenDone { get { return false;} set { return;}} public event EventHandler<DisconnectedEventArgs> Disconnected = delegate { }; /// <summary> /// A request have been received in the context. /// </summary> public event EventHandler<RequestEventArgs> RequestReceived = delegate { }; } public class TestHttpRequest: IHttpRequest { private string _uriPath; public bool BodyIsComplete { get { return true; } } public string[] AcceptTypes { get {return _acceptTypes; } } private string[] _acceptTypes; public Stream Body { get { return _body; } set { _body = value;} } private Stream _body; public ConnectionType Connection { get { return _connection; } set { _connection = value; } } private ConnectionType _connection; public int ContentLength { get { return _contentLength; } set { _contentLength = value; } } private int _contentLength; public NameValueCollection Headers { get { return _headers; } } private NameValueCollection _headers = new NameValueCollection(); public string HttpVersion { get { return _httpVersion; } set { _httpVersion = value; } } private string _httpVersion = null; public string Method { get { return _method; } set { _method = value; } } private string _method = null; public HttpInput QueryString { get { return _queryString; } } private HttpInput _queryString = null; public Uri Uri { get { return _uri; } set { _uri = value; } } private Uri _uri = null; public string[] UriParts { get { return _uri.Segments; } } public HttpParam Param { get { return null; } } public HttpForm Form { get { return null; } } public bool IsAjax { get { return false; } } public RequestCookies Cookies { get { return null; } } public TestHttpRequest() {} public TestHttpRequest(string contentEncoding, string contentType, string userAgent, string remoteAddr, string remotePort, string[] acceptTypes, ConnectionType connectionType, int contentLength, Uri uri) { _headers["content-encoding"] = contentEncoding; _headers["content-type"] = contentType; _headers["user-agent"] = userAgent; _headers["remote_addr"] = remoteAddr; _headers["remote_port"] = remotePort; _acceptTypes = acceptTypes; _connection = connectionType; _contentLength = contentLength; _uri = uri; } public void DecodeBody(FormDecoderProvider providers) {} public void SetCookies(RequestCookies cookies) {} public void AddHeader(string name, string value) { _headers.Add(name, value); } public int AddToBody(byte[] bytes, int offset, int length) { return 0; } public void Clear() {} public object Clone() { TestHttpRequest clone = new TestHttpRequest(); clone._acceptTypes = _acceptTypes; clone._connection = _connection; clone._contentLength = _contentLength; clone._uri = _uri; clone._headers = new NameValueCollection(_headers); return clone; } public IHttpResponse CreateResponse(IHttpClientContext context) { return new HttpResponse(context, this); } /// <summary> /// Path and query (will be merged with the host header) and put in Uri /// </summary> /// <see cref="Uri"/> public string UriPath { get { return _uriPath; } set { _uriPath = value; } } } public class TestHttpResponse: IHttpResponse { public Stream Body { get { return _body; } set { _body = value; } } private Stream _body; public string ProtocolVersion { get { return _protocolVersion; } set { _protocolVersion = value; } } private string _protocolVersion; public bool Chunked { get { return _chunked; } set { _chunked = value; } } private bool _chunked; public ConnectionType Connection { get { return _connection; } set { _connection = value; } } private ConnectionType _connection; public Encoding Encoding { get { return _encoding; } set { _encoding = value; } } private Encoding _encoding; public int KeepAlive { get { return _keepAlive; } set { _keepAlive = value; } } private int _keepAlive; public HttpStatusCode Status { get { return _status; } set { _status = value; } } private HttpStatusCode _status; public string Reason { get { return _reason; } set { _reason = value; } } private string _reason; public long ContentLength { get { return _contentLength; } set { _contentLength = value; } } private long _contentLength; public string ContentType { get { return _contentType; } set { _contentType = value; } } private string _contentType; public bool HeadersSent { get { return _headersSent; } } private bool _headersSent; public bool Sent { get { return _sent; } } private bool _sent; public ResponseCookies Cookies { get { return _cookies; } } private ResponseCookies _cookies = null; public TestHttpResponse() { _headersSent = false; _sent = false; } public void AddHeader(string name, string value) {} public void Send() { if (!_headersSent) SendHeaders(); if (_sent) throw new InvalidOperationException("stuff already sent"); _sent = true; } public void SendBody(byte[] buffer, int offset, int count) { if (!_headersSent) SendHeaders(); _sent = true; } public void SendBody(byte[] buffer) { if (!_headersSent) SendHeaders(); _sent = true; } public void SendHeaders() { if (_headersSent) throw new InvalidOperationException("headers already sent"); _headersSent = true; } public void Redirect(Uri uri) {} public void Redirect(string url) {} } public OSHttpRequest req0; public OSHttpRequest req1; public OSHttpResponse rsp0; public IPEndPoint ipEP0; [TestFixtureSetUp] public void Init() { TestHttpRequest threq0 = new TestHttpRequest("utf-8", "text/xml", "OpenSim Test Agent", "192.168.0.1", "4711", new string[] {"text/xml"}, ConnectionType.KeepAlive, 4711, new Uri("http://127.0.0.1/admin/inventory/Dr+Who/Tardis")); threq0.Method = "GET"; threq0.HttpVersion = HttpHelper.HTTP10; TestHttpRequest threq1 = new TestHttpRequest("utf-8", "text/xml", "OpenSim Test Agent", "192.168.0.1", "4711", new string[] {"text/xml"}, ConnectionType.KeepAlive, 4711, new Uri("http://127.0.0.1/admin/inventory/Dr+Who/Tardis?a=0&b=1&c=2")); threq1.Method = "POST"; threq1.HttpVersion = HttpHelper.HTTP11; threq1.Headers["x-wuff"] = "wuffwuff"; threq1.Headers["www-authenticate"] = "go away"; req0 = new OSHttpRequest(new TestHttpClientContext(false), threq0); req1 = new OSHttpRequest(new TestHttpClientContext(false), threq1); rsp0 = new OSHttpResponse(new TestHttpResponse()); ipEP0 = new IPEndPoint(IPAddress.Parse("192.168.0.1"), 4711); } [Test] public void T000_OSHttpRequest() { Assert.That(req0.HttpMethod, Is.EqualTo("GET")); Assert.That(req0.ContentType, Is.EqualTo("text/xml")); Assert.That(req0.ContentLength, Is.EqualTo(4711)); Assert.That(req1.HttpMethod, Is.EqualTo("POST")); } [Test] public void T001_OSHttpRequestHeaderAccess() { Assert.That(req1.Headers["x-wuff"], Is.EqualTo("wuffwuff")); Assert.That(req1.Headers.Get("x-wuff"), Is.EqualTo("wuffwuff")); Assert.That(req1.Headers["www-authenticate"], Is.EqualTo("go away")); Assert.That(req1.Headers.Get("www-authenticate"), Is.EqualTo("go away")); Assert.That(req0.RemoteIPEndPoint, Is.EqualTo(ipEP0)); } [Test] public void T002_OSHttpRequestUriParsing() { Assert.That(req0.RawUrl, Is.EqualTo("/admin/inventory/Dr+Who/Tardis")); Assert.That(req1.Url.ToString(), Is.EqualTo("http://127.0.0.1/admin/inventory/Dr+Who/Tardis?a=0&b=1&c=2")); } [Test] public void T100_OSHttpResponse() { rsp0.ContentType = "text/xml"; Assert.That(rsp0.ContentType, Is.EqualTo("text/xml")); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Runtime.InteropServices; using System.Xml.XPath; using System.Diagnostics; namespace System.Xml.Schema { /// <summary> /// This class contains a (CLR Object, XmlType) pair that represents an instance of an Xml atomic value. /// It is optimized to avoid boxing. /// </summary> public sealed class XmlAtomicValue : XPathItem, ICloneable { private readonly XmlSchemaType _xmlType; private readonly object _objVal; private readonly TypeCode _clrType; private Union _unionVal; private readonly NamespacePrefixForQName _nsPrefix; [StructLayout(LayoutKind.Explicit, Size = 8)] private struct Union { [FieldOffset(0)] public bool boolVal; [FieldOffset(0)] public double dblVal; [FieldOffset(0)] public long i64Val; [FieldOffset(0)] public int i32Val; [FieldOffset(0)] public DateTime dtVal; } private class NamespacePrefixForQName : IXmlNamespaceResolver { public string prefix; public string ns; public NamespacePrefixForQName(string prefix, string ns) { this.ns = ns; this.prefix = prefix; } public string LookupNamespace(string prefix) { if (prefix == this.prefix) { return ns; } return null; } public string LookupPrefix(string namespaceName) { if (ns == namespaceName) { return prefix; } return null; } public IDictionary<string, string> GetNamespacesInScope(XmlNamespaceScope scope) { Dictionary<string, string> dict = new Dictionary<string, string>(1); dict[prefix] = ns; return dict; } } //----------------------------------------------- // XmlAtomicValue constructors and methods //----------------------------------------------- internal XmlAtomicValue(XmlSchemaType xmlType, bool value) { if (xmlType == null) throw new ArgumentNullException(nameof(xmlType)); _xmlType = xmlType; _clrType = TypeCode.Boolean; _unionVal.boolVal = value; } internal XmlAtomicValue(XmlSchemaType xmlType, DateTime value) { if (xmlType == null) throw new ArgumentNullException(nameof(xmlType)); _xmlType = xmlType; _clrType = TypeCode.DateTime; _unionVal.dtVal = value; } internal XmlAtomicValue(XmlSchemaType xmlType, double value) { if (xmlType == null) throw new ArgumentNullException(nameof(xmlType)); _xmlType = xmlType; _clrType = TypeCode.Double; _unionVal.dblVal = value; } internal XmlAtomicValue(XmlSchemaType xmlType, int value) { if (xmlType == null) throw new ArgumentNullException(nameof(xmlType)); _xmlType = xmlType; _clrType = TypeCode.Int32; _unionVal.i32Val = value; } internal XmlAtomicValue(XmlSchemaType xmlType, long value) { if (xmlType == null) throw new ArgumentNullException(nameof(xmlType)); _xmlType = xmlType; _clrType = TypeCode.Int64; _unionVal.i64Val = value; } internal XmlAtomicValue(XmlSchemaType xmlType, string value) { if (value == null) throw new ArgumentNullException(nameof(value)); if (xmlType == null) throw new ArgumentNullException(nameof(xmlType)); _xmlType = xmlType; _objVal = value; } internal XmlAtomicValue(XmlSchemaType xmlType, string value, IXmlNamespaceResolver nsResolver) { if (value == null) throw new ArgumentNullException(nameof(value)); if (xmlType == null) throw new ArgumentNullException(nameof(xmlType)); _xmlType = xmlType; _objVal = value; if (nsResolver != null && (_xmlType.TypeCode == XmlTypeCode.QName || _xmlType.TypeCode == XmlTypeCode.Notation)) { string prefix = GetPrefixFromQName(value); _nsPrefix = new NamespacePrefixForQName(prefix, nsResolver.LookupNamespace(prefix)); } } internal XmlAtomicValue(XmlSchemaType xmlType, object value) { if (value == null) throw new ArgumentNullException(nameof(value)); if (xmlType == null) throw new ArgumentNullException(nameof(xmlType)); _xmlType = xmlType; _objVal = value; } internal XmlAtomicValue(XmlSchemaType xmlType, object value, IXmlNamespaceResolver nsResolver) { if (value == null) throw new ArgumentNullException(nameof(value)); if (xmlType == null) throw new ArgumentNullException(nameof(xmlType)); _xmlType = xmlType; _objVal = value; if (nsResolver != null && (_xmlType.TypeCode == XmlTypeCode.QName || _xmlType.TypeCode == XmlTypeCode.Notation)) { //Its a qualifiedName XmlQualifiedName qname = _objVal as XmlQualifiedName; Debug.Assert(qname != null); //string representation is handled in a different overload string ns = qname.Namespace; _nsPrefix = new NamespacePrefixForQName(nsResolver.LookupPrefix(ns), ns); } } /// <summary> /// Since XmlAtomicValue is immutable, clone simply returns this. /// </summary> public XmlAtomicValue Clone() { return this; } //----------------------------------------------- // ICloneable methods //----------------------------------------------- /// <summary> /// Since XmlAtomicValue is immutable, clone simply returns this. /// </summary> object ICloneable.Clone() { return this; } //----------------------------------------------- // XPathItem methods //----------------------------------------------- public override bool IsNode { get { return false; } } public override XmlSchemaType XmlType { get { return _xmlType; } } public override Type ValueType { get { return _xmlType.Datatype.ValueType; } } public override object TypedValue { get { XmlValueConverter valueConverter = _xmlType.ValueConverter; if (_objVal == null) { switch (_clrType) { case TypeCode.Boolean: return valueConverter.ChangeType(_unionVal.boolVal, ValueType); case TypeCode.Int32: return valueConverter.ChangeType(_unionVal.i32Val, ValueType); case TypeCode.Int64: return valueConverter.ChangeType(_unionVal.i64Val, ValueType); case TypeCode.Double: return valueConverter.ChangeType(_unionVal.dblVal, ValueType); case TypeCode.DateTime: return valueConverter.ChangeType(_unionVal.dtVal, ValueType); default: Debug.Fail("Should never get here"); break; } } return valueConverter.ChangeType(_objVal, ValueType, _nsPrefix); } } public override bool ValueAsBoolean { get { XmlValueConverter valueConverter = _xmlType.ValueConverter; if (_objVal == null) { switch (_clrType) { case TypeCode.Boolean: return _unionVal.boolVal; case TypeCode.Int32: return valueConverter.ToBoolean(_unionVal.i32Val); case TypeCode.Int64: return valueConverter.ToBoolean(_unionVal.i64Val); case TypeCode.Double: return valueConverter.ToBoolean(_unionVal.dblVal); case TypeCode.DateTime: return valueConverter.ToBoolean(_unionVal.dtVal); default: Debug.Fail("Should never get here"); break; } } return valueConverter.ToBoolean(_objVal); } } public override DateTime ValueAsDateTime { get { XmlValueConverter valueConverter = _xmlType.ValueConverter; if (_objVal == null) { switch (_clrType) { case TypeCode.Boolean: return valueConverter.ToDateTime(_unionVal.boolVal); case TypeCode.Int32: return valueConverter.ToDateTime(_unionVal.i32Val); case TypeCode.Int64: return valueConverter.ToDateTime(_unionVal.i64Val); case TypeCode.Double: return valueConverter.ToDateTime(_unionVal.dblVal); case TypeCode.DateTime: return _unionVal.dtVal; default: Debug.Fail("Should never get here"); break; } } return valueConverter.ToDateTime(_objVal); } } public override double ValueAsDouble { get { XmlValueConverter valueConverter = _xmlType.ValueConverter; if (_objVal == null) { switch (_clrType) { case TypeCode.Boolean: return valueConverter.ToDouble(_unionVal.boolVal); case TypeCode.Int32: return valueConverter.ToDouble(_unionVal.i32Val); case TypeCode.Int64: return valueConverter.ToDouble(_unionVal.i64Val); case TypeCode.Double: return _unionVal.dblVal; case TypeCode.DateTime: return valueConverter.ToDouble(_unionVal.dtVal); default: Debug.Fail("Should never get here"); break; } } return valueConverter.ToDouble(_objVal); } } public override int ValueAsInt { get { XmlValueConverter valueConverter = _xmlType.ValueConverter; if (_objVal == null) { switch (_clrType) { case TypeCode.Boolean: return valueConverter.ToInt32(_unionVal.boolVal); case TypeCode.Int32: return _unionVal.i32Val; case TypeCode.Int64: return valueConverter.ToInt32(_unionVal.i64Val); case TypeCode.Double: return valueConverter.ToInt32(_unionVal.dblVal); case TypeCode.DateTime: return valueConverter.ToInt32(_unionVal.dtVal); default: Debug.Fail("Should never get here"); break; } } return valueConverter.ToInt32(_objVal); } } public override long ValueAsLong { get { XmlValueConverter valueConverter = _xmlType.ValueConverter; if (_objVal == null) { switch (_clrType) { case TypeCode.Boolean: return valueConverter.ToInt64(_unionVal.boolVal); case TypeCode.Int32: return valueConverter.ToInt64(_unionVal.i32Val); case TypeCode.Int64: return _unionVal.i64Val; case TypeCode.Double: return valueConverter.ToInt64(_unionVal.dblVal); case TypeCode.DateTime: return valueConverter.ToInt64(_unionVal.dtVal); default: Debug.Fail("Should never get here"); break; } } return valueConverter.ToInt64(_objVal); } } public override object ValueAs(Type type, IXmlNamespaceResolver nsResolver) { XmlValueConverter valueConverter = _xmlType.ValueConverter; if (type == typeof(XPathItem) || type == typeof(XmlAtomicValue)) return this; if (_objVal == null) { switch (_clrType) { case TypeCode.Boolean: return valueConverter.ChangeType(_unionVal.boolVal, type); case TypeCode.Int32: return valueConverter.ChangeType(_unionVal.i32Val, type); case TypeCode.Int64: return valueConverter.ChangeType(_unionVal.i64Val, type); case TypeCode.Double: return valueConverter.ChangeType(_unionVal.dblVal, type); case TypeCode.DateTime: return valueConverter.ChangeType(_unionVal.dtVal, type); default: Debug.Fail("Should never get here"); break; } } return valueConverter.ChangeType(_objVal, type, nsResolver); } public override string Value { get { XmlValueConverter valueConverter = _xmlType.ValueConverter; if (_objVal == null) { switch (_clrType) { case TypeCode.Boolean: return valueConverter.ToString(_unionVal.boolVal); case TypeCode.Int32: return valueConverter.ToString(_unionVal.i32Val); case TypeCode.Int64: return valueConverter.ToString(_unionVal.i64Val); case TypeCode.Double: return valueConverter.ToString(_unionVal.dblVal); case TypeCode.DateTime: return valueConverter.ToString(_unionVal.dtVal); default: Debug.Fail("Should never get here"); break; } } return valueConverter.ToString(_objVal, _nsPrefix); } } public override string ToString() { return Value; } private string GetPrefixFromQName(string value) { int colonOffset; int len = ValidateNames.ParseQName(value, 0, out colonOffset); if (len == 0 || len != value.Length) { return null; } if (colonOffset != 0) { return value.Substring(0, colonOffset); } else { return string.Empty; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Data.Common; using System.Diagnostics; using System.Text; using System.Threading; using System.Threading.Tasks; namespace System.Data.SqlClient { sealed internal class SqlSequentialTextReader : System.IO.TextReader { private SqlDataReader _reader; // The SqlDataReader that we are reading data from private int _columnIndex; // The index of out column in the table private Encoding _encoding; // Encoding for this character stream private Decoder _decoder; // Decoder based on the encoding (NOTE: Decoders are stateful as they are designed to process streams of data) private byte[] _leftOverBytes; // Bytes leftover from the last Read() operation - this can be null if there were no bytes leftover (Possible optimization: re-use the same array?) private int _peekedChar; // The last character that we peeked at (or -1 if we haven't peeked at anything) private Task _currentTask; // The current async task private CancellationTokenSource _disposalTokenSource; // Used to indicate that a cancellation is requested due to disposal internal SqlSequentialTextReader(SqlDataReader reader, int columnIndex, Encoding encoding) { Debug.Assert(reader != null, "Null reader when creating sequential textreader"); Debug.Assert(columnIndex >= 0, "Invalid column index when creating sequential textreader"); Debug.Assert(encoding != null, "Null encoding when creating sequential textreader"); _reader = reader; _columnIndex = columnIndex; _encoding = encoding; _decoder = encoding.GetDecoder(); _leftOverBytes = null; _peekedChar = -1; _currentTask = null; _disposalTokenSource = new CancellationTokenSource(); } internal int ColumnIndex { get { return _columnIndex; } } public override int Peek() { if (_currentTask != null) { throw ADP.AsyncOperationPending(); } if (IsClosed) { throw ADP.ObjectDisposed(this); } if (!HasPeekedChar) { _peekedChar = Read(); } Debug.Assert(_peekedChar == -1 || ((_peekedChar >= char.MinValue) && (_peekedChar <= char.MaxValue)), string.Format("Bad peeked character: {0}", _peekedChar)); return _peekedChar; } public override int Read() { if (_currentTask != null) { throw ADP.AsyncOperationPending(); } if (IsClosed) { throw ADP.ObjectDisposed(this); } int readChar = -1; // If there is already a peeked char, then return it if (HasPeekedChar) { readChar = _peekedChar; _peekedChar = -1; } // If there is data available try to read a char else { char[] tempBuffer = new char[1]; int charsRead = InternalRead(tempBuffer, 0, 1); if (charsRead == 1) { readChar = tempBuffer[0]; } } Debug.Assert(readChar == -1 || ((readChar >= char.MinValue) && (readChar <= char.MaxValue)), string.Format("Bad read character: {0}", readChar)); return readChar; } public override int Read(char[] buffer, int index, int count) { ValidateReadParameters(buffer, index, count); if (IsClosed) { throw ADP.ObjectDisposed(this); } if (_currentTask != null) { throw ADP.AsyncOperationPending(); } int charsRead = 0; int charsNeeded = count; // Load in peeked char if ((charsNeeded > 0) && (HasPeekedChar)) { Debug.Assert((_peekedChar >= char.MinValue) && (_peekedChar <= char.MaxValue), string.Format("Bad peeked character: {0}", _peekedChar)); buffer[index + charsRead] = (char)_peekedChar; charsRead++; charsNeeded--; _peekedChar = -1; } // If we need more data and there is data avaiable, read charsRead += InternalRead(buffer, index + charsRead, charsNeeded); return charsRead; } public override Task<int> ReadAsync(char[] buffer, int index, int count) { ValidateReadParameters(buffer, index, count); TaskCompletionSource<int> completion = new TaskCompletionSource<int>(); if (IsClosed) { completion.SetException(ADP.ExceptionWithStackTrace(ADP.ObjectDisposed(this))); } else { try { Task original = Interlocked.CompareExchange<Task>(ref _currentTask, completion.Task, null); if (original != null) { completion.SetException(ADP.ExceptionWithStackTrace(ADP.AsyncOperationPending())); } else { bool completedSynchronously = true; int charsRead = 0; int adjustedIndex = index; int charsNeeded = count; // Load in peeked char if ((HasPeekedChar) && (charsNeeded > 0)) { // Take a copy of _peekedChar in case it is cleared during close int peekedChar = _peekedChar; if (peekedChar >= char.MinValue) { Debug.Assert((_peekedChar >= char.MinValue) && (_peekedChar <= char.MaxValue), string.Format("Bad peeked character: {0}", _peekedChar)); buffer[adjustedIndex] = (char)peekedChar; adjustedIndex++; charsRead++; charsNeeded--; _peekedChar = -1; } } int byteBufferUsed; byte[] byteBuffer = PrepareByteBuffer(charsNeeded, out byteBufferUsed); // Permit a 0 byte read in order to advance the reader to the correct column if ((byteBufferUsed < byteBuffer.Length) || (byteBuffer.Length == 0)) { int bytesRead; var reader = _reader; if (reader != null) { Task<int> getBytesTask = reader.GetBytesAsync(_columnIndex, byteBuffer, byteBufferUsed, byteBuffer.Length - byteBufferUsed, Timeout.Infinite, _disposalTokenSource.Token, out bytesRead); if (getBytesTask == null) { byteBufferUsed += bytesRead; } else { // We need more data - setup the callback, and mark this as not completed sync completedSynchronously = false; getBytesTask.ContinueWith((t) => { _currentTask = null; // If we completed but the textreader is closed, then report cancellation if ((t.Status == TaskStatus.RanToCompletion) && (!IsClosed)) { try { int bytesReadFromStream = t.Result; byteBufferUsed += bytesReadFromStream; if (byteBufferUsed > 0) { charsRead += DecodeBytesToChars(byteBuffer, byteBufferUsed, buffer, adjustedIndex, charsNeeded); } completion.SetResult(charsRead); } catch (Exception ex) { completion.SetException(ex); } } else if (IsClosed) { completion.SetException(ADP.ExceptionWithStackTrace(ADP.ObjectDisposed(this))); } else if (t.Status == TaskStatus.Faulted) { if (t.Exception.InnerException is SqlException) { // ReadAsync can't throw a SqlException, so wrap it in an IOException completion.SetException(ADP.ExceptionWithStackTrace(ADP.ErrorReadingFromStream(t.Exception.InnerException))); } else { completion.SetException(t.Exception.InnerException); } } else { completion.SetCanceled(); } }, TaskScheduler.Default); } if ((completedSynchronously) && (byteBufferUsed > 0)) { // No more data needed, decode what we have charsRead += DecodeBytesToChars(byteBuffer, byteBufferUsed, buffer, adjustedIndex, charsNeeded); } } else { // Reader is null, close must of happened in the middle of this read completion.SetException(ADP.ExceptionWithStackTrace(ADP.ObjectDisposed(this))); } } if (completedSynchronously) { _currentTask = null; if (IsClosed) { completion.SetCanceled(); } else { completion.SetResult(charsRead); } } } } catch (Exception ex) { // In case of any errors, ensure that the completion is completed and the task is set back to null if we switched it completion.TrySetException(ex); Interlocked.CompareExchange(ref _currentTask, null, completion.Task); throw; } } return completion.Task; } protected override void Dispose(bool disposing) { if (disposing) { // Set the textreader as closed SetClosed(); } base.Dispose(disposing); } /// <summary> /// Forces the TextReader to act as if it was closed /// This does not actually close the stream, read off the rest of the data or dispose this /// </summary> internal void SetClosed() { _disposalTokenSource.Cancel(); _reader = null; _peekedChar = -1; // Wait for pending task var currentTask = _currentTask; if (currentTask != null) { ((IAsyncResult)currentTask).AsyncWaitHandle.WaitOne(); } } /// <summary> /// Performs the actual reading and converting /// NOTE: This assumes that buffer, index and count are all valid, we're not closed (!IsClosed) and that there is data left (IsDataLeft()) /// </summary> /// <param name="buffer"></param> /// <param name="index"></param> /// <param name="count"></param> /// <returns></returns> private int InternalRead(char[] buffer, int index, int count) { Debug.Assert(buffer != null, "Null output buffer"); Debug.Assert((index >= 0) && (count >= 0) && (index + count <= buffer.Length), string.Format("Bad count: {0} or index: {1}", count, index)); Debug.Assert(!IsClosed, "Can't read while textreader is closed"); try { int byteBufferUsed; byte[] byteBuffer = PrepareByteBuffer(count, out byteBufferUsed); byteBufferUsed += _reader.GetBytesInternalSequential(_columnIndex, byteBuffer, byteBufferUsed, byteBuffer.Length - byteBufferUsed); if (byteBufferUsed > 0) { return DecodeBytesToChars(byteBuffer, byteBufferUsed, buffer, index, count); } else { // Nothing to read, or nothing read return 0; } } catch (SqlException ex) { // Read can't throw a SqlException - so wrap it in an IOException throw ADP.ErrorReadingFromStream(ex); } } /// <summary> /// Creates a byte array large enough to store all bytes for the characters in the current encoding, then fills it with any leftover bytes /// </summary> /// <param name="numberOfChars">Number of characters that are to be read</param> /// <param name="byteBufferUsed">Number of bytes pre-filled by the leftover bytes</param> /// <returns>A byte array of the correct size, pre-filled with leftover bytes</returns> private byte[] PrepareByteBuffer(int numberOfChars, out int byteBufferUsed) { Debug.Assert(numberOfChars >= 0, "Can't prepare a byte buffer for negative characters"); byte[] byteBuffer; if (numberOfChars == 0) { byteBuffer = new byte[0]; byteBufferUsed = 0; } else { int byteBufferSize = _encoding.GetMaxByteCount(numberOfChars); if (_leftOverBytes != null) { // If we have more leftover bytes than we need for this conversion, then just re-use the leftover buffer if (_leftOverBytes.Length > byteBufferSize) { byteBuffer = _leftOverBytes; byteBufferUsed = byteBuffer.Length; } else { // Otherwise, copy over the leftover buffer byteBuffer = new byte[byteBufferSize]; Array.Copy(_leftOverBytes, byteBuffer, _leftOverBytes.Length); byteBufferUsed = _leftOverBytes.Length; } } else { byteBuffer = new byte[byteBufferSize]; byteBufferUsed = 0; } } return byteBuffer; } /// <summary> /// Decodes the given bytes into characters, and stores the leftover bytes for later use /// </summary> /// <param name="inBuffer">Buffer of bytes to decode</param> /// <param name="inBufferCount">Number of bytes to decode from the inBuffer</param> /// <param name="outBuffer">Buffer to write the characters to</param> /// <param name="outBufferOffset">Offset to start writing to outBuffer at</param> /// <param name="outBufferCount">Maximum number of characters to decode</param> /// <returns>The actual number of characters decoded</returns> private int DecodeBytesToChars(byte[] inBuffer, int inBufferCount, char[] outBuffer, int outBufferOffset, int outBufferCount) { Debug.Assert(inBuffer != null, "Null input buffer"); Debug.Assert((inBufferCount > 0) && (inBufferCount <= inBuffer.Length), string.Format("Bad inBufferCount: {0}", inBufferCount)); Debug.Assert(outBuffer != null, "Null output buffer"); Debug.Assert((outBufferOffset >= 0) && (outBufferCount > 0) && (outBufferOffset + outBufferCount <= outBuffer.Length), string.Format("Bad outBufferCount: {0} or outBufferOffset: {1}", outBufferCount, outBufferOffset)); int charsRead; int bytesUsed; bool completed; _decoder.Convert(inBuffer, 0, inBufferCount, outBuffer, outBufferOffset, outBufferCount, false, out bytesUsed, out charsRead, out completed); // completed may be false and there is no spare bytes if the Decoder has stored bytes to use later if ((!completed) && (bytesUsed < inBufferCount)) { _leftOverBytes = new byte[inBufferCount - bytesUsed]; Array.Copy(inBuffer, bytesUsed, _leftOverBytes, 0, _leftOverBytes.Length); } else { // If Convert() sets completed to true, then it must have used all of the bytes we gave it Debug.Assert(bytesUsed >= inBufferCount, "Converted completed, but not all bytes were used"); _leftOverBytes = null; } Debug.Assert(((_reader == null) || (_reader.ColumnDataBytesRemaining() > 0) || (!completed) || (_leftOverBytes == null)), "Stream has run out of data and the decoder finished, but there are leftover bytes"); Debug.Assert(charsRead > 0, "Converted no chars. Bad encoding?"); return charsRead; } /// <summary> /// True if this TextReader is supposed to be closed /// </summary> private bool IsClosed { get { return (_reader == null); } } /// <summary> /// True if there is data left to read /// </summary> /// <returns></returns> private bool IsDataLeft { get { return ((_leftOverBytes != null) || (_reader.ColumnDataBytesRemaining() > 0)); } } /// <summary> /// True if there is a peeked character available /// </summary> private bool HasPeekedChar { get { return (_peekedChar >= char.MinValue); } } /// <summary> /// Checks the the parameters passed into a Read() method are valid /// </summary> /// <param name="buffer"></param> /// <param name="index"></param> /// <param name="count"></param> internal static void ValidateReadParameters(char[] buffer, int index, int count) { if (buffer == null) { throw ADP.ArgumentNull(ADP.ParameterBuffer); } if (index < 0) { throw ADP.ArgumentOutOfRange(ADP.ParameterIndex); } if (count < 0) { throw ADP.ArgumentOutOfRange(ADP.ParameterCount); } try { if (checked(index + count) > buffer.Length) { throw ExceptionBuilder.InvalidOffsetLength(); } } catch (OverflowException) { // If we've overflowed when adding index and count, then they never would have fit into buffer anyway throw ExceptionBuilder.InvalidOffsetLength(); } } } sealed internal class SqlUnicodeEncoding : UnicodeEncoding { private static SqlUnicodeEncoding s_singletonEncoding = new SqlUnicodeEncoding(); private SqlUnicodeEncoding() : base(bigEndian: false, byteOrderMark: false, throwOnInvalidBytes: false) { } public override Decoder GetDecoder() { return new SqlUnicodeDecoder(); } public override int GetMaxByteCount(int charCount) { // SQL Server never sends a BOM, so we can assume that its 2 bytes per char return charCount * 2; } public static Encoding SqlUnicodeEncodingInstance { get { return s_singletonEncoding; } } sealed private class SqlUnicodeDecoder : Decoder { public override int GetCharCount(byte[] bytes, int index, int count) { // SQL Server never sends a BOM, so we can assume that its 2 bytes per char return count / 2; } public override int GetChars(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex) { // This method is required - simply call Convert() int bytesUsed; int charsUsed; bool completed; Convert(bytes, byteIndex, byteCount, chars, charIndex, chars.Length - charIndex, true, out bytesUsed, out charsUsed, out completed); return charsUsed; } public override void Convert(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex, int charCount, bool flush, out int bytesUsed, out int charsUsed, out bool completed) { // Assume 2 bytes per char and no BOM charsUsed = Math.Min(charCount, byteCount / 2); bytesUsed = charsUsed * 2; completed = (bytesUsed == byteCount); // BlockCopy uses offsets\length measured in bytes, not the actual array index Buffer.BlockCopy(bytes, byteIndex, chars, charIndex * 2, bytesUsed); } } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.WindowsAzure; using Microsoft.WindowsAzure.Management.WebSites; using Microsoft.WindowsAzure.Management.WebSites.Models; namespace Microsoft.WindowsAzure { /// <summary> /// The Web Sites Management API provides a RESTful set of web services /// that interact with the Windows Azure Web Sites service to manage your /// web sites. The API has entities that capture the relationship between /// an end user and Windows Azure Web Sites service. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/dn166981.aspx for /// more information) /// </summary> public static partial class WebHostingPlanOperationsExtensions { /// <summary> /// Creates a new Web Hosting Plan. (see /// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/ /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='parameters'> /// Required. Web Hosting Plan Parameters. /// </param> /// <returns> /// The Create Web Web Hosting Plan operation response. /// </returns> public static WebHostingPlanCreateResponse Create(this IWebHostingPlanOperations operations, string webSpaceName, WebHostingPlanCreateParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IWebHostingPlanOperations)s).CreateAsync(webSpaceName, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Creates a new Web Hosting Plan. (see /// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/ /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='parameters'> /// Required. Web Hosting Plan Parameters. /// </param> /// <returns> /// The Create Web Web Hosting Plan operation response. /// </returns> public static Task<WebHostingPlanCreateResponse> CreateAsync(this IWebHostingPlanOperations operations, string webSpaceName, WebHostingPlanCreateParameters parameters) { return operations.CreateAsync(webSpaceName, parameters, CancellationToken.None); } /// <summary> /// Deletes a Web Hosting Plan (see /// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/ /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='webHostingPlanName'> /// Required. The name of the web hosting plan. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static OperationResponse Delete(this IWebHostingPlanOperations operations, string webSpaceName, string webHostingPlanName) { return Task.Factory.StartNew((object s) => { return ((IWebHostingPlanOperations)s).DeleteAsync(webSpaceName, webHostingPlanName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Deletes a Web Hosting Plan (see /// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/ /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='webHostingPlanName'> /// Required. The name of the web hosting plan. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public static Task<OperationResponse> DeleteAsync(this IWebHostingPlanOperations operations, string webSpaceName, string webHostingPlanName) { return operations.DeleteAsync(webSpaceName, webHostingPlanName, CancellationToken.None); } /// <summary> /// Gets details of an existing Web Hosting Plan (see /// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/ /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='webHostingPlanName'> /// Required. The name of the web hosting plan. /// </param> /// <returns> /// The Get Web Hosting Plan operation response. /// </returns> public static WebHostingPlanGetResponse Get(this IWebHostingPlanOperations operations, string webSpaceName, string webHostingPlanName) { return Task.Factory.StartNew((object s) => { return ((IWebHostingPlanOperations)s).GetAsync(webSpaceName, webHostingPlanName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Gets details of an existing Web Hosting Plan (see /// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/ /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='webHostingPlanName'> /// Required. The name of the web hosting plan. /// </param> /// <returns> /// The Get Web Hosting Plan operation response. /// </returns> public static Task<WebHostingPlanGetResponse> GetAsync(this IWebHostingPlanOperations operations, string webSpaceName, string webHostingPlanName) { return operations.GetAsync(webSpaceName, webHostingPlanName, CancellationToken.None); } /// <summary> /// You can retrieve historical usage metrics for a site by issuing an /// HTTP GET request. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/dn166964.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='webHostingPlanName'> /// Required. The name of the web hosting plan. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Get Historical Usage Metrics /// Web hosting plan operation. /// </param> /// <returns> /// The Get Historical Usage Metrics Web hosting plan operation /// response. /// </returns> public static WebHostingPlanGetHistoricalUsageMetricsResponse GetHistoricalUsageMetrics(this IWebHostingPlanOperations operations, string webSpaceName, string webHostingPlanName, WebHostingPlanGetHistoricalUsageMetricsParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IWebHostingPlanOperations)s).GetHistoricalUsageMetricsAsync(webSpaceName, webHostingPlanName, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// You can retrieve historical usage metrics for a site by issuing an /// HTTP GET request. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/dn166964.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='webHostingPlanName'> /// Required. The name of the web hosting plan. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Get Historical Usage Metrics /// Web hosting plan operation. /// </param> /// <returns> /// The Get Historical Usage Metrics Web hosting plan operation /// response. /// </returns> public static Task<WebHostingPlanGetHistoricalUsageMetricsResponse> GetHistoricalUsageMetricsAsync(this IWebHostingPlanOperations operations, string webSpaceName, string webHostingPlanName, WebHostingPlanGetHistoricalUsageMetricsParameters parameters) { return operations.GetHistoricalUsageMetricsAsync(webSpaceName, webHostingPlanName, parameters, CancellationToken.None); } /// <summary> /// You can list the web spaces under the current subscription by /// issuing a GET request. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/dn166961.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <returns> /// The List Web Hosting Plans operation response. /// </returns> public static WebHostingPlanListResponse List(this IWebHostingPlanOperations operations, string webSpaceName) { return Task.Factory.StartNew((object s) => { return ((IWebHostingPlanOperations)s).ListAsync(webSpaceName); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// You can list the web spaces under the current subscription by /// issuing a GET request. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/dn166961.aspx /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <returns> /// The List Web Hosting Plans operation response. /// </returns> public static Task<WebHostingPlanListResponse> ListAsync(this IWebHostingPlanOperations operations, string webSpaceName) { return operations.ListAsync(webSpaceName, CancellationToken.None); } /// <summary> /// Updates an existing Web Hosting Plan. (see /// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/ /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='webHostingPlanName'> /// Required. The name of the web hosting plan. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Update Web Hosting Plan /// operation. /// </param> /// <returns> /// The Create Web Hosting Plan operation response. /// </returns> public static WebHostingPlanUpdateResponse Update(this IWebHostingPlanOperations operations, string webSpaceName, string webHostingPlanName, WebHostingPlanUpdateParameters parameters) { return Task.Factory.StartNew((object s) => { return ((IWebHostingPlanOperations)s).UpdateAsync(webSpaceName, webHostingPlanName, parameters); } , operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// Updates an existing Web Hosting Plan. (see /// http://azure.microsoft.com/en-us/documentation/articles/azure-web-sites-web-hosting-plans-in-depth-overview/ /// for more information) /// </summary> /// <param name='operations'> /// Reference to the /// Microsoft.WindowsAzure.Management.WebSites.IWebHostingPlanOperations. /// </param> /// <param name='webSpaceName'> /// Required. The name of the web space. /// </param> /// <param name='webHostingPlanName'> /// Required. The name of the web hosting plan. /// </param> /// <param name='parameters'> /// Required. Parameters supplied to the Update Web Hosting Plan /// operation. /// </param> /// <returns> /// The Create Web Hosting Plan operation response. /// </returns> public static Task<WebHostingPlanUpdateResponse> UpdateAsync(this IWebHostingPlanOperations operations, string webSpaceName, string webHostingPlanName, WebHostingPlanUpdateParameters parameters) { return operations.UpdateAsync(webSpaceName, webHostingPlanName, parameters, CancellationToken.None); } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Sockets; using UnityEditor; using UnityEditorInternal; using UnityEngine; namespace UnityTest { [Serializable] public class NetworkResultsReceiver : EditorWindow { public static NetworkResultsReceiver Instance; private string m_StatusLabel; private TcpListener m_Listener; [SerializeField] private PlatformRunnerConfiguration m_Configuration; private List<ITestResult> m_TestResults = new List<ITestResult>(); #region steering variables private bool m_RunFinished; private bool m_Repaint; private TimeSpan m_TestTimeout = TimeSpan.Zero; private DateTime m_LastMessageReceived; private bool m_Running; public TimeSpan ReceiveMessageTimeout = TimeSpan.FromSeconds(30); private readonly TimeSpan m_InitialConnectionTimeout = TimeSpan.FromSeconds(300); private bool m_TestFailed; #endregion private void AcceptCallback(TcpClient client) { m_Repaint = true; ResultDTO dto; try { m_LastMessageReceived = DateTime.Now; using (var stream = client.GetStream()) { var bf = new DTOFormatter(); dto = (ResultDTO)bf.Deserialize(stream); stream.Close(); } client.Close(); } catch (ObjectDisposedException e) { Debug.LogException(e); m_StatusLabel = "Got disconnected"; return; } catch (Exception e) { Debug.LogException(e); return; } switch (dto.messageType) { case ResultDTO.MessageType.TestStarted: m_StatusLabel = dto.testName; m_TestTimeout = TimeSpan.FromSeconds(dto.testTimeout); break; case ResultDTO.MessageType.TestFinished: m_TestResults.Add(dto.testResult); m_TestTimeout = TimeSpan.Zero; if (dto.testResult.Executed && dto.testResult.ResultState != TestResultState.Ignored && !dto.testResult.IsSuccess) m_TestFailed = true; break; case ResultDTO.MessageType.RunStarted: m_TestResults = new List<ITestResult>(); m_StatusLabel = "Run started: " + dto.loadedLevelName; break; case ResultDTO.MessageType.RunFinished: WriteResultsToLog(dto, m_TestResults); if (!string.IsNullOrEmpty(m_Configuration.resultsDir)) { var platform = m_Configuration.runInEditor ? "Editor" : m_Configuration.buildTarget.ToString(); var resultWriter = new XmlResultWriter(dto.loadedLevelName, platform, m_TestResults.ToArray()); try { if (!Directory.Exists(m_Configuration.resultsDir)) { Directory.CreateDirectory(m_Configuration.resultsDir); } var filePath = Path.Combine(m_Configuration.resultsDir, dto.loadedLevelName + ".xml"); File.WriteAllText(filePath, resultWriter.GetTestResult()); } catch (Exception e) { Debug.LogException(e); } } break; case ResultDTO.MessageType.AllScenesFinished: m_Running = false; m_RunFinished = true; break; case ResultDTO.MessageType.Ping: break; } } private void WriteResultsToLog(ResultDTO dto, List<ITestResult> list) { string result = "Run finished for: " + dto.loadedLevelName; var failCount = list.Count(t => t.Executed && !t.IsSuccess); if (failCount == 0) result += "\nAll tests passed"; else result += "\n" + failCount + " tests failed"; if (failCount == 0) Debug.Log(result); else Debug.LogWarning(result); } public void Update() { if (EditorApplication.isCompiling && m_Listener != null) { m_Running = false; m_Listener.Stop(); return; } if (m_Running) { try { if (m_Listener != null && m_Listener.Pending()) { using (var client = m_Listener.AcceptTcpClient()) { AcceptCallback(client); client.Close(); } } } catch (InvalidOperationException e) { m_StatusLabel = "Exception happened: " + e.Message; Repaint(); Debug.LogException(e); } } if (m_Running) { var adjustedtestTimeout = m_TestTimeout.Add(m_TestTimeout); var timeout = ReceiveMessageTimeout > adjustedtestTimeout ? ReceiveMessageTimeout : adjustedtestTimeout; if ((DateTime.Now - m_LastMessageReceived) > timeout) { Debug.LogError("Timeout when waiting for test results"); m_RunFinished = true; } } if (m_RunFinished) { Close(); if (InternalEditorUtility.inBatchMode) EditorApplication.Exit(m_TestFailed ? Batch.returnCodeTestsFailed : Batch.returnCodeTestsOk); } if (m_Repaint) Repaint(); } public void OnEnable() { minSize = new Vector2(300, 100); titleContent = new GUIContent("Test run monitor"); Instance = this; m_StatusLabel = "Initializing..."; if (EditorApplication.isCompiling) return; EnableServer(); } private void EnableServer() { if (m_Configuration == null) throw new Exception("No result receiver server configuration."); var ipAddress = IPAddress.Any; if (m_Configuration.ipList != null && m_Configuration.ipList.Count == 1) ipAddress = IPAddress.Parse(m_Configuration.ipList.Single()); var ipAddStr = Equals(ipAddress, IPAddress.Any) ? "[All interfaces]" : ipAddress.ToString(); m_Listener = new TcpListener(ipAddress, m_Configuration.port); m_StatusLabel = "Waiting for connection on: " + ipAddStr + ":" + m_Configuration.port; try { m_Listener.Start(100); } catch (SocketException e) { m_StatusLabel = "Exception happened: " + e.Message; Repaint(); Debug.LogException(e); } m_Running = true; m_LastMessageReceived = DateTime.Now + m_InitialConnectionTimeout; } public void OnDisable() { Instance = null; if (m_Listener != null) m_Listener.Stop(); } public void OnGUI() { EditorGUILayout.LabelField("Status:", EditorStyles.boldLabel); EditorGUILayout.LabelField(m_StatusLabel); GUILayout.FlexibleSpace(); if (GUILayout.Button("Stop")) { StopReceiver(); if (InternalEditorUtility.inBatchMode) EditorApplication.Exit(Batch.returnCodeRunError); } } public static void StartReceiver(PlatformRunnerConfiguration configuration) { var w = (NetworkResultsReceiver)GetWindow(typeof(NetworkResultsReceiver), false); w.SetConfiguration(configuration); if (!EditorApplication.isCompiling) { w.EnableServer(); } w.Show(true); } private void SetConfiguration(PlatformRunnerConfiguration configuration) { m_Configuration = configuration; } public static void StopReceiver() { if (Instance == null) return; try{ Instance.Close(); }catch(Exception e){ Debug.LogException(e); DestroyImmediate(Instance); } } } }
using System; using System.Globalization; using System.Collections.Generic; using Sasoma.Utils; using Sasoma.Microdata.Interfaces; using Sasoma.Languages.Core; using Sasoma.Microdata.Properties; namespace Sasoma.Microdata.Types { /// <summary> /// A liquor store. /// </summary> public class LiquorStore_Core : TypeCore, IStore { public LiquorStore_Core() { this._TypeId = 151; this._Id = "LiquorStore"; this._Schema_Org_Url = "http://schema.org/LiquorStore"; string label = ""; GetLabel(out label, "LiquorStore", typeof(LiquorStore_Core)); this._Label = label; this._Ancestors = new int[]{266,193,155,252}; this._SubTypes = new int[0]; this._SuperTypes = new int[]{252}; this._Properties = new int[]{67,108,143,229,5,10,49,85,91,98,115,135,159,199,196,47,75,77,94,95,130,137,36,60,152,156,167}; } /// <summary> /// Physical address of the item. /// </summary> private Address_Core address; public Address_Core Address { get { return address; } set { address = value; SetPropertyInstance(address); } } /// <summary> /// The overall rating, based on a collection of reviews or ratings, of the item. /// </summary> private Properties.AggregateRating_Core aggregateRating; public Properties.AggregateRating_Core AggregateRating { get { return aggregateRating; } set { aggregateRating = value; SetPropertyInstance(aggregateRating); } } /// <summary> /// The larger organization that this local business is a branch of, if any. /// </summary> private BranchOf_Core branchOf; public BranchOf_Core BranchOf { get { return branchOf; } set { branchOf = value; SetPropertyInstance(branchOf); } } /// <summary> /// A contact point for a person or organization. /// </summary> private ContactPoints_Core contactPoints; public ContactPoints_Core ContactPoints { get { return contactPoints; } set { contactPoints = value; SetPropertyInstance(contactPoints); } } /// <summary> /// The basic containment relation between places. /// </summary> private ContainedIn_Core containedIn; public ContainedIn_Core ContainedIn { get { return containedIn; } set { containedIn = value; SetPropertyInstance(containedIn); } } /// <summary> /// The currency accepted (in <a href=\http://en.wikipedia.org/wiki/ISO_4217\ target=\new\>ISO 4217 currency format</a>). /// </summary> private CurrenciesAccepted_Core currenciesAccepted; public CurrenciesAccepted_Core CurrenciesAccepted { get { return currenciesAccepted; } set { currenciesAccepted = value; SetPropertyInstance(currenciesAccepted); } } /// <summary> /// A short description of the item. /// </summary> private Description_Core description; public Description_Core Description { get { return description; } set { description = value; SetPropertyInstance(description); } } /// <summary> /// Email address. /// </summary> private Email_Core email; public Email_Core Email { get { return email; } set { email = value; SetPropertyInstance(email); } } /// <summary> /// People working for this organization. /// </summary> private Employees_Core employees; public Employees_Core Employees { get { return employees; } set { employees = value; SetPropertyInstance(employees); } } /// <summary> /// Upcoming or past events associated with this place or organization. /// </summary> private Events_Core events; public Events_Core Events { get { return events; } set { events = value; SetPropertyInstance(events); } } /// <summary> /// The fax number. /// </summary> private FaxNumber_Core faxNumber; public FaxNumber_Core FaxNumber { get { return faxNumber; } set { faxNumber = value; SetPropertyInstance(faxNumber); } } /// <summary> /// A person who founded this organization. /// </summary> private Founders_Core founders; public Founders_Core Founders { get { return founders; } set { founders = value; SetPropertyInstance(founders); } } /// <summary> /// The date that this organization was founded. /// </summary> private FoundingDate_Core foundingDate; public FoundingDate_Core FoundingDate { get { return foundingDate; } set { foundingDate = value; SetPropertyInstance(foundingDate); } } /// <summary> /// The geo coordinates of the place. /// </summary> private Geo_Core geo; public Geo_Core Geo { get { return geo; } set { geo = value; SetPropertyInstance(geo); } } /// <summary> /// URL of an image of the item. /// </summary> private Image_Core image; public Image_Core Image { get { return image; } set { image = value; SetPropertyInstance(image); } } /// <summary> /// A count of a specific user interactions with this item\u2014for example, <code>20 UserLikes</code>, <code>5 UserComments</code>, or <code>300 UserDownloads</code>. The user interaction type should be one of the sub types of <a href=\http://schema.org/UserInteraction\>UserInteraction</a>. /// </summary> private InteractionCount_Core interactionCount; public InteractionCount_Core InteractionCount { get { return interactionCount; } set { interactionCount = value; SetPropertyInstance(interactionCount); } } /// <summary> /// The location of the event or organization. /// </summary> private Location_Core location; public Location_Core Location { get { return location; } set { location = value; SetPropertyInstance(location); } } /// <summary> /// A URL to a map of the place. /// </summary> private Maps_Core maps; public Maps_Core Maps { get { return maps; } set { maps = value; SetPropertyInstance(maps); } } /// <summary> /// A member of this organization. /// </summary> private Members_Core members; public Members_Core Members { get { return members; } set { members = value; SetPropertyInstance(members); } } /// <summary> /// The name of the item. /// </summary> private Name_Core name; public Name_Core Name { get { return name; } set { name = value; SetPropertyInstance(name); } } /// <summary> /// The opening hours for a business. Opening hours can be specified as a weekly time range, starting with days, then times per day. Multiple days can be listed with commas ',' separating each day. Day or time ranges are specified using a hyphen '-'.<br/>- Days are specified using the following two-letter combinations: <code>Mo</code>, <code>Tu</code>, <code>We</code>, <code>Th</code>, <code>Fr</code>, <code>Sa</code>, <code>Su</code>.<br/>- Times are specified using 24:00 time. For example, 3pm is specified as <code>15:00</code>. <br/>- Here is an example: <code>&lt;time itemprop=\openingHours\ datetime=\Tu,Th 16:00-20:00\&gt;Tuesdays and Thursdays 4-8pm&lt;/time&gt;</code>. <br/>- If a business is open 7 days a week, then it can be specified as <code>&lt;time itemprop=\openingHours\ datetime=\Mo-Su\&gt;Monday through Sunday, all day&lt;/time&gt;</code>. /// </summary> private OpeningHours_Core openingHours; public OpeningHours_Core OpeningHours { get { return openingHours; } set { openingHours = value; SetPropertyInstance(openingHours); } } /// <summary> /// Cash, credit card, etc. /// </summary> private PaymentAccepted_Core paymentAccepted; public PaymentAccepted_Core PaymentAccepted { get { return paymentAccepted; } set { paymentAccepted = value; SetPropertyInstance(paymentAccepted); } } /// <summary> /// Photographs of this place. /// </summary> private Photos_Core photos; public Photos_Core Photos { get { return photos; } set { photos = value; SetPropertyInstance(photos); } } /// <summary> /// The price range of the business, for example <code>$$$</code>. /// </summary> private PriceRange_Core priceRange; public PriceRange_Core PriceRange { get { return priceRange; } set { priceRange = value; SetPropertyInstance(priceRange); } } /// <summary> /// Review of the item. /// </summary> private Reviews_Core reviews; public Reviews_Core Reviews { get { return reviews; } set { reviews = value; SetPropertyInstance(reviews); } } /// <summary> /// The telephone number. /// </summary> private Telephone_Core telephone; public Telephone_Core Telephone { get { return telephone; } set { telephone = value; SetPropertyInstance(telephone); } } /// <summary> /// URL of the item. /// </summary> private Properties.URL_Core uRL; public Properties.URL_Core URL { get { return uRL; } set { uRL = value; SetPropertyInstance(uRL); } } } }
//! \file ImageERI.cs //! \date Tue May 26 12:04:30 2015 //! \brief Entis rasterized image format. // // Copyright (C) 2015 by morkt // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // using System; using System.ComponentModel.Composition; using System.IO; using System.Windows.Media; using GameRes.Utility; namespace GameRes.Formats.Entis { internal class EriMetaData : ImageMetaData { public int StreamPos; public int Version; public CvType Transformation; public EriCode Architecture; public int FormatType; public bool VerticalFlip; public int ClippedPixel; public int SamplingFlags; public ulong QuantumizedBits; public ulong AllottedBits; public int BlockingDegree; public int LappedBlock; public int FrameTransform; public int FrameDegree; } public enum CvType { Lossless_ERI = 0x03020000, DCT_ERI = 0x00000001, LOT_ERI = 0x00000005, LOT_ERI_MSS = 0x00000105, } public enum EriCode { RunlengthGamma = -1, RunlengthHuffman = -4, Nemesis = -16, } public enum EriImage { RGB = 0x00000001, RGBA = 0x04000001, Gray = 0x00000002, TypeMask = 0x00FFFFFF, WithPalette = 0x01000000, UseClipping = 0x02000000, WithAlpha = 0x04000000, SideBySide = 0x10000000, } internal class EriFile : BinaryReader { internal struct Section { public AsciiString Id; public long Length; } public EriFile (Stream stream) : base (stream, System.Text.Encoding.ASCII, true) { } public Section ReadSection () { var section = new Section(); section.Id = new AsciiString (8); if (8 != this.Read (section.Id.Value, 0, 8)) throw new EndOfStreamException(); section.Length = this.ReadInt64(); return section; } public long FindSection (string name) { var id = new AsciiString (8); for (;;) { if (8 != this.Read (id.Value, 0, 8)) throw new EndOfStreamException(); var length = this.ReadInt64(); if (length < 0) throw new EndOfStreamException(); if (id == name) return length; this.BaseStream.Seek (length, SeekOrigin.Current); } } } [Export(typeof(ImageFormat))] public class EriFormat : ImageFormat { public override string Tag { get { return "ERI"; } } public override string Description { get { return "Entis rasterized image format"; } } public override uint Signature { get { return 0x69746e45u; } } // 'Enti' public override ImageMetaData ReadMetaData (Stream stream) { byte[] header = new byte[0x40]; if (header.Length != stream.Read (header, 0, header.Length)) return null; if (0x03000100 != LittleEndian.ToUInt32 (header, 8)) return null; if (!Binary.AsciiEqual (header, 0x10, "Entis Rasterized Image")) return null; using (var reader = new EriFile (stream)) { var section = reader.ReadSection(); if (section.Id != "Header " || section.Length <= 0) return null; int header_size = (int)section.Length; int stream_pos = 0x50 + header_size; EriMetaData info = null; while (header_size > 8) { section = reader.ReadSection(); header_size -= 8; if (section.Length <= 0 || section.Length > header_size) break; if ("ImageInf" == section.Id) { int version = reader.ReadInt32(); if (version != 0x00020100 && version != 0x00020200) return null; info = new EriMetaData { StreamPos = stream_pos, Version = version }; info.Transformation = (CvType)reader.ReadInt32(); info.Architecture = (EriCode)reader.ReadInt32(); info.FormatType = reader.ReadInt32(); int w = reader.ReadInt32(); int h = reader.ReadInt32(); info.Width = (uint)Math.Abs (w); info.Height = (uint)Math.Abs (h); info.VerticalFlip = h < 0; info.BPP = reader.ReadInt32(); info.ClippedPixel = reader.ReadInt32(); info.SamplingFlags = reader.ReadInt32(); info.QuantumizedBits = reader.ReadUInt64(); info.AllottedBits = reader.ReadUInt64(); info.BlockingDegree = reader.ReadInt32(); info.LappedBlock = reader.ReadInt32(); info.FrameTransform = reader.ReadInt32(); info.FrameDegree = reader.ReadInt32(); break; } header_size -= (int)section.Length; reader.BaseStream.Seek (section.Length, SeekOrigin.Current); } return info; } } public override ImageData Read (Stream stream, ImageMetaData info) { var meta = info as EriMetaData; if (null == meta) throw new ArgumentException ("EriFormat.Read should be supplied with EriMetaData", "info"); stream.Position = meta.StreamPos; using (var input = new EriFile (stream)) { Color[] palette = null; for (;;) // ReadSection throws an exception in case of EOF { var section = input.ReadSection(); if ("Stream " == section.Id) continue; if ("ImageFrm" == section.Id) break; if ("Palette " == section.Id && info.BPP <= 8 && section.Length <= 0x400) { palette = ReadPalette (stream, (int)section.Length); continue; } input.BaseStream.Seek (section.Length, SeekOrigin.Current); } var reader = new EriReader (stream, meta, palette); reader.DecodeImage(); return ImageData.Create (info, reader.Format, reader.Palette, reader.Data, reader.Stride); } } private Color[] ReadPalette (Stream input, int palette_length) { var palette_data = new byte[0x400]; if (palette_length > palette_data.Length) throw new InvalidFormatException(); if (palette_length != input.Read (palette_data, 0, palette_length)) throw new InvalidFormatException(); var colors = new Color[256]; for (int i = 0; i < 256; ++i) { colors[i] = Color.FromRgb (palette_data[i*4+2], palette_data[i*4+1], palette_data[i*4]); } return colors; } public override void Write (Stream file, ImageData image) { throw new NotImplementedException ("EriFormat.Write not implemented"); } } }
// // Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. // // Microsoft Bot Framework: http://botframework.com // // Bot Builder SDK GitHub: // https://github.com/Microsoft/BotBuilder // // Copyright (c) Microsoft Corporation // All rights reserved. // // MIT License: // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Runtime.CompilerServices; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.Bot.Builder.Internals.Fibers; namespace Microsoft.Bot.Builder.Scorables.Internals { public interface IBinder { bool TryBind(MethodBase method, IResolver resolver, out IBinding binding); bool TryBind(Delegate lambda, IResolver resolver, out IBinding binding); bool TryBind<R>(MethodInfo method, IResolver resolver, out IBinding<R> binding); bool TryBind<R>(Delegate lambda, IResolver resolver, out IBinding<R> binding); } public static partial class Extensions { private static readonly ConditionalWeakTable<MethodBase, IReadOnlyList<ParameterInfo>> ParametersByMethod = new ConditionalWeakTable<MethodBase, IReadOnlyList<ParameterInfo>>(); public static IReadOnlyList<ParameterInfo> CachedParameters(this MethodBase method) { return ParametersByMethod.GetValue(method, m => m.GetParameters()); } private static readonly ConditionalWeakTable<MethodBase, IReadOnlyList<Type>> ParameterTypesByMethod = new ConditionalWeakTable<MethodBase, IReadOnlyList<Type>>(); public static IReadOnlyList<Type> CachedParameterTypes(this MethodBase method) { return ParameterTypesByMethod.GetValue(method, m => m.GetParameters().ToList(p => p.ParameterType)); } } public sealed class Binder : IBinder { public static readonly IBinder Instance = new Binder(); private Binder() { } public static bool TryResolveReturnType<R>(MethodInfo method) { var type = method.ReturnType; if (typeof(R).IsAssignableFrom(type)) { return true; } if (type.IsGenericType) { var definition = type.GetGenericTypeDefinition(); if (definition == typeof(Task<>)) { var arguments = type.GetGenericArguments(); if (typeof(R).IsAssignableFrom(arguments[0])) { return true; } } } return false; } public static bool TryResolveInstance(IResolver resolver, MethodBase method, object target, out object instance) { if (target != null) { var type = target.GetType(); if (method.DeclaringType.IsAssignableFrom(type)) { instance = target; return true; } } if (method.IsStatic) { instance = null; return true; } return resolver.TryResolve(method.DeclaringType, null, out instance); } public static bool TryResolveArgument(IResolver resolver, ParameterInfo parameter, out object argument) { var type = parameter.ParameterType; var entity = parameter.GetCustomAttribute<EntityAttribute>(); if (entity != null) { if (resolver.TryResolve(type, entity.Name, out argument)) { return true; } } if (resolver.TryResolve(type, parameter.Name, out argument)) { return true; } return resolver.TryResolve(type, null, out argument); } public static bool TryResolveArguments(IResolver resolver, MethodBase method, out object[] arguments) { var parameters = method.CachedParameters(); if (parameters.Count == 0) { arguments = Array.Empty<object>(); return true; } arguments = null; for (int index = 0; index < parameters.Count; ++index) { var parameter = parameters[index]; object argument; if (!TryResolveArgument(resolver, parameter, out argument)) { arguments = null; return false; } if (arguments == null) { arguments = new object[parameters.Count]; } arguments[index] = argument; } return arguments != null; } public static bool TryBind(MethodBase method, object target, IResolver resolver, out IBinding binding) { object instance; if (!TryResolveInstance(resolver, method, target, out instance)) { binding = null; return false; } object[] arguments; if (!TryResolveArguments(resolver, method, out arguments)) { binding = null; return false; } binding = new Binding(method, instance, arguments); return true; } public static bool TryBind<R>(MethodInfo method, object target, IResolver resolver, out IBinding<R> binding) { if (!TryResolveReturnType<R>(method)) { binding = null; return false; } object instance; if (!TryResolveInstance(resolver, method, target, out instance)) { binding = null; return false; } object[] arguments; if (!TryResolveArguments(resolver, method, out arguments)) { binding = null; return false; } binding = new Binding<R>(method, instance, arguments); return true; } bool IBinder.TryBind(MethodBase method, IResolver resolver, out IBinding binding) { return TryBind(method, null, resolver, out binding); } bool IBinder.TryBind(Delegate lambda, IResolver resolver, out IBinding binding) { return TryBind(lambda.Method, lambda.Target, resolver, out binding); } bool IBinder.TryBind<R>(MethodInfo method, IResolver resolver, out IBinding<R> binding) { return TryBind(method, null, resolver, out binding); } bool IBinder.TryBind<R>(Delegate lambda, IResolver resolver, out IBinding<R> binding) { return TryBind(lambda.Method, lambda.Target, resolver, out binding); } } }
using System; using System.Collections.Generic; using System.Linq.Expressions; using Marten.Linq.Fields; using Marten.Services; using Marten.Testing.Documents; using NpgsqlTypes; using Shouldly; using Weasel.Postgresql; using Xunit; namespace Marten.Testing.Linq.Fields { public class DuplicatedFieldTests { private DuplicatedField theField; public DuplicatedFieldTests() { var storeOptions = new StoreOptions{}; storeOptions.Advanced.DuplicatedFieldEnumStorage = EnumStorage.AsInteger; theField = DuplicatedField.For<User>(storeOptions, x => x.FirstName); } [Fact] public void create_table_column_for_non_indexed_search() { var column = theField.ToColumn(); column.Name.ShouldBe("first_name"); column.Type.ShouldBe("varchar"); } [Fact] public void upsert_argument_defaults() { theField.UpsertArgument.Arg.ShouldBe("arg_first_name"); theField.UpsertArgument.Column.ShouldBe("first_name"); theField.UpsertArgument.PostgresType.ShouldBe("varchar"); } [Fact] public void sql_locator_with_default_column_name() { theField.TypedLocator.ShouldBe("d.first_name"); } [Fact] public void sql_locator_with_custom_column_name() { theField.ColumnName = "x_first_name"; theField.TypedLocator.ShouldBe("d.x_first_name"); } [Fact] public void enum_field() { var options = new StoreOptions(); options.Serializer(new JsonNetSerializer { EnumStorage = EnumStorage.AsString }); var field = DuplicatedField.For<Target>(options, x => x.Color); field.UpsertArgument.DbType.ShouldBe(NpgsqlDbType.Varchar); field.UpsertArgument.PostgresType.ShouldBe("varchar"); var constant = Expression.Constant((int)Colors.Blue); field.GetValueForCompiledQueryParameter(constant).ShouldBe(Colors.Blue.ToString()); } [Fact] public void enum_field_allows_null() { var options = new StoreOptions(); options.Serializer(new JsonNetSerializer { EnumStorage = EnumStorage.AsString }); var field = DuplicatedField.For<Target>(options, x => x.Color); field.UpsertArgument.DbType.ShouldBe(NpgsqlDbType.Varchar); field.UpsertArgument.PostgresType.ShouldBe("varchar"); var constant = Expression.Constant(null); field.GetValueForCompiledQueryParameter(constant).ShouldBe(null); } [Theory] [InlineData(EnumStorage.AsInteger, "color = CAST(data ->> 'Color' as integer)")] [InlineData(EnumStorage.AsString, "color = data ->> 'Color'")] public void storage_is_set_when_passed_in(EnumStorage storageMode, string expectedUpdateFragment) { var storeOptions = new StoreOptions(); storeOptions.Serializer(new JsonNetSerializer { EnumStorage = storageMode }); var field = DuplicatedField.For<Target>(storeOptions, x => x.Color); field.UpdateSqlFragment().ShouldBe(expectedUpdateFragment); } [Theory] [InlineData(null, "string = data ->> 'String'")] [InlineData("varchar", "string = data ->> 'String'")] [InlineData("text", "string = data ->> 'String'")] public void pg_type_is_used_for_string(string pgType, string expectedUpdateFragment) { var field = DuplicatedField.For<Target>(new StoreOptions(), x => x.String); field.PgType = pgType ?? field.PgType; field.UpdateSqlFragment().ShouldBe(expectedUpdateFragment); var expectedPgType = pgType ?? "varchar"; field.PgType.ShouldBe(expectedPgType); field.UpsertArgument.PostgresType.ShouldBe(expectedPgType); field.DbType.ShouldBe(NpgsqlDbType.Text); } [Theory] [InlineData(null, "user_id = CAST(data ->> 'UserId' as uuid)")] [InlineData("uuid", "user_id = CAST(data ->> 'UserId' as uuid)")] [InlineData("text", "user_id = CAST(data ->> 'UserId' as text)")] public void pg_type_is_used_for_guid(string pgType, string expectedUpdateFragment) { var field = DuplicatedField.For<Target>(new StoreOptions(), x => x.UserId); field.PgType = pgType ?? field.PgType; field.UpdateSqlFragment().ShouldBe(expectedUpdateFragment); var expectedPgType = pgType ?? "uuid"; field.PgType.ShouldBe(expectedPgType); field.UpsertArgument.PostgresType.ShouldBe(expectedPgType); field.DbType.ShouldBe(NpgsqlDbType.Uuid); } [Theory] [InlineData(null, "tags_array = CAST(ARRAY(SELECT jsonb_array_elements_text(CAST(data ->> 'TagsArray' as jsonb))) as varchar[])")] [InlineData("varchar[]", "tags_array = CAST(ARRAY(SELECT jsonb_array_elements_text(CAST(data ->> 'TagsArray' as jsonb))) as varchar[])")] [InlineData("text[]", "tags_array = CAST(ARRAY(SELECT jsonb_array_elements_text(CAST(data ->> 'TagsArray' as jsonb))) as text[])")] public void pg_type_is_used_for_string_array(string pgType, string expectedUpdateFragment) { var field = DuplicatedField.For<Target>(new StoreOptions(), x => x.TagsArray); field.PgType = pgType ?? field.PgType; field.UpdateSqlFragment().ShouldBe(expectedUpdateFragment); var expectedPgType = pgType ?? "varchar[]"; field.PgType.ShouldBe(expectedPgType); field.UpsertArgument.PostgresType.ShouldBe(expectedPgType); field.DbType.ShouldBe(NpgsqlDbType.Array | NpgsqlDbType.Text); } [Theory] [InlineData(null, "tags_list = CAST(data ->> 'TagsList' as jsonb)")] [InlineData("varchar[]", "tags_list = CAST(ARRAY(SELECT jsonb_array_elements_text(CAST(data ->> 'TagsList' as jsonb))) as varchar[])")] [InlineData("text[]", "tags_list = CAST(ARRAY(SELECT jsonb_array_elements_text(CAST(data ->> 'TagsList' as jsonb))) as text[])")] public void pg_type_is_used_for_string_list(string pgType, string expectedUpdateFragment) { var field = DuplicatedField.For<ListTarget>(new StoreOptions(), x => x.TagsList); field.PgType = pgType ?? field.PgType; field.UpdateSqlFragment().ShouldBe(expectedUpdateFragment); var expectedPgType = pgType ?? "jsonb"; field.PgType.ShouldBe(expectedPgType); field.UpsertArgument.PostgresType.ShouldBe(expectedPgType); field.DbType.ShouldBe(NpgsqlDbType.Array | NpgsqlDbType.Text); } [Theory] [InlineData(null, "date = public.mt_immutable_timestamp(data ->> 'Date')")] [InlineData("myergen", "date = myergen.mt_immutable_timestamp(data ->> 'Date')")] public void store_options_schema_name_is_used_for_timestamp(string schemaName, string expectedUpdateFragment) { var storeOptions = schemaName != null ? new StoreOptions {DatabaseSchemaName = schemaName} : new StoreOptions(); var field = DuplicatedField.For<Target>(storeOptions, x => x.Date); field.UpdateSqlFragment().ShouldBe(expectedUpdateFragment); } [Theory] [InlineData(null, "date_offset = public.mt_immutable_timestamptz(data ->> 'DateOffset')")] [InlineData("myergen", "date_offset = myergen.mt_immutable_timestamptz(data ->> 'DateOffset')")] public void store_options_schema_name_is_used_for_timestamptz(string schemaName, string expectedUpdateFragment) { var storeOptions = schemaName != null ? new StoreOptions {DatabaseSchemaName = schemaName} : new StoreOptions(); var field = DuplicatedField.For<Target>(storeOptions, x => x.DateOffset); field.UpdateSqlFragment().ShouldBe(expectedUpdateFragment); } [Theory] [InlineData(Casing.Default, "other_id = CAST(data ->> 'OtherId' as uuid)")] [InlineData(Casing.CamelCase, "other_id = CAST(data ->> 'otherId' as uuid)")] public void store_options_serializer_with_casing(Casing casing, string expectedUpdateFragment) { var storeOptions = new StoreOptions(); storeOptions.UseDefaultSerialization(casing:casing); var field = DuplicatedField.For<DuplicateFieldCasingTestDoc>(storeOptions, x => x.OtherId); field.UpdateSqlFragment().ShouldBe(expectedUpdateFragment); } private class ListTarget { public List<string> TagsList { get; set; } } private class DuplicateFieldCasingTestDoc { public Guid Id { get; set; } public Guid OtherId { get; set; } } } }
using System; using System.Runtime.InteropServices; // ReSharper disable InconsistentNaming -- those names are from Windows/COM namespace Tulpep.ActiveDirectoryObjectPicker { /// <summary> /// This structure is used as a parameter in OLE functions and methods that require data format information. /// </summary> [StructLayout(LayoutKind.Sequential)] internal struct FORMATETC { public int cfFormat; public IntPtr ptd; public uint dwAspect; public int lindex; public uint tymed; } /// <summary> /// The STGMEDIUM structure is a generalized global memory handle used for data transfer operations by the IDataObject /// </summary> [StructLayout(LayoutKind.Sequential)] internal struct STGMEDIUM { public uint tymed; public IntPtr hGlobal; /* Presumably this is supposed to be an Object but according to a comment by xC0000005 on the * DSOP_INIT_INFO MSDN page, there is a bug in Windows whereby the returned object doesn't * support IUnknown, causing a E_NOT_IMPL error from .NET. * * Changing it to IntPtr makes it opaque to .NET and prevents the error */ public IntPtr pUnkForRelease; } /// <summary> /// The DSOP_INIT_INFO structure contains data required to initialize an object picker dialog box. /// </summary> [StructLayout(LayoutKind.Sequential, CharSet=CharSet.Auto)] internal struct DSOP_INIT_INFO { public uint cbSize; [MarshalAs(UnmanagedType.LPWStr)] public string pwzTargetComputer; public uint cDsScopeInfos; public IntPtr aDsScopeInfos; public uint flOptions; public uint cAttributesToFetch; public IntPtr apwzAttributeNames; } /// <summary> /// The DSOP_SCOPE_INIT_INFO structure describes one or more scope types that have the same attributes. /// A scope type is a type of location, for example a domain, computer, or Global Catalog, /// from which the user can select objects. /// </summary> [StructLayout(LayoutKind.Sequential, CharSet=CharSet.Auto), Serializable] internal struct DSOP_SCOPE_INIT_INFO { public uint cbSize; public uint flType; public uint flScope; public DSOP_FILTER_FLAGS FilterFlags; [MarshalAs(UnmanagedType.LPWStr)] public string pwzDcName; [MarshalAs(UnmanagedType.LPWStr)] public string pwzADsPath; public uint hr; } /// <summary> /// The DSOP_UPLEVEL_FILTER_FLAGS structure contains flags that indicate the filters to use for an up-level scope. /// An up-level scope is a scope that supports the ADSI LDAP provider. /// </summary> [StructLayout(LayoutKind.Sequential, CharSet=CharSet.Auto)] internal struct DSOP_UPLEVEL_FILTER_FLAGS { public uint flBothModes; public uint flMixedModeOnly; public uint flNativeModeOnly; } /// <summary> /// The DSOP_FILTER_FLAGS structure contains flags that indicate the types of objects presented to the user /// for a specified scope or scopes. /// </summary> [StructLayout(LayoutKind.Sequential, CharSet=CharSet.Auto)] internal struct DSOP_FILTER_FLAGS { public DSOP_UPLEVEL_FILTER_FLAGS Uplevel; public uint flDownlevel; } /// <summary> /// The DS_SELECTION structure contains data about an object the user selected from an object picker dialog box. /// The DS_SELECTION_LIST structure contains an array of DS_SELECTION structures. /// </summary> [StructLayout(LayoutKind.Sequential, CharSet=CharSet.Auto)] internal struct DS_SELECTION { [MarshalAs(UnmanagedType.LPWStr)] public string pwzName; [MarshalAs(UnmanagedType.LPWStr)] public string pwzADsPath; [MarshalAs(UnmanagedType.LPWStr)] public string pwzClass; [MarshalAs(UnmanagedType.LPWStr)] public string pwzUPN; public IntPtr pvarFetchedAttributes; public uint flScopeType; } /// <summary> /// The DS_SELECTION_LIST structure contains data about the objects the user selected from an object picker dialog box. /// This structure is supplied by the IDataObject interface supplied by the IDsObjectPicker::InvokeDialog method /// in the CFSTR_DSOP_DS_SELECTION_LIST data format. /// </summary> [StructLayout(LayoutKind.Sequential, CharSet=CharSet.Auto)] internal struct DS_SELECTION_LIST { public uint cItems; public uint cFetchedAttributes; public DS_SELECTION[] aDsSelection; } /// <summary> /// Flags that indicate the scope types described by this structure. You can combine multiple scope types /// if all specified scopes use the same settings. /// </summary> internal class DSOP_SCOPE_TYPE_FLAGS { public const uint DSOP_SCOPE_TYPE_TARGET_COMPUTER = 0x00000001; public const uint DSOP_SCOPE_TYPE_UPLEVEL_JOINED_DOMAIN = 0x00000002; public const uint DSOP_SCOPE_TYPE_DOWNLEVEL_JOINED_DOMAIN = 0x00000004; public const uint DSOP_SCOPE_TYPE_ENTERPRISE_DOMAIN = 0x00000008; public const uint DSOP_SCOPE_TYPE_GLOBAL_CATALOG = 0x00000010; public const uint DSOP_SCOPE_TYPE_EXTERNAL_UPLEVEL_DOMAIN = 0x00000020; public const uint DSOP_SCOPE_TYPE_EXTERNAL_DOWNLEVEL_DOMAIN = 0x00000040; public const uint DSOP_SCOPE_TYPE_WORKGROUP = 0x00000080; public const uint DSOP_SCOPE_TYPE_USER_ENTERED_UPLEVEL_SCOPE = 0x00000100; public const uint DSOP_SCOPE_TYPE_USER_ENTERED_DOWNLEVEL_SCOPE = 0x00000200; } /// <summary> /// Flags that determine the object picker options. /// </summary> internal class DSOP_INIT_INFO_FLAGS { public const uint DSOP_FLAG_MULTISELECT = 0x00000001; public const uint DSOP_FLAG_SKIP_TARGET_COMPUTER_DC_CHECK = 0x00000002; } /// <summary> /// Flags that indicate the format used to return ADsPath for objects selected from this scope. /// The flScope member can also indicate the initial scope displayed in the Look in drop-down list. /// </summary> internal class DSOP_SCOPE_INIT_INFO_FLAGS { public const uint DSOP_SCOPE_FLAG_STARTING_SCOPE =0x00000001; public const uint DSOP_SCOPE_FLAG_WANT_PROVIDER_WINNT =0x00000002; public const uint DSOP_SCOPE_FLAG_WANT_PROVIDER_LDAP =0x00000004; public const uint DSOP_SCOPE_FLAG_WANT_PROVIDER_GC =0x00000008; public const uint DSOP_SCOPE_FLAG_WANT_SID_PATH =0x00000010; public const uint DSOP_SCOPE_FLAG_WANT_DOWNLEVEL_BUILTIN_PATH =0x00000020; public const uint DSOP_SCOPE_FLAG_DEFAULT_FILTER_USERS =0x00000040; public const uint DSOP_SCOPE_FLAG_DEFAULT_FILTER_GROUPS =0x00000080; public const uint DSOP_SCOPE_FLAG_DEFAULT_FILTER_COMPUTERS =0x00000100; public const uint DSOP_SCOPE_FLAG_DEFAULT_FILTER_CONTACTS =0x00000200; public const uint DSOP_SCOPE_FLAG_DEFAULT_FILTER_SERVICE_ACCOUNTS =0x00000400; // added in Windows SDK 7 } /// <summary> /// Filter flags to use for an up-level scope, regardless of whether it is a mixed or native mode domain. /// </summary> internal class DSOP_FILTER_FLAGS_FLAGS { public const uint DSOP_FILTER_INCLUDE_ADVANCED_VIEW = 0x00000001; public const uint DSOP_FILTER_USERS = 0x00000002; public const uint DSOP_FILTER_BUILTIN_GROUPS = 0x00000004; public const uint DSOP_FILTER_WELL_KNOWN_PRINCIPALS = 0x00000008; public const uint DSOP_FILTER_UNIVERSAL_GROUPS_DL = 0x00000010; public const uint DSOP_FILTER_UNIVERSAL_GROUPS_SE = 0x00000020; public const uint DSOP_FILTER_GLOBAL_GROUPS_DL = 0x00000040; public const uint DSOP_FILTER_GLOBAL_GROUPS_SE = 0x00000080; public const uint DSOP_FILTER_DOMAIN_LOCAL_GROUPS_DL = 0x00000100; public const uint DSOP_FILTER_DOMAIN_LOCAL_GROUPS_SE = 0x00000200; public const uint DSOP_FILTER_CONTACTS = 0x00000400; public const uint DSOP_FILTER_COMPUTERS = 0x00000800; public const uint DSOP_FILTER_SERVICE_ACCOUNTS =0x00001000; // added in Windows SDK 7 } /// <summary> /// Contains the filter flags to use for down-level scopes /// </summary> internal class DSOP_DOWNLEVEL_FLAGS { public const uint DSOP_DOWNLEVEL_FILTER_USERS = 0x80000001; public const uint DSOP_DOWNLEVEL_FILTER_LOCAL_GROUPS = 0x80000002; public const uint DSOP_DOWNLEVEL_FILTER_GLOBAL_GROUPS = 0x80000004; public const uint DSOP_DOWNLEVEL_FILTER_COMPUTERS = 0x80000008; public const uint DSOP_DOWNLEVEL_FILTER_WORLD = 0x80000010; public const uint DSOP_DOWNLEVEL_FILTER_AUTHENTICATED_USER = 0x80000020; public const uint DSOP_DOWNLEVEL_FILTER_ANONYMOUS = 0x80000040; public const uint DSOP_DOWNLEVEL_FILTER_BATCH = 0x80000080; public const uint DSOP_DOWNLEVEL_FILTER_CREATOR_OWNER = 0x80000100; public const uint DSOP_DOWNLEVEL_FILTER_CREATOR_GROUP = 0x80000200; public const uint DSOP_DOWNLEVEL_FILTER_DIALUP = 0x80000400; public const uint DSOP_DOWNLEVEL_FILTER_INTERACTIVE = 0x80000800; public const uint DSOP_DOWNLEVEL_FILTER_NETWORK = 0x80001000; public const uint DSOP_DOWNLEVEL_FILTER_SERVICE = 0x80002000; public const uint DSOP_DOWNLEVEL_FILTER_SYSTEM = 0x80004000; public const uint DSOP_DOWNLEVEL_FILTER_EXCLUDE_BUILTIN_GROUPS = 0x80008000; public const uint DSOP_DOWNLEVEL_FILTER_TERMINAL_SERVER = 0x80010000; public const uint DSOP_DOWNLEVEL_FILTER_ALL_WELLKNOWN_SIDS = 0x80020000; public const uint DSOP_DOWNLEVEL_FILTER_LOCAL_SERVICE = 0x80040000; public const uint DSOP_DOWNLEVEL_FILTER_NETWORK_SERVICE = 0x80080000; public const uint DSOP_DOWNLEVEL_FILTER_REMOTE_LOGON = 0x80100000; public const uint DSOP_DOWNLEVEL_FILTER_INTERNET_USER = 0x80200000; // added in Windows SDK 6 public const uint DSOP_DOWNLEVEL_FILTER_OWNER_RIGHTS = 0x80400000; // added in Windows SDK 6 public const uint DSOP_DOWNLEVEL_FILTER_SERVICES = 0x80800000; // added in Windows SDK 6 public const uint DSOP_DOWNLEVEL_FILTER_LOCAL_LOGON = 0x81000000; // added in Windows SDK 7 public const uint DSOP_DOWNLEVEL_FILTER_THIS_ORG_CERT = 0x82000000; // added in Windows SDK 7 public const uint DSOP_DOWNLEVEL_FILTER_IIS_APP_POOL = 0x84000000; // added in Windows SDK 7 } /// <summary> /// The IDsObjectPicker.InvokeDialog result /// </summary> internal class HRESULT { public const int S_OK = 0; // The method succeeded. public const int S_FALSE = 1; // The user cancelled the dialog box. ppdoSelections receives NULL. public const int E_NOTIMPL = unchecked((int)0x80004001); // ? } /// <summary> /// The CFSTR_DSOP_DS_SELECTION_LIST clipboard format is provided by the IDataObject obtained by calling IDsObjectPicker.InvokeDialog /// </summary> internal class CLIPBOARD_FORMAT { public const string CFSTR_DSOP_DS_SELECTION_LIST = "CFSTR_DSOP_DS_SELECTION_LIST"; } /// <summary> /// The TYMED enumeration values indicate the type of storage medium being used in a data transfer. /// </summary> internal enum TYMED { TYMED_HGLOBAL = 1, TYMED_FILE = 2, TYMED_ISTREAM = 4, TYMED_ISTORAGE = 8, TYMED_GDI = 16, TYMED_MFPICT = 32, TYMED_ENHMF = 64, TYMED_NULL = 0 } /// <summary> /// The DVASPECT enumeration values specify the desired data or view aspect of the object when drawing or getting data. /// </summary> internal enum DVASPECT { DVASPECT_CONTENT = 1, DVASPECT_THUMBNAIL = 2, DVASPECT_ICON = 4, DVASPECT_DOCPRINT = 8 } /// <summary> /// Directory name types for use with IADsNameTranslate /// </summary> enum ADS_NAME_TYPE_ENUM { ADS_NAME_TYPE_1779 = 1, ADS_NAME_TYPE_CANONICAL = 2, ADS_NAME_TYPE_NT4 = 3, ADS_NAME_TYPE_DISPLAY = 4, ADS_NAME_TYPE_DOMAIN_SIMPLE = 5, ADS_NAME_TYPE_ENTERPRISE_SIMPLE = 6, ADS_NAME_TYPE_GUID = 7, ADS_NAME_TYPE_UNKNOWN = 8, ADS_NAME_TYPE_USER_PRINCIPAL_NAME = 9, ADS_NAME_TYPE_CANONICAL_EX = 10, ADS_NAME_TYPE_SERVICE_PRINCIPAL_NAME = 11, ADS_NAME_TYPE_SID_OR_SID_HISTORY_NAME = 12, } } // ReSharper restore InconsistentNaming
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace System.Linq.Expressions.Tests { public static class LambdaUnaryNotNullableTests { #region Test methods [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableByteTest(bool useInterpreter) { foreach (byte? value in new byte?[] { null, 0, 1, byte.MaxValue }) { VerifyUnaryNotNullableByte(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableIntTest(bool useInterpreter) { foreach (int? value in new int?[] { null, 0, 1, -1, int.MinValue, int.MaxValue }) { VerifyUnaryNotNullableInt(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableLongTest(bool useInterpreter) { foreach (long? value in new long?[] { null, 0, 1, -1, long.MinValue, long.MaxValue }) { VerifyUnaryNotNullableLong(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableSByteTest(bool useInterpreter) { foreach (sbyte? value in new sbyte?[] { null, 0, 1, -1, sbyte.MinValue, sbyte.MaxValue }) { VerifyUnaryNotNullableSByte(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableShortTest(bool useInterpreter) { foreach (short? value in new short?[] { null, 0, 1, -1, short.MinValue, short.MaxValue }) { VerifyUnaryNotNullableShort(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableUIntTest(bool useInterpreter) { foreach (uint? value in new uint?[] { null, 0, 1, uint.MaxValue }) { VerifyUnaryNotNullableUInt(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableULongTest(bool useInterpreter) { foreach (ulong? value in new ulong?[] { null, 0, 1, ulong.MaxValue }) { VerifyUnaryNotNullableULong(value, useInterpreter); } } [Theory, ClassData(typeof(CompilationTypes))] public static void CheckLambdaUnaryNotNullableUShortTest(bool useInterpreter) { foreach (ushort? value in new ushort?[] { null, 0, 1, ushort.MaxValue }) { VerifyUnaryNotNullableUShort(value, useInterpreter); } } #endregion #region Test verifiers private static void VerifyUnaryNotNullableByte(byte? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(byte?), "p"); // parameter hard coded Expression<Func<byte?>> e1 = Expression.Lambda<Func<byte?>>( Expression.Invoke( Expression.Lambda<Func<byte?, byte?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(byte?)) }), Enumerable.Empty<ParameterExpression>()); Func<byte?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<byte?, Func<byte?>>> e2 = Expression.Lambda<Func<byte?, Func<byte?>>>( Expression.Lambda<Func<byte?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<byte?, Func<byte?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<byte?, byte?>>> e3 = Expression.Lambda<Func<Func<byte?, byte?>>>( Expression.Invoke( Expression.Lambda<Func<Func<byte?, byte?>>>( Expression.Lambda<Func<byte?, byte?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<byte?, byte?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<byte?, byte?>>> e4 = Expression.Lambda<Func<Func<byte?, byte?>>>( Expression.Lambda<Func<byte?, byte?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<byte?, byte?>> f4 = e4.Compile(useInterpreter); byte? expected = unchecked((byte?)~value); Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableInt(int? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(int?), "p"); // parameter hard coded Expression<Func<int?>> e1 = Expression.Lambda<Func<int?>>( Expression.Invoke( Expression.Lambda<Func<int?, int?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(int?)) }), Enumerable.Empty<ParameterExpression>()); Func<int?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<int?, Func<int?>>> e2 = Expression.Lambda<Func<int?, Func<int?>>>( Expression.Lambda<Func<int?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<int?, Func<int?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<int?, int?>>> e3 = Expression.Lambda<Func<Func<int?, int?>>>( Expression.Invoke( Expression.Lambda<Func<Func<int?, int?>>>( Expression.Lambda<Func<int?, int?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<int?, int?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<int?, int?>>> e4 = Expression.Lambda<Func<Func<int?, int?>>>( Expression.Lambda<Func<int?, int?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<int?, int?>> f4 = e4.Compile(useInterpreter); int? expected = ~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableLong(long? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(long?), "p"); // parameter hard coded Expression<Func<long?>> e1 = Expression.Lambda<Func<long?>>( Expression.Invoke( Expression.Lambda<Func<long?, long?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(long?)) }), Enumerable.Empty<ParameterExpression>()); Func<long?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<long?, Func<long?>>> e2 = Expression.Lambda<Func<long?, Func<long?>>>( Expression.Lambda<Func<long?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<long?, Func<long?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<long?, long?>>> e3 = Expression.Lambda<Func<Func<long?, long?>>>( Expression.Invoke( Expression.Lambda<Func<Func<long?, long?>>>( Expression.Lambda<Func<long?, long?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<long?, long?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<long?, long?>>> e4 = Expression.Lambda<Func<Func<long?, long?>>>( Expression.Lambda<Func<long?, long?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<long?, long?>> f4 = e4.Compile(useInterpreter); long? expected = ~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableSByte(sbyte? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(sbyte?), "p"); // parameter hard coded Expression<Func<sbyte?>> e1 = Expression.Lambda<Func<sbyte?>>( Expression.Invoke( Expression.Lambda<Func<sbyte?, sbyte?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(sbyte?)) }), Enumerable.Empty<ParameterExpression>()); Func<sbyte?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<sbyte?, Func<sbyte?>>> e2 = Expression.Lambda<Func<sbyte?, Func<sbyte?>>>( Expression.Lambda<Func<sbyte?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<sbyte?, Func<sbyte?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<sbyte?, sbyte?>>> e3 = Expression.Lambda<Func<Func<sbyte?, sbyte?>>>( Expression.Invoke( Expression.Lambda<Func<Func<sbyte?, sbyte?>>>( Expression.Lambda<Func<sbyte?, sbyte?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<sbyte?, sbyte?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<sbyte?, sbyte?>>> e4 = Expression.Lambda<Func<Func<sbyte?, sbyte?>>>( Expression.Lambda<Func<sbyte?, sbyte?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<sbyte?, sbyte?>> f4 = e4.Compile(useInterpreter); sbyte? expected = (sbyte?)~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableShort(short? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(short?), "p"); // parameter hard coded Expression<Func<short?>> e1 = Expression.Lambda<Func<short?>>( Expression.Invoke( Expression.Lambda<Func<short?, short?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(short?)) }), Enumerable.Empty<ParameterExpression>()); Func<short?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<short?, Func<short?>>> e2 = Expression.Lambda<Func<short?, Func<short?>>>( Expression.Lambda<Func<short?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<short?, Func<short?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<short?, short?>>> e3 = Expression.Lambda<Func<Func<short?, short?>>>( Expression.Invoke( Expression.Lambda<Func<Func<short?, short?>>>( Expression.Lambda<Func<short?, short?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<short?, short?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<short?, short?>>> e4 = Expression.Lambda<Func<Func<short?, short?>>>( Expression.Lambda<Func<short?, short?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<short?, short?>> f4 = e4.Compile(useInterpreter); short? expected = (short?)~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableUInt(uint? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(uint?), "p"); // parameter hard coded Expression<Func<uint?>> e1 = Expression.Lambda<Func<uint?>>( Expression.Invoke( Expression.Lambda<Func<uint?, uint?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(uint?)) }), Enumerable.Empty<ParameterExpression>()); Func<uint?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<uint?, Func<uint?>>> e2 = Expression.Lambda<Func<uint?, Func<uint?>>>( Expression.Lambda<Func<uint?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<uint?, Func<uint?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<uint?, uint?>>> e3 = Expression.Lambda<Func<Func<uint?, uint?>>>( Expression.Invoke( Expression.Lambda<Func<Func<uint?, uint?>>>( Expression.Lambda<Func<uint?, uint?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<uint?, uint?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<uint?, uint?>>> e4 = Expression.Lambda<Func<Func<uint?, uint?>>>( Expression.Lambda<Func<uint?, uint?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<uint?, uint?>> f4 = e4.Compile(useInterpreter); uint? expected = ~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableULong(ulong? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(ulong?), "p"); // parameter hard coded Expression<Func<ulong?>> e1 = Expression.Lambda<Func<ulong?>>( Expression.Invoke( Expression.Lambda<Func<ulong?, ulong?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(ulong?)) }), Enumerable.Empty<ParameterExpression>()); Func<ulong?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<ulong?, Func<ulong?>>> e2 = Expression.Lambda<Func<ulong?, Func<ulong?>>>( Expression.Lambda<Func<ulong?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<ulong?, Func<ulong?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<ulong?, ulong?>>> e3 = Expression.Lambda<Func<Func<ulong?, ulong?>>>( Expression.Invoke( Expression.Lambda<Func<Func<ulong?, ulong?>>>( Expression.Lambda<Func<ulong?, ulong?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<ulong?, ulong?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<ulong?, ulong?>>> e4 = Expression.Lambda<Func<Func<ulong?, ulong?>>>( Expression.Lambda<Func<ulong?, ulong?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<ulong?, ulong?>> f4 = e4.Compile(useInterpreter); ulong? expected = ~value; Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } private static void VerifyUnaryNotNullableUShort(ushort? value, bool useInterpreter) { ParameterExpression p = Expression.Parameter(typeof(ushort?), "p"); // parameter hard coded Expression<Func<ushort?>> e1 = Expression.Lambda<Func<ushort?>>( Expression.Invoke( Expression.Lambda<Func<ushort?, ushort?>>( Expression.Not(p), new ParameterExpression[] { p }), new Expression[] { Expression.Constant(value, typeof(ushort?)) }), Enumerable.Empty<ParameterExpression>()); Func<ushort?> f1 = e1.Compile(useInterpreter); // function generator that takes a parameter Expression<Func<ushort?, Func<ushort?>>> e2 = Expression.Lambda<Func<ushort?, Func<ushort?>>>( Expression.Lambda<Func<ushort?>>( Expression.Not(p), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p }); Func<ushort?, Func<ushort?>> f2 = e2.Compile(useInterpreter); // function generator Expression<Func<Func<ushort?, ushort?>>> e3 = Expression.Lambda<Func<Func<ushort?, ushort?>>>( Expression.Invoke( Expression.Lambda<Func<Func<ushort?, ushort?>>>( Expression.Lambda<Func<ushort?, ushort?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<ushort?, ushort?> f3 = e3.Compile(useInterpreter)(); // parameter-taking function generator Expression<Func<Func<ushort?, ushort?>>> e4 = Expression.Lambda<Func<Func<ushort?, ushort?>>>( Expression.Lambda<Func<ushort?, ushort?>>( Expression.Not(p), new ParameterExpression[] { p }), Enumerable.Empty<ParameterExpression>()); Func<Func<ushort?, ushort?>> f4 = e4.Compile(useInterpreter); ushort? expected = unchecked((ushort?)~value); Assert.Equal(expected, f1()); Assert.Equal(expected, f2(value)()); Assert.Equal(expected, f3(value)); Assert.Equal(expected, f4()(value)); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void CeilingScalarSingle() { var test = new SimpleBinaryOpTest__CeilingScalarSingle(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Sse.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Sse.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); if (Sse.IsSupported) { // Validates passing a static member works, using pinning and Load test.RunClsVarScenario_Load(); } // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Sse.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local class works test.RunClassLclFldScenario(); if (Sse.IsSupported) { // Validates passing the field of a local class works, using pinning and Load test.RunClassLclFldScenario_Load(); } // Validates passing an instance member of a class works test.RunClassFldScenario(); if (Sse.IsSupported) { // Validates passing an instance member of a class works, using pinning and Load test.RunClassFldScenario_Load(); } // Validates passing the field of a local struct works test.RunStructLclFldScenario(); if (Sse.IsSupported) { // Validates passing the field of a local struct works, using pinning and Load test.RunStructLclFldScenario_Load(); } // Validates passing an instance member of a struct works test.RunStructFldScenario(); if (Sse.IsSupported) { // Validates passing an instance member of a struct works, using pinning and Load test.RunStructFldScenario_Load(); } } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__CeilingScalarSingle { private struct DataTable { private byte[] inArray1; private byte[] inArray2; private byte[] outArray; private GCHandle inHandle1; private GCHandle inHandle2; private GCHandle outHandle; private ulong alignment; public DataTable(Single[] inArray1, Single[] inArray2, Single[] outArray, int alignment) { int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>(); int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>(); int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>(); if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray) { throw new ArgumentException("Invalid value of alignment"); } this.inArray1 = new byte[alignment * 2]; this.inArray2 = new byte[alignment * 2]; this.outArray = new byte[alignment * 2]; this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned); this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned); this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned); this.alignment = (ulong)alignment; Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1); Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2); } public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment); public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment); public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment); public void Dispose() { inHandle1.Free(); inHandle2.Free(); outHandle.Free(); } private static unsafe void* Align(byte* buffer, ulong expectedAlignment) { return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1)); } } private struct TestStruct { public Vector128<Single> _fld1; public Vector128<Single> _fld2; public static TestStruct Create() { var testStruct = new TestStruct(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>()); return testStruct; } public void RunStructFldScenario(SimpleBinaryOpTest__CeilingScalarSingle testClass) { var result = Sse41.CeilingScalar(_fld1, _fld2); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } public void RunStructFldScenario_Load(SimpleBinaryOpTest__CeilingScalarSingle testClass) { fixed (Vector128<Single>* pFld1 = &_fld1) fixed (Vector128<Single>* pFld2 = &_fld2) { var result = Sse41.CeilingScalar( Sse.LoadVector128((Single*)(pFld1)), Sse.LoadVector128((Single*)(pFld2)) ); Unsafe.Write(testClass._dataTable.outArrayPtr, result); testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr); } } } private static readonly int LargestVectorSize = 16; private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single); private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single); private static readonly int RetElementCount = Unsafe.SizeOf<Vector128<Single>>() / sizeof(Single); private static Single[] _data1 = new Single[Op1ElementCount]; private static Single[] _data2 = new Single[Op2ElementCount]; private static Vector128<Single> _clsVar1; private static Vector128<Single> _clsVar2; private Vector128<Single> _fld1; private Vector128<Single> _fld2; private DataTable _dataTable; static SimpleBinaryOpTest__CeilingScalarSingle() { for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>()); } public SimpleBinaryOpTest__CeilingScalarSingle() { Succeeded = true; for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector128<Single>>()); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector128<Single>>()); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); } _dataTable = new DataTable(_data1, _data2, new Single[RetElementCount], LargestVectorSize); } public bool IsSupported => Sse41.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead)); var result = Sse41.CeilingScalar( Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load)); var result = Sse41.CeilingScalar( Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)), Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned)); var result = Sse41.CeilingScalar( Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)), Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead)); var result = typeof(Sse41).GetMethod(nameof(Sse41.CeilingScalar), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) }) .Invoke(null, new object[] { Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load)); var result = typeof(Sse41).GetMethod(nameof(Sse41.CeilingScalar), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) }) .Invoke(null, new object[] { Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)), Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned)); var result = typeof(Sse41).GetMethod(nameof(Sse41.CeilingScalar), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) }) .Invoke(null, new object[] { Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)), Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario)); var result = Sse41.CeilingScalar( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunClsVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load)); fixed (Vector128<Single>* pClsVar1 = &_clsVar1) fixed (Vector128<Single>* pClsVar2 = &_clsVar2) { var result = Sse41.CeilingScalar( Sse.LoadVector128((Single*)(pClsVar1)), Sse.LoadVector128((Single*)(pClsVar2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } } public void RunLclVarScenario_UnsafeRead() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead)); var op1 = Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr); var op2 = Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr); var result = Sse41.CeilingScalar(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load)); var op1 = Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)); var op2 = Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr)); var result = Sse41.CeilingScalar(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned)); var op1 = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)); var op2 = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr)); var result = Sse41.CeilingScalar(op1, op2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(op1, op2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario)); var test = new SimpleBinaryOpTest__CeilingScalarSingle(); var result = Sse41.CeilingScalar(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunClassLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load)); var test = new SimpleBinaryOpTest__CeilingScalarSingle(); fixed (Vector128<Single>* pFld1 = &test._fld1) fixed (Vector128<Single>* pFld2 = &test._fld2) { var result = Sse41.CeilingScalar( Sse.LoadVector128((Single*)(pFld1)), Sse.LoadVector128((Single*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } } public void RunClassFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario)); var result = Sse41.CeilingScalar(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunClassFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load)); fixed (Vector128<Single>* pFld1 = &_fld1) fixed (Vector128<Single>* pFld2 = &_fld2) { var result = Sse41.CeilingScalar( Sse.LoadVector128((Single*)(pFld1)), Sse.LoadVector128((Single*)(pFld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } } public void RunStructLclFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario)); var test = TestStruct.Create(); var result = Sse41.CeilingScalar(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructLclFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load)); var test = TestStruct.Create(); var result = Sse41.CeilingScalar( Sse.LoadVector128((Single*)(&test._fld1)), Sse.LoadVector128((Single*)(&test._fld2)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunStructFldScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario)); var test = TestStruct.Create(); test.RunStructFldScenario(this); } public void RunStructFldScenario_Load() { TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load)); var test = TestStruct.Create(); test.RunStructFldScenario_Load(this); } public void RunUnsupportedScenario() { TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario)); bool succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { succeeded = true; } if (!succeeded) { Succeeded = false; } } private void ValidateResult(Vector128<Single> op1, Vector128<Single> op2, void* result, [CallerMemberName] string method = "") { Single[] inArray1 = new Single[Op1ElementCount]; Single[] inArray2 = new Single[Op2ElementCount]; Single[] outArray = new Single[RetElementCount]; Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1); Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "") { Single[] inArray1 = new Single[Op1ElementCount]; Single[] inArray2 = new Single[Op2ElementCount]; Single[] outArray = new Single[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector128<Single>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector128<Single>>()); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector128<Single>>()); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "") { bool succeeded = true; if (BitConverter.SingleToInt32Bits(result[0]) != BitConverter.SingleToInt32Bits(MathF.Ceiling(right[0]))) { succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if (BitConverter.SingleToInt32Bits(result[i]) != BitConverter.SingleToInt32Bits(left[i])) { succeeded = false; break; } } } if (!succeeded) { TestLibrary.TestFramework.LogInformation($"{nameof(Sse41)}.{nameof(Sse41.CeilingScalar)}<Single>(Vector128<Single>, Vector128<Single>): {method} failed:"); TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})"); TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})"); TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})"); TestLibrary.TestFramework.LogInformation(string.Empty); Succeeded = false; } } } }
// $Id: RspList.java,v 1.3 2004/03/30 06:47:28 belaban Exp $ using Alachisoft.NCache.Common.Net; namespace Alachisoft.NGroups.Util { /// <summary> Contains responses from all members. Marks faulty members. /// A RspList is a response list used in peer-to-peer protocols. /// </summary> public class RspList { public object First { get { return rsps.Count > 0?((Rsp) rsps[0]).Value:null; } } /// <summary>Returns the results from non-suspected members that are not null. </summary> public System.Collections.ArrayList Results { get { System.Collections.ArrayList ret = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10)); Rsp rsp; object val; for (int i = 0; i < rsps.Count; i++) { rsp = (Rsp) rsps[i]; if (rsp.wasReceived() && (val = rsp.Value) != null) ret.Add(val); } return ret; } } public System.Collections.ArrayList SuspectedMembers { get { System.Collections.ArrayList retval = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10)); Rsp rsp; for (int i = 0; i < rsps.Count; i++) { rsp = (Rsp) rsps[i]; if (rsp.wasSuspected()) retval.Add(rsp.Sender); } return retval; } } internal System.Collections.ArrayList rsps = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10)); public void reset() { rsps.Clear(); } public void addRsp(Address sender, object retval) { Rsp rsp = find(sender); if (rsp != null) { rsp.sender = sender; rsp.retval = retval; rsp.received = true; rsp.suspected = false; return; } rsp = new Rsp(sender, retval); rsps.Add(rsp); } public void addNotReceived(Address sender) { Rsp rsp = find(sender); if (rsp == null) rsps.Add(new Rsp(sender)); } public void addSuspect(Address sender) { Rsp rsp = find(sender); if (rsp != null) { rsp.sender = sender; rsp.retval = null; rsp.received = false; rsp.suspected = true; return ; } rsps.Add(new Rsp(sender, true)); } public bool isReceived(Address sender) { Rsp rsp = find(sender); if (rsp == null) return false; return rsp.received; } public int numSuspectedMembers() { int num = 0; Rsp rsp; for (int i = 0; i < rsps.Count; i++) { rsp = (Rsp) rsps[i]; if (rsp.wasSuspected()) num++; } return num; } public bool isSuspected(Address sender) { Rsp rsp = find(sender); if (rsp == null) return false; return rsp.suspected; } public object get(Address sender) { Rsp rsp = find(sender); if (rsp == null) return null; return rsp.retval; } public int size() { return rsps.Count; } public object elementAt(int i) { return rsps[i]; } public void removeElementAt(int i) { rsps.RemoveAt(i); } public void removeRsp(Rsp r) { rsps.Remove(r); } public override string ToString() { System.Text.StringBuilder ret = new System.Text.StringBuilder(); Rsp rsp; for (int i = 0; i < rsps.Count; i++) { rsp = (Rsp) rsps[i]; ret.Append("[" + rsp + "]\n"); } return ret.ToString(); } bool contains(Address sender) { Rsp rsp; for (int i = 0; i < rsps.Count; i++) { rsp = (Rsp) rsps[i]; if (rsp.sender != null && sender != null && rsp.sender.Equals(sender)) return true; } return false; } public Rsp find(Address sender) { Rsp rsp; for (int i = 0; i < rsps.Count; i++) { rsp = (Rsp) rsps[i]; if (rsp.sender != null && sender != null && rsp.sender.Equals(sender)) return rsp; } return null; } } }
//#define ASTAR_POOL_DEBUG //@SHOWINEDITOR Enables debugging of path pooling. Will log warnings and info messages about paths not beeing pooled correctly. using UnityEngine; using System.Collections; using Pathfinding; using System.Collections.Generic; namespace Pathfinding { /** Base class for all path types */ public abstract class Path { /** Data for the thread calculating this path */ public PathHandler pathHandler {get; private set;} /** Callback to call when the path is complete. * This is usually sent to the Seeker component which post processes the path and then calls a callback to the script which requested the path */ public OnPathDelegate callback; /** Immediate callback to call when the path is complete. * \warning This may be called from a separate thread. Usually you do not want to use this one. * * \see callback */ public OnPathDelegate immediateCallback; PathState state; System.Object stateLock = new object(); /** Current state of the path. * \see #CompleteState */ PathCompleteState pathCompleteState; /** Current state of the path */ public PathCompleteState CompleteState { get { return pathCompleteState; } protected set { pathCompleteState = value; } } /** If the path failed, this is true. * \see #errorLog */ public bool error { get { return CompleteState == PathCompleteState.Error; }} /** Additional info on what went wrong. * \see #error */ private string _errorLog = ""; /** Log messages with info about eventual errors. */ public string errorLog { get { return _errorLog; } } /** Holds the path as a Node array. All nodes the path traverses. * This might not be the same as all nodes the smoothed path traverses. */ public List<GraphNode> path; /** Holds the (perhaps post processed) path as a Vector3 array */ public List<Vector3> vectorPath; /** The max number of milliseconds per iteration (frame, in case of non-multithreading) */ protected float maxFrameTime; /** The node currently being processed */ protected PathNode currentR; /** The duration of this path in ms. How long it took to calculate the path */ public float duration; /** The number of frames/iterations this path has executed. * This is the number of frames when not using multithreading. * When using multithreading, this value is quite irrelevant */ public int searchIterations; /** Number of nodes this path has searched */ public int searchedNodes; /** When the call was made to start the pathfinding for this path */ public System.DateTime callTime {get; private set;} /** True if the path is currently recycled (i.e in the path pool). * Do not set this value. Only read. It is used internally. */ internal bool recycled; /** True if the Reset function has been called. * Used to allert users when they are doing something wrong. */ protected bool hasBeenReset; /** Constraint for how to search for nodes */ public NNConstraint nnConstraint = PathNNConstraint.Default; /** Internal linked list implementation. * \warning This is used internally by the system. You should never change this. */ internal Path next; /** Determines which heuristic to use */ public Heuristic heuristic; /** Scale of the heuristic values */ public float heuristicScale = 1F; /** ID of this path. Used to distinguish between different paths */ public ushort pathID {get; private set;} /** Target to use for H score calculation. Used alongside #hTarget. */ protected GraphNode hTargetNode; /** Target to use for H score calculations. \see Pathfinding.Node.H */ protected Int3 hTarget; /** Which graph tags are traversable. * This is a bitmask so -1 = all bits set = all tags traversable. * For example, to set bit 5 to true, you would do * \code myPath.enabledTags |= 1 << 5; \endcode * To set it to false, you would do * \code myPath.enabledTags &= ~(1 << 5); \endcode * * The Seeker has a popup field where you can set which tags to use. * \note If you are using a Seeker. The Seeker will set this value to what is set in the inspector field on StartPath. * So you need to change the Seeker value via script, not set this value if you want to change it via script. * * \see CanTraverse */ public int enabledTags = -1; /** List of zeroes to use as default tag penalties */ static readonly int[] ZeroTagPenalties = new int[32]; /** The tag penalties that are actually used. * If manualTagPenalties is null, this will be ZeroTagPenalties * \see tagPenalties */ protected int[] internalTagPenalties; /** Tag penalties set by other scripts * \see tagPenalties */ protected int[] manualTagPenalties; /** Penalties for each tag. * Tag 0 which is the default tag, will have added a penalty of tagPenalties[0]. * These should only be positive values since the A* algorithm cannot handle negative penalties. * \note This array will never be null. If you try to set it to null or with a lenght which is not 32. It will be set to "new int[0]". * * \note If you are using a Seeker. The Seeker will set this value to what is set in the inspector field on StartPath. * So you need to change the Seeker value via script, not set this value if you want to change it via script. * * \see Seeker.tagPenalties */ public int[] tagPenalties { get { return manualTagPenalties; } set { if (value == null || value.Length != 32) { manualTagPenalties = null; internalTagPenalties = ZeroTagPenalties; } else { manualTagPenalties = value; internalTagPenalties = value; } } } /** True for paths that want to search all nodes and not jump over nodes as optimizations. * This disables Jump Point Search when that is enabled to prevent e.g ConstantPath and FloodPath * to become completely useless. */ public virtual bool FloodingPath { get { return false; } } /** Total Length of the path. * Calculates the total length of the #vectorPath. * Cache this rather than call this function every time since it will calculate the length every time, not just return a cached value. * \returns Total length of #vectorPath, if #vectorPath is null positive infinity is returned. */ public float GetTotalLength () { if (vectorPath == null) return float.PositiveInfinity; float tot = 0; for (int i=0;i<vectorPath.Count-1;i++) tot += Vector3.Distance (vectorPath[i],vectorPath[i+1]); return tot; } /** Waits until this path has been calculated and returned. * Allows for very easy scripting. \code //In an IEnumerator function Path p = Seeker.StartPath (transform.position, transform.position + Vector3.forward * 10); yield return StartCoroutine (p.WaitForPath ()); //The path is calculated at this stage \endcode * \note Do not confuse this with AstarPath.WaitForPath. This one will wait using yield until it has been calculated * while AstarPath.WaitForPath will halt all operations until the path has been calculated. * * \throws System.InvalidOperationException if the path is not started. Send the path to Seeker.StartPath or AstarPath.StartPath before calling this function. * * \see AstarPath.WaitForPath */ public IEnumerator WaitForPath () { if (GetState () == PathState.Created) throw new System.InvalidOperationException ("This path has not been started yet"); while (GetState () != PathState.Returned) yield return null; } public uint CalculateHScore (GraphNode node) { uint v1; switch (heuristic) { case Heuristic.Euclidean: v1 = (uint)(((GetHTarget () - node.position).costMagnitude)*heuristicScale); return v1; case Heuristic.Manhattan: Int3 p2 = node.position; v1 = (uint)((System.Math.Abs (hTarget.x-p2.x) + System.Math.Abs (hTarget.y-p2.y) + System.Math.Abs (hTarget.z-p2.z))*heuristicScale); return v1; case Heuristic.DiagonalManhattan: Int3 p = GetHTarget () - node.position; p.x = System.Math.Abs (p.x); p.y = System.Math.Abs (p.y); p.z = System.Math.Abs (p.z); int diag = System.Math.Min (p.x,p.z); int diag2 = System.Math.Max (p.x,p.z); v1 = (uint)((((14*diag)/10) + (diag2-diag) + p.y) * heuristicScale); return v1; } return 0U; } /** Returns penalty for the given tag. * \param tag A value between 0 (inclusive) and 32 (exclusive). */ public uint GetTagPenalty (int tag) { return (uint)internalTagPenalties[tag]; } public Int3 GetHTarget () { return hTarget; } /** Returns if the node can be traversed. * This per default equals to if the node is walkable and if the node's tag is included in #enabledTags */ public bool CanTraverse (GraphNode node) { unchecked { return node.Walkable && (enabledTags >> (int)node.Tag & 0x1) != 0; } } public uint GetTraversalCost (GraphNode node) { #if ASTAR_NO_TRAVERSAL_COST return 0; #else unchecked { return GetTagPenalty ((int)node.Tag ) + node.Penalty ; } #endif } /** May be called by graph nodes to get a special cost for some connections. * Nodes may call it when PathNode.flag2 is set to true, for example mesh nodes, which have * a very large area can be marked on the start and end nodes, this method will be called * to get the actual cost for moving from the start position to its neighbours instead * of as would otherwise be the case, from the start node's position to its neighbours. * The position of a node and the actual start point on the node can vary quite a lot. * * The default behaviour of this method is to return the previous cost of the connection, * essentiall making no change at all. * * This method should return the same regardless of the order of a and b. * That is f(a,b) == f(b,a) should hold. * * \param a Moving from this node * \param b Moving to this node * \param currentCost The cost of moving between the nodes. Return this value if there is no meaningful special cost to return. */ public virtual uint GetConnectionSpecialCost (GraphNode a, GraphNode b, uint currentCost) { return currentCost; } /** Returns if this path is done calculating. * \returns If CompleteState is not PathCompleteState.NotCalculated. * * \note The path might not have been returned yet. * * \since Added in 3.0.8 * * \see Seeker.IsDone */ public bool IsDone () { return CompleteState != PathCompleteState.NotCalculated; } /** Threadsafe increment of the state */ public void AdvanceState (PathState s) { lock (stateLock) { state = (PathState)System.Math.Max ((int)state, (int)s); } } /** Returns the state of the path in the pathfinding pipeline */ public PathState GetState () { return (PathState)state; } /** Appends \a msg to #errorLog and logs \a msg to the console. * Debug.Log call is only made if AstarPath.logPathResults is not equal to None and not equal to InGame. * Consider calling Error() along with this call. */ // Ugly Code Inc. wrote the below code :D // What it does is that it disables the LogError function if ASTAR_NO_LOGGING is enabled // since the DISABLED define will never be enabled // Ugly way of writing Conditional("!ASTAR_NO_LOGGING") public void LogError (string msg) { // Optimize for release builds if (!(!AstarPath.isEditor && AstarPath.active.logPathResults == PathLog.None)) { _errorLog += msg; } if (AstarPath.active.logPathResults != PathLog.None && AstarPath.active.logPathResults != PathLog.InGame) { Debug.LogWarning (msg); } } /** Logs an error and calls Error(). * This is called only if something is very wrong or the user is doing something he/she really should not be doing. */ public void ForceLogError (string msg) { Error(); _errorLog += msg; Debug.LogError (msg); } /** Appends a message to the #errorLog. * Nothing is logged to the console. * * \note If AstarPath.logPathResults is PathLog.None and this is a standalone player, nothing will be logged as an optimization. */ public void Log (string msg) { // Optimize for release builds if (!(!AstarPath.isEditor && AstarPath.active.logPathResults == PathLog.None)) { _errorLog += msg; } } /** Aborts the path because of an error. * Sets #error to true. * This function is called when an error has ocurred (e.g a valid path could not be found). * \see LogError */ public void Error () { CompleteState = PathCompleteState.Error; } /** Does some error checking. * Makes sure the user isn't using old code paths and that no major errors have been done. * * \throws An exception if any errors are found */ private void ErrorCheck () { if (!hasBeenReset) throw new System.Exception ("The path has never been reset. Use pooling API or call Reset() after creating the path with the default constructor."); if (recycled) throw new System.Exception ("The path is currently in a path pool. Are you sending the path for calculation twice?"); if (pathHandler == null) throw new System.Exception ("Field pathHandler is not set. Please report this bug."); if (GetState() > PathState.Processing) throw new System.Exception ("This path has already been processed. Do not request a path with the same path object twice."); } /** Called when the path enters the pool. * This method should release e.g pooled lists and other pooled resources * The base version of this method releases vectorPath and path lists. * Reset() will be called after this function, not before. * \warning Do not call this function manually. */ public virtual void OnEnterPool () { if (vectorPath != null) Pathfinding.Util.ListPool<Vector3>.Release (vectorPath); if (path != null) Pathfinding.Util.ListPool<GraphNode>.Release (path); vectorPath = null; path = null; } /** Reset all values to their default values. * * \note All inheriting path types (e.g ConstantPath, RandomPath, etc.) which declare their own variables need to * override this function, resetting ALL their variables to enable recycling of paths. * If this is not done, trying to use that path type for pooling might result in weird behaviour. * The best way is to reset to default values the variables declared in the extended path type and then * call this base function in inheriting types with base.Reset (). * * \warning This function should not be called manually. */ public virtual void Reset () { if (System.Object.ReferenceEquals (AstarPath.active, null)) throw new System.NullReferenceException ("No AstarPath object found in the scene. " + "Make sure there is one or do not create paths in Awake"); hasBeenReset = true; state = (int)PathState.Created; releasedNotSilent = false; pathHandler = null; callback = null; _errorLog = ""; pathCompleteState = PathCompleteState.NotCalculated; path = Pathfinding.Util.ListPool<GraphNode>.Claim(); vectorPath = Pathfinding.Util.ListPool<Vector3>.Claim(); currentR = null; duration = 0; searchIterations = 0; searchedNodes = 0; //calltime nnConstraint = PathNNConstraint.Default; next = null; heuristic = AstarPath.active.heuristic; heuristicScale = AstarPath.active.heuristicScale; enabledTags = -1; tagPenalties = null; callTime = System.DateTime.UtcNow; pathID = AstarPath.active.GetNextPathID (); hTarget = Int3.zero; hTargetNode = null; } protected bool HasExceededTime (int searchedNodes, long targetTime) { return System.DateTime.UtcNow.Ticks >= targetTime; } /** Internal method to recycle the path. * Calling this means that the path and any variables on it are not needed anymore and the path can be pooled. * All path data will be reset. * Implement this in inheriting path types to support recycling of paths. \code public override void Recycle () { //Recycle the Path (<Path> should be replaced by the path type it is implemented in) PathPool<Path>.Recycle (this); } \endcode * * \warning Do not call this function directly, instead use the #Claim and #Release functions. * \see Pathfinding.PathPool * \see Reset * \see Claim * \see Release */ protected abstract void Recycle (); /** List of claims on this path with reference objects */ private List<System.Object> claimed = new List<System.Object>(); /** True if the path has been released with a non-silent call yet. * * \see Release * \see ReleaseSilent * \see Claim */ private bool releasedNotSilent; /** Claim this path. * A claim on a path will ensure that it is not recycled. * If you are using a path, you will want to claim it when you first get it and then release it when you will not * use it anymore. When there are no claims on the path, it will be recycled and put in a pool. * * This is essentially just reference counting. * * The object passed to this method is merely used as a way to more easily detect when pooling is not done correctly. * It can be any object, when used from a movement script you can just pass "this". This class will throw an exception * if you try to call Claim on the same path twice with the same object (which is usually not what you want) or * if you try to call Release with an object that has not been used in a Claim call for that path. * The object passed to the Claim method needs to be the same as the one you pass to this method. * * \see Release * \see Recycle * \see \ref pooling */ public void Claim (System.Object o) { if (System.Object.ReferenceEquals (o, null)) throw new System.ArgumentNullException ("o"); for ( int i = 0; i < claimed.Count; i++ ) { // Need to use ReferenceEquals because it might be called from another thread if ( System.Object.ReferenceEquals (claimed[i], o) ) throw new System.ArgumentException ("You have already claimed the path with that object ("+o+"). Are you claiming the path with the same object twice?"); } claimed.Add (o); } /** Releases the path silently. * This will remove the claim by the specified object, but the path will not be recycled if the claim count reches zero unless a Release call (not silent) has been made earlier. * This is used by the internal pathfinding components such as Seeker and AstarPath so that they will not recycle paths. * This enables users to skip the claim/release calls if they want without the path being recycled by the Seeker or AstarPath. */ public void ReleaseSilent (System.Object o) { if (o == null) throw new System.ArgumentNullException ("o"); for (int i=0;i<claimed.Count;i++) { // Need to use ReferenceEquals because it might be called from another thread if (System.Object.ReferenceEquals (claimed[i], o)) { claimed.RemoveAt (i); if (releasedNotSilent && claimed.Count == 0) { Recycle (); } return; } } if (claimed.Count == 0) { throw new System.ArgumentException ("You are releasing a path which is not claimed at all (most likely it has been pooled already). " + "Are you releasing the path with the same object ("+o+") twice?"); } throw new System.ArgumentException ("You are releasing a path which has not been claimed with this object ("+o+"). " + "Are you releasing the path with the same object twice?"); } /** Releases a path claim. * Removes the claim of the path by the specified object. * When the claim count reaches zero, the path will be recycled, all variables will be cleared and the path will be put in a pool to be used again. * This is great for memory since less allocations are made. * \see Claim */ public void Release (System.Object o) { if (o == null) throw new System.ArgumentNullException ("o"); for (int i=0;i<claimed.Count;i++) { // Need to use ReferenceEquals because it might be called from another thread if (System.Object.ReferenceEquals (claimed[i], o)) { claimed.RemoveAt (i); releasedNotSilent = true; if (claimed.Count == 0) { Recycle (); } return; } } if (claimed.Count == 0) { throw new System.ArgumentException ("You are releasing a path which is not claimed at all (most likely it has been pooled already). " + "Are you releasing the path with the same object ("+o+") twice?"); } throw new System.ArgumentException ("You are releasing a path which has not been claimed with this object ("+o+"). " + "Are you releasing the path with the same object twice?"); } /** Traces the calculated path from the end node to the start. * This will build an array (#path) of the nodes this path will pass through and also set the #vectorPath array to the #path arrays positions. * Assumes the #vectorPath and #path are empty and not null (which will be the case for a correctly initialized path). */ protected virtual void Trace (PathNode from) { int count = 0; PathNode c = from; while (c != null) { c = c.parent; count++; if (count > 2048) { Debug.LogWarning ("Infinite loop? >2048 node path. Remove this message if you really have that long paths (Path.cs, Trace method)"); break; } } // Ensure capacities for lists AstarProfiler.StartProfile ("Check List Capacities"); if (path.Capacity < count) path.Capacity = count; if (vectorPath.Capacity < count) vectorPath.Capacity = count; AstarProfiler.EndProfile (); c = from; for (int i = 0;i<count;i++) { path.Add (c.node); c = c.parent; } int half = count/2; for (int i=0;i<half;i++) { GraphNode tmp = path[i]; path[i] = path[count-i-1]; path[count - i - 1] = tmp; } for (int i=0;i<count;i++) { vectorPath.Add ((Vector3)path[i].position); } } /** Returns a debug string for this path. */ public virtual string DebugString (PathLog logMode) { if (logMode == PathLog.None || (!error && logMode == PathLog.OnlyErrors)) { return ""; } // Get a cached string builder for this thread System.Text.StringBuilder text = pathHandler.DebugStringBuilder; text.Length = 0; text.Append (error ? "Path Failed : " : "Path Completed : "); text.Append ("Computation Time "); text.Append ((duration).ToString (logMode == PathLog.Heavy ? "0.000 ms " : "0.00 ms ")); text.Append ("Searched Nodes "); text.Append (searchedNodes); if (!error) { text.Append (" Path Length "); text.Append (path == null ? "Null" : path.Count.ToString ()); if (logMode == PathLog.Heavy) { text.Append ("\nSearch Iterations "+searchIterations); } } if (error) { text.Append ("\nError: "); text.Append (errorLog); } if (logMode == PathLog.Heavy && !AstarPath.IsUsingMultithreading ) { text.Append ("\nCallback references "); if (callback != null) text.Append(callback.Target.GetType().FullName).AppendLine(); else text.AppendLine ("NULL"); } text.Append ("\nPath Number "); text.Append (pathID); return text.ToString (); } /** Calls callback to return the calculated path. \see #callback */ public virtual void ReturnPath () { if (callback != null) { callback (this); } } /** Prepares low level path variables for calculation. * Called before a path search will take place. * Always called before the Prepare, Initialize and CalculateStep functions */ public void PrepareBase (PathHandler pathHandler) { //Path IDs have overflowed 65K, cleanup is needed //Since pathIDs are handed out sequentially, we can do this if (pathHandler.PathID > pathID) { pathHandler.ClearPathIDs (); } //Make sure the path has a reference to the pathHandler this.pathHandler = pathHandler; //Assign relevant path data to the pathHandler pathHandler.InitializeForPath (this); // Make sure that internalTagPenalties is an array which has the length 32 if (internalTagPenalties == null || internalTagPenalties.Length != 32) internalTagPenalties = ZeroTagPenalties; try { ErrorCheck (); } catch (System.Exception e) { ForceLogError ("Exception in path "+pathID+"\n"+e); } } /** Called before the path is started. * Called right before Initialize */ public abstract void Prepare (); /** Always called after the path has been calculated. * Guaranteed to be called before other paths have been calculated on * the same thread. * Use for cleaning up things like node tagging and similar. */ public virtual void Cleanup () {} /** Initializes the path. * Sets up the open list and adds the first node to it */ public abstract void Initialize (); /** Calculates the until it is complete or the time has progressed past \a targetTick */ public abstract void CalculateStep (long targetTick); } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Web; using Aspose.OCR.Live.Demos.UI.Config; using Aspose.OCR.Live.Demos.UI.Controllers; namespace Aspose.OCR.Live.Demos.UI.Models { public class ViewModel { public int MaximumUploadFiles { get; set; } /// <summary> /// Name of the product (e.g., words) /// </summary> public string Product { get; set; } public BaseController Controller; /// <summary> /// Product + AppName, e.g. wordsMerger /// </summary> public string ProductAppName { get; set; } private AsposeOCRContext _atcContext; public AsposeOCRContext AsposeOCRContext { get { if (_atcContext == null) _atcContext = new AsposeOCRContext(HttpContext.Current); return _atcContext; } } private Dictionary<string, string> _resources; public Dictionary<string, string> Resources { get { if (_resources == null) _resources = AsposeOCRContext.Resources; return _resources; } set { _resources = value; } } public string UIBasePath => Configuration.AsposeOCRLiveDemosPath; public string PageProductTitle => Resources["Aspose" + TitleCase(Product)]; /// <summary> /// The name of the app (e.g., Conversion, Merger) /// </summary> public string AppName { get; set; } /// <summary> /// The full address of the application without query string (e.g., https://products.aspose.app/words/conversion) /// </summary> public string AppURL { get; set; } /// <summary> /// File extension without dot received by "fileformat" value in RouteData (e.g. docx) /// </summary> public string Extension { get; set; } /// <summary> /// File extension without dot received by "fileformat" value in RouteData (e.g. docx) /// </summary> public string Extension2 { get; set; } /// <summary> /// Redirect to main app, if there is no ExtensionInfoModel for auto generated models /// </summary> public bool RedirectToMainApp { get; set; } /// <summary> /// Is canonical page opened (/all) /// </summary> public bool IsCanonical; /// <summary> /// Name of the partial View of controls (e.g. SignatureControls) /// </summary> public string ControlsView { get; set; } public string AnotherFileText { get; set; } public string UploadButtonText { get; set; } public string SuccessMessage { get; set; } /// <summary> /// List of app features for ul-list. E.g. Resources[app + "LiFeature1"] /// </summary> public List<string> AppFeatures { get; set; } public string Title { get; set; } public string TitleSub { get; set; } public string PageTitle { get => Controller.ViewBag.PageTitle; set => Controller.ViewBag.PageTitle = value; } public string MetaDescription { get => Controller.ViewBag.MetaDescription; set => Controller.ViewBag.MetaDescription = value; } public string MetaKeywords { get => Controller.ViewBag.MetaKeywords; set => Controller.ViewBag.MetaKeywords = value; } /// <summary> /// If the application doesn't need to upload several files (e.g. Viewer, Editor) /// </summary> public bool UploadAndRedirect { get; set; } protected string TitleCase(string value) => new System.Globalization.CultureInfo("en-US", false).TextInfo.ToTitleCase(value); /// <summary> /// e.g., .doc|.docx|.dot|.dotx|.rtf|.odt|.ott|.txt|.html|.xhtml|.mhtml /// </summary> public string ExtensionsString { get; set; } #region SaveAs private bool _saveAsComponent; public bool SaveAsComponent { get => _saveAsComponent; set { _saveAsComponent = value; Controller.ViewBag.SaveAsComponent = value; if (_saveAsComponent) { var sokey1 = $"{Product}{AppName}SaveAsOptions"; var sokey2 = $"{Product}SaveAsOptions"; if ("pdf".Equals(Product.ToLower()) || "page".Equals(Product.ToLower())) { sokey2 = $"{Product}ConversionSaveAsExtensionsMvc"; } if ("cells".Equals(Product.ToLower())) { sokey2 = $"{Product}{AppName}SaveAsExtensions"; } if (Resources.ContainsKey(sokey1)) SaveAsOptions = Resources[sokey1].Split(','); else if (Resources.ContainsKey(sokey2)) { if (AppName == "Conversion" && Product == "words") { var lst = Resources[sokey2].Split(',').ToList(); try { var index = lst.FindIndex(x => x == "DOCX"); lst.RemoveAt(index); var index2 = lst.FindIndex(x => x == "DOC"); lst.Insert(index2, "DOCX"); } catch { // } finally { SaveAsOptions = lst.ToArray(); } } else if (AppName == "Conversion" && Product == "pdf") { var lst = Resources[sokey2].Split(',').ToList().Select(x => x.ToUpper().Trim()).ToList(); try { var index = lst.FindIndex(x => x == "DOCX"); lst.RemoveAt(index); var index2 = lst.FindIndex(x => x == "DOC"); lst.Insert(index2, "DOCX"); } catch { // } finally { SaveAsOptions = lst.ToArray(); } } else if (AppName == "Conversion" && Product == "page") { var lst = Resources[sokey2].Split(',').ToList().Select(x => x.ToUpper().Trim()).ToList(); SaveAsOptions = lst.ToArray(); } else SaveAsOptions = Resources[sokey2].Split(','); } var lifeaturekey = Product + "SaveAsLiFeature"; if (AppFeatures != null && Resources.ContainsKey(lifeaturekey)) AppFeatures.Add(Resources[lifeaturekey]); } } } public string SaveAsOptionsList { get { string list = ""; foreach (var extensin in SaveAsOptions) { if (list == "") { list = extensin.ToUpper(); } else { list = list + ", " + extensin.ToUpper(); } } return list; } } /// <summary> /// FileFormats in UpperCase /// </summary> public string[] SaveAsOptions { get; set; } /// <summary> /// Original file format SaveAs option for multiple files uploading /// </summary> public bool SaveAsOriginal { get; set; } #endregion /// <summary> /// The possibility of changing the order of uploaded files. It is actual for Merger App. /// </summary> public bool UseSorting { get; set; } #region ViewSections public bool ShowExtensionInfo => ExtensionInfoModel != null; public ExtensionInfoModel ExtensionInfoModel { get; set; } public bool HowTo => HowToModel != null; public HowToModel HowToModel { get; set; } #endregion public string JSOptions => new JSOptions(this).ToString(); public ViewModel(BaseController controller, string app) { Controller = controller; Resources = controller.Resources; AppName = Resources.ContainsKey($"{app}APPName") ? Resources[$"{app}APPName"] : app; Product = controller.Product; var url = controller.Request.Url.AbsoluteUri; AppURL = url.Substring(0, (url.IndexOf("?") > 0 ? url.IndexOf("?") : url.Length)); ProductAppName = Product + app; UploadButtonText = GetFromResources(ProductAppName + "Button", app + "Button"); SuccessMessage = GetFromResources(app + "SuccessMessage"); AnotherFileText = GetFromResources(app + "AnotherFile"); IsCanonical = true; HowToModel = new HowToModel(this); SetTitles(); SetAppFeatures(app); SaveAsOriginal = true; SaveAsComponent = false; SetExtensionsString(); } private void SetTitles() { PageTitle = Resources[ProductAppName + "PageTitle"]; MetaDescription = Resources[ProductAppName + "MetaDescription"]; MetaKeywords = ""; Title = Resources[ProductAppName + "Title"]; TitleSub = Resources[ProductAppName + "SubTitle"]; Controller.ViewBag.CanonicalTag = null; } private void SetAppFeatures(string app) { AppFeatures = new List<string>(); var i = 1; while (Resources.ContainsKey($"{ProductAppName}LiFeature{i}")) AppFeatures.Add(Resources[$"{ProductAppName}LiFeature{i++}"]); // Stop other developers to add unnecessary features. if (AppFeatures.Count == 0) { i = 1; while (Resources.ContainsKey($"{app}LiFeature{i}")) { if (!Resources[$"{app}LiFeature{i}"].Contains("Instantly download") || AppFeatures.Count == 0) AppFeatures.Add(Resources[$"{app}LiFeature{i}"]); i++; } } } private string GetFromResources(string key, string defaultKey = null) { if (Resources.ContainsKey(key)) return Resources[key]; if (!string.IsNullOrEmpty(defaultKey) && Resources.ContainsKey(defaultKey)) return Resources[defaultKey]; return ""; } private void SetExtensionsString() { if (!ShowExtensionInfo) { var key1 = $"{Product}{AppName}ValidationExpression"; var key2 = $"{Product}ValidationExpression"; ExtensionsString = Resources.ContainsKey(key1) ? Resources[key1] : Resources[key2]; } else { switch (Extension) { case "doc": case "docx": ExtensionsString = ".docx|.doc"; break; case "html": case "htm": case "mhtml": case "mht": ExtensionsString = ".htm|.html|.mht|.mhtml"; break; default: ExtensionsString = $".{Extension}"; break; } if (AppName == "Comparison" && !string.IsNullOrEmpty(Extension2)) ExtensionsString += $"|.{Extension2}"; } } } }
// // StatisticsPage.cs // // Author: // Aaron Bockover <abockover@novell.com> // // Copyright (C) 2008 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using Mono.Unix; using Gtk; namespace Banshee.Gui.TrackEditor { internal class FixedTreeView : TreeView { public FixedTreeView (ListStore model) : base (model) { } protected override bool OnKeyPressEvent (Gdk.EventKey evnt) { if ((evnt.State & Gdk.ModifierType.ControlMask) != 0 && (evnt.Key == Gdk.Key.Page_Up || evnt.Key == Gdk.Key.Page_Down)) { return false; } return base.OnKeyPressEvent (evnt); } } public class StatisticsPage : ScrolledWindow, ITrackEditorPage { private CellRendererText name_renderer; private CellRendererText value_renderer; private ListStore model; private TreeView view; public StatisticsPage () { ShadowType = ShadowType.In; VscrollbarPolicy = PolicyType.Automatic; HscrollbarPolicy = PolicyType.Never; BorderWidth = 2; view = new FixedTreeView (model); view.HeadersVisible = false; view.RowSeparatorFunc = new TreeViewRowSeparatorFunc (RowSeparatorFunc); view.HasTooltip = true; view.QueryTooltip += HandleQueryTooltip; name_renderer = new CellRendererText () { Alignment = Pango.Alignment.Right, Weight = (int)Pango.Weight.Bold, Xalign = 1.0f, Yalign = 0.0f, Scale = Pango.Scale.Small }; value_renderer = new CellRendererText (); value_renderer.Editable = true; value_renderer.Scale = Pango.Scale.Small; value_renderer.Ellipsize = Pango.EllipsizeMode.End; value_renderer.WrapMode = Pango.WrapMode.Word; value_renderer.EditingStarted += delegate(object o, EditingStartedArgs args) { var entry = args.Editable as Entry; if (entry != null) { entry.IsEditable = false; } }; view.SizeAllocated += delegate { UpdateWrapWidth (); }; view.Realized += delegate { UpdateWrapWidth (); }; view.AppendColumn (Catalog.GetString ("Name"), name_renderer, "text", 0); view.AppendColumn (Catalog.GetString ("Value"), value_renderer, "text", 1, "ellipsize", 3, "ellipsize-set", 4, "wrap-width", 5); Add (view); ShowAll (); } public void UpdateWrapWidth () { if (view.IsRealized) { var width = GetValueWidth (); model.Foreach ((TreeModel m, TreePath path, TreeIter iter) => { if ((Pango.EllipsizeMode) model.GetValue (iter, 3) != Pango.EllipsizeMode.End) { model.SetValue (iter, 5, width); } return false; }); } } public CellRendererText NameRenderer { get { return name_renderer; } } public CellRendererText ValueRenderer { get { return value_renderer; } } private bool RowSeparatorFunc (TreeModel model, TreeIter iter) { return (bool)model.GetValue (iter, 2); } private int GetValueWidth () { var column = view.GetColumn (1); var column_width = column.Width - 2 * value_renderer.Xpad - (int)view.StyleGetProperty ("horizontal-separator") - 2 * (int)view.StyleGetProperty ("focus-line-width"); return (int) column_width; } private void HandleQueryTooltip(object o, QueryTooltipArgs args) { TreePath path; TreeIter iter; if (view.GetPathAtPos (args.X, args.Y, out path) && view.Model.GetIter (out iter, path)) { string text = (string)view.Model.GetValue (iter, 1); if (!String.IsNullOrEmpty (text)) { using (var layout = new Pango.Layout (view.PangoContext)) { layout.FontDescription = value_renderer.FontDesc; layout.SetText (text); layout.Attributes = new Pango.AttrList (); layout.Attributes.Insert (new Pango.AttrScale (value_renderer.Scale)); int width, height; layout.GetPixelSize (out width, out height); var column_width = GetValueWidth (); if (width > column_width) { var column = view.GetColumn (1); args.Tooltip.Text = text; view.SetTooltipCell (args.Tooltip, path, column, value_renderer); args.RetVal = true; } } } } // Work around ref counting SIGSEGV, see http://bugzilla.gnome.org/show_bug.cgi?id=478519#c9 if (args.Tooltip != null) { args.Tooltip.Dispose (); } } protected override void OnStyleSet (Style previous_style) { base.OnStyleSet (previous_style); name_renderer.CellBackgroundGdk = Style.Background (StateType.Normal); } public void Initialize (TrackEditorDialog dialog) { } public void LoadTrack (EditorTrackInfo track) { model = null; CreateModel (); TagLib.File file = track.GetTaglibFile (); if (track.Uri.IsLocalPath) { string path = track.Uri.AbsolutePath; AddItem (Catalog.GetString ("File Name:"), System.IO.Path.GetFileName (path)); AddItem (Catalog.GetString ("Directory:"), System.IO.Path.GetDirectoryName (path)); AddItem (Catalog.GetString ("Full Path:"), path); try { AddFileSizeItem (Banshee.IO.File.GetSize (track.Uri)); } catch { } } else { AddItem (Catalog.GetString ("URI:"), track.Uri.AbsoluteUri); AddFileSizeItem (track.FileSize); } AddSeparator (); if (file != null) { System.Text.StringBuilder builder = new System.Text.StringBuilder (); Banshee.Sources.DurationStatusFormatters.ConfusingPreciseFormatter (builder, file.Properties.Duration); AddItem (Catalog.GetString ("Duration:"), String.Format ("{0} ({1}ms)", builder, file.Properties.Duration.TotalMilliseconds)); AddItem (Catalog.GetString ("Audio Bitrate:"), String.Format ("{0} KB/sec", file.Properties.AudioBitrate)); AddItem (Catalog.GetString ("Audio Sample Rate:"), String.Format ("{0} Hz", file.Properties.AudioSampleRate)); AddItem (Catalog.GetString ("Audio Channels:"), file.Properties.AudioChannels); if (file.Properties.BitsPerSample > 0) { AddItem (Catalog.GetString ("Bits Per Sample:"), String.Format ("{0} bits", file.Properties.BitsPerSample)); } if ((file.Properties.MediaTypes & TagLib.MediaTypes.Video) != 0) { AddItem (Catalog.GetString ("Video Dimensions:"), String.Format ("{0}x{1}", file.Properties.VideoWidth, file.Properties.VideoHeight)); } foreach (TagLib.ICodec codec in file.Properties.Codecs) { if (codec != null) { /* Translators: {0} is the description of the codec */ AddItem (String.Format (Catalog.GetString ("{0} Codec:"), codec.MediaTypes.ToString ()), codec.Description); } } AddItem (Catalog.GetString ("Container Formats:"), file.TagTypes.ToString ()); AddSeparator (); file.Dispose (); } AddItem (Catalog.GetString ("Imported On:"), track.DateAdded > DateTime.MinValue ? track.DateAdded.ToString () : Catalog.GetString ("Unknown")); AddItem (Catalog.GetString ("Last Played:"), track.LastPlayed > DateTime.MinValue ? track.LastPlayed.ToString () : Catalog.GetString ("Unknown")); AddItem (Catalog.GetString ("Last Skipped:"), track.LastSkipped > DateTime.MinValue ? track.LastSkipped.ToString () : Catalog.GetString ("Unknown")); AddItem (Catalog.GetString ("Play Count:"), track.PlayCount); AddItem (Catalog.GetString ("Skip Count:"), track.SkipCount); AddItem (Catalog.GetString ("Score:"), track.Score); } private void AddFileSizeItem (long bytes) { Hyena.Query.FileSizeQueryValue value = new Hyena.Query.FileSizeQueryValue (bytes); AddItem (Catalog.GetString ("File Size:"), String.Format ("{0} ({1} {2})", value.ToUserQuery (), bytes, Catalog.GetString ("bytes"))); } private void CreateModel () { if (model == null) { model = new ListStore (typeof (string), typeof (string), typeof (bool), typeof (Pango.EllipsizeMode), typeof(bool), typeof (int)); view.Model = model; } } public void AddItem (string name, object value) { AddItem (name, value, false); } public void AddItem (string name, object value, bool wrapText) { CreateModel (); if (name != null && value != null) { model.AppendValues (name, value.ToString (), false, wrapText ? Pango.EllipsizeMode.None : Pango.EllipsizeMode.End, true, wrapText ? 10 : -1); } } public void AddSeparator () { CreateModel (); model.AppendValues (String.Empty, String.Empty, true, Pango.EllipsizeMode.End, false, -1); } public int Order { get { return 40; } } public string Title { get { return Catalog.GetString ("Properties"); } } public PageType PageType { get { return PageType.View; } } public Gtk.Widget TabWidget { get { return null; } } public Gtk.Widget Widget { get { return this; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. //----------------------------------------------------------------------------- // // Description: // This class represents a PackageRelationshipSelector. // //----------------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Diagnostics; // for Debug.Assert namespace System.IO.Packaging { /// <summary> /// This class is used to represent a PackageRelationship selector. PackageRelationships can be /// selected based on their Type or ID. This class will specify what the selection is based on and /// what the actual criteria is. </summary> public sealed class PackageRelationshipSelector { //------------------------------------------------------ // // Public Constructors // //------------------------------------------------------ #region Public Constructor /// <summary> /// Constructor /// </summary> /// <param name="sourceUri">Source Uri of the PackagePart or PackageRoot ("/") that owns the relationship</param> /// <param name="selectorType">PackageRelationshipSelectorType enum representing the type of the selectionCriteria</param> /// <param name="selectionCriteria">The actual string that is used to select the relationships</param> /// <exception cref="ArgumentNullException">If sourceUri is null</exception> /// <exception cref="ArgumentNullException">If selectionCriteria is null</exception> /// <exception cref="ArgumentOutOfRangeException">If selectorType Enumeration does not have a valid value</exception> /// <exception cref="System.Xml.XmlException">If PackageRelationshipSelectorType.Id and selection criteria is not valid Xsd Id</exception> /// <exception cref="ArgumentException">If PackageRelationshipSelectorType.Type and selection criteria is not valid relationship type</exception> /// <exception cref="ArgumentException">If sourceUri is not "/" to indicate the PackageRoot, then it must conform to the /// valid PartUri syntax</exception> public PackageRelationshipSelector(Uri sourceUri, PackageRelationshipSelectorType selectorType, string selectionCriteria) { if (sourceUri == null) throw new ArgumentNullException("sourceUri"); if (selectionCriteria == null) throw new ArgumentNullException("selectionCriteria"); //If the sourceUri is not equal to "/", it must be a valid part name. if (Uri.Compare(sourceUri, PackUriHelper.PackageRootUri, UriComponents.SerializationInfoString, UriFormat.UriEscaped, StringComparison.Ordinal) != 0) sourceUri = PackUriHelper.ValidatePartUri(sourceUri); //selectionCriteria is tested here as per the value of the selectorType. //If selectionCriteria is empty string we will throw the appropriate error message. if (selectorType == PackageRelationshipSelectorType.Type) InternalRelationshipCollection.ThrowIfInvalidRelationshipType(selectionCriteria); else if (selectorType == PackageRelationshipSelectorType.Id) InternalRelationshipCollection.ThrowIfInvalidXsdId(selectionCriteria); else throw new ArgumentOutOfRangeException("selectorType"); _sourceUri = sourceUri; _selectionCriteria = selectionCriteria; _selectorType = selectorType; } #endregion Public Constructor //------------------------------------------------------ // // Public Properties // //------------------------------------------------------ #region Public Properties /// <summary> /// This is a uri to the parent PackagePart to which this relationship belongs. /// </summary> /// <value>PackagePart</value> public Uri SourceUri { get { return _sourceUri; } } /// <summary> /// Enumeration value indicating the interpretations of the SelectionCriteria. /// </summary> /// <value></value> public PackageRelationshipSelectorType SelectorType { get { return _selectorType; } } /// <summary> /// Selection Criteria - actual value (could be ID or type) on which the selection is based /// </summary> /// <value></value> public string SelectionCriteria { get { return _selectionCriteria; } } #endregion Public Properties //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ #region Public Methods /// <summary> /// This method returns the list of selected PackageRelationships as per the /// given criteria, from a part in the Package provided /// </summary> /// <param name="package">Package object from which we get the relationsips</param> /// <returns></returns> /// <exception cref="ArgumentNullException">If package parameter is null</exception> public List<PackageRelationship> Select(Package package) { if (package == null) { throw new ArgumentNullException("package"); } List<PackageRelationship> relationships = new List<PackageRelationship>(0); switch (SelectorType) { case PackageRelationshipSelectorType.Id: if (SourceUri.Equals(PackUriHelper.PackageRootUri)) { if (package.RelationshipExists(SelectionCriteria)) relationships.Add(package.GetRelationship(SelectionCriteria)); } else { if (package.PartExists(SourceUri)) { PackagePart part = package.GetPart(SourceUri); if (part.RelationshipExists(SelectionCriteria)) relationships.Add(part.GetRelationship(SelectionCriteria)); } } break; case PackageRelationshipSelectorType.Type: if (SourceUri.Equals(PackUriHelper.PackageRootUri)) { foreach (PackageRelationship r in package.GetRelationshipsByType(SelectionCriteria)) relationships.Add(r); } else { if (package.PartExists(SourceUri)) { foreach (PackageRelationship r in package.GetPart(SourceUri).GetRelationshipsByType(SelectionCriteria)) relationships.Add(r); } } break; default: //Debug.Assert is fine here since the parameters have already been validated. And all the properties are //readonly Debug.Assert(false, "This option should never be called"); break; } return relationships; } #endregion Public Methods //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ #region Private Members private Uri _sourceUri; private string _selectionCriteria; private PackageRelationshipSelectorType _selectorType; #endregion Private Members } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using Xunit; namespace System.Linq.Tests { public class RepeatTests : EnumerableTests { [Fact] public void Repeat_ProduceCorrectSequence() { var repeatSequence = Enumerable.Repeat(1, 100); int count = 0; foreach (var val in repeatSequence) { count++; Assert.Equal(1, val); } Assert.Equal(100, count); } [Fact] public void Repeat_ToArray_ProduceCorrectResult() { var array = Enumerable.Repeat(1, 100).ToArray(); Assert.Equal(100, array.Length); for (var i = 0; i < array.Length; i++) Assert.Equal(1, array[i]); } [Fact] public void Repeat_ToList_ProduceCorrectResult() { var list = Enumerable.Repeat(1, 100).ToList(); Assert.Equal(100, list.Count); for (var i = 0; i < list.Count; i++) Assert.Equal(1, list[i]); } [Fact] public void Repeat_ProduceSameObject() { object objectInstance = new object(); var array = Enumerable.Repeat(objectInstance, 100).ToArray(); Assert.Equal(100, array.Length); for (var i = 0; i < array.Length; i++) Assert.Same(objectInstance, array[i]); } [Fact] public void Repeat_WorkWithNullElement() { object objectInstance = null; var array = Enumerable.Repeat(objectInstance, 100).ToArray(); Assert.Equal(100, array.Length); for (var i = 0; i < array.Length; i++) Assert.Null(array[i]); } [Fact] public void Repeat_ZeroCountLeadToEmptySequence() { var array = Enumerable.Repeat(1, 0).ToArray(); Assert.Equal(0, array.Length); } [Fact] public void Repeat_ThrowExceptionOnNegativeCount() { AssertExtensions.Throws<ArgumentOutOfRangeException>("count", () => Enumerable.Repeat(1, -1)); } [Fact] public void Repeat_NotEnumerateAfterEnd() { using (var repeatEnum = Enumerable.Repeat(1, 1).GetEnumerator()) { Assert.True(repeatEnum.MoveNext()); Assert.False(repeatEnum.MoveNext()); Assert.False(repeatEnum.MoveNext()); } } [Fact] public void Repeat_EnumerableAndEnumeratorAreSame() { var repeatEnumerable = Enumerable.Repeat(1, 1); using (var repeatEnumerator = repeatEnumerable.GetEnumerator()) { Assert.Same(repeatEnumerable, repeatEnumerator); } } [Fact] public void Repeat_GetEnumeratorReturnUniqueInstances() { var repeatEnumerable = Enumerable.Repeat(1, 1); using (var enum1 = repeatEnumerable.GetEnumerator()) using (var enum2 = repeatEnumerable.GetEnumerator()) { Assert.NotSame(enum1, enum2); } } [Fact] public void SameResultsRepeatCallsIntQuery() { Assert.Equal(Enumerable.Repeat(-3, 0), Enumerable.Repeat(-3, 0)); } [Fact] public void SameResultsRepeatCallsStringQuery() { Assert.Equal(Enumerable.Repeat("SSS", 99), Enumerable.Repeat("SSS", 99)); } [Fact] public void CountOneSingleResult() { int[] expected = { -15 }; Assert.Equal(expected, Enumerable.Repeat(-15, 1)); } [Fact] public void RepeatArbitraryCorrectResults() { int[] expected = { 12, 12, 12, 12, 12, 12, 12, 12 }; Assert.Equal(expected, Enumerable.Repeat(12, 8)); } [Fact] public void RepeatNull() { int?[] expected = { null, null, null, null }; Assert.Equal(expected, Enumerable.Repeat((int?)null, 4)); } [Fact] public void Take() { Assert.Equal(Enumerable.Repeat(12, 8), Enumerable.Repeat(12, 12).Take(8)); } [Fact] public void TakeExcessive() { Assert.Equal(Enumerable.Repeat("", 4), Enumerable.Repeat("", 4).Take(22)); } [Fact] public void Skip() { Assert.Equal(Enumerable.Repeat(12, 8), Enumerable.Repeat(12, 12).Skip(4)); } [Fact] public void SkipExcessive() { Assert.Empty(Enumerable.Repeat(12, 8).Skip(22)); } [Fact] public void TakeCanOnlyBeOne() { Assert.Equal(new[] { 1 }, Enumerable.Repeat(1, 10).Take(1)); Assert.Equal(new[] { 1 }, Enumerable.Repeat(1, 10).Skip(1).Take(1)); Assert.Equal(new[] { 1 }, Enumerable.Repeat(1, 10).Take(3).Skip(2)); Assert.Equal(new[] { 1 }, Enumerable.Repeat(1, 10).Take(3).Take(1)); } [Fact] public void SkipNone() { Assert.Equal(Enumerable.Repeat(12, 8), Enumerable.Repeat(12, 8).Skip(0)); } [Fact] public void First() { Assert.Equal("Test", Enumerable.Repeat("Test", 42).First()); } [Fact] public void FirstOrDefault() { Assert.Equal("Test", Enumerable.Repeat("Test", 42).FirstOrDefault()); } [Fact] public void Last() { Assert.Equal("Test", Enumerable.Repeat("Test", 42).Last()); } [Fact] public void LastOrDefault() { Assert.Equal("Test", Enumerable.Repeat("Test", 42).LastOrDefault()); } [Fact] public void ElementAt() { Assert.Equal("Test", Enumerable.Repeat("Test", 42).ElementAt(13)); } [Fact] public void ElementAtOrDefault() { Assert.Equal("Test", Enumerable.Repeat("Test", 42).ElementAtOrDefault(13)); } [Fact] public void ElementAtExcessive() { AssertExtensions.Throws<ArgumentOutOfRangeException>("index", () => Enumerable.Repeat(3, 3).ElementAt(100)); } [Fact] public void ElementAtOrDefaultExcessive() { Assert.Equal(0, Enumerable.Repeat(3, 3).ElementAtOrDefault(100)); } [Fact] public void Count() { Assert.Equal(42, Enumerable.Repeat("Test", 42).Count()); } } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System; using System.Collections; using System.Xml; using System.Diagnostics.Contracts; namespace System.Xml.Schema { // Summary: // Contains a cache of XML Schema definition language (XSD) schemas. public class XmlSchemaSet { #if SILVERLIGHT_4_0_WP internal XmlSchemaSet() {} #elif SILVERLIGHT private XmlSchemaSet() {} #endif // Summary: // Initializes a new instance of the System.Xml.Schema.XmlSchemaSet class. //public XmlSchemaSet(); // // Summary: // Initializes a new instance of the System.Xml.Schema.XmlSchemaSet class with // the specified System.Xml.XmlNameTable. // // Parameters: // nameTable: // The System.Xml.XmlNameTable object to use. // // Exceptions: // System.ArgumentNullException: // The System.Xml.XmlNameTable object passed as a parameter is null. #if !SILVERLIGHT public XmlSchemaSet(XmlNameTable nameTable) { Contract.Requires(nameTable != null); } #endif // Summary: // Gets or sets the System.Xml.Schema.XmlSchemaCompilationSettings for the System.Xml.Schema.XmlSchemaSet. // // Returns: // The System.Xml.Schema.XmlSchemaCompilationSettings for the System.Xml.Schema.XmlSchemaSet. // The default is an System.Xml.Schema.XmlSchemaCompilationSettings instance // with the System.Xml.Schema.XmlSchemaCompilationSettings.EnableUpaCheck property // set to true. //public XmlSchemaCompilationSettings CompilationSettings { get; set; } // // Summary: // Gets the number of logical XML Schema definition language (XSD) schemas in // the System.Xml.Schema.XmlSchemaSet. // // Returns: // The number of logical schemas in the System.Xml.Schema.XmlSchemaSet. #if !SILVERLIGHT public int Count { get { Contract.Ensures(Contract.Result<int>() >= 0); return default(int); } } #endif // // Summary: // Gets all the global attributes in all the XML Schema definition language // (XSD) schemas in the System.Xml.Schema.XmlSchemaSet. // // Returns: // An System.Xml.Schema.XmlSchemaObjectTable. #if !SILVERLIGHT public XmlSchemaObjectTable GlobalAttributes { get { Contract.Ensures(Contract.Result<XmlSchemaObjectTable>() != null); return default(XmlSchemaObjectTable); } } #endif // // Summary: // Gets all the global elements in all the XML Schema definition language (XSD) // schemas in the System.Xml.Schema.XmlSchemaSet. // // Returns: // An System.Xml.Schema.XmlSchemaObjectTable. #if !SILVERLIGHT public XmlSchemaObjectTable GlobalElements { get { Contract.Ensures(Contract.Result<XmlSchemaObjectTable>() != null); return default(XmlSchemaObjectTable); } } #endif // // Summary: // Gets all of the global simple and complex types in all the XML Schema definition // language (XSD) schemas in the System.Xml.Schema.XmlSchemaSet. // // Returns: // An System.Xml.Schema.XmlSchemaObjectTable. #if !SILVERLIGHT public XmlSchemaObjectTable GlobalTypes { get { Contract.Ensures(Contract.Result<XmlSchemaObjectTable>() != null); return default(XmlSchemaObjectTable); } } #endif // // Summary: // Indicates if the XML Schema definition language (XSD) schemas in the System.Xml.Schema.XmlSchemaSet // have been compiled. // // Returns: // Returns true if the schemas in the System.Xml.Schema.XmlSchemaSet have been // compiled since the last time a schema was added or removed from the System.Xml.Schema.XmlSchemaSet; // otherwise, false. //public bool IsCompiled { get; } // // Summary: // Gets the default System.Xml.XmlNameTable used by the System.Xml.Schema.XmlSchemaSet // when loading new XML Schema definition language (XSD) schemas. // // Returns: // An System.Xml.XmlNameTable. //public XmlNameTable NameTable { get; } // // Summary: // Sets the System.Xml.XmlResolver used to resolve namespaces or locations referenced // in include and import elements of a schema. // // Returns: // The System.Xml.XmlResolver used to resolve namespaces or locations referenced // in include and import elements of a schema. //public XmlResolver XmlResolver { set; } // Summary: // Sets an event handler for receiving information about XML Schema definition // language (XSD) schema validation errors. // public event ValidationEventHandler ValidationEventHandler; #if !SILVERLIGHT // Summary: // Adds the given System.Xml.Schema.XmlSchema to the System.Xml.Schema.XmlSchemaSet. // // Parameters: // schema: // The System.Xml.Schema.XmlSchema object to add to the System.Xml.Schema.XmlSchemaSet. // // Returns: // An System.Xml.Schema.XmlSchema object if the schema is valid. If the schema // is not valid and a System.Xml.Schema.ValidationEventHandler is specified, // then null is returned and the appropriate validation event is raised. Otherwise // an System.Xml.Schema.XmlSchemaException is thrown. // // Exceptions: // System.Xml.Schema.XmlSchemaException: // The schema is not valid. // // System.ArgumentNullException: // The System.Xml.Schema.XmlSchema object passed as a parameter is null. public XmlSchema Add(XmlSchema schema) { Contract.Requires(schema != null); return default(XmlSchema); } // // Summary: // Adds all the XML Schema definition language (XSD) schemas in the given System.Xml.Schema.XmlSchemaSet // to the System.Xml.Schema.XmlSchemaSet. // // Parameters: // schemas: // The System.Xml.Schema.XmlSchemaSet object. // // Exceptions: // System.Xml.Schema.XmlSchemaException: // A schema in the System.Xml.Schema.XmlSchemaSet is not valid. // // System.ArgumentNullException: // The System.Xml.Schema.XmlSchemaSet object passed as a parameter is null. public void Add(XmlSchemaSet schemas) { Contract.Requires(schemas != null); } // // Summary: // Adds the XML Schema definition language (XSD) schema at the URL specified // to the System.Xml.Schema.XmlSchemaSet. // // Parameters: // targetNamespace: // The schema targetNamespace property, or null to use the targetNamespace specified // in the schema. // // schemaUri: // The URL that specifies the schema to load. // // Returns: // An System.Xml.Schema.XmlSchema object if the schema is valid. If the schema // is not valid and a System.Xml.Schema.ValidationEventHandler is specified, // then null is returned and the appropriate validation event is raised. Otherwise, // an System.Xml.Schema.XmlSchemaException is thrown. // // Exceptions: // System.Xml.Schema.XmlSchemaException: // The schema is not valid. // // System.ArgumentNullException: // The URL passed as a parameter is null or System.String.Empty. public XmlSchema Add(string targetNamespace, string schemaUri) { return default(XmlSchema); } // // Summary: // Adds the XML Schema definition language (XSD) schema contained in the System.Xml.XmlReader // to the System.Xml.Schema.XmlSchemaSet. // // Parameters: // targetNamespace: // The schema targetNamespace property, or null to use the targetNamespace specified // in the schema. // // schemaDocument: // The System.Xml.XmlReader object. // // Returns: // An System.Xml.Schema.XmlSchema object if the schema is valid. If the schema // is not valid and a System.Xml.Schema.ValidationEventHandler is specified, // then null is returned and the appropriate validation event is raised. Otherwise, // an System.Xml.Schema.XmlSchemaException is thrown. // // Exceptions: // System.Xml.Schema.XmlSchemaException: // The schema is not valid. // // System.ArgumentNullException: // The System.Xml.XmlReader object passed as a parameter is null. public XmlSchema Add(string targetNamespace, XmlReader schemaDocument) { Contract.Requires(schemaDocument != null); return default(XmlSchema); } #endif // // Summary: // Compiles the XML Schema definition language (XSD) schemas added to the System.Xml.Schema.XmlSchemaSet // into one logical schema. // // Exceptions: // System.Xml.Schema.XmlSchemaException: // An error occurred when validating and compiling the schemas in the System.Xml.Schema.XmlSchemaSet. //public void Compile(); // // Summary: // Indicates whether an XML Schema definition language (XSD) schema with the // specified target namespace URI is in the System.Xml.Schema.XmlSchemaSet. // // Parameters: // targetNamespace: // The schema targetNamespace property. // // Returns: // Returns true if a schema with the specified target namespace URI is in the // System.Xml.Schema.XmlSchemaSet; otherwise, false. //public bool Contains(string targetNamespace); // // Summary: // Indicates whether the specified XML Schema definition language (XSD) System.Xml.Schema.XmlSchema // object is in the System.Xml.Schema.XmlSchemaSet. // // Parameters: // schema: // The System.Xml.Schema.XmlSchema object. // // Returns: // Returns true if the System.Xml.Schema.XmlSchema object is in the System.Xml.Schema.XmlSchemaSet; // otherwise, false. // // Exceptions: // System.ArgumentNullException: // The System.Xml.Schema.XmlSchemaSet passed as a parameter is null. #if !SILVERLIGHT public bool Contains(XmlSchema schema) { Contract.Requires(schema != null); return default(bool); } #endif // // Summary: // Copies all the System.Xml.Schema.XmlSchema objects from the System.Xml.Schema.XmlSchemaSet // to the given array, starting at the given index. // // Parameters: // schemas: // The array to copy the objects to. // // index: // The index in the array where copying will begin. #if !SILVERLIGHT public void CopyTo(XmlSchema[] schemas, int index) { Contract.Requires(schemas != null); Contract.Requires(index >= 0); Contract.Requires(index < schemas.Length); } #endif // // Summary: // Removes the specified XML Schema definition language (XSD) schema from the // System.Xml.Schema.XmlSchemaSet. // // Parameters: // schema: // The System.Xml.Schema.XmlSchema object to remove from the System.Xml.Schema.XmlSchemaSet. // // Returns: // The System.Xml.Schema.XmlSchema object removed from the System.Xml.Schema.XmlSchemaSet // or null if the schema was not found in the System.Xml.Schema.XmlSchemaSet. // // Exceptions: // System.Xml.Schema.XmlSchemaException: // The schema is not a valid schema. // // System.ArgumentNullException: // The System.Xml.Schema.XmlSchema passed as a parameter is null. #if !SILVERLIGHT public XmlSchema Remove(XmlSchema schema) { Contract.Requires(schema != null); return default(XmlSchema); } #endif // // Summary: // Removes the specified XML Schema definition language (XSD) schema and all // the schemas it imports from the System.Xml.Schema.XmlSchemaSet. // // Parameters: // schemaToRemove: // The System.Xml.Schema.XmlSchema object to remove from the System.Xml.Schema.XmlSchemaSet. // // Returns: // Returns true if the System.Xml.Schema.XmlSchema object and all its imports // were successfully removed; otherwise, false. // // Exceptions: // System.ArgumentNullException: // The System.Xml.Schema.XmlSchema passed as a parameter is null. #if !SILVERLIGHT public bool RemoveRecursive(XmlSchema schemaToRemove) { Contract.Requires(schemaToRemove != null); return default(bool); } #endif // // Summary: // Reprocesses an XML Schema definition language (XSD) schema that already exists // in the System.Xml.Schema.XmlSchemaSet. // // Parameters: // schema: // The schema to reprocess. // // Returns: // An System.Xml.Schema.XmlSchema object if the schema is a valid schema. If // the schema is not valid and a System.Xml.Schema.ValidationEventHandler is // specified, null is returned and the appropriate validation event is raised. // Otherwise, an System.Xml.Schema.XmlSchemaException is thrown. // // Exceptions: // System.Xml.Schema.XmlSchemaException: // The schema is not valid. // // System.ArgumentNullException: // The System.Xml.Schema.XmlSchema object passed as a parameter is null. // // System.ArgumentException: // The System.Xml.Schema.XmlSchema object passed as a parameter does not already // exist in the System.Xml.Schema.XmlSchemaSet. #if !SILVERLIGHT public XmlSchema Reprocess(XmlSchema schema) { Contract.Requires(schema != null); Contract.Ensures(Contract.Result<XmlSchema>() != null); return default(XmlSchema); } #endif // // Summary: // Returns a collection of all the XML Schema definition language (XSD) schemas // in the System.Xml.Schema.XmlSchemaSet. // // Returns: // An System.Collections.ICollection object containing all the schemas that // have been added to the System.Xml.Schema.XmlSchemaSet. If no schemas have // been added to the System.Xml.Schema.XmlSchemaSet, an empty System.Collections.ICollection // object is returned. #if !SILVERLIGHT public ICollection Schemas() { Contract.Ensures(Contract.Result<ICollection>() != null); return default(ICollection); } // // Summary: // Returns a collection of all the XML Schema definition language (XSD) schemas // in the System.Xml.Schema.XmlSchemaSet that belong to the given namespace. // // Parameters: // targetNamespace: // The schema targetNamespace property. // // Returns: // An System.Collections.ICollection object containing all the schemas that // have been added to the System.Xml.Schema.XmlSchemaSet that belong to the // given namespace. If no schemas have been added to the System.Xml.Schema.XmlSchemaSet, // an empty System.Collections.ICollection object is returned. public ICollection Schemas(string targetNamespace) { Contract.Ensures(Contract.Result<ICollection>() != null); return default(ICollection); } #endif } }
//------------------------------------------------------------------------------ // // Copyright (c) Microsoft Corporation, 2005 // // File: PreloadedPackages.cs // // Description: Collection of preloaded packages to be used with // PackWebRequest. // //------------------------------------------------------------------------------ using System; using System.Security; using System.Security.Permissions; using System.Collections; using System.Collections.Specialized; using System.Diagnostics; using System.Globalization; using System.IO; using System.IO.Packaging; using MS.Internal; using MS.Internal.PresentationCore; // for ExceptionStringTable namespace MS.Internal.IO.Packaging { /// <summary> /// PreloadedPackages /// </summary> /// <remarks>Note: we purposely didn't make this class a dictionary since it is an internal /// class and we won't be using even half of the dictionary functionalities. /// If this class becomes a public class which is strongly discouraged, this class /// needs to implement IDictionary.</remarks> // <SecurityNote> // Critical: This class serves as a depository of all well-known pre-populated // packages. This class is marked as SecurityCritical to ensure that // 1. only trusted code can add/get/remove trusted packages into the depository // 2. a whole package will never be given out to the platform client // Note: it is OK to give out a part stream from a package instance but // the package related objects such as Package, PackagePart, // PackageRelationship should NEVER be given out to a client. // List of the trusted packages allowed: // 1. ResourceContainer // 2. SiteOfOriginContainer // 3. ZipPackage that is only instantiated by XPS Viewer // </SecurityNote> [SecurityCritical(SecurityCriticalScope.Everything)] [FriendAccessAllowed] internal static class PreloadedPackages { //------------------------------------------------------ // // Static Constructors // //------------------------------------------------------ static PreloadedPackages() { _globalLock = new Object(); } //------------------------------------------------------ // // Internal Methods // //------------------------------------------------------ #region Internal Methods /// <summary> /// GetPackage given a uri /// </summary> /// <param name="uri">uri to match on</param> /// <returns>object if found - else null</returns> /// <exception cref="ArgumentException">uri must be absolute</exception> internal static Package GetPackage(Uri uri) { bool ignored; return GetPackage(uri, out ignored); } /// <summary> /// GetPackage given a uri /// </summary> /// <param name="uri">uri to match on</param> /// <param name="threadSafe">true if the returned package is threadsafe - undefined if null is returned</param> /// <returns>object if found - else null</returns> /// <exception cref="ArgumentException">uri must be absolute</exception> internal static Package GetPackage(Uri uri, out bool threadSafe) { ValidateUriKey(uri); lock (_globalLock) { Package package = null; threadSafe = false; if (_packagePairs != null) { PackageThreadSafePair packagePair = _packagePairs[uri] as PackageThreadSafePair; if (packagePair != null) { package = packagePair.Package; threadSafe = packagePair.ThreadSafe; } } return package; } } /// <summary> /// AddPackage - default to non-thread-safe /// </summary> /// <param name="uri">uri to use for matching</param> /// <param name="package">package object to serve content from</param> /// <remarks>Adds a uri, content pair to the cache. If the uri is already /// in the cache, this removes the old content and replaces it. /// The object will not be subject to automatic removal from the cache</remarks> internal static void AddPackage(Uri uri, Package package) { AddPackage(uri, package, false); } /// <summary> /// AddPackage /// </summary> /// <param name="uri">uri to use for matching</param> /// <param name="package">package object to serve content from</param> /// <param name="threadSafe">is package thread-safe?</param> /// <remarks>Adds a uri, content pair to the cache. If the uri is already /// in the cache, this removes the old content and replaces it. /// The object will not be subject to automatic removal from the cache</remarks> internal static void AddPackage(Uri uri, Package package, bool threadSafe) { ValidateUriKey(uri); lock (_globalLock) { if (_packagePairs == null) { _packagePairs = new HybridDictionary(3); } _packagePairs.Add(uri, new PackageThreadSafePair(package, threadSafe)); } } /// <summary> /// RemovePackage /// </summary> /// <param name="uri">uri of the package that needs to be removed </param> /// <remarks>Removes the package corresponding to the uri from the cache. If a matching uri isn't found /// the status of the cache doesn't change and no exception is throwen /// </remarks> internal static void RemovePackage(Uri uri) { ValidateUriKey(uri); lock (_globalLock) { if (_packagePairs != null) { _packagePairs.Remove(uri); } } } // Null the instance. Similar to Dispose, but not quite. internal static void Clear() { lock (_globalLock) { _packagePairs = null; } } private static void ValidateUriKey(Uri uri) { if (uri == null) { throw new ArgumentNullException("uri"); } if (!uri.IsAbsoluteUri) { throw new ArgumentException(SR.Get(SRID.UriMustBeAbsolute), "uri"); } } #endregion Internal Methods /// <summary> /// Package-bool pair where the bool represents the thread-safety status of the package /// </summary> private class PackageThreadSafePair { //------------------------------------------------------ // // Internal Constructors // //------------------------------------------------------ internal PackageThreadSafePair(Package package, bool threadSafe) { Invariant.Assert(package != null); _package = package; _threadSafe = threadSafe; } //------------------------------------------------------ // // Internal Properties // //------------------------------------------------------ /// <summary> /// Package /// </summary> internal Package Package { get { return _package; } } /// <summary> /// True if package is thread-safe /// </summary> internal bool ThreadSafe { get { return _threadSafe; } } //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ private readonly Package _package; private readonly bool _threadSafe; } //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ #region Private Fields // We are expect to have no more than 10 preloaded packages // per AppDomain for our scenarios // ListDictionary is the best fit for this scenarios; otherwise we should be using // Hashtable. HybridDictionary already has functionality of switching between // ListDictionary and Hashtable depending on the size of the collection static private HybridDictionary _packagePairs; static private Object _globalLock; #endregion Private Fields } }
using MediaBrowser.Controller.Drawing; using MediaBrowser.Controller.LiveTv; using MediaBrowser.Controller.MediaEncoding; using MediaBrowser.Model.Dto; using MediaBrowser.Model.Entities; using MediaBrowser.Model.LiveTv; using MediaBrowser.Model.Logging; using MediaBrowser.Model.MediaInfo; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Net; using System.Text; using System.Threading; using System.Threading.Tasks; using TVHeadEnd.Helper; using TVHeadEnd.HTSP; using TVHeadEnd.HTSP_Responses; using TVHeadEnd.TimeoutHelper; namespace TVHeadEnd { public class LiveTvService : ILiveTvService { public event EventHandler DataSourceChanged; public event EventHandler<RecordingStatusChangedEventArgs> RecordingStatusChanged; //Added for stream probing private readonly IMediaEncoder _mediaEncoder; private readonly TimeSpan TIMEOUT = TimeSpan.FromMinutes(5); private HTSConnectionHandler _htsConnectionHandler; private volatile int _subscriptionId = 0; private readonly ILogger _logger; public LiveTvService(ILogger logger, IMediaEncoder mediaEncoder) { //System.Diagnostics.StackTrace t = new System.Diagnostics.StackTrace(); logger.Info("[TVHclient] LiveTvService()"); _logger = logger; _htsConnectionHandler = HTSConnectionHandler.GetInstance(_logger); _htsConnectionHandler.setLiveTvService(this); //Added for stream probing _mediaEncoder = mediaEncoder; } public string HomePageUrl { get { return "http://tvheadend.org/"; } } public string Name { get { return "TVHclient LiveTvService"; } } public void sendDataSourceChanged() { try { if (DataSourceChanged != null) { DataSourceChanged(this, EventArgs.Empty); } else { _logger.Fatal("[TVHclient] sendDataSourceChanged called but EventHandler 'DataSourceChanged' was not set by Emby!!!"); } } catch (Exception ex) { _logger.Error("[TVHclient] LiveTvService.sendDataSourceChanged caught exception: " + ex.Message); } } public void sendRecordingStatusChanged(RecordingStatusChangedEventArgs recordingStatusChangedEventArgs) { try { _logger.Fatal("[TVHclient] sendRecordingStatusChanged 1"); if (RecordingStatusChanged != null) { _logger.Fatal("[TVHclient] sendRecordingStatusChanged 2"); RecordingStatusChanged(this, recordingStatusChangedEventArgs); } else { _logger.Fatal("[TVHclient] sendRecordingStatusChanged called but EventHandler 'RecordingStatusChanged' was not set by Emby!!!"); } } catch (Exception ex) { _logger.Error("[TVHclient] LiveTvService.sendRecordingStatusChanged caught exception: " + ex.Message); } } public async Task CancelSeriesTimerAsync(string timerId, CancellationToken cancellationToken) { int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] CancelSeriesTimerAsync, call canceled or timed out."); return; } HTSMessage deleteAutorecMessage = new HTSMessage(); deleteAutorecMessage.Method = "deleteAutorecEntry"; deleteAutorecMessage.putField("id", timerId); TaskWithTimeoutRunner<HTSMessage> twtr = new TaskWithTimeoutRunner<HTSMessage>(TIMEOUT); TaskWithTimeoutResult<HTSMessage> twtRes = await twtr.RunWithTimeout(Task.Factory.StartNew<HTSMessage>(() => { LoopBackResponseHandler lbrh = new LoopBackResponseHandler(); _htsConnectionHandler.SendMessage(deleteAutorecMessage, lbrh); return lbrh.getResponse(); })); if (twtRes.HasTimeout) { _logger.Error("[TVHclient] Can't delete recording because of timeout"); } else { HTSMessage deleteAutorecResponse = twtRes.Result; Boolean success = deleteAutorecResponse.getInt("success", 0) == 1; if (!success) { _logger.Error("[TVHclient] Can't cancel timer: '" + deleteAutorecResponse.getString("error") + "'"); } } } public async Task CancelTimerAsync(string timerId, CancellationToken cancellationToken) { int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] CancelTimerAsync, call canceled or timed out."); return; } HTSMessage cancelTimerMessage = new HTSMessage(); cancelTimerMessage.Method = "cancelDvrEntry"; cancelTimerMessage.putField("id", timerId); TaskWithTimeoutRunner<HTSMessage> twtr = new TaskWithTimeoutRunner<HTSMessage>(TIMEOUT); TaskWithTimeoutResult<HTSMessage> twtRes = await twtr.RunWithTimeout(Task.Factory.StartNew<HTSMessage>(() => { LoopBackResponseHandler lbrh = new LoopBackResponseHandler(); _htsConnectionHandler.SendMessage(cancelTimerMessage, lbrh); return lbrh.getResponse(); })); if (twtRes.HasTimeout) { _logger.Error("[TVHclient] Can't cancel timer because of timeout"); } else { HTSMessage cancelTimerResponse = twtRes.Result; Boolean success = cancelTimerResponse.getInt("success", 0) == 1; if (!success) { _logger.Error("[TVHclient] Can't cancel timer: '" + cancelTimerResponse.getString("error") + "'"); } } } public async Task CloseLiveStream(string subscriptionId, CancellationToken cancellationToken) { await Task.Factory.StartNew<string>(() => { //_logger.Info("[TVHclient] CloseLiveStream for subscriptionId = " + subscriptionId); return subscriptionId; }); } public async Task CreateSeriesTimerAsync(SeriesTimerInfo info, CancellationToken cancellationToken) { // Dummy method to avoid warnings await Task.Factory.StartNew<int>(() => { return 0; }); throw new NotImplementedException(); //int timeOut = await WaitForInitialLoadTask(cancellationToken); //if (timeOut == -1 || cancellationToken.IsCancellationRequested) //{ // _logger.Info("[TVHclient] CreateSeriesTimerAsync, call canceled or timed out - returning empty list."); // return; //} ////_logger.Info("[TVHclient] CreateSeriesTimerAsync: got SeriesTimerInfo: " + dump(info)); //HTSMessage createSeriesTimerMessage = new HTSMessage(); //createSeriesTimerMessage.Method = "addAutorecEntry"; //createSeriesTimerMessage.putField("title", info.Name); //if (!info.RecordAnyChannel) //{ // createSeriesTimerMessage.putField("channelId", info.ChannelId); //} //createSeriesTimerMessage.putField("minDuration", 0); //createSeriesTimerMessage.putField("maxDuration", 0); //int tempPriority = info.Priority; //if (tempPriority == 0) //{ // tempPriority = _priority; // info.Priority delivers 0 if timers is newly created - no GUI //} //createSeriesTimerMessage.putField("priority", tempPriority); //createSeriesTimerMessage.putField("configName", _profile); //createSeriesTimerMessage.putField("daysOfWeek", AutorecDataHelper.getDaysOfWeekFromList(info.Days)); //if (!info.RecordAnyTime) //{ // createSeriesTimerMessage.putField("approxTime", AutorecDataHelper.getMinutesFromMidnight(info.StartDate)); //} //createSeriesTimerMessage.putField("startExtra", (long)(info.PrePaddingSeconds / 60L)); //createSeriesTimerMessage.putField("stopExtra", (long)(info.PostPaddingSeconds / 60L)); //createSeriesTimerMessage.putField("comment", info.Overview); ////_logger.Info("[TVHclient] CreateSeriesTimerAsync: created HTSP message: " + createSeriesTimerMessage.ToString()); ///* // public DateTime EndDate { get; set; } // public string ProgramId { get; set; } // public bool RecordNewOnly { get; set; } // */ ////HTSMessage createSeriesTimerResponse = await Task.Factory.StartNew<HTSMessage>(() => ////{ //// LoopBackResponseHandler lbrh = new LoopBackResponseHandler(); //// _htsConnection.sendMessage(createSeriesTimerMessage, lbrh); //// return lbrh.getResponse(); ////}); //TaskWithTimeoutRunner<HTSMessage> twtr = new TaskWithTimeoutRunner<HTSMessage>(TIMEOUT); //TaskWithTimeoutResult<HTSMessage> twtRes = await twtr.RunWithTimeout(Task.Factory.StartNew<HTSMessage>(() => //{ // LoopBackResponseHandler lbrh = new LoopBackResponseHandler(); // _htsConnection.sendMessage(createSeriesTimerMessage, lbrh); // return lbrh.getResponse(); //})); //if (twtRes.HasTimeout) //{ // _logger.Error("[TVHclient] Can't create series because of timeout"); //} //else //{ // HTSMessage createSeriesTimerResponse = twtRes.Result; // Boolean success = createSeriesTimerResponse.getInt("success", 0) == 1; // if (!success) // { // _logger.Error("[TVHclient] Can't create series timer: '" + createSeriesTimerResponse.getString("error") + "'"); // } //} } public async Task CreateTimerAsync(TimerInfo info, CancellationToken cancellationToken) { int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] CreateTimerAsync, call canceled or timed out."); return; } HTSMessage createTimerMessage = new HTSMessage(); createTimerMessage.Method = "addDvrEntry"; createTimerMessage.putField("channelId", info.ChannelId); createTimerMessage.putField("start", DateTimeHelper.getUnixUTCTimeFromUtcDateTime(info.StartDate)); createTimerMessage.putField("stop", DateTimeHelper.getUnixUTCTimeFromUtcDateTime(info.EndDate)); createTimerMessage.putField("startExtra", (long)(info.PrePaddingSeconds / 60)); createTimerMessage.putField("stopExtra", (long)(info.PostPaddingSeconds / 60)); createTimerMessage.putField("priority", _htsConnectionHandler.GetPriority()); // info.Priority delivers always 0 - no GUI createTimerMessage.putField("configName", _htsConnectionHandler.GetProfile()); createTimerMessage.putField("description", info.Overview); createTimerMessage.putField("title", info.Name); createTimerMessage.putField("creator", Plugin.Instance.Configuration.Username); TaskWithTimeoutRunner<HTSMessage> twtr = new TaskWithTimeoutRunner<HTSMessage>(TIMEOUT); TaskWithTimeoutResult<HTSMessage> twtRes = await twtr.RunWithTimeout(Task.Factory.StartNew<HTSMessage>(() => { LoopBackResponseHandler lbrh = new LoopBackResponseHandler(); _htsConnectionHandler.SendMessage(createTimerMessage, lbrh); return lbrh.getResponse(); })); if (twtRes.HasTimeout) { _logger.Error("[TVHclient] Can't create timer because of timeout"); } else { HTSMessage createTimerResponse = twtRes.Result; Boolean success = createTimerResponse.getInt("success", 0) == 1; if (!success) { _logger.Error("[TVHclient] Can't create timer: '" + createTimerResponse.getString("error") + "'"); } } } public async Task DeleteRecordingAsync(string recordingId, CancellationToken cancellationToken) { int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] DeleteRecordingAsync, call canceled or timed out."); return; } HTSMessage deleteRecordingMessage = new HTSMessage(); deleteRecordingMessage.Method = "deleteDvrEntry"; deleteRecordingMessage.putField("id", recordingId); TaskWithTimeoutRunner<HTSMessage> twtr = new TaskWithTimeoutRunner<HTSMessage>(TIMEOUT); TaskWithTimeoutResult<HTSMessage> twtRes = await twtr.RunWithTimeout(Task.Factory.StartNew<HTSMessage>(() => { LoopBackResponseHandler lbrh = new LoopBackResponseHandler(); _htsConnectionHandler.SendMessage(deleteRecordingMessage, lbrh); return lbrh.getResponse(); })); if (twtRes.HasTimeout) { _logger.Error("[TVHclient] Can't delete recording because of timeout"); } else { HTSMessage deleteRecordingResponse = twtRes.Result; Boolean success = deleteRecordingResponse.getInt("success", 0) == 1; if (!success) { _logger.Error("[TVHclient] Can't delete recording: '" + deleteRecordingResponse.getString("error") + "'"); } } } public Task<ImageStream> GetChannelImageAsync(string channelId, CancellationToken cancellationToken) { return Task.FromResult<ImageStream>(_htsConnectionHandler.GetChannelImage(channelId, cancellationToken)); // Leave as is. This is handled by supplying image url to ChannelInfo //throw new NotImplementedException(); } public async Task<IEnumerable<ChannelInfo>> GetChannelsAsync(CancellationToken cancellationToken) { int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] GetChannelsAsync, call canceled or timed out - returning empty list."); return new List<ChannelInfo>(); } TaskWithTimeoutRunner<IEnumerable<ChannelInfo>> twtr = new TaskWithTimeoutRunner<IEnumerable<ChannelInfo>>(TIMEOUT); TaskWithTimeoutResult<IEnumerable<ChannelInfo>> twtRes = await twtr.RunWithTimeout(_htsConnectionHandler.BuildChannelInfos(cancellationToken)); if (twtRes.HasTimeout) { return new List<ChannelInfo>(); } return twtRes.Result; } public async Task<MediaSourceInfo> GetChannelStream(string channelId, string mediaSourceId, CancellationToken cancellationToken) { HTSMessage getTicketMessage = new HTSMessage(); getTicketMessage.Method = "getTicket"; getTicketMessage.putField("channelId", channelId); TaskWithTimeoutRunner<HTSMessage> twtr = new TaskWithTimeoutRunner<HTSMessage>(TIMEOUT); TaskWithTimeoutResult<HTSMessage> twtRes = await twtr.RunWithTimeout(Task.Factory.StartNew<HTSMessage>(() => { LoopBackResponseHandler lbrh = new LoopBackResponseHandler(); _htsConnectionHandler.SendMessage(getTicketMessage, lbrh); return lbrh.getResponse(); })); if (twtRes.HasTimeout) { _logger.Error("[TVHclient] Timeout obtaining playback authentication ticket from TVH"); } else { HTSMessage getTicketResponse = twtRes.Result; if (_subscriptionId == int.MaxValue) { _subscriptionId = 0; } int currSubscriptionId = _subscriptionId++; if (_htsConnectionHandler.GetEnableSubsMaudios()) { _logger.Info("[TVHclient] Support for live TV subtitles and multiple audio tracks is enabled."); MediaSourceInfo livetvasset = new MediaSourceInfo(); livetvasset.Id = "" + currSubscriptionId; // Use HTTP basic auth instead of TVH ticketing system for authentication to allow the users to switch subs or audio tracks at any time livetvasset.Path = _htsConnectionHandler.GetHttpBaseUrl() + getTicketResponse.getString("path"); livetvasset.Protocol = MediaProtocol.Http; // Probe the asset stream to determine available sub-streams string livetvasset_probeUrl = "" + livetvasset.Path; string livetvasset_source = "LiveTV"; // Probe the asset stream to determine available sub-streams await ProbeStream(livetvasset, livetvasset_probeUrl, livetvasset_source, cancellationToken); return livetvasset; } else { return new MediaSourceInfo { Id = "" + currSubscriptionId, Path = _htsConnectionHandler.GetHttpBaseUrl() + getTicketResponse.getString("path") + "?ticket=" + getTicketResponse.getString("ticket"), Protocol = MediaProtocol.Http, MediaStreams = new List<MediaStream> { new MediaStream { Type = MediaStreamType.Video, // Set the index to -1 because we don't know the exact index of the video stream within the container Index = -1, // Set to true if unknown to enable deinterlacing IsInterlaced = true }, new MediaStream { Type = MediaStreamType.Audio, // Set the index to -1 because we don't know the exact index of the audio stream within the container Index = -1 } } }; } } throw new TimeoutException(""); } public Task<List<MediaSourceInfo>> GetChannelStreamMediaSources(string channelId, CancellationToken cancellationToken) { throw new NotImplementedException(); } public async Task<SeriesTimerInfo> GetNewTimerDefaultsAsync(CancellationToken cancellationToken, ProgramInfo program = null) { return await Task.Factory.StartNew<SeriesTimerInfo>(() => { return new SeriesTimerInfo { PostPaddingSeconds = 0, PrePaddingSeconds = 0, RecordAnyChannel = true, RecordAnyTime = true, RecordNewOnly = false }; }); } public Task<ImageStream> GetProgramImageAsync(string programId, string channelId, CancellationToken cancellationToken) { // Leave as is. This is handled by supplying image url to ProgramInfo throw new NotImplementedException(); } public async Task<IEnumerable<ProgramInfo>> GetProgramsAsync(string channelId, DateTime startDateUtc, DateTime endDateUtc, CancellationToken cancellationToken) { int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] GetProgramsAsync, call canceled or timed out - returning empty list."); return new List<ProgramInfo>(); } GetEventsResponseHandler currGetEventsResponseHandler = new GetEventsResponseHandler(startDateUtc, endDateUtc, _logger, cancellationToken); HTSMessage queryEvents = new HTSMessage(); queryEvents.Method = "getEvents"; queryEvents.putField("channelId", Convert.ToInt32(channelId)); _htsConnectionHandler.SendMessage(queryEvents, currGetEventsResponseHandler); _logger.Info("[TVHclient] GetProgramsAsync, ask TVH for events of channel '" + channelId + "'."); TaskWithTimeoutRunner<IEnumerable<ProgramInfo>> twtr = new TaskWithTimeoutRunner<IEnumerable<ProgramInfo>>(TIMEOUT); TaskWithTimeoutResult<IEnumerable<ProgramInfo>> twtRes = await twtr.RunWithTimeout(currGetEventsResponseHandler.GetEvents(cancellationToken, channelId)); if (twtRes.HasTimeout) { _logger.Info("[TVHclient] GetProgramsAsync, timeout during call for events of channel '" + channelId + "'."); return new List<ProgramInfo>(); } return twtRes.Result; } public Task<ImageStream> GetRecordingImageAsync(string recordingId, CancellationToken cancellationToken) { // Leave as is. This is handled by supplying image url to RecordingInfo throw new NotImplementedException(); } public async Task<IEnumerable<RecordingInfo>> GetRecordingsAsync(CancellationToken cancellationToken) { // retrieve all 'Pending', 'Inprogress' and 'Completed' recordings // we don't deliver the 'Pending' recordings int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] GetRecordingsAsync, call canceled or timed out - returning empty list."); return new List<RecordingInfo>(); } TaskWithTimeoutRunner<IEnumerable<RecordingInfo>> twtr = new TaskWithTimeoutRunner<IEnumerable<RecordingInfo>>(TIMEOUT); TaskWithTimeoutResult<IEnumerable<RecordingInfo>> twtRes = await twtr.RunWithTimeout(_htsConnectionHandler.BuildDvrInfos(cancellationToken)); if (twtRes.HasTimeout) { return new List<RecordingInfo>(); } return twtRes.Result; } public async Task ProbeStream(MediaSourceInfo mediaSourceInfo, string probeUrl, string source, CancellationToken cancellationToken) { _logger.Info("[TVHclient] Probe stream for {0}", source); _logger.Info("[TVHclient] Probe URL: {0}", probeUrl); MediaInfoRequest req = new MediaInfoRequest { MediaType = MediaBrowser.Model.Dlna.DlnaProfileType.Video, InputPath = probeUrl, Protocol = MediaProtocol.Http, ExtractChapters = false, VideoType = VideoType.VideoFile, }; var originalRuntime = mediaSourceInfo.RunTimeTicks; Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); MediaInfo info = await _mediaEncoder.GetMediaInfo(req, cancellationToken).ConfigureAwait(false); stopWatch.Stop(); TimeSpan ts = stopWatch.Elapsed; string elapsedTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", ts.Hours, ts.Minutes, ts.Seconds, ts.Milliseconds / 10); _logger.Info("[TVHclient] Probe RunTime " + elapsedTime); if (info != null) { _logger.Info("[TVHclient] Probe returned:"); mediaSourceInfo.Bitrate = info.Bitrate; _logger.Info("[TVHclient] BitRate: " + info.Bitrate); mediaSourceInfo.Container = info.Container; _logger.Info("[TVHclient] Container: " + info.Container); mediaSourceInfo.Formats = info.Formats; _logger.Info("[TVHclient] Formats: " + info.Formats); mediaSourceInfo.MediaStreams = info.MediaStreams; _logger.Info("[TVHclient] MediaStreams: " + info.MediaStreams); mediaSourceInfo.RunTimeTicks = info.RunTimeTicks; _logger.Info("[TVHclient] RunTimeTicks: " + info.RunTimeTicks); mediaSourceInfo.Size = info.Size; _logger.Info("[TVHclient] Size: " + info.Size); mediaSourceInfo.Timestamp = info.Timestamp; _logger.Info("[TVHclient] Timestamp: " + info.Timestamp); mediaSourceInfo.Video3DFormat = info.Video3DFormat; _logger.Info("[TVHclient] Video3DFormat: " + info.Video3DFormat); mediaSourceInfo.VideoType = info.VideoType; _logger.Info("[TVHclient] VideoType: " + info.VideoType); mediaSourceInfo.RequiresClosing = true; mediaSourceInfo.RequiresOpening = true; mediaSourceInfo.SupportsDirectPlay = true; mediaSourceInfo.SupportsDirectStream = true; mediaSourceInfo.SupportsTranscoding = true; mediaSourceInfo.DefaultSubtitleStreamIndex = null; if (!originalRuntime.HasValue) { mediaSourceInfo.RunTimeTicks = null; _logger.Info("[TVHclient] Original runtime: n/a"); } var audioStream = mediaSourceInfo.MediaStreams.FirstOrDefault(i => i.Type == MediaBrowser.Model.Entities.MediaStreamType.Audio); if (audioStream == null || audioStream.Index == -1) { mediaSourceInfo.DefaultAudioStreamIndex = null; _logger.Info("[TVHclient] DefaultAudioStreamIndex: n/a"); } else { mediaSourceInfo.DefaultAudioStreamIndex = audioStream.Index; _logger.Info("[TVHclient] DefaultAudioStreamIndex: " + info.DefaultAudioStreamIndex); } } else { _logger.Error("[TVHclient] Cannot probe {0} stream", source); } } public async Task<MediaSourceInfo> GetRecordingStream(string recordingId, string mediaSourceId, CancellationToken cancellationToken) { HTSMessage getTicketMessage = new HTSMessage(); getTicketMessage.Method = "getTicket"; getTicketMessage.putField("dvrId", recordingId); TaskWithTimeoutRunner<HTSMessage> twtr = new TaskWithTimeoutRunner<HTSMessage>(TIMEOUT); TaskWithTimeoutResult<HTSMessage> twtRes = await twtr.RunWithTimeout(Task.Factory.StartNew<HTSMessage>(() => { LoopBackResponseHandler lbrh = new LoopBackResponseHandler(); _htsConnectionHandler.SendMessage(getTicketMessage, lbrh); return lbrh.getResponse(); })); if (twtRes.HasTimeout) { _logger.Error("[TVHclient] Timeout obtaining playback authentication ticket from TVH"); } else { HTSMessage getTicketResponse = twtRes.Result; if (_subscriptionId == int.MaxValue) { _subscriptionId = 0; } int currSubscriptionId = _subscriptionId++; if (_htsConnectionHandler.GetEnableSubsMaudios()) { _logger.Info("[TVHclient] Support for live TV subtitles and multiple audio tracks is enabled."); MediaSourceInfo recordingasset = new MediaSourceInfo(); recordingasset.Id = "" + currSubscriptionId; // Use HTTP basic auth instead of TVH ticketing system for authentication to allow the users to switch subs or audio tracks at any time recordingasset.Path = _htsConnectionHandler.GetHttpBaseUrl() + getTicketResponse.getString("path"); recordingasset.Protocol = MediaProtocol.Http; // Set asset source and type for stream probing and logging string recordingasset_probeUrl = "" + recordingasset.Path; string recordingasset_source = "Recording"; // Probe the asset stream to determine available sub-streams await ProbeStream(recordingasset, recordingasset_probeUrl, recordingasset_source, cancellationToken); return recordingasset; } else { return new MediaSourceInfo { Id = "" + currSubscriptionId, Path = _htsConnectionHandler.GetHttpBaseUrl() + getTicketResponse.getString("path") + "?ticket=" + getTicketResponse.getString("ticket"), Protocol = MediaProtocol.Http, MediaStreams = new List<MediaStream> { new MediaStream { Type = MediaStreamType.Video, // Set the index to -1 because we don't know the exact index of the video stream within the container Index = -1, // Set to true if unknown to enable deinterlacing IsInterlaced = true }, new MediaStream { Type = MediaStreamType.Audio, // Set the index to -1 because we don't know the exact index of the audio stream within the container Index = -1 } } }; } } throw new TimeoutException(); } public Task<List<MediaSourceInfo>> GetRecordingStreamMediaSources(string recordingId, CancellationToken cancellationToken) { throw new NotImplementedException(); } public async Task<IEnumerable<SeriesTimerInfo>> GetSeriesTimersAsync(CancellationToken cancellationToken) { int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] GetSeriesTimersAsync, call canceled ot timed out - returning empty list."); return new List<SeriesTimerInfo>(); } TaskWithTimeoutRunner<IEnumerable<SeriesTimerInfo>> twtr = new TaskWithTimeoutRunner<IEnumerable<SeriesTimerInfo>>(TIMEOUT); TaskWithTimeoutResult<IEnumerable<SeriesTimerInfo>> twtRes = await twtr.RunWithTimeout(_htsConnectionHandler.BuildAutorecInfos(cancellationToken)); if (twtRes.HasTimeout) { return new List<SeriesTimerInfo>(); } return twtRes.Result; } public async Task<LiveTvServiceStatusInfo> GetStatusInfoAsync(CancellationToken cancellationToken) { int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] GetStatusInfoAsync, call canceled or timed out."); return new LiveTvServiceStatusInfo { Status = LiveTvServiceStatus.Unavailable }; } string serverName = _htsConnectionHandler.GetServername(); string serverVersion = _htsConnectionHandler.GetServerVersion(); int serverProtocolVersion = _htsConnectionHandler.GetServerProtocolVersion(); string diskSpace = _htsConnectionHandler.GetDiskSpace(); int usedHTSPversion = (serverProtocolVersion < (int)HTSMessage.HTSP_VERSION) ? serverProtocolVersion : (int)HTSMessage.HTSP_VERSION; string serverVersionMessage = "<p>" + serverName + " " + serverVersion + "</p>" + "<p>HTSP protocol version: " + usedHTSPversion + "</p>" + "<p>Free diskspace: " + diskSpace + "</p>"; //TaskWithTimeoutRunner<List<LiveTvTunerInfo>> twtr = new TaskWithTimeoutRunner<List<LiveTvTunerInfo>>(TIMEOUT); //TaskWithTimeoutResult<List<LiveTvTunerInfo>> twtRes = await // twtr.RunWithTimeout(_tunerDataHelper.buildTunerInfos(cancellationToken)); List<LiveTvTunerInfo> tvTunerInfos; //if (twtRes.HasTimeout) //{ tvTunerInfos = new List<LiveTvTunerInfo>(); //} else //{ // tvTunerInfos = twtRes.Result; //} return new LiveTvServiceStatusInfo { Version = serverVersionMessage, Tuners = tvTunerInfos, Status = LiveTvServiceStatus.Ok, }; } public async Task<IEnumerable<TimerInfo>> GetTimersAsync(CancellationToken cancellationToken) { // retrieve the 'Pending' recordings"); int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] GetTimersAsync, call canceled or timed out - returning empty list."); return new List<TimerInfo>(); } TaskWithTimeoutRunner<IEnumerable<TimerInfo>> twtr = new TaskWithTimeoutRunner<IEnumerable<TimerInfo>>(TIMEOUT); TaskWithTimeoutResult<IEnumerable<TimerInfo>> twtRes = await twtr.RunWithTimeout(_htsConnectionHandler.BuildPendingTimersInfos(cancellationToken)); if (twtRes.HasTimeout) { return new List<TimerInfo>(); } return twtRes.Result; } public Task RecordLiveStream(string id, CancellationToken cancellationToken) { _logger.Info("[TVHclient] RecordLiveStream " + id); throw new NotImplementedException(); } public Task ResetTuner(string id, CancellationToken cancellationToken) { throw new NotImplementedException(); } public async Task UpdateSeriesTimerAsync(SeriesTimerInfo info, CancellationToken cancellationToken) { await CancelSeriesTimerAsync(info.Id, cancellationToken); // TODO add if method is implemented // await CreateSeriesTimerAsync(info, cancellationToken); } public async Task UpdateTimerAsync(TimerInfo info, CancellationToken cancellationToken) { int timeOut = await WaitForInitialLoadTask(cancellationToken); if (timeOut == -1 || cancellationToken.IsCancellationRequested) { _logger.Info("[TVHclient] UpdateTimerAsync, call canceled or timed out."); return; } HTSMessage updateTimerMessage = new HTSMessage(); updateTimerMessage.Method = "updateDvrEntry"; updateTimerMessage.putField("id", info.Id); updateTimerMessage.putField("startExtra", (long)(info.PrePaddingSeconds / 60)); updateTimerMessage.putField("stopExtra", (long)(info.PostPaddingSeconds / 60)); TaskWithTimeoutRunner<HTSMessage> twtr = new TaskWithTimeoutRunner<HTSMessage>(TIMEOUT); TaskWithTimeoutResult<HTSMessage> twtRes = await twtr.RunWithTimeout(Task.Factory.StartNew<HTSMessage>(() => { LoopBackResponseHandler lbrh = new LoopBackResponseHandler(); _htsConnectionHandler.SendMessage(updateTimerMessage, lbrh); return lbrh.getResponse(); })); if (twtRes.HasTimeout) { _logger.Error("[TVHclient] Can't update timer because of timeout"); } else { HTSMessage updateTimerResponse = twtRes.Result; Boolean success = updateTimerResponse.getInt("success", 0) == 1; if (!success) { _logger.Error("[TVHclient] Can't update timer: '" + updateTimerResponse.getString("error") + "'"); } } } /***********/ /* Helpers */ /***********/ private Task<int> WaitForInitialLoadTask(CancellationToken cancellationToken) { return Task.Factory.StartNew<int>(() => _htsConnectionHandler.WaitForInitialLoad(cancellationToken)); } private string dump(SeriesTimerInfo sti) { StringBuilder sb = new StringBuilder(); sb.Append("\n<SeriesTimerInfo>\n"); sb.Append(" Id: " + sti.Id + "\n"); sb.Append(" Name: " + sti.Name + "\n"); sb.Append(" Overview: " + sti.Overview + "\n"); sb.Append(" Priority: " + sti.Priority + "\n"); sb.Append(" ChannelId: " + sti.ChannelId + "\n"); sb.Append(" ProgramId: " + sti.ProgramId + "\n"); sb.Append(" Days: " + dump(sti.Days) + "\n"); sb.Append(" StartDate: " + sti.StartDate + "\n"); sb.Append(" EndDate: " + sti.EndDate + "\n"); sb.Append(" IsPrePaddingRequired: " + sti.IsPrePaddingRequired + "\n"); sb.Append(" PrePaddingSeconds: " + sti.PrePaddingSeconds + "\n"); sb.Append(" IsPostPaddingRequired: " + sti.IsPrePaddingRequired + "\n"); sb.Append(" PostPaddingSeconds: " + sti.PostPaddingSeconds + "\n"); sb.Append(" RecordAnyChannel: " + sti.RecordAnyChannel + "\n"); sb.Append(" RecordAnyTime: " + sti.RecordAnyTime + "\n"); sb.Append(" RecordNewOnly: " + sti.RecordNewOnly + "\n"); sb.Append("</SeriesTimerInfo>\n"); return sb.ToString(); } private string dump(List<DayOfWeek> days) { StringBuilder sb = new StringBuilder(); foreach (DayOfWeek dow in days) { sb.Append(dow + ", "); } string tmpResult = sb.ToString(); if (tmpResult.EndsWith(",")) { tmpResult = tmpResult.Substring(0, tmpResult.Length - 2); } return tmpResult; } /* public async Task CopyFilesAsync(StreamReader source, StreamWriter destination) { char[] buffer = new char[0x1000]; int numRead; while ((numRead = await source.ReadAsync(buffer, 0, buffer.Length)) != 0) { await destination.WriteAsync(buffer, 0, numRead); } } */ } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. namespace Microsoft.Azure.Management.Sql.Fluent { using System.Threading; using System.Threading.Tasks; using Microsoft.Azure.Management.ResourceManager.Fluent.Core; using Microsoft.Azure.Management.ResourceManager.Fluent.Core.ChildResource.Definition; using Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions; using Microsoft.Azure.Management.Sql.Fluent.SqlServer.Definition; using Microsoft.Azure.Management.Sql.Fluent.SqlVirtualNetworkRule.Definition; using Microsoft.Azure.Management.Sql.Fluent.SqlVirtualNetworkRule.SqlVirtualNetworkRuleDefinition; using Microsoft.Azure.Management.Sql.Fluent.SqlVirtualNetworkRule.Update; using Microsoft.Azure.Management.Sql.Fluent.SqlVirtualNetworkRuleOperations.Definition; using Microsoft.Azure.Management.Sql.Fluent.SqlVirtualNetworkRuleOperations.SqlVirtualNetworkRuleOperationsDefinition; using Microsoft.Azure.Management.Sql.Fluent.Models; internal partial class SqlVirtualNetworkRuleImpl { /// <summary> /// Sets the parent SQL server for the new Virtual Network Rule. /// </summary> /// <param name="sqlServerId">The parent SQL server ID.</param> /// <return>The next stage of the definition.</return> SqlVirtualNetworkRuleOperations.Definition.IWithSubnet SqlVirtualNetworkRuleOperations.Definition.IWithSqlServer.WithExistingSqlServerId(string sqlServerId) { return this.WithExistingSqlServerId(sqlServerId); } /// <summary> /// Sets the parent SQL server name and resource group it belongs to. /// </summary> /// <param name="resourceGroupName">The name of the resource group the parent SQL server.</param> /// <param name="sqlServerName">The parent SQL server name.</param> /// <return>The next stage of the definition.</return> SqlVirtualNetworkRuleOperations.Definition.IWithSubnet SqlVirtualNetworkRuleOperations.Definition.IWithSqlServer.WithExistingSqlServer(string resourceGroupName, string sqlServerName) { return this.WithExistingSqlServer(resourceGroupName, sqlServerName); } /// <summary> /// Sets the parent SQL server for the new Virtual Network Rule. /// </summary> /// <param name="sqlServer">The parent SQL server.</param> /// <return>The next stage of the definition.</return> SqlVirtualNetworkRuleOperations.Definition.IWithSubnet SqlVirtualNetworkRuleOperations.Definition.IWithSqlServer.WithExistingSqlServer(ISqlServer sqlServer) { return this.WithExistingSqlServer(sqlServer); } /// <summary> /// Attaches the child definition to the parent resource definiton. /// </summary> /// <return>The next stage of the parent definition.</return> SqlServer.Definition.IWithCreate Microsoft.Azure.Management.ResourceManager.Fluent.Core.ChildResource.Definition.IInDefinition<SqlServer.Definition.IWithCreate>.Attach() { return this.Attach(); } /// <summary> /// Begins an update for a new resource. /// This is the beginning of the builder pattern used to update top level resources /// in Azure. The final method completing the definition and starting the actual resource creation /// process in Azure is Appliable.apply(). /// </summary> /// <return>The stage of new resource update.</return> SqlVirtualNetworkRule.Update.IUpdate Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions.IUpdatable<SqlVirtualNetworkRule.Update.IUpdate>.Update() { return this.Update(); } /// <summary> /// Gets Sets the flag to ignore the missing subnet's SQL service endpoint entry. /// Virtual Machines in the subnet will not be able to connect to the SQL server until Microsoft.Sql /// service endpoint is added to the subnet. /// </summary> /// <summary> /// Gets The next stage of the definition. /// </summary> SqlVirtualNetworkRule.Definition.IWithAttach<SqlServer.Definition.IWithCreate> SqlVirtualNetworkRule.Definition.IWithServiceEndpoint<SqlServer.Definition.IWithCreate>.IgnoreMissingSqlServiceEndpoint { get { return this.IgnoreMissingSqlServiceEndpoint(); } } /// <summary> /// Sets the virtual network ID and the subnet name for the SQL server Virtual Network Rule. /// </summary> /// <param name="networkId">The virtual network ID to be used.</param> /// <param name="subnetName">The name of the subnet within the virtual network to be used.</param> /// <return>The next stage of the definition.</return> SqlVirtualNetworkRule.Definition.IWithServiceEndpoint<SqlServer.Definition.IWithCreate> SqlVirtualNetworkRule.Definition.IWithSubnet<SqlServer.Definition.IWithCreate>.WithSubnet(string networkId, string subnetName) { return this.WithSubnet(networkId, subnetName); } /// <summary> /// Gets Sets the flag to ignore the missing subnet's SQL service endpoint entry. /// Virtual Machines in the subnet will not be able to connect to the SQL server until Microsoft.Sql /// service endpoint is added to the subnet. /// </summary> /// <summary> /// Gets The next stage of the definition. /// </summary> SqlVirtualNetworkRule.Update.IUpdate SqlVirtualNetworkRule.Update.IWithServiceEndpoint.IgnoreMissingSqlServiceEndpoint() { return this.IgnoreMissingSqlServiceEndpoint(); } /// <summary> /// Gets Sets the flag to ignore the missing subnet's SQL service endpoint entry. /// Virtual Machines in the subnet will not be able to connect to the SQL server until Microsoft.Sql /// service endpoint is added to the subnet. /// </summary> /// <summary> /// Gets The next stage of the definition. /// </summary> SqlVirtualNetworkRuleOperations.Definition.IWithCreate SqlVirtualNetworkRuleOperations.Definition.IWithServiceEndpoint.IgnoreMissingSqlServiceEndpoint() { return this.IgnoreMissingSqlServiceEndpoint(); } /// <summary> /// Gets the name of the resource group. /// </summary> string Microsoft.Azure.Management.ResourceManager.Fluent.Core.IHasResourceGroup.ResourceGroupName { get { return this.ResourceGroupName(); } } /// <summary> /// Gets the subnet ID of the Azure SQL Server Virtual Network Rule. /// </summary> string Microsoft.Azure.Management.Sql.Fluent.ISqlVirtualNetworkRule.SubnetId { get { return this.SubnetId(); } } /// <summary> /// Gets the Azure SQL Server Virtual Network Rule state; possible values include: 'Initializing', /// 'InProgress', 'Ready', 'Deleting', 'Unknown'. /// </summary> string Microsoft.Azure.Management.Sql.Fluent.ISqlVirtualNetworkRule.State { get { return this.State(); } } /// <summary> /// Gets the parent SQL server ID. /// </summary> string Microsoft.Azure.Management.Sql.Fluent.ISqlVirtualNetworkRule.ParentId { get { return this.ParentId(); } } /// <summary> /// Gets name of the SQL Server to which this Virtual Network Rule belongs. /// </summary> string Microsoft.Azure.Management.Sql.Fluent.ISqlVirtualNetworkRule.SqlServerName { get { return this.SqlServerName(); } } /// <summary> /// Deletes the virtual network rule. /// </summary> void Microsoft.Azure.Management.Sql.Fluent.ISqlVirtualNetworkRule.Delete() { this.Delete(); } /// <summary> /// Deletes the virtual network rule asynchronously. /// </summary> /// <return>A representation of the deferred computation of this call.</return> async Task Microsoft.Azure.Management.Sql.Fluent.ISqlVirtualNetworkRule.DeleteAsync(CancellationToken cancellationToken) { await this.DeleteAsync(cancellationToken); } /// <summary> /// Sets the virtual network ID and the subnet name for the SQL server Virtual Network Rule. /// </summary> /// <param name="networkId">The virtual network ID to be used.</param> /// <param name="subnetName">The name of the subnet within the virtual network to be used.</param> /// <return>The next stage of the definition.</return> SqlVirtualNetworkRule.Update.IUpdate SqlVirtualNetworkRule.Update.IWithSubnet.WithSubnet(string networkId, string subnetName) { return this.WithSubnet(networkId, subnetName); } /// <summary> /// Sets the virtual network ID and the subnet name for the SQL server Virtual Network Rule. /// </summary> /// <param name="networkId">The virtual network ID to be used.</param> /// <param name="subnetName">The name of the subnet within the virtual network to be used.</param> /// <return>The next stage of the definition.</return> SqlVirtualNetworkRuleOperations.Definition.IWithServiceEndpoint SqlVirtualNetworkRuleOperations.Definition.IWithSubnet.WithSubnet(string networkId, string subnetName) { return this.WithSubnet(networkId, subnetName); } } }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Reflection; using Orleans.CodeGeneration; using System.Linq.Expressions; using System.Text; namespace Orleans.Runtime { /// <summary> /// A collection of utility functions for dealing with Type information. /// </summary> internal static class TypeUtils { /// <summary> /// The assembly name of the core Orleans assembly. /// </summary> private static readonly AssemblyName OrleansCoreAssembly = typeof(IGrain).GetTypeInfo().Assembly.GetName(); private static readonly ConcurrentDictionary<Tuple<Type, TypeFormattingOptions>, string> ParseableNameCache = new ConcurrentDictionary<Tuple<Type, TypeFormattingOptions>, string>(); private static readonly ConcurrentDictionary<Tuple<Type, bool>, List<Type>> ReferencedTypes = new ConcurrentDictionary<Tuple<Type, bool>, List<Type>>(); private static string GetSimpleNameHandleArray(Type t, Language language) { if (t.IsArray && language == Language.VisualBasic) return t.Name.Replace('[', '(').Replace(']', ')'); return t.Name; } private static string GetSimpleNameHandleArray(TypeInfo typeInfo, Language language) { return GetSimpleNameHandleArray(typeInfo.AsType(), language); } public static string GetSimpleTypeName(Type t, Predicate<Type> fullName = null, Language language = Language.CSharp) { return GetSimpleTypeName(t.GetTypeInfo(), fullName, language); } public static string GetSimpleTypeName(TypeInfo typeInfo, Predicate<Type> fullName = null, Language language = Language.CSharp) { if (typeInfo.IsNestedPublic || typeInfo.IsNestedPrivate) { if (typeInfo.DeclaringType.GetTypeInfo().IsGenericType) { return GetTemplatedName( GetUntemplatedTypeName(typeInfo.DeclaringType.Name), typeInfo.DeclaringType, typeInfo.GetGenericArguments(), _ => true, language) + "." + GetUntemplatedTypeName(typeInfo.Name); } return GetTemplatedName(typeInfo.DeclaringType, language: language) + "." + GetUntemplatedTypeName(typeInfo.Name); } var type = typeInfo.AsType(); if (typeInfo.IsGenericType) return GetSimpleTypeName(fullName != null && fullName(type) ? GetFullName(type, language) : GetSimpleNameHandleArray(typeInfo, language)); return fullName != null && fullName(type) ? GetFullName(type, language) : GetSimpleNameHandleArray(typeInfo, language); } public static string GetUntemplatedTypeName(string typeName) { int i = typeName.IndexOf('`'); if (i > 0) { typeName = typeName.Substring(0, i); } i = typeName.IndexOf('<'); if (i > 0) { typeName = typeName.Substring(0, i); } return typeName; } public static string GetSimpleTypeName(string typeName) { int i = typeName.IndexOf('`'); if (i > 0) { typeName = typeName.Substring(0, i); } i = typeName.IndexOf('['); if (i > 0) { typeName = typeName.Substring(0, i); } i = typeName.IndexOf('<'); if (i > 0) { typeName = typeName.Substring(0, i); } return typeName; } public static bool IsConcreteTemplateType(Type t) { if (t.GetTypeInfo().IsGenericType) return true; return t.IsArray && IsConcreteTemplateType(t.GetElementType()); } public static string GetTemplatedName(Type t, Predicate<Type> fullName = null, Language language = Language.CSharp) { if (fullName == null) fullName = _ => true; // default to full type names var typeInfo = t.GetTypeInfo(); if (typeInfo.IsGenericType) return GetTemplatedName(GetSimpleTypeName(typeInfo, fullName, language), t, typeInfo.GetGenericArguments(), fullName, language); if (t.IsArray) { bool isVB = language == Language.VisualBasic; return GetTemplatedName(t.GetElementType(), fullName) + (isVB ? "(" : "[") + new string(',', t.GetArrayRank() - 1) + (isVB ? ")" : "]"); } return GetSimpleTypeName(typeInfo, fullName, language); } public static bool IsConstructedGenericType(this TypeInfo typeInfo) { // is there an API that returns this info without converting back to type already? return typeInfo.AsType().IsConstructedGenericType; } internal static IEnumerable<TypeInfo> GetTypeInfos(this Type[] types) { return types.Select(t => t.GetTypeInfo()); } public static string GetTemplatedName(string baseName, Type t, Type[] genericArguments, Predicate<Type> fullName, Language language = Language.CSharp) { var typeInfo = t.GetTypeInfo(); if (!typeInfo.IsGenericType || (t.DeclaringType != null && t.DeclaringType.GetTypeInfo().IsGenericType)) return baseName; bool isVB = language == Language.VisualBasic; string s = baseName; s += isVB ? "(Of " : "<"; s += GetGenericTypeArgs(genericArguments, fullName, language); s += isVB ? ")" : ">"; return s; } public static string GetGenericTypeArgs(IEnumerable<Type> args, Predicate<Type> fullName, Language language = Language.CSharp) { string s = string.Empty; bool first = true; foreach (var genericParameter in args) { if (!first) { s += ","; } if (!genericParameter.GetTypeInfo().IsGenericType) { s += GetSimpleTypeName(genericParameter, fullName, language); } else { s += GetTemplatedName(genericParameter, fullName, language); } first = false; } return s; } public static string GetParameterizedTemplateName(TypeInfo typeInfo, bool applyRecursively = false, Predicate<Type> fullName = null, Language language = Language.CSharp) { if (fullName == null) fullName = tt => true; return GetParameterizedTemplateName(typeInfo, fullName, applyRecursively, language); } public static string GetParameterizedTemplateName(TypeInfo typeInfo, Predicate<Type> fullName, bool applyRecursively = false, Language language = Language.CSharp) { if (typeInfo.IsGenericType) { return GetParameterizedTemplateName(GetSimpleTypeName(typeInfo, fullName), typeInfo, applyRecursively, fullName, language); } var t = typeInfo.AsType(); if (fullName != null && fullName(t) == true) { return t.FullName; } return t.Name; } public static string GetParameterizedTemplateName(string baseName, TypeInfo typeInfo, bool applyRecursively = false, Predicate<Type> fullName = null, Language language = Language.CSharp) { if (fullName == null) fullName = tt => false; if (!typeInfo.IsGenericType) return baseName; bool isVB = language == Language.VisualBasic; string s = baseName; s += isVB ? "(Of " : "<"; bool first = true; foreach (var genericParameter in typeInfo.GetGenericArguments()) { if (!first) { s += ","; } var genericParameterTypeInfo = genericParameter.GetTypeInfo(); if (applyRecursively && genericParameterTypeInfo.IsGenericType) { s += GetParameterizedTemplateName(genericParameterTypeInfo, applyRecursively, language: language); } else { s += genericParameter.FullName == null || !fullName(genericParameter) ? genericParameter.Name : genericParameter.FullName; } first = false; } s += isVB ? ")" : ">"; return s; } public static string GetRawClassName(string baseName, Type t) { var typeInfo = t.GetTypeInfo(); return typeInfo.IsGenericType ? baseName + '`' + typeInfo.GetGenericArguments().Length : baseName; } public static string GetRawClassName(string typeName) { int i = typeName.IndexOf('['); return i <= 0 ? typeName : typeName.Substring(0, i); } public static Type[] GenericTypeArgsFromClassName(string className) { return GenericTypeArgsFromArgsString(GenericTypeArgsString(className)); } public static Type[] GenericTypeArgsFromArgsString(string genericArgs) { if (string.IsNullOrEmpty(genericArgs)) return new Type[] { }; var genericTypeDef = genericArgs.Replace("[]", "##"); // protect array arguments return InnerGenericTypeArgs(genericTypeDef); } private static Type[] InnerGenericTypeArgs(string className) { var typeArgs = new List<Type>(); var innerTypes = GetInnerTypes(className); foreach (var innerType in innerTypes) { if (innerType.StartsWith("[[")) // Resolve and load generic types recursively { InnerGenericTypeArgs(GenericTypeArgsString(innerType)); string genericTypeArg = className.Trim('[', ']'); typeArgs.Add(Type.GetType(genericTypeArg.Replace("##", "[]"))); } else { string nonGenericTypeArg = innerType.Trim('[', ']'); typeArgs.Add(Type.GetType(nonGenericTypeArg.Replace("##", "[]"))); } } return typeArgs.ToArray(); } private static string[] GetInnerTypes(string input) { // Iterate over strings of length 2 positionwise. var charsWithPositions = input.Zip(Enumerable.Range(0, input.Length), (c, i) => new { Ch = c, Pos = i }); var candidatesWithPositions = charsWithPositions.Zip(charsWithPositions.Skip(1), (c1, c2) => new { Str = c1.Ch.ToString() + c2.Ch, Pos = c1.Pos }); var results = new List<string>(); int startPos = -1; int endPos = -1; int endTokensNeeded = 0; string curStartToken = ""; string curEndToken = ""; var tokenPairs = new[] { new { Start = "[[", End = "]]" }, new { Start = "[", End = "]" } }; // Longer tokens need to come before shorter ones foreach (var candidate in candidatesWithPositions) { if (startPos == -1) { foreach (var token in tokenPairs) { if (candidate.Str.StartsWith(token.Start)) { curStartToken = token.Start; curEndToken = token.End; startPos = candidate.Pos; break; } } } if (curStartToken != "" && candidate.Str.StartsWith(curStartToken)) endTokensNeeded++; if (curEndToken != "" && candidate.Str.EndsWith(curEndToken)) { endPos = candidate.Pos; endTokensNeeded--; } if (endTokensNeeded == 0 && startPos != -1) { results.Add(input.Substring(startPos, endPos - startPos + 2)); startPos = -1; curStartToken = ""; } } return results.ToArray(); } public static string GenericTypeArgsString(string className) { int startIndex = className.IndexOf('['); int endIndex = className.LastIndexOf(']'); return className.Substring(startIndex + 1, endIndex - startIndex - 1); } public static bool IsGenericClass(string name) { return name.Contains("`") || name.Contains("["); } public static string GetFullName(TypeInfo typeInfo, Language language = Language.CSharp) { if (typeInfo == null) throw new ArgumentNullException(nameof(typeInfo)); return GetFullName(typeInfo.AsType()); } public static string GetFullName(Type t, Language language = Language.CSharp) { if (t == null) throw new ArgumentNullException("t"); if (t.IsNested && !t.IsGenericParameter) { return t.Namespace + "." + t.DeclaringType.Name + "." + GetSimpleNameHandleArray(t, language); } if (t.IsArray) { bool isVB = language == Language.VisualBasic; return GetFullName(t.GetElementType(), language) + (isVB ? "(" : "[") + new string(',', t.GetArrayRank() - 1) + (isVB ? ")" : "]"); } return t.FullName ?? (t.IsGenericParameter ? GetSimpleNameHandleArray(t, language) : t.Namespace + "." + GetSimpleNameHandleArray(t, language)); } /// <summary> /// Returns all fields of the specified type. /// </summary> /// <param name="type">The type.</param> /// <returns>All fields of the specified type.</returns> public static IEnumerable<FieldInfo> GetAllFields(this Type type) { const BindingFlags AllFields = BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.DeclaredOnly; var current = type; while ((current != typeof(object)) && (current != null)) { var fields = current.GetFields(AllFields); foreach (var field in fields) { yield return field; } current = current.GetTypeInfo().BaseType; } } /// <summary> /// decide whether the class is derived from Grain /// </summary> public static bool IsGrainClass(Type type) { var grainType = typeof(Grain); var grainChevronType = typeof(Grain<>); if (type.Assembly.ReflectionOnly) { grainType = ToReflectionOnlyType(grainType); grainChevronType = ToReflectionOnlyType(grainChevronType); } if (grainType == type || grainChevronType == type) return false; if (!grainType.IsAssignableFrom(type)) return false; // exclude generated classes. return !IsGeneratedType(type); } public static bool IsSystemTargetClass(Type type) { Type systemTargetType; if (!TryResolveType("Orleans.Runtime.SystemTarget", out systemTargetType)) return false; var systemTargetInterfaceType = typeof(ISystemTarget); var systemTargetBaseInterfaceType = typeof(ISystemTargetBase); if (type.Assembly.ReflectionOnly) { systemTargetType = ToReflectionOnlyType(systemTargetType); systemTargetInterfaceType = ToReflectionOnlyType(systemTargetInterfaceType); systemTargetBaseInterfaceType = ToReflectionOnlyType(systemTargetBaseInterfaceType); } if (!systemTargetInterfaceType.IsAssignableFrom(type) || !systemTargetBaseInterfaceType.IsAssignableFrom(type) || !systemTargetType.IsAssignableFrom(type)) return false; // exclude generated classes. return !IsGeneratedType(type); } public static bool IsConcreteGrainClass(Type type, out IEnumerable<string> complaints, bool complain) { complaints = null; if (!IsGrainClass(type)) return false; if (!type.GetTypeInfo().IsAbstract) return true; complaints = complain ? new[] { string.Format("Grain type {0} is abstract and cannot be instantiated.", type.FullName) } : null; return false; } public static bool IsConcreteGrainClass(Type type, out IEnumerable<string> complaints) { return IsConcreteGrainClass(type, out complaints, complain: true); } public static bool IsConcreteGrainClass(Type type) { IEnumerable<string> complaints; return IsConcreteGrainClass(type, out complaints, complain: false); } public static bool IsGeneratedType(Type type) { return TypeHasAttribute(type, typeof(GeneratedAttribute)); } /// <summary> /// Returns true if the provided <paramref name="type"/> is in any of the provided /// <paramref name="namespaces"/>, false otherwise. /// </summary> /// <param name="type">The type to check.</param> /// <param name="namespaces"></param> /// <returns> /// true if the provided <paramref name="type"/> is in any of the provided <paramref name="namespaces"/>, false /// otherwise. /// </returns> public static bool IsInNamespace(Type type, List<string> namespaces) { if (type.Namespace == null) { return false; } foreach (var ns in namespaces) { if (ns.Length > type.Namespace.Length) { continue; } // If the candidate namespace is a prefix of the type's namespace, return true. if (type.Namespace.StartsWith(ns, StringComparison.Ordinal) && (type.Namespace.Length == ns.Length || type.Namespace[ns.Length] == '.')) { return true; } } return false; } /// <summary> /// Returns true if <paramref name="type"/> has implementations of all serialization methods, false otherwise. /// </summary> /// <param name="type">The type.</param> /// <returns> /// true if <paramref name="type"/> has implementations of all serialization methods, false otherwise. /// </returns> public static bool HasAllSerializationMethods(Type type) { // Check if the type has any of the serialization methods. var hasCopier = false; var hasSerializer = false; var hasDeserializer = false; foreach (var method in type.GetMethods(BindingFlags.Static | BindingFlags.Public)) { hasSerializer |= method.GetCustomAttribute<SerializerMethodAttribute>(false) != null; hasDeserializer |= method.GetCustomAttribute<DeserializerMethodAttribute>(false) != null; hasCopier |= method.GetCustomAttribute<CopierMethodAttribute>(false) != null; } var hasAllSerializationMethods = hasCopier && hasSerializer && hasDeserializer; return hasAllSerializationMethods; } public static bool IsGrainMethodInvokerType(Type type) { var generalType = typeof(IGrainMethodInvoker); if (type.Assembly.ReflectionOnly) { generalType = ToReflectionOnlyType(generalType); } return generalType.IsAssignableFrom(type) && TypeHasAttribute(type, typeof(MethodInvokerAttribute)); } public static Type ResolveType(string fullName) { return CachedTypeResolver.Instance.ResolveType(fullName); } public static bool TryResolveType(string fullName, out Type type) { return CachedTypeResolver.Instance.TryResolveType(fullName, out type); } public static Type ResolveReflectionOnlyType(string assemblyQualifiedName) { return CachedReflectionOnlyTypeResolver.Instance.ResolveType(assemblyQualifiedName); } public static Type ToReflectionOnlyType(Type type) { return type.Assembly.ReflectionOnly ? type : ResolveReflectionOnlyType(type.AssemblyQualifiedName); } public static IEnumerable<Type> GetTypes(Assembly assembly, Predicate<Type> whereFunc, Logger logger) { return assembly.IsDynamic ? Enumerable.Empty<Type>() : GetDefinedTypes(assembly, logger).Select(t => t.AsType()).Where(type => !type.GetTypeInfo().IsNestedPrivate && whereFunc(type)); } public static IEnumerable<TypeInfo> GetDefinedTypes(Assembly assembly, Logger logger) { try { return assembly.DefinedTypes; } catch (Exception exception) { if (logger.IsWarning) { var message = string.Format( "AssemblyLoader encountered an exception loading types from assembly '{0}': {1}", assembly.FullName, exception); logger.Warn(ErrorCode.Loader_TypeLoadError_5, message, exception); } return Enumerable.Empty<TypeInfo>(); } } public static IEnumerable<Type> GetTypes(Predicate<Type> whereFunc, Logger logger) { var assemblies = AppDomain.CurrentDomain.GetAssemblies(); var result = new List<Type>(); foreach (var assembly in assemblies) { // there's no point in evaluating nested private types-- one of them fails to coerce to a reflection-only type anyhow. var types = GetTypes(assembly, whereFunc, logger); result.AddRange(types); } return result; } public static IEnumerable<Type> GetTypes(List<string> assemblies, Predicate<Type> whereFunc, Logger logger) { var currentAssemblies = AppDomain.CurrentDomain.GetAssemblies(); var result = new List<Type>(); foreach (var assembly in currentAssemblies.Where(loaded => !loaded.IsDynamic && assemblies.Contains(loaded.Location))) { // there's no point in evaluating nested private types-- one of them fails to coerce to a reflection-only type anyhow. var types = GetTypes(assembly, whereFunc, logger); result.AddRange(types); } return result; } /// <summary> /// Returns a value indicating whether or not the provided <paramref name="methodInfo"/> is a grain method. /// </summary> /// <param name="methodInfo">The method.</param> /// <returns>A value indicating whether or not the provided <paramref name="methodInfo"/> is a grain method.</returns> public static bool IsGrainMethod(MethodInfo methodInfo) { if (methodInfo == null) throw new ArgumentNullException("methodInfo", "Cannot inspect null method info"); if (methodInfo.IsStatic || methodInfo.IsSpecialName || methodInfo.DeclaringType == null) { return false; } return methodInfo.DeclaringType.GetTypeInfo().IsInterface && typeof(IAddressable).IsAssignableFrom(methodInfo.DeclaringType); } public static bool TypeHasAttribute(Type type, Type attribType) { if (type.Assembly.ReflectionOnly || attribType.Assembly.ReflectionOnly) { type = ToReflectionOnlyType(type); attribType = ToReflectionOnlyType(attribType); // we can't use Type.GetCustomAttributes here because we could potentially be working with a reflection-only type. return CustomAttributeData.GetCustomAttributes(type).Any( attrib => attribType.IsAssignableFrom(attrib.AttributeType)); } return TypeHasAttribute(type.GetTypeInfo(), attribType); } public static bool TypeHasAttribute(TypeInfo typeInfo, Type attribType) { return typeInfo.GetCustomAttributes(attribType, true).Any(); } /// <summary> /// Returns a sanitized version of <paramref name="type"/>s name which is suitable for use as a class name. /// </summary> /// <param name="type"> /// The grain type. /// </param> /// <returns> /// A sanitized version of <paramref name="type"/>s name which is suitable for use as a class name. /// </returns> public static string GetSuitableClassName(Type type) { return GetClassNameFromInterfaceName(type.GetUnadornedTypeName()); } /// <summary> /// Returns a class-like version of <paramref name="interfaceName"/>. /// </summary> /// <param name="interfaceName"> /// The interface name. /// </param> /// <returns> /// A class-like version of <paramref name="interfaceName"/>. /// </returns> public static string GetClassNameFromInterfaceName(string interfaceName) { string cleanName; if (interfaceName.StartsWith("i", StringComparison.OrdinalIgnoreCase)) { cleanName = interfaceName.Substring(1); } else { cleanName = interfaceName; } return cleanName; } /// <summary> /// Returns the non-generic type name without any special characters. /// </summary> /// <param name="type"> /// The type. /// </param> /// <returns> /// The non-generic type name without any special characters. /// </returns> public static string GetUnadornedTypeName(this Type type) { var index = type.Name.IndexOf('`'); // An ampersand can appear as a suffix to a by-ref type. return (index > 0 ? type.Name.Substring(0, index) : type.Name).TrimEnd('&'); } /// <summary> /// Returns the non-generic method name without any special characters. /// </summary> /// <param name="method"> /// The method. /// </param> /// <returns> /// The non-generic method name without any special characters. /// </returns> public static string GetUnadornedMethodName(this MethodInfo method) { var index = method.Name.IndexOf('`'); return index > 0 ? method.Name.Substring(0, index) : method.Name; } /// <summary> /// Returns a string representation of <paramref name="type"/>. /// </summary> /// <param name="type"> /// The type. /// </param> /// <param name="includeNamespace"> /// A value indicating whether or not to include the namespace name. /// </param> /// <returns> /// A string representation of the <paramref name="type"/>. /// </returns> public static string GetParseableName(this Type type, TypeFormattingOptions options = null) { options = options ?? new TypeFormattingOptions(); return ParseableNameCache.GetOrAdd( Tuple.Create(type, options), _ => { var builder = new StringBuilder(); var typeInfo = type.GetTypeInfo(); GetParseableName( type, builder, new Queue<Type>( typeInfo.IsGenericTypeDefinition ? typeInfo.GetGenericArguments() : typeInfo.GenericTypeArguments), options); return builder.ToString(); }); } /// <summary> /// Returns a string representation of <paramref name="type"/>. /// </summary> /// <param name="type"> /// The type. /// </param> /// <param name="builder"> /// The <see cref="StringBuilder"/> to append results to. /// </param> /// <param name="typeArguments"> /// The type arguments of <paramref name="type"/>. /// </param> /// <param name="options"> /// The type formatting options. /// </param> private static void GetParseableName( Type type, StringBuilder builder, Queue<Type> typeArguments, TypeFormattingOptions options) { var typeInfo = type.GetTypeInfo(); if (typeInfo.IsArray) { builder.AppendFormat( "{0}[{1}]", typeInfo.GetElementType().GetParseableName(options), string.Concat(Enumerable.Range(0, type.GetArrayRank() - 1).Select(_ => ','))); return; } if (typeInfo.IsGenericParameter) { if (options.IncludeGenericTypeParameters) { builder.Append(type.GetUnadornedTypeName()); } return; } if (typeInfo.DeclaringType != null) { // This is not the root type. GetParseableName(typeInfo.DeclaringType, builder, typeArguments, options); builder.Append(options.NestedTypeSeparator); } else if (!string.IsNullOrWhiteSpace(type.Namespace) && options.IncludeNamespace) { // This is the root type, so include the namespace. var namespaceName = type.Namespace; if (options.NestedTypeSeparator != '.') { namespaceName = namespaceName.Replace('.', options.NestedTypeSeparator); } if (options.IncludeGlobal) { builder.AppendFormat("global::"); } builder.AppendFormat("{0}{1}", namespaceName, options.NestedTypeSeparator); } if (type.IsConstructedGenericType) { // Get the unadorned name, the generic parameters, and add them together. var unadornedTypeName = type.GetUnadornedTypeName() + options.NameSuffix; builder.Append(EscapeIdentifier(unadornedTypeName)); var generics = Enumerable.Range(0, Math.Min(typeInfo.GetGenericArguments().Count(), typeArguments.Count)) .Select(_ => typeArguments.Dequeue()) .ToList(); if (generics.Count > 0 && options.IncludeTypeParameters) { var genericParameters = string.Join( ",", generics.Select(generic => GetParseableName(generic, options))); builder.AppendFormat("<{0}>", genericParameters); } } else if (typeInfo.IsGenericTypeDefinition) { // Get the unadorned name, the generic parameters, and add them together. var unadornedTypeName = type.GetUnadornedTypeName() + options.NameSuffix; builder.Append(EscapeIdentifier(unadornedTypeName)); var generics = Enumerable.Range(0, Math.Min(type.GetGenericArguments().Count(), typeArguments.Count)) .Select(_ => typeArguments.Dequeue()) .ToList(); if (generics.Count > 0 && options.IncludeTypeParameters) { var genericParameters = string.Join( ",", generics.Select(_ => options.IncludeGenericTypeParameters ? _.ToString() : string.Empty)); builder.AppendFormat("<{0}>", genericParameters); } } else { builder.Append(EscapeIdentifier(type.GetUnadornedTypeName() + options.NameSuffix)); } } /// <summary> /// Returns the namespaces of the specified types. /// </summary> /// <param name="types"> /// The types to include. /// </param> /// <returns> /// The namespaces of the specified types. /// </returns> public static IEnumerable<string> GetNamespaces(params Type[] types) { return types.Select(type => "global::" + type.Namespace).Distinct(); } /// <summary> /// Returns the <see cref="MethodInfo"/> for the simple method call in the provided <paramref name="expression"/>. /// </summary> /// <typeparam name="T"> /// The containing type of the method. /// </typeparam> /// <typeparam name="TResult"> /// The return type of the method. /// </typeparam> /// <param name="expression"> /// The expression. /// </param> /// <returns> /// The <see cref="MethodInfo"/> for the simple method call in the provided <paramref name="expression"/>. /// </returns> public static MethodInfo Method<T, TResult>(Expression<Func<T, TResult>> expression) { var methodCall = expression.Body as MethodCallExpression; if (methodCall != null) { return methodCall.Method; } throw new ArgumentException("Expression type unsupported."); } /// <summary> /// Returns the <see cref="MemberInfo"/> for the simple member access in the provided <paramref name="expression"/>. /// </summary> /// <typeparam name="T"> /// The containing type of the method. /// </typeparam> /// <typeparam name="TResult"> /// The return type of the method. /// </typeparam> /// <param name="expression"> /// The expression. /// </param> /// <returns> /// The <see cref="MemberInfo"/> for the simple member access call in the provided <paramref name="expression"/>. /// </returns> public static MemberInfo Member<T, TResult>(Expression<Func<T, TResult>> expression) { var methodCall = expression.Body as MethodCallExpression; if (methodCall != null) { return methodCall.Method; } var property = expression.Body as MemberExpression; if (property != null) { return property.Member; } throw new ArgumentException("Expression type unsupported."); } /// <summary> /// Returns the <see cref="MemberInfo"/> for the simple member access in the provided <paramref name="expression"/>. /// </summary> /// <typeparam name="T"> /// The containing type of the method. /// </typeparam> /// <typeparam name="TResult"> /// The return type of the method. /// </typeparam> /// <param name="expression"> /// The expression. /// </param> /// <returns> /// The <see cref="MemberInfo"/> for the simple member access call in the provided <paramref name="expression"/>. /// </returns> public static MemberInfo Member<TResult>(Expression<Func<TResult>> expression) { var methodCall = expression.Body as MethodCallExpression; if (methodCall != null) { return methodCall.Method; } var property = expression.Body as MemberExpression; if (property != null) { return property.Member; } throw new ArgumentException("Expression type unsupported."); } /// <summary> /// Returns the <see cref="MethodInfo"/> for the simple method call in the provided <paramref name="expression"/>. /// </summary> /// <typeparam name="T"> /// The containing type of the method. /// </typeparam> /// <param name="expression"> /// The expression. /// </param> /// <returns> /// The <see cref="MethodInfo"/> for the simple method call in the provided <paramref name="expression"/>. /// </returns> public static MethodInfo Method<T>(Expression<Func<T>> expression) { var methodCall = expression.Body as MethodCallExpression; if (methodCall != null) { return methodCall.Method; } throw new ArgumentException("Expression type unsupported."); } /// <summary> /// Returns the <see cref="MethodInfo"/> for the simple method call in the provided <paramref name="expression"/>. /// </summary> /// <typeparam name="T"> /// The containing type of the method. /// </typeparam> /// <param name="expression"> /// The expression. /// </param> /// <returns> /// The <see cref="MethodInfo"/> for the simple method call in the provided <paramref name="expression"/>. /// </returns> public static MethodInfo Method<T>(Expression<Action<T>> expression) { var methodCall = expression.Body as MethodCallExpression; if (methodCall != null) { return methodCall.Method; } throw new ArgumentException("Expression type unsupported."); } /// <summary> /// Returns the namespace of the provided type, or <see cref="string.Empty"/> if the type has no namespace. /// </summary> /// <param name="type">The type.</param> /// <returns> /// The namespace of the provided type, or <see cref="string.Empty"/> if the type has no namespace. /// </returns> public static string GetNamespaceOrEmpty(this Type type) { if (type == null || string.IsNullOrEmpty(type.Namespace)) { return string.Empty; } return type.Namespace; } /// <summary> /// Returns the types referenced by the provided <paramref name="type"/>. /// </summary> /// <param name="type"> /// The type. /// </param> /// <param name="includeMethods"> /// Whether or not to include the types referenced in the methods of this type. /// </param> /// <returns> /// The types referenced by the provided <paramref name="type"/>. /// </returns> public static IList<Type> GetTypes(this Type type, bool includeMethods = false) { List<Type> results; var key = Tuple.Create(type, includeMethods); if (!ReferencedTypes.TryGetValue(key, out results)) { results = GetTypes(type, includeMethods, null).ToList(); ReferencedTypes.TryAdd(key, results); } return results; } /// <summary> /// Returns a value indicating whether or not the provided assembly is the Orleans assembly or references it. /// </summary> /// <param name="assembly">The assembly.</param> /// <returns>A value indicating whether or not the provided assembly is the Orleans assembly or references it.</returns> internal static bool IsOrleansOrReferencesOrleans(Assembly assembly) { // We want to be loosely coupled to the assembly version if an assembly depends on an older Orleans, // but we want a strong assembly match for the Orleans binary itself // (so we don't load 2 different versions of Orleans by mistake) return DoReferencesContain(assembly.GetReferencedAssemblies(), OrleansCoreAssembly) || string.Equals(assembly.GetName().FullName, OrleansCoreAssembly.FullName, StringComparison.Ordinal); } /// <summary> /// Returns a value indicating whether or not the specified references contain the provided assembly name. /// </summary> /// <param name="references">The references.</param> /// <param name="assemblyName">The assembly name.</param> /// <returns>A value indicating whether or not the specified references contain the provided assembly name.</returns> private static bool DoReferencesContain(IReadOnlyCollection<AssemblyName> references, AssemblyName assemblyName) { if (references.Count == 0) { return false; } return references.Any(asm => string.Equals(asm.Name, assemblyName.Name, StringComparison.Ordinal)); } /// <summary> /// Returns the types referenced by the provided <paramref name="type"/>. /// </summary> /// <param name="type"> /// The type. /// </param> /// <param name="includeMethods"> /// Whether or not to include the types referenced in the methods of this type. /// </param> /// <returns> /// The types referenced by the provided <paramref name="type"/>. /// </returns> private static IEnumerable<Type> GetTypes( this Type type, bool includeMethods, HashSet<Type> exclude) { exclude = exclude ?? new HashSet<Type>(); if (!exclude.Add(type)) { yield break; } yield return type; if (type.IsArray) { foreach (var elementType in type.GetElementType().GetTypes(false, exclude: exclude)) { yield return elementType; } } if (type.IsConstructedGenericType) { foreach (var genericTypeArgument in type.GetGenericArguments().SelectMany(_ => GetTypes(_, false, exclude: exclude))) { yield return genericTypeArgument; } } if (!includeMethods) { yield break; } foreach (var method in type.GetMethods()) { foreach (var referencedType in GetTypes(method.ReturnType, false, exclude: exclude)) { yield return referencedType; } foreach (var parameter in method.GetParameters()) { foreach (var referencedType in GetTypes(parameter.ParameterType, false, exclude: exclude)) { yield return referencedType; } } } } private static string EscapeIdentifier(string identifier) { switch (identifier) { case "abstract": case "add": case "base": case "bool": case "break": case "byte": case "case": case "catch": case "char": case "checked": case "class": case "const": case "continue": case "decimal": case "default": case "delegate": case "do": case "double": case "else": case "enum": case "event": case "explicit": case "extern": case "false": case "finally": case "fixed": case "float": case "for": case "foreach": case "get": case "goto": case "if": case "implicit": case "in": case "int": case "interface": case "internal": case "lock": case "long": case "namespace": case "new": case "null": case "object": case "operator": case "out": case "override": case "params": case "partial": case "private": case "protected": case "public": case "readonly": case "ref": case "remove": case "return": case "sbyte": case "sealed": case "set": case "short": case "sizeof": case "static": case "string": case "struct": case "switch": case "this": case "throw": case "true": case "try": case "typeof": case "uint": case "ulong": case "unsafe": case "ushort": case "using": case "virtual": case "where": case "while": return "@" + identifier; default: return identifier; } } } }
using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Drawing; using System.Text.RegularExpressions; using System.Web.UI; using GuruComponents.Netrix.ComInterop; using GuruComponents.Netrix.Designer; using GuruComponents.Netrix.Events; using GuruComponents.Netrix.PlugIns; using GuruComponents.Netrix.VmlDesigner.Elements; using GuruComponents.Netrix.WebEditing.Documents; using GuruComponents.Netrix.WebEditing.Elements; using GuruComponents.Netrix.HtmlFormatting.Elements; using ImageElement=GuruComponents.Netrix.VmlDesigner.Elements.ImageElement; namespace GuruComponents.Netrix.VmlDesigner { /// <summary> /// VmlDesigner Extender Plug-In. /// </summary> [ToolboxItem(true)] [ToolboxBitmap(typeof(VmlDesigner), "Resources.VmlDesigner.ico")] [ProvideProperty("VmlDesigner", "GuruComponents.Netrix.IHtmlEditor")] public class VmlDesigner : Component, IExtenderProvider, IPlugIn { private Hashtable properties; private Hashtable behaviors; /// <summary> /// Default Constructor supports design time behavior. /// </summary> public VmlDesigner() { properties = new Hashtable(); behaviors = new Hashtable(); } /// <summary> /// Constructor supports design time behavior. /// </summary> public VmlDesigner(IContainer parent) : this() { properties = new Hashtable(); parent.Add(this); } private VmlDesignerProperties EnsurePropertiesExists(IHtmlEditor key) { VmlDesignerProperties p = (VmlDesignerProperties)properties[key]; if (p == null) { p = new VmlDesignerProperties(); properties[key] = p; } return p; } private VmlDesignerBehavior EnsureBehaviorExists(IHtmlEditor key) { VmlDesignerBehavior b = (VmlDesignerBehavior)behaviors[key]; if (b == null) { b = new VmlDesignerBehavior(key as IHtmlEditor, EnsurePropertiesExists(key), this); behaviors[key] = b; } return b; } # region +++++ Block: VmlDesigner [ExtenderProvidedProperty(), Category("NetRix Component"), Description("VmlDesigner Properties")] [TypeConverter(typeof(ExpandableObjectConverter))] public VmlDesignerProperties GetVmlDesigner(IHtmlEditor htmlEditor) { return this.EnsurePropertiesExists(htmlEditor); } public void SetVmlDesigner(IHtmlEditor htmlEditor, VmlDesignerProperties Properties) { // Properties EnsurePropertiesExists(htmlEditor).Active = Properties.Active; EnsurePropertiesExists(htmlEditor).SnapEnabled = Properties.SnapEnabled; EnsurePropertiesExists(htmlEditor).SnapGrid = Properties.SnapGrid; EnsurePropertiesExists(htmlEditor).CanRotate = Properties.CanRotate; EnsurePropertiesExists(htmlEditor).ElementEvents = Properties.ElementEvents; // Designer htmlEditor.AddEditDesigner(EnsureBehaviorExists(htmlEditor) as Interop.IHTMLEditDesigner); // Behavior htmlEditor.RegisterNamespace("v", null); htmlEditor.BeforeSnapRect += new BeforeSnapRectEventHandler(htmlEditor_BeforeSnapRect); // Commands htmlEditor.AddCommand(new CommandWrapper(new EventHandler(VmlDesignerOperation), Commands.Activate)); htmlEditor.AddCommand(new CommandWrapper(new EventHandler(VmlDesignerOperation), Commands.Deactivate)); htmlEditor.AddCommand(new CommandWrapper(new EventHandler(VmlDesignerOperation), Commands.EnsureStyle)); htmlEditor.AddCommand(new CommandWrapper(new EventHandler(VmlDesignerOperation), Commands.InsertMode)); htmlEditor.AddCommand(new CommandWrapper(new EventHandler(VmlDesignerOperation), Commands.DesignMode)); // activate behaviors when document is ready, otherwise it will fail htmlEditor.RegisterPlugIn(this); } void htmlEditor_BeforeSnapRect(object sender, BeforeSnapRectEventArgs e) { // Implement connector feature here if (e.Element is IConnector) { } } class FlatPoint : IComparable<FlatPoint> { int val; public int Value { get { return val; } set { val = value; } } #region IComparable<FlatPoint> Members public int CompareTo(FlatPoint other) { if (this.val > other.val) return 1; if (this.val < other.val) return -1; return 0; } #endregion } private List<FlatPoint> XPoints, YPoints; private void RefreshConnectorPointList(IHtmlEditor editor) { if (XPoints == null) { XPoints = new List<FlatPoint>(); } if (YPoints == null) { YPoints = new List<FlatPoint>(); } // use findbehavior to get element by alias // Put all x in one table, all y in another // sort the list // set all "bool" to false but the current connector element // get the "true" connector element entry from x // check both neirborghs whether they are in range // if in range, check y too // in case both are in range, move element point to nearest neirborgh // refresh list entry } private VmlDesignerCommands commands; [Browsable(false)] public VmlDesignerCommands Commands { get { if (commands == null) { commands = new VmlDesignerCommands(); } return commands; } } private void VmlDesignerOperation(object sender, EventArgs e) { CommandWrapper cw = (CommandWrapper)sender; if (cw.CommandID.Guid.Equals(Commands.CommandGroup)) { switch ((VmlDesignerCommand)cw.ID) { case VmlDesignerCommand.Activate: EnsureBehaviorExists(cw.TargetEditor).Active = true; break; case VmlDesignerCommand.Deactivate: EnsureBehaviorExists(cw.TargetEditor).Active = false; break; case VmlDesignerCommand.EnsureStyle: EnsureVmlStyle(cw.TargetEditor); break; case VmlDesignerCommand.InsertMode: EnsureBehaviorExists(cw.TargetEditor).InsertMode = true; break; case VmlDesignerCommand.DesignMode: EnsureBehaviorExists(cw.TargetEditor).InsertMode = false; break; } } } internal static void EnsureVmlStyle(IHtmlEditor editor) { if (editor.GetElementsByTagName("html") == null) return; IElement html = (IElement)editor.GetElementsByTagName("html")[0]; html.SetAttribute("xmlns:v", "urn:schemas-microsoft-com:vml"); ICollectionBase sc = (ICollectionBase)editor.DocumentStructure.EmbeddedStylesheets; Regex rx = new Regex(@"v\\:*\s+{\s+behavior:\s*url(#default#VML);\s+}", RegexOptions.IgnoreCase); foreach (IElement element in sc) { if (element.InnerText == null) continue; if (rx.Match(element.InnerText).Success) return; } IElement style = editor.CreateElement("style"); sc.Add(style); Interop.IHTMLStyleSheet ss = ((Interop.IHTMLStyleElement)style.GetBaseElement()).styleSheet; ss.SetCssText(@"v\:* { behavior: url(#default#VML); }"); editor.DocumentStructure.EmbeddedStylesheets = sc; } [Browsable(true), ReadOnly(true)] public string Version { get { return this.GetType().Assembly.GetName().Version.ToString(); } } public bool ShouldSerializeVmlDesigner(IHtmlEditor htmlEditor) { VmlDesignerProperties p = EnsurePropertiesExists(htmlEditor); return true; } # endregion #region IExtenderProvider Member public bool CanExtend(object extendee) { if (extendee is IHtmlEditor) { return true; } else { return false; } } #endregion private Hashtable behaviorCookies = new Hashtable(); public void NotifyReadyStateCompleted(IHtmlEditor htmlEditor) { VmlTagInfo tagInfo; // Predefined Shapes tagInfo = new VmlTagInfo("rect", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(RectElement)); tagInfo = new VmlTagInfo("roundrect", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(RoundRectElement)); tagInfo = new VmlTagInfo("line", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(LineElement)); tagInfo = new VmlTagInfo("arc", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(ArcElement)); tagInfo = new VmlTagInfo("curve", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(CurveElement)); tagInfo = new VmlTagInfo("image", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(ImageElement)); tagInfo = new VmlTagInfo("oval", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(OvalElement)); tagInfo = new VmlTagInfo("polyline", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(PolylineElement)); tagInfo = new VmlTagInfo("shape", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(ShapeElement)); tagInfo = new VmlTagInfo("shapetype", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(ShapetypeElement)); tagInfo = new VmlTagInfo("group", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(GroupElement)); // Subelements tagInfo = new VmlTagInfo("imagedata", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(ImagedataElement)); tagInfo = new VmlTagInfo("textbox", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(TextboxElement)); tagInfo = new VmlTagInfo("textpath", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(TextpathElement)); tagInfo = new VmlTagInfo("background", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(BackgroundElement)); tagInfo = new VmlTagInfo("fill", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(FillElement)); tagInfo = new VmlTagInfo("formulas", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(FormulasElement)); tagInfo = new VmlTagInfo("stroke", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(StrokeElement)); tagInfo = new VmlTagInfo("shadow", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(ShadowElement)); tagInfo = new VmlTagInfo("handle", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(HandleElement)); tagInfo = new VmlTagInfo("h", FormattingFlags.None); htmlEditor.RegisterElement(tagInfo, typeof(HElement)); } public override string ToString() { return "Click plus sign for details"; } #region IPlugIn Members public Type Type { get { return this.GetType(); } } public string Name { get { return "VmlDesigner"; } } public bool IsExtenderProvider { get { return true; } } public Feature Features { get { return Feature.CreateElements | Feature.EditDesigner | Feature.OwnNamespace; } } public IDictionary GetSupportedNamespaces(IHtmlEditor htmlEditor) { Hashtable ht = new Hashtable(); ht.Add("v", "vml"); return ht; } public Control CreateElement(string tagName, IHtmlEditor htmlEditor) { string ns = "v"; if (tagName.IndexOf(":") != -1) { tagName = tagName.Substring(tagName.IndexOf(":") + 1); } Interop.IHTMLDocument2 doc = htmlEditor.GetActiveDocument(false); Interop.IHTMLElement el = doc.CreateElement(String.Concat(ns, ":", tagName)); if (el != null) { return htmlEditor.GenericElementFactory.CreateElement(el); } else { return null; } } /// <summary> /// List of element types, which the extender plugin extends. /// </summary> /// <remarks> /// See <see cref="GuruComponents.Netrix.PlugIns.IPlugIn.ElementExtenders"/> for background information. /// </remarks> public List<CommandExtender> GetElementExtenders(IElement component) { return null; } #endregion } }
using System; using System.Data; using Csla; using Csla.Data; using Invoices.DataAccess; namespace Invoices.Business { /// <summary> /// ProductTypeItem (editable child object).<br/> /// This is a generated <see cref="ProductTypeItem"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="ProductTypeColl"/> collection. /// </remarks> [Serializable] public partial class ProductTypeItem : BusinessBase<ProductTypeItem> { #region Static Fields private static int _lastId; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="ProductTypeId"/> property. /// </summary> [NotUndoable] public static readonly PropertyInfo<int> ProductTypeIdProperty = RegisterProperty<int>(p => p.ProductTypeId, "Product Type Id"); /// <summary> /// Gets the Product Type Id. /// </summary> /// <value>The Product Type Id.</value> public int ProductTypeId { get { return GetProperty(ProductTypeIdProperty); } } /// <summary> /// Maintains metadata about <see cref="Name"/> property. /// </summary> public static readonly PropertyInfo<string> NameProperty = RegisterProperty<string>(p => p.Name, "Name"); /// <summary> /// Gets or sets the Name. /// </summary> /// <value>The Name.</value> public string Name { get { return GetProperty(NameProperty); } set { SetProperty(NameProperty, value); } } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="ProductTypeItem"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public ProductTypeItem() { // Use factory methods and do not use direct creation. Saved += OnProductTypeItemSaved; // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="ProductTypeItem"/> object properties. /// </summary> [RunLocal] protected override void Child_Create() { LoadProperty(ProductTypeIdProperty, System.Threading.Interlocked.Decrement(ref _lastId)); var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="ProductTypeItem"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Child_Fetch(SafeDataReader dr) { // Value properties LoadProperty(ProductTypeIdProperty, dr.GetInt32("ProductTypeId")); LoadProperty(NameProperty, dr.GetString("Name")); var args = new DataPortalHookArgs(dr); OnFetchRead(args); // check all object rules and property rules BusinessRules.CheckRules(); } /// <summary> /// Inserts a new <see cref="ProductTypeItem"/> object in the database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert() { using (var dalManager = DalFactoryInvoices.GetManager()) { var args = new DataPortalHookArgs(); OnInsertPre(args); var dal = dalManager.GetProvider<IProductTypeItemDal>(); using (BypassPropertyChecks) { int productTypeId = -1; dal.Insert( out productTypeId, Name ); LoadProperty(ProductTypeIdProperty, productTypeId); } OnInsertPost(args); } } /// <summary> /// Updates in the database all changes made to the <see cref="ProductTypeItem"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update() { if (!IsDirty) return; using (var dalManager = DalFactoryInvoices.GetManager()) { var args = new DataPortalHookArgs(); OnUpdatePre(args); var dal = dalManager.GetProvider<IProductTypeItemDal>(); using (BypassPropertyChecks) { dal.Update( ProductTypeId, Name ); } OnUpdatePost(args); } } /// <summary> /// Self deletes the <see cref="ProductTypeItem"/> object from database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf() { using (var dalManager = DalFactoryInvoices.GetManager()) { var args = new DataPortalHookArgs(); OnDeletePre(args); var dal = dalManager.GetProvider<IProductTypeItemDal>(); using (BypassPropertyChecks) { dal.Delete(ReadProperty(ProductTypeIdProperty)); } OnDeletePost(args); } } #endregion #region Saved Event // TODO: edit "ProductTypeItem.cs", uncomment the "OnDeserialized" method and add the following line: // TODO: Saved += OnProductTypeItemSaved; private void OnProductTypeItemSaved(object sender, Csla.Core.SavedEventArgs e) { if (ProductTypeItemSaved != null) ProductTypeItemSaved(sender, e); } /// <summary> Use this event to signal a <see cref="ProductTypeItem"/> object was saved.</summary> public static event EventHandler<Csla.Core.SavedEventArgs> ProductTypeItemSaved; #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
/******************************************************************************* * Copyright 2008-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * ***************************************************************************** * __ _ _ ___ * ( )( \/\/ )/ __) * /__\ \ / \__ \ * (_)(_) \/\/ (___/ * * AWS SDK for .NET */ using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Text; using Amazon.Runtime; using Amazon.Runtime.Internal.Util; namespace Amazon.Util { /// <summary> /// This class defines utilities and constants that can be used by /// all the client libraries of the SDK. /// </summary> public static partial class AWSSDKUtils { #region Internal Constants internal const string DefaultRegion = "us-east-1"; internal const string DefaultGovRegion = "us-gov-west-1"; internal const string SDKVersionNumber = "2.3.24.3"; internal const int DefaultMaxRetry = 3; private const int DefaultConnectionLimit = 50; private const int DefaultMaxIdleTime = 50 * 1000; // 50 seconds internal static readonly DateTime EPOCH_START = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); internal const int DefaultBufferSize = 8192; // Default value of progress update interval for streaming is 100KB. internal const long DefaultProgressUpdateInterval = 102400; internal static Dictionary<int, string> RFCEncodingSchemes = new Dictionary<int, string> { { 3986, ValidUrlCharacters }, { 1738, ValidUrlCharactersRFC1738 } }; #endregion #region Public Constants /// <summary> /// The user agent string header /// </summary> public const string UserAgentHeader = "User-Agent"; /// <summary> /// The Set of accepted and valid Url characters per RFC3986. /// Characters outside of this set will be encoded. /// </summary> public const string ValidUrlCharacters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.~"; /// <summary> /// The Set of accepted and valid Url characters per RFC1738. /// Characters outside of this set will be encoded. /// </summary> public const string ValidUrlCharactersRFC1738 = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_."; /// <summary> /// The set of accepted and valid Url path characters per RFC3986. /// </summary> private static string ValidPathCharacters = DetermineValidPathCharacters(); // Checks which path characters should not be encoded // This set will be different for .NET 4 and .NET 4.5, as // per http://msdn.microsoft.com/en-us/library/hh367887%28v=vs.110%29.aspx private static string DetermineValidPathCharacters() { const string basePathCharacters = "/:'()!*[]"; var sb = new StringBuilder(); foreach (var c in basePathCharacters) { var escaped = Uri.EscapeUriString(c.ToString()); if (escaped.Length == 1 && escaped[0] == c) sb.Append(c); } return sb.ToString(); } /// <summary> /// The string representing Url Encoded Content in HTTP requests /// </summary> public const string UrlEncodedContent = "application/x-www-form-urlencoded; charset=utf-8"; /// <summary> /// The GMT Date Format string. Used when parsing date objects /// </summary> public const string GMTDateFormat = "ddd, dd MMM yyyy HH:mm:ss \\G\\M\\T"; /// <summary> /// The ISO8601Date Format string. Used when parsing date objects /// </summary> public const string ISO8601DateFormat = "yyyy-MM-dd\\THH:mm:ss.fff\\Z"; /// <summary> /// The ISO8601Date Format string. Used when parsing date objects /// </summary> public const string ISO8601DateFormatNoMS = "yyyy-MM-dd\\THH:mm:ss\\Z"; /// <summary> /// The ISO8601 Basic date/time format string. Used when parsing date objects /// </summary> public const string ISO8601BasicDateTimeFormat = "yyyyMMddTHHmmssZ"; /// <summary> /// The ISO8601 basic date format. Used during AWS4 signature computation. /// </summary> public const string ISO8601BasicDateFormat = "yyyyMMdd"; /// <summary> /// The RFC822Date Format string. Used when parsing date objects /// </summary> public const string RFC822DateFormat = "ddd, dd MMM yyyy HH:mm:ss \\G\\M\\T"; #endregion #region UserAgent static string _versionNumber; static string _sdkUserAgent; /// <summary> /// The AWS SDK User Agent /// </summary> public static string SDKUserAgent { get { return _sdkUserAgent; } } static AWSSDKUtils() { BuildUserAgentString(); } public static void SetUserAgent(string productName, string versionNumber) { _userAgentBaseName = productName; _versionNumber = versionNumber; BuildUserAgentString(); } static void BuildUserAgentString() { if (_versionNumber == null) { _versionNumber = SDKVersionNumber; } _sdkUserAgent = string.Format(CultureInfo.InvariantCulture, "{0}/{1} .NET Runtime/{2} .NET Framework/{3} OS/{4}", _userAgentBaseName, _versionNumber, DetermineRuntime(), DetermineFramework(), DetermineOSVersion()); } #endregion #region IsSet methods /* Set Collection True -> set to empty AlwaysSend* False -> set to empty collection type Value type True -> set to default(T) False -> set to null Get Collection Field is AlwaysSend* OR has items -> True Otherwise -> False Value type Field is any value -> True Null -> False */ internal static void SetIsSet<T>(bool isSet, ref Nullable<T> field) where T : struct { if (isSet) field = default(T); else field = null; } internal static void SetIsSet<T>(bool isSet, ref List<T> field) { if (isSet) field = new AlwaysSendList<T>(field); else field = new List<T>(); } internal static void SetIsSet<TKey, TValue>(bool isSet, ref Dictionary<TKey, TValue> field) { if (isSet) field = new AlwaysSendDictionary<TKey, TValue>(field); else field = new Dictionary<TKey, TValue>(); } internal static bool GetIsSet<T>(Nullable<T> field) where T : struct { return (field.HasValue); } internal static bool GetIsSet<T>(List<T> field) { if (field == null) return false; if (field.Count > 0) return true; var sl = field as AlwaysSendList<T>; if (sl != null) return true; return false; } internal static bool GetIsSet<TKey, TVvalue>(Dictionary<TKey, TVvalue> field) { if (field == null) return false; if (field.Count > 0) return true; var sd = field as AlwaysSendDictionary<TKey, TVvalue>; if (sd != null) return true; return false; } #endregion #region Internal Methods /// <summary> /// Returns an extension of a path. /// This has the same behavior as System.IO.Path.GetExtension, but does not /// check the path for invalid characters. /// </summary> /// <param name="path"></param> /// <returns></returns> internal static string GetExtension(string path) { if (path == null) return null; int length = path.Length; int index = length; while (--index >= 0) { char ch = path[index]; if (ch == '.') { if (index != length - 1) return path.Substring(index, length - index); else return string.Empty; } else if (IsPathSeparator(ch)) break; } return string.Empty; } // Checks if the character is one \ / : private static bool IsPathSeparator(char ch) { return (ch == '\\' || ch == '/' || ch == ':'); } /* * Determines the string to be signed based on the input parameters for * AWS Signature Version 2 */ internal static string CalculateStringToSignV2(IDictionary<string, string> parameters, string serviceUrl) { StringBuilder data = new StringBuilder("POST\n", 512); IDictionary<string, string> sorted = new SortedDictionary<string, string>(parameters, StringComparer.Ordinal); Uri endpoint = new Uri(serviceUrl); data.Append(endpoint.Host); data.Append("\n"); string uri = endpoint.AbsolutePath; if (uri == null || uri.Length == 0) { uri = "/"; } data.Append(AWSSDKUtils.UrlEncode(uri, true)); data.Append("\n"); foreach (KeyValuePair<string, string> pair in sorted) { if (pair.Value != null) { data.Append(AWSSDKUtils.UrlEncode(pair.Key, false)); data.Append("="); data.Append(AWSSDKUtils.UrlEncode(pair.Value, false)); data.Append("&"); } } string result = data.ToString(); return result.Remove(result.Length - 1); } /** * Convert Dictionary of paremeters to Url encoded query string */ internal static string GetParametersAsString(IDictionary<string, string> parameters) { string[] keys = new string[parameters.Keys.Count]; parameters.Keys.CopyTo(keys, 0); Array.Sort<string>(keys); StringBuilder data = new StringBuilder(512); foreach (string key in keys) { string value = parameters[key]; if (value != null) { data.Append(key); data.Append('='); data.Append(AWSSDKUtils.UrlEncode(value, false)); data.Append('&'); } } string result = data.ToString(); if (result.Length == 0) return string.Empty; return result.Remove(result.Length - 1); } /// <summary> /// Returns a new string created by joining each of the strings in the /// specified list together, with a comma between them. /// </summary> /// <parma name="strings">The list of strings to join into a single, comma delimited /// string list.</parma> /// <returns> A new string created by joining each of the strings in the /// specified list together, with a comma between strings.</returns> internal static String Join(List<String> strings) { StringBuilder result = new StringBuilder(); Boolean first = true; foreach (String s in strings) { if (!first) result.Append(", "); result.Append(s); first = false; } return result.ToString(); } /// <summary> /// Attempt to infer the region for a service request based on the endpoint /// </summary> /// <param name="url">Endpoint to the service to be called</param> /// <returns> /// Region parsed from the endpoint; DefaultRegion (or DefaultGovRegion) /// if it cannot be determined/is not explicit /// </returns> public static string DetermineRegion(string url) { int delimIndex = url.IndexOf("//", StringComparison.Ordinal); if (delimIndex >= 0) url = url.Substring(delimIndex + 2); if(url.EndsWith("/", StringComparison.Ordinal)) url = url.Substring(0, url.Length - 1); int awsIndex = url.IndexOf(".amazonaws.com", StringComparison.Ordinal); if (awsIndex < 0) return DefaultRegion; string serviceAndRegion = url.Substring(0, awsIndex); int cloudSearchIndex = url.IndexOf(".cloudsearch.amazonaws.com", StringComparison.Ordinal); if (cloudSearchIndex > 0) serviceAndRegion = url.Substring(0, cloudSearchIndex); int queueIndex = serviceAndRegion.IndexOf("queue", StringComparison.Ordinal); if (queueIndex == 0) return DefaultRegion; if (queueIndex > 0) return serviceAndRegion.Substring(0, queueIndex - 1); char separator; if (serviceAndRegion.StartsWith("s3-", StringComparison.Ordinal)) separator = '-'; else separator = '.'; int separatorIndex = serviceAndRegion.IndexOf(separator); if (separatorIndex == -1) return DefaultRegion; string region = serviceAndRegion.Substring(separatorIndex + 1); if (region.Equals("external-1")) return RegionEndpoint.USEast1.SystemName; if (string.Equals(region, "us-gov", StringComparison.Ordinal)) return DefaultGovRegion; return region; } /// <summary> /// Attempt to infer the service name for a request (in short form, eg 'iam') from the /// service endpoint. /// </summary> /// <param name="url">Endpoint to the service to be called</param> /// <returns> /// Short-form name of the service parsed from the endpoint; empty string if it cannot /// be determined /// </returns> public static string DetermineService(string url) { int delimIndex = url.IndexOf("//", StringComparison.Ordinal); if (delimIndex >= 0) url = url.Substring(delimIndex + 2); string[] urlParts = url.Split(new char[] {'.'}, StringSplitOptions.RemoveEmptyEntries); if (urlParts == null || urlParts.Length == 0) return string.Empty; string servicePart = urlParts[0]; int hyphenated = servicePart.IndexOf('-'); string service; if (hyphenated < 0) { service = servicePart; } else { service = servicePart.Substring(0, hyphenated); } // Check for SQS : return "sqs" incase service is determined to be "queue" as per the URL. if (service.Equals("queue")) { return "sqs"; } else { return service; } } /// <summary> /// Utility method for converting Unix epoch seconds to DateTime structure. /// </summary> /// <param name="seconds">The number of seconds since January 1, 1970.</param> /// <returns>Converted DateTime structure</returns> public static DateTime ConvertFromUnixEpochSeconds(int seconds) { return new DateTime(seconds * 10000000L + EPOCH_START.Ticks, DateTimeKind.Utc).ToLocalTime(); } public static int ConvertToUnixEpochSeconds(DateTime dateTime) { return (int)ConvertToUnixEpochMilliSeconds(dateTime); } public static double ConvertToUnixEpochMilliSeconds(DateTime dateTime) { TimeSpan ts = new TimeSpan(dateTime.ToUniversalTime().Ticks - EPOCH_START.Ticks); double milli = Math.Round(ts.TotalMilliseconds, 0) / 1000.0; return milli; } /// <summary> /// Helper function to format a byte array into string /// </summary> /// <param name="data">The data blob to process</param> /// <param name="lowercase">If true, returns hex digits in lower case form</param> /// <returns>String version of the data</returns> internal static string ToHex(byte[] data, bool lowercase) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < data.Length; i++) { sb.Append(data[i].ToString(lowercase ? "x2" : "X2", CultureInfo.InvariantCulture)); } return sb.ToString(); } /// <summary> /// Calls a specific EventHandler in a background thread /// </summary> /// <param name="handler"></param> /// <param name="args"></param> /// <param name="sender"></param> internal static void InvokeInBackground<T>(EventHandler<T> handler, T args, object sender) where T : EventArgs { if (handler == null) return; var list = handler.GetInvocationList(); foreach (var call in list) { var eventHandler = ((EventHandler<T>)call); if (eventHandler != null) { if (Dispatcher.IsRunning) Dispatcher.Dispatch(() => eventHandler(sender, args)); } } } private static BackgroundInvoker _dispatcher; private static BackgroundInvoker Dispatcher { get { if (_dispatcher == null) { _dispatcher = new BackgroundInvoker(); } return _dispatcher; } } /// <summary> /// Parses a query string of a URL and returns the parameters as a string-to-string dictionary. /// </summary> /// <param name="url"></param> /// <returns></returns> internal static Dictionary<string, string> ParseQueryParameters(string url) { Dictionary<string, string> parameters = new Dictionary<string, string>(); if (!string.IsNullOrEmpty(url)) { int queryIndex = url.IndexOf('?'); if (queryIndex >= 0) { string queryString = url.Substring(queryIndex + 1); string[] kvps = queryString.Split(new char[] { '&' }, StringSplitOptions.None); foreach (string kvp in kvps) { if (string.IsNullOrEmpty(kvp)) continue; string[] nameValuePair = kvp.Split(new char[] { '=' }, 2); string name = nameValuePair[0]; string value = nameValuePair.Length == 1 ? null : nameValuePair[1]; parameters[name] = value; } } } return parameters; } internal static void AddToDictionary<TKey, TValue>(Dictionary<TKey, TValue> dictionary, TKey key, TValue value) { if (dictionary.ContainsKey(key)) throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "Dictionary already contains item with key {0}", key)); dictionary[key] = value; } internal static void FillDictionary<T, TKey, TValue>(IEnumerable<T> items, Func<T, TKey> keyGenerator, Func<T, TValue> valueGenerator, Dictionary<TKey, TValue> targetDictionary) { foreach (var item in items) { var key = keyGenerator(item); var value = valueGenerator(item); AddToDictionary(targetDictionary, key, value); } } internal static Dictionary<TKey, TValue> ToDictionary<T, TKey, TValue>(IEnumerable<T> items, Func<T, TKey> keyGenerator, Func<T, TValue> valueGenerator, IEqualityComparer<TKey> comparer = null) { Dictionary<TKey, TValue> dictionary; if (comparer == null) dictionary = new Dictionary<TKey, TValue>(); else dictionary = new Dictionary<TKey, TValue>(comparer); FillDictionary(items, keyGenerator, valueGenerator, dictionary); return dictionary; } internal static bool TryFindByValue<TKey, TValue>( IDictionary<TKey, TValue> dictionary, TValue value, IEqualityComparer<TValue> valueComparer, out TKey key) { foreach (var kvp in dictionary) { var candidateValue = kvp.Value; if (valueComparer.Equals(value, candidateValue)) { key = kvp.Key; return true; } } key = default(TKey); return false; } internal static Stream GenerateStreamFromString(string s) { MemoryStream stream = new MemoryStream(); StreamWriter writer = new StreamWriter(stream); writer.Write(s); writer.Flush(); stream.Position = 0; return stream; } #endregion #region Public Methods and Properties /// <summary> /// Formats the current date as a GMT timestamp /// </summary> /// <returns>A GMT formatted string representation /// of the current date and time /// </returns> public static string FormattedCurrentTimestampGMT { get { DateTime dateTime = AWSSDKUtils.CorrectedUtcNow; DateTime formatted = new DateTime( dateTime.Year, dateTime.Month, dateTime.Day, dateTime.Hour, dateTime.Minute, dateTime.Second, dateTime.Millisecond, DateTimeKind.Local ); return formatted.ToString( GMTDateFormat, CultureInfo.InvariantCulture ); } } /// <summary> /// Formats the current date as ISO 8601 timestamp /// </summary> /// <returns>An ISO 8601 formatted string representation /// of the current date and time /// </returns> public static string FormattedCurrentTimestampISO8601 { get { return GetFormattedTimestampISO8601(0); } } /// <summary> /// Gets the ISO8601 formatted timestamp that is minutesFromNow /// in the future. /// </summary> /// <param name="minutesFromNow">The number of minutes from the current instant /// for which the timestamp is needed.</param> /// <returns>The ISO8601 formatted future timestamp.</returns> public static string GetFormattedTimestampISO8601(int minutesFromNow) { DateTime dateTime = AWSSDKUtils.CorrectedUtcNow.AddMinutes(minutesFromNow); DateTime formatted = new DateTime( dateTime.Year, dateTime.Month, dateTime.Day, dateTime.Hour, dateTime.Minute, dateTime.Second, dateTime.Millisecond, DateTimeKind.Local ); return formatted.ToString( AWSSDKUtils.ISO8601DateFormat, CultureInfo.InvariantCulture ); } /// <summary> /// Formats the current date as ISO 8601 timestamp /// </summary> /// <returns>An ISO 8601 formatted string representation /// of the current date and time /// </returns> public static string FormattedCurrentTimestampRFC822 { get { return GetFormattedTimestampRFC822(0); } } /// <summary> /// Gets the RFC822 formatted timestamp that is minutesFromNow /// in the future. /// </summary> /// <param name="minutesFromNow">The number of minutes from the current instant /// for which the timestamp is needed.</param> /// <returns>The ISO8601 formatted future timestamp.</returns> public static string GetFormattedTimestampRFC822(int minutesFromNow) { DateTime dateTime = AWSSDKUtils.CorrectedUtcNow.AddMinutes(minutesFromNow); DateTime formatted = new DateTime( dateTime.Year, dateTime.Month, dateTime.Day, dateTime.Hour, dateTime.Minute, dateTime.Second, dateTime.Millisecond, DateTimeKind.Local ); return formatted.ToString( AWSSDKUtils.RFC822DateFormat, CultureInfo.InvariantCulture ); } /// <summary> /// URL encodes a string per RFC3986. If the path property is specified, /// the accepted path characters {/+:} are not encoded. /// </summary> /// <param name="data">The string to encode</param> /// <param name="path">Whether the string is a URL path or not</param> /// <returns>The encoded string</returns> public static string UrlEncode(string data, bool path) { return UrlEncode(3986, data, path); } /// <summary> /// URL encodes a string per the specified RFC. If the path property is specified, /// the accepted path characters {/+:} are not encoded. /// </summary> /// <param name="rfcNumber">RFC number determing safe characters</param> /// <param name="data">The string to encode</param> /// <param name="path">Whether the string is a URL path or not</param> /// <returns>The encoded string</returns> /// <remarks> /// Currently recognised RFC versions are 1738 (Dec '94) and 3986 (Jan '05). /// If the specified RFC is not recognised, 3986 is used by default. /// </remarks> internal static string UrlEncode(int rfcNumber, string data, bool path) { StringBuilder encoded = new StringBuilder(data.Length * 2); string validUrlCharacters; if (!RFCEncodingSchemes.TryGetValue(rfcNumber, out validUrlCharacters)) validUrlCharacters = ValidUrlCharacters; string unreservedChars = String.Concat(validUrlCharacters, (path ? ValidPathCharacters : "")); foreach (char symbol in System.Text.Encoding.UTF8.GetBytes(data)) { if (unreservedChars.IndexOf(symbol) != -1) { encoded.Append(symbol); } else { encoded.Append("%").Append(string.Format(CultureInfo.InvariantCulture, "{0:X2}", (int)symbol)); } } return encoded.ToString(); } public static void Sleep(TimeSpan ts) { Sleep((int)ts.TotalMilliseconds); } /// <summary> /// Convert bytes to a hex string /// </summary> /// <param name="bytes">Bytes to convert.</param> /// <returns>Hexadecimal string representing the byte array.</returns> internal static string BytesToHexString(byte[] bytes) { string hex = BitConverter.ToString(bytes); hex = hex.Replace("-", string.Empty); return hex; } /// <summary> /// Convert a hex string to bytes /// </summary> /// <param name="hex">Hexadecimal string</param> /// <returns>Byte array corresponding to the hex string.</returns> internal static byte[] HexStringToBytes(string hex) { if (string.IsNullOrEmpty(hex) || hex.Length % 2 == 1) throw new ArgumentOutOfRangeException("hex"); int count = 0; byte[] buffer = new byte[hex.Length / 2]; for (int i = 0; i < hex.Length; i += 2) { string sub = hex.Substring(i, 2); byte b = Convert.ToByte(sub, 16); buffer[count] = b; count++; } return buffer; } /// <summary> /// Returns DateTime.UtcNow + ClockOffset when /// <seealso cref="AWSConfigs.CorrectForClockSkew"/> is true. /// This value should be used when constructing requests, as it /// will represent accurate time w.r.t. AWS servers. /// </summary> public static DateTime CorrectedUtcNow { get { var now = DateTime.UtcNow; if (AWSConfigs.CorrectForClockSkew) now += AWSConfigs.ClockOffset; return now; } } #endregion } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using System.IO; using libsecondlife; using libsecondlife.Imaging; namespace groupmanager { public partial class frmGroupInfo : Form { Group Group; SecondLife Client; GroupProfile Profile = new GroupProfile(); Dictionary<LLUUID, GroupMember> Members = new Dictionary<LLUUID,GroupMember>(); Dictionary<LLUUID, GroupTitle> Titles = new Dictionary<LLUUID,GroupTitle>(); Dictionary<LLUUID, GroupMemberData> MemberData = new Dictionary<LLUUID, GroupMemberData>(); Dictionary<LLUUID, string> Names = new Dictionary<LLUUID, string>(); GroupManager.GroupProfileCallback GroupProfileCallback; GroupManager.GroupMembersCallback GroupMembersCallback; GroupManager.GroupTitlesCallback GroupTitlesCallback; AvatarManager.AvatarNamesCallback AvatarNamesCallback; AssetManager.ImageReceivedCallback ImageReceivedCallback; public frmGroupInfo(Group group, SecondLife client) { InitializeComponent(); while (!IsHandleCreated) { // Force handle creation IntPtr temp = Handle; } GroupProfileCallback = new GroupManager.GroupProfileCallback(GroupProfileHandler); GroupMembersCallback = new GroupManager.GroupMembersCallback(GroupMembersHandler); GroupTitlesCallback = new GroupManager.GroupTitlesCallback(GroupTitlesHandler); AvatarNamesCallback = new AvatarManager.AvatarNamesCallback(AvatarNamesHandler); ImageReceivedCallback = new AssetManager.ImageReceivedCallback(Assets_OnImageReceived); Group = group; Client = client; // Register the callbacks for this form Client.Assets.OnImageReceived += ImageReceivedCallback; Client.Groups.OnGroupProfile += GroupProfileCallback; Client.Groups.OnGroupMembers += GroupMembersCallback; Client.Groups.OnGroupTitles += GroupTitlesCallback; Client.Avatars.OnAvatarNames += AvatarNamesCallback; // Request the group information Client.Groups.RequestGroupProfile(Group.ID); Client.Groups.RequestGroupMembers(Group.ID); Client.Groups.RequestGroupTitles(Group.ID); } ~frmGroupInfo() { // Unregister the callbacks for this form Client.Assets.OnImageReceived -= ImageReceivedCallback; Client.Groups.OnGroupProfile -= GroupProfileCallback; Client.Groups.OnGroupMembers -= GroupMembersCallback; Client.Groups.OnGroupTitles -= GroupTitlesCallback; Client.Avatars.OnAvatarNames -= AvatarNamesCallback; } private void GroupProfileHandler(GroupProfile profile) { Profile = profile; if (Group.InsigniaID != LLUUID.Zero) Client.Assets.RequestImage(Group.InsigniaID, ImageType.Normal, 113000.0f, 0); if (this.InvokeRequired) this.BeginInvoke(new MethodInvoker(UpdateProfile)); } void Assets_OnImageReceived(ImageDownload image, AssetTexture assetTexture) { ManagedImage imgData; Image bitmap; if (image.Success) { OpenJPEG.DecodeToImage(image.AssetData, out imgData, out bitmap); picInsignia.Image = bitmap; } } private void UpdateProfile() { lblGroupName.Text = Profile.Name; txtCharter.Text = Profile.Charter; chkShow.Checked = Profile.ShowInList; chkPublish.Checked = Profile.AllowPublish; chkOpenEnrollment.Checked = Profile.OpenEnrollment; chkFee.Checked = (Profile.MembershipFee != 0); numFee.Value = Profile.MembershipFee; chkMature.Checked = Profile.MaturePublish; Client.Avatars.RequestAvatarName(Profile.FounderID); } private void AvatarNamesHandler(Dictionary<LLUUID, string> names) { lock (Names) { foreach (KeyValuePair<LLUUID, string> agent in names) { Names[agent.Key] = agent.Value; } } UpdateNames(); } private void UpdateNames() { if (this.InvokeRequired) { Invoke(new MethodInvoker(UpdateNames)); } else { lock (Names) { if (Profile.FounderID != LLUUID.Zero && Names.ContainsKey(Profile.FounderID)) { lblFoundedBy.Text = "Founded by " + Names[Profile.FounderID]; } lock (MemberData) { foreach (KeyValuePair<LLUUID, string> name in Names) { if (!MemberData.ContainsKey(name.Key)) { MemberData[name.Key] = new GroupMemberData(); } MemberData[name.Key].Name = name.Value; } } } UpdateMemberList(); } } private void UpdateMemberList() { // General tab list lock (lstMembers) { lstMembers.Items.Clear(); foreach (GroupMemberData entry in MemberData.Values) { ListViewItem lvi = new ListViewItem(); lvi.Text = entry.Name; ListViewItem.ListViewSubItem lvsi = new ListViewItem.ListViewSubItem(); lvsi.Text = entry.Title; lvi.SubItems.Add(lvsi); lvsi = new ListViewItem.ListViewSubItem(); lvsi.Text = entry.LastOnline; lvi.SubItems.Add(lvsi); lstMembers.Items.Add(lvi); } } // Members tab list lock (lstMembers2) { lstMembers2.Items.Clear(); foreach (GroupMemberData entry in MemberData.Values) { ListViewItem lvi = new ListViewItem(); lvi.Text = entry.Name; ListViewItem.ListViewSubItem lvsi = new ListViewItem.ListViewSubItem(); lvsi.Text = entry.Contribution.ToString(); lvi.SubItems.Add(lvsi); lvsi = new ListViewItem.ListViewSubItem(); lvsi.Text = entry.LastOnline; lvi.SubItems.Add(lvsi); lstMembers2.Items.Add(lvi); } } } private void GroupMembersHandler(Dictionary<LLUUID, GroupMember> members) { Members = members; UpdateMembers(); } private void UpdateMembers() { if (this.InvokeRequired) { Invoke(new MethodInvoker(UpdateMembers)); } else { List<LLUUID> requestids = new List<LLUUID>(); lock (Members) { lock (MemberData) { foreach (GroupMember member in Members.Values) { GroupMemberData memberData = new GroupMemberData(); memberData.ID = member.ID; memberData.IsOwner = member.IsOwner; memberData.LastOnline = member.OnlineStatus; memberData.Powers = (ulong)member.Powers; memberData.Title = member.Title; memberData.Contribution = member.Contribution; MemberData[member.ID] = memberData; // Add this ID to the name request batch requestids.Add(member.ID); } } } Client.Avatars.RequestAvatarNames(requestids); } } private void GroupTitlesHandler(Dictionary<LLUUID, GroupTitle> titles) { Titles = titles; UpdateTitles(); } private void UpdateTitles() { if (this.InvokeRequired) { Invoke(new MethodInvoker(UpdateTitles)); } else { lock (Titles) { foreach (KeyValuePair<LLUUID, GroupTitle> kvp in Titles) { Console.Write("Title: " + kvp.Value.Title + " = " + kvp.Key.ToString()); if (kvp.Value.Selected) Console.WriteLine(" (Selected)"); else Console.WriteLine(); } } } } } public class GroupMemberData { public LLUUID ID; public string Name; public string Title; public string LastOnline; public ulong Powers; public bool IsOwner; public int Contribution; } }
using System.Web; using NServiceKit.Common; using NServiceKit.Common.ServiceModel; using NServiceKit.ServiceHost; using NServiceKit.ServiceInterface.ServiceModel; using NServiceKit.Text; using NServiceKit.WebHost.Endpoints; using NServiceKit.WebHost.Endpoints.Support; using System; using System.Collections.Generic; using System.ComponentModel; using System.Linq; using System.Reflection; using System.Runtime.Serialization; using HttpRequestWrapper = NServiceKit.WebHost.Endpoints.Extensions.HttpRequestWrapper; using HttpResponseWrapper = NServiceKit.WebHost.Endpoints.Extensions.HttpResponseWrapper; namespace NServiceKit { /// <summary>A metadata types handler.</summary> public class MetadataTypesHandler : HttpHandlerBase, INServiceKitHttpHandler { /// <summary>Gets or sets the configuration.</summary> /// /// <value>The configuration.</value> public MetadataTypesConfig Config { get; set; } /// <summary>Executes the given context.</summary> /// /// <param name="context">The context.</param> public override void Execute(HttpContext context) { ProcessRequest( new HttpRequestWrapper(GetType().Name, context.Request), new HttpResponseWrapper(context.Response), GetType().Name); } /// <summary>Process the request.</summary> /// /// <param name="httpReq"> The HTTP request.</param> /// <param name="httpRes"> The HTTP resource.</param> /// <param name="operationName">Name of the operation.</param> public void ProcessRequest(IHttpRequest httpReq, IHttpResponse httpRes, string operationName) { var metadata = new MetadataTypes { Config = Config, }; var existingTypes = new HashSet<Type> { typeof(ResponseStatus), typeof(ErrorResponse), }; var meta = EndpointHost.Metadata; foreach (var operation in meta.Operations) { if (!meta.IsVisible(httpReq, operation)) continue; metadata.Operations.Add(new MetadataOperationType { Actions = operation.Actions, Request = operation.RequestType.ToType(), Response = operation.ResponseType.ToType(), }); existingTypes.Add(operation.RequestType); if (operation.ResponseType != null) { existingTypes.Add(operation.ResponseType); } } foreach (var type in meta.GetAllTypes()) { if (existingTypes.Contains(type)) continue; metadata.Operations.Add(new MetadataOperationType { Request = type.ToType(), }); existingTypes.Add(type); } var considered = new HashSet<Type>(existingTypes); var queue = new Queue<Type>(existingTypes); while (queue.Count > 0) { var type = queue.Dequeue(); foreach (var pi in type.GetSerializableProperties()) { if (pi.PropertyType.IsUserType()) { if (considered.Contains(pi.PropertyType)) continue; considered.Add(pi.PropertyType); queue.Enqueue(pi.PropertyType); metadata.Types.Add(pi.PropertyType.ToType()); } } if (type.BaseType != null && type.BaseType.IsUserType() && !considered.Contains(type.BaseType)) { considered.Add(type.BaseType); queue.Enqueue(type.BaseType); metadata.Types.Add(type.BaseType.ToType()); } } var json = metadata.ToJson(); //httpRes.ContentType = "application/json"; //httpRes.Write(json); //return; httpRes.ContentType = "application/x-ssz-metatypes"; var encJson = CryptUtils.Encrypt(EndpointHostConfig.PublicKey, json, RsaKeyLengths.Bit2048); httpRes.Write(encJson); } } /// <summary>A metadata type extensions.</summary> public static class MetadataTypeExtensions { /// <summary>A Type extension method that converts a type to a type.</summary> /// /// <param name="type">The type to act on.</param> /// /// <returns>type as a MetadataType.</returns> public static MetadataType ToType(this Type type) { if (type == null) return null; var metaType = new MetadataType { Name = type.Name, Namespace = type.Namespace, GenericArgs = type.IsGenericType ? type.GetGenericArguments().Select(x => x.Name).ToArray() : null, Attributes = type.ToAttributes(), Properties = type.ToProperties(), }; if (type.BaseType != null && type.BaseType != typeof(object)) { metaType.Inherits = type.BaseType.Name; metaType.InheritsGenericArgs = type.BaseType.IsGenericType ? type.BaseType.GetGenericArguments().Select(x => x.Name).ToArray() : null; } if (type.GetTypeWithInterfaceOf(typeof(IReturnVoid)) != null) { metaType.ReturnVoidMarker = true; } else { var genericMarker = type.GetTypeWithGenericTypeDefinitionOf(typeof(IReturn<>)); if (genericMarker != null) { metaType.ReturnMarkerGenericArgs = genericMarker.GetGenericArguments().Select(x => x.Name).ToArray(); } } var typeAttrs = TypeDescriptor.GetAttributes(type); var routeAttrs = typeAttrs.OfType<RouteAttribute>().ToList(); if (routeAttrs.Count > 0) { metaType.Routes = routeAttrs.ConvertAll(x => new MetadataRoute { Path = x.Path, Notes = x.Notes, Summary = x.Summary, Verbs = x.Verbs, }); } var descAttr = typeAttrs.OfType<DescriptionAttribute>().FirstOrDefault(); if (descAttr != null) { metaType.Description = descAttr.Description; } var dcAttr = type.GetDataContract(); if (dcAttr != null) { metaType.DataContract = new MetadataDataContract { Name = dcAttr.Name, Namespace = dcAttr.Namespace, }; } return metaType; } /// <summary>An IEnumerable&lt;Attribute&gt; extension method that converts the attrs to the attributes.</summary> /// /// <param name="type">The type to act on.</param> /// /// <returns>attrs as a List&lt;MetadataAttribute&gt;</returns> public static List<MetadataAttribute> ToAttributes(this Type type) { return !type.IsUserType() || type.IsOrHasGenericInterfaceTypeOf(typeof(IEnumerable<>)) ? null : type.GetCustomAttributes(false).ToAttributes(); } /// <summary>A Type extension method that converts a type to the properties.</summary> /// /// <param name="type">The type to act on.</param> /// /// <returns>type as a List&lt;MetadataPropertyType&gt;</returns> public static List<MetadataPropertyType> ToProperties(this Type type) { var props = !type.IsUserType() || type.IsOrHasGenericInterfaceTypeOf(typeof(IEnumerable<>)) ? null : type.GetInstancePublicProperties().ToList().ConvertAll(x => x.ToProperty()); return props == null || props.Count == 0 ? null : props; } /// <summary>Exclude known attributes filter.</summary> /// /// <param name="x">The Attribute to process.</param> /// /// <returns>true if it succeeds, false if it fails.</returns> public static bool ExcludeKnownAttrsFilter(Attribute x) { return x.GetType() != typeof(RouteAttribute) && x.GetType() != typeof(DescriptionAttribute) && x.GetType().Name != "DataContractAttribute" //Type equality issues with Mono .NET 3.5/4 && x.GetType().Name != "DataMemberAttribute"; } /// <summary>An IEnumerable&lt;Attribute&gt; extension method that converts the attrs to the attributes.</summary> /// /// <param name="attrs">The attrs to act on.</param> /// /// <returns>attrs as a List&lt;MetadataAttribute&gt;</returns> public static List<MetadataAttribute> ToAttributes(this object[] attrs) { var to = attrs.OfType<Attribute>() .Where(ExcludeKnownAttrsFilter) .ToList().ConvertAll(x => x.ToAttribute()); return to.Count == 0 ? null : to; } /// <summary>An IEnumerable&lt;Attribute&gt; extension method that converts the attrs to the attributes.</summary> /// /// <param name="attrs">The attrs to act on.</param> /// /// <returns>attrs as a List&lt;MetadataAttribute&gt;</returns> public static List<MetadataAttribute> ToAttributes(this IEnumerable<Attribute> attrs) { var to = attrs .Where(ExcludeKnownAttrsFilter) .Select(attr => attr.ToAttribute()) .ToList(); return to.Count == 0 ? null : to; } /// <summary>An Attribute extension method that converts an attr to an attribute.</summary> /// /// <param name="attr">The attr to act on.</param> /// /// <returns>attr as a MetadataAttribute.</returns> public static MetadataAttribute ToAttribute(this Attribute attr) { var firstCtor = attr.GetType().GetConstructors().OrderBy(x => x.GetParameters().Length).FirstOrDefault(); var metaAttr = new MetadataAttribute { Name = attr.GetType().Name, ConstructorArgs = firstCtor != null ? firstCtor.GetParameters().ToList().ConvertAll(x => x.ToProperty()) : null, Args = attr.NonDefaultProperties(), }; return metaAttr; } /// <summary>An Attribute extension method that non default properties.</summary> /// /// <param name="attr">The attr to act on.</param> /// /// <returns>A List&lt;MetadataPropertyType&gt;</returns> public static List<MetadataPropertyType> NonDefaultProperties(this Attribute attr) { return attr.GetType().GetPublicProperties() .Select(pi => pi.ToProperty(attr)) .Where(property => property.Name != "TypeId" && property.Value != null) .ToList(); } /// <summary>A ParameterInfo extension method that converts a pi to a property.</summary> /// /// <param name="pi"> The pi to act on.</param> /// <param name="instance">The instance.</param> /// /// <returns>pi as a MetadataPropertyType.</returns> public static MetadataPropertyType ToProperty(this PropertyInfo pi, object instance = null) { var property = new MetadataPropertyType { Name = pi.Name, Attributes = pi.GetCustomAttributes(false).ToAttributes(), Type = pi.PropertyType.Name, DataMember = pi.GetDataMember().ToDataMember(), GenericArgs = pi.PropertyType.IsGenericType ? pi.PropertyType.GetGenericArguments().Select(x => x.Name).ToArray() : null, }; if (instance != null) { var value = pi.GetValue(instance, null); if (value != pi.PropertyType.GetDefaultValue()) { property.Value = value.ToJson(); } } return property; } /// <summary>A ParameterInfo extension method that converts a pi to a property.</summary> /// /// <param name="pi">The pi to act on.</param> /// /// <returns>pi as a MetadataPropertyType.</returns> public static MetadataPropertyType ToProperty(this ParameterInfo pi) { var propertyAttrs = pi.GetCustomAttributes(false); var property = new MetadataPropertyType { Name = pi.Name, Attributes = propertyAttrs.ToAttributes(), Type = pi.ParameterType.Name, }; var descAttr = propertyAttrs.OfType<DescriptionAttribute>().FirstOrDefault(); if (descAttr != null) { property.Description = descAttr.Description; } return property; } /// <summary>A DataMemberAttribute extension method that converts an attr to a data member.</summary> /// /// <param name="attr">The attr to act on.</param> /// /// <returns>attr as a MetadataDataMember.</returns> public static MetadataDataMember ToDataMember(this DataMemberAttribute attr) { if (attr == null) return null; var metaAttr = new MetadataDataMember { Name = attr.Name, EmitDefaultValue = attr.EmitDefaultValue != true ? attr.EmitDefaultValue : (bool?)null, Order = attr.Order >= 0 ? attr.Order : (int?)null, IsRequired = attr.IsRequired != false ? attr.IsRequired : (bool?)null, }; return metaAttr; } /// <summary>A Type extension method that gets instance public properties.</summary> /// /// <param name="type">The type to act on.</param> /// /// <returns>An array of property information.</returns> public static PropertyInfo[] GetInstancePublicProperties(this Type type) { return type.GetProperties(BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly) .Where(t => t.GetIndexParameters().Length == 0) // ignore indexed properties .ToArray(); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Sql { using Microsoft.Azure; using Microsoft.Azure.Management; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// DatabaseUsagesOperations operations. /// </summary> internal partial class DatabaseUsagesOperations : IServiceOperations<SqlManagementClient>, IDatabaseUsagesOperations { /// <summary> /// Initializes a new instance of the DatabaseUsagesOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal DatabaseUsagesOperations(SqlManagementClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the SqlManagementClient /// </summary> public SqlManagementClient Client { get; private set; } /// <summary> /// Returns database usages. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group that contains the resource. You can obtain /// this value from the Azure Resource Manager API or the portal. /// </param> /// <param name='serverName'> /// The name of the server. /// </param> /// <param name='databaseName'> /// The name of the database. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IEnumerable<DatabaseUsage>>> ListByDatabaseWithHttpMessagesAsync(string resourceGroupName, string serverName, string databaseName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (serverName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "serverName"); } if (databaseName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "databaseName"); } string apiVersion = "2014-04-01"; // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("serverName", serverName); tracingParameters.Add("databaseName", databaseName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListByDatabase", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/usages").ToString(); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName)); _url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName)); List<string> _queryParameters = new List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IEnumerable<DatabaseUsage>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<DatabaseUsage>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
//Copyright 2019 Esri // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using System.Windows.Data; using ArcGIS.Desktop.Framework; using ArcGIS.Desktop.Framework.Contracts; using ArcGIS.Desktop.Framework.Threading.Tasks; using ArcGIS.Desktop.Core; using ArcGIS.Desktop.Core.Events; using ArcGIS.Desktop.Mapping; using System.Collections.ObjectModel; using System.Windows.Input; using ArcGIS.Desktop.Mapping.Events; using System; using System.Threading; using System.Windows; namespace DockPaneBookmarkAdvanced { /// <summary> /// View model for the bookmarks dockpane. /// </summary> internal class BookmarkViewModel : DockPane { private const string DockPaneId = "DockPaneBookmarkAdvanced_Bookmark"; private const string MenuId = "DockPaneBookmarkAdvanced_Bookmark_Menu"; /// <summary> /// used to lock collections for use by multiple threads /// </summary> private readonly object _lockCollection = new object(); private readonly ObservableCollection<Bookmark> _bookmarks = new ObservableCollection<Bookmark>(); // Read only alias for map bookmarks. private readonly ReadOnlyObservableCollection<Bookmark> _readOnlyBookmarks; private readonly ObservableCollection<Map> _maps = new ObservableCollection<Map>(); // Read only alias for project bookmarks. private readonly ReadOnlyObservableCollection<Map> _readOnlyMaps; /// <summary> /// constructor. /// </summary> protected BookmarkViewModel() { // set up the command to retrieve the maps _retrieveMapsCommand = new RelayCommand(() => RetrieveMaps(), () => true); _delBookmarkCommand = new RelayCommand(() => DeleteBookmark(), () => true); _readOnlyBookmarks = new ReadOnlyObservableCollection<Bookmark>(_bookmarks); _readOnlyMaps = new ReadOnlyObservableCollection<Map>(_maps); Utils.RunOnUiThread(() => { BindingOperations.EnableCollectionSynchronization(_readOnlyMaps, _lockCollection); BindingOperations.EnableCollectionSynchronization(_readOnlyBookmarks, _lockCollection); }); ProjectItemsChangedEvent.Subscribe(OnProjectCollectionChanged, false); CheckBookmarks(_selectedMap); IsShowCircularAnimation = Visibility.Collapsed; } #region Overrides /// <summary> /// Override to implement custom initialization code for this dockpane /// </summary> /// <returns></returns> protected override async Task InitializeAsync() { IsNoBookmarkExists = Visibility.Collapsed; } #endregion #region Subscribed Events /// <summary> /// Subscribe to Project Items Changed events which is getting called each /// time the project items change which happens when a new map is added or removed in ArcGIS Pro /// </summary> /// <param name="args">ProjectItemsChangedEventArgs</param> private void OnProjectCollectionChanged(ProjectItemsChangedEventArgs args) { if (args == null) return; var mapItem = args.ProjectItem as MapProjectItem; if (mapItem == null) return; IsShowCircularAnimation = Visibility.Visible; // new project item was added switch (args.Action) { case System.Collections.Specialized.NotifyCollectionChangedAction.Add: { var foundItem = _maps.FirstOrDefault(m => m.URI == mapItem.Path); // one cannot be found; so add it to our list if (foundItem == null) { _maps.Add(mapItem.GetMap()); } } break; case System.Collections.Specialized.NotifyCollectionChangedAction.Remove: { Map map = mapItem.GetMap(); // if this is the selected map, resest if (SelectedMap == map) SelectedMap = null; // remove from the collection if (_maps.Contains(map)) { _maps.Remove(map); } } break; } InitMaps(); IsShowCircularAnimation = Visibility.Hidden; } #endregion #region Commands /// <summary> /// Command for retrieving commands. Bind to this property in the view. /// </summary> private readonly ICommand _retrieveMapsCommand; public ICommand RetrieveMapsCommand => _retrieveMapsCommand; /// <summary> /// Method for retrieving map items in the project. /// </summary> private void RetrieveMaps() { // create / clear the collection _maps.Clear(); if (Project.Current != null) { // GetMap needs to be on the MCT QueuedTask.Run(() => { // get the map project items and add to my collection foreach (var item in Project.Current.GetItems<MapProjectItem>()) { _maps.Add(item.GetMap()); } }); } } #endregion #region Properties private Visibility _isCircularAnimation; public Visibility IsShowCircularAnimation { get { return _isCircularAnimation; } set { SetProperty(ref _isCircularAnimation, value, () => IsShowCircularAnimation); } } private Visibility _isNoBookmarkExists = Visibility.Collapsed; public Visibility IsNoBookmarkExists { get { return _isNoBookmarkExists; } set { SetProperty(ref _isNoBookmarkExists, value, () => IsNoBookmarkExists); } } /// <summary> /// collection of bookmarks. Bind to this property in the view. /// </summary> public ReadOnlyObservableCollection<Bookmark> Bookmarks => _readOnlyBookmarks; /// <summary> /// Collection of map items. Bind to this property in the view. /// </summary> public ReadOnlyObservableCollection<Map> AllMaps => _readOnlyMaps; /// <summary> /// Holds the selected map from the combobox. When setting the value, ensure that the map is open and active before retrieving the bookmarks /// </summary> private Map _selectedMap = null; public Map SelectedMap { get { return _selectedMap; } set { // make sure we're on the UI thread _bookmarks.Clear(); Utils.RunOnUiThread(async () => { SetProperty(ref _selectedMap, value, () => SelectedMap); CheckBookmarks(_selectedMap); if (_selectedMap == null) { _selectedBmk = null; return; } // open /activate the map Utils.OpenAndActivateMap(_selectedMap.URI); // no need to await await UpdateBookmarks(SelectedMap); }); } } private string _bookmarksViewType = "Gallery"; public string BookmarksViewType { get { return _bookmarksViewType; } set { SetProperty(ref _bookmarksViewType, value, () => BookmarksViewType); } } private async Task UpdateBookmarks(Map selectedMap) { _bookmarks.Clear(); // get the bookmarks. GetBookmarks needs to be on MCT but want to refresh members and properties on UI thread await QueuedTask.Run(() => { foreach (var bmk in selectedMap.GetBookmarks()) { _bookmarks.Add(bmk); } }); } private async Task CheckBookmarks(Map selectedMap) { if (selectedMap == null) { IsNoBookmarkExists = Visibility.Visible; return; } await QueuedTask.Run(() => { if (_selectedMap.GetBookmarks().Count == 0) { IsNoBookmarkExists = Visibility.Visible; return; } else IsNoBookmarkExists = Visibility.Collapsed; }); } /// <summary> /// Holds the selected bookmark from the listview. /// </summary> private Bookmark _selectedBmk; public Bookmark SelectedBookmark { get { return _selectedBmk; } set { SetProperty(ref _selectedBmk, value, () => SelectedBookmark); ZoomToBookmark(); } } private string _SearchText; public string SearchText { get { return _SearchText; } set { SetProperty(ref _SearchText, value, () => SearchText); foreach (var bmk in Bookmarks) { if (bmk.Name.StartsWith(_SearchText, StringComparison.CurrentCultureIgnoreCase)) { SelectedBookmark = bmk; } } } } public string DelBookmarkToolTip => "Delete this bookmark"; /// <summary> /// Command for adding a new bookmark. Bind to this property in the view /// </summary> private ICommand _newBookmarkCommand; public ICommand NewBookmarkCommand { get { if (_newBookmarkCommand == null) { _newBookmarkCommand = FrameworkApplication.GetPlugInWrapper("esri_mapping_createBookmark") as ICommand; } return _newBookmarkCommand; } } /// <summary> /// command for deleting a bookmark. Bind to this property in the view /// </summary> private ICommand _delBookmarkCommand; public ICommand DelBookmarkCommand { get { return _delBookmarkCommand; } } /// <summary> /// method for deleting a bookmark /// </summary> private async void DeleteBookmark() { if (SelectedBookmark == null) return; if (SelectedMap == null) return; // clear the bookmarks _bookmarks.Clear(); // find the map var mapItem = Project.Current.Items.FirstOrDefault(i => i.Path == SelectedBookmark.MapURI) as MapProjectItem; await QueuedTask.Run(() => { var map = mapItem?.GetMap(); if (map == null) return; // remove the bookmark map.RemoveBookmark(SelectedBookmark); }); // no need to await await UpdateBookmarks(SelectedMap); } #endregion #region Zoom to Bookmark /// <summary> /// Zooms to the currently selected bookmark. /// </summary> internal void ZoomToBookmark() { if (SelectedBookmark == null) return; // make sure the map is open Utils.OpenAndActivateMap(SelectedBookmark.MapURI); // zoom to it if (MapView.Active != null) MapView.Active.ZoomToAsync(SelectedBookmark); } #endregion /// <summary> /// Show the DockPane. /// </summary> internal static void Show() { DockPane pane = FrameworkApplication.DockPaneManager.Find(DockPaneId); if (pane == null) return; pane.Activate(); } internal static void InitMaps() { Thread.Sleep(6000); } /// <summary> /// Text shown near the top of the DockPane. /// </summary> private string _heading = "Bookmarks"; public string Heading { get { return _heading; } set { SetProperty(ref _heading, value, () => Heading); } } #region Burger Button /// <summary> /// Tooltip shown when hovering over the burger button. /// </summary> public string BurgerButtonTooltip { get { return "Change view"; } } /// <summary> /// Menu shown when burger button is clicked. /// </summary> public System.Windows.Controls.ContextMenu BurgerButtonPopupMenu { get { return FrameworkApplication.CreateContextMenu(MenuId); } } #endregion } /// <summary> /// Button implementation to show the DockPane. /// </summary> internal class Bookmark_ShowButton : Button { protected override void OnClick() { BookmarkViewModel.Show(); } } /// <summary> /// Button implementation for the button on the menu of the burger button. /// </summary> internal class BookmarkOutline_MenuButton : Button { protected override void OnClick() { var vm = FrameworkApplication.DockPaneManager.Find("DockPaneBookmarkAdvanced_Bookmark") as BookmarkViewModel; vm.BookmarksViewType = "List"; } } /// <summary> /// Button implementation for the button on the menu of the burger button. /// </summary> internal class BookmarkGallery_MenuButton : Button { protected override void OnClick() { var vm = FrameworkApplication.DockPaneManager.Find("DockPaneBookmarkAdvanced_Bookmark") as BookmarkViewModel; vm.BookmarksViewType = "Gallery"; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace Microsoft.DocAsCode.Build.Engine { using System; using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Linq; using System.Text.RegularExpressions; using Microsoft.DocAsCode.Common; using Microsoft.DocAsCode.Build.Engine; public sealed class ArchiveResourceCollection : ResourceCollection { private readonly object _locker = new object(); private ZipArchive _zipped; private bool disposed = false; public override string Name { get; } public override IEnumerable<string> Names { get; } public override bool IsEmpty { get; } public ArchiveResourceCollection(Stream stream, string name) { if (stream == null) throw new ArgumentNullException(nameof(stream)); _zipped = new ZipArchive(stream); Name = name; // When Name is empty, entry is folder, ignore Names = _zipped.Entries.Where(s => !string.IsNullOrEmpty(s.Name)).Select(s => s.FullName); IsEmpty = !Names.Any(); } /// <summary> /// TODO: This is not thread safe, only expose GetResource in interface /// </summary> /// <param name="name"></param> /// <returns></returns> public override Stream GetResourceStream(string name) { if (IsEmpty) return null; lock (_locker) { var memoryStream = new MemoryStream(); using (var stream = GetResourceStreamCore(name)) { if (stream == null) { return null; } stream.CopyTo(memoryStream); } memoryStream.Seek(0, SeekOrigin.Begin); return memoryStream; } } public override string GetResource(string name) { lock (_locker) { using (var stream = GetResourceStreamCore(name)) { return GetString(stream); } } } private Stream GetResourceStreamCore(string name) { // zip entry is case sensitive // incase relative path is combined by backslash \ return _zipped.GetEntry(StringExtension.ToNormalizedPath(name.Trim()))?.Open(); } protected override void Dispose(bool disposing) { if (disposed) return; _zipped?.Dispose(); _zipped = null; disposed = true; base.Dispose(disposing); } } public sealed class FileResourceCollection : ResourceCollection { private const int MaxSearchLevel = 5; // keep comparer to be case sensitive as to be consistent with zip entries private static StringComparer ResourceComparer = StringComparer.Ordinal; private string _directory = null; private readonly int _maxDepth; public override string Name { get; } public override IEnumerable<string> Names { get; } public override bool IsEmpty { get; } public FileResourceCollection(string directory, int maxSearchLevel = MaxSearchLevel) { if (string.IsNullOrEmpty(directory)) _directory = Directory.GetCurrentDirectory(); else _directory = directory; Name = _directory; _maxDepth = maxSearchLevel; var includedFiles = GetFiles(_directory, "*", maxSearchLevel); Names = includedFiles.Select(s => PathUtility.MakeRelativePath(_directory, s)).Where(s => s != null); IsEmpty = !Names.Any(); } public override Stream GetResourceStream(string name) { if (IsEmpty) return null; // incase relative path is combined by backslash \ if (!Names.Contains(StringExtension.ToNormalizedPath(name.Trim()), ResourceComparer)) return null; var filePath = Path.Combine(_directory, name); return new FileStream(filePath, FileMode.Open, FileAccess.Read); } private IEnumerable<string> GetFiles(string directory, string searchPattern, int searchLevel) { if (searchLevel < 1) { return Enumerable.Empty<string>(); } var files = Directory.GetFiles(directory, searchPattern, SearchOption.TopDirectoryOnly); var dirs = Directory.GetDirectories(directory); if (searchLevel == 1) { foreach (var dir in dirs) { var remainingFiles = Directory.GetFiles(dir, searchPattern, SearchOption.AllDirectories); if (remainingFiles.Length > 0) { throw new ResourceFileExceedsMaxDepthException(_maxDepth, PathUtility.MakeRelativePath(_directory, remainingFiles[0]), Name); } } return files; } List<string> allFiles = new List<string>(files); foreach(var dir in dirs) { allFiles.AddRange(GetFiles(dir, searchPattern, searchLevel - 1)); } return allFiles; } } public sealed class CompositeResourceCollectionWithOverridden : ResourceCollection { private ResourceCollection[] _collectionsInOverriddenOrder = null; private bool disposed = false; public override string Name => "Composite"; public override IEnumerable<string> Names { get; } public override bool IsEmpty { get; } public CompositeResourceCollectionWithOverridden(IEnumerable<ResourceCollection> collectionsInOverriddenOrder) { if (collectionsInOverriddenOrder == null || !collectionsInOverriddenOrder.Any()) { IsEmpty = true; } else { _collectionsInOverriddenOrder = collectionsInOverriddenOrder.ToArray(); Names = _collectionsInOverriddenOrder.SelectMany(s => s.Names).Distinct(); } } public override Stream GetResourceStream(string name) { if (IsEmpty) return null; for (int i = _collectionsInOverriddenOrder.Length - 1; i > -1; i--) { var stream = _collectionsInOverriddenOrder[i].GetResourceStream(name); if (stream != null) { Logger.LogDiagnostic($"Resource \"{name}\" is found from \"{_collectionsInOverriddenOrder[i].Name}\""); return stream; } } return null; } protected override void Dispose(bool disposing) { if (disposed) return; if (_collectionsInOverriddenOrder != null) { for (int i = 0; i < _collectionsInOverriddenOrder.Length; i++) { _collectionsInOverriddenOrder[i].Dispose(); _collectionsInOverriddenOrder[i] = null; } _collectionsInOverriddenOrder = null; } base.Dispose(disposing); } } public sealed class EmptyResourceCollection : ResourceCollection { private static readonly IEnumerable<string> Empty = new string[0]; public override bool IsEmpty => true; public override string Name => "Empty"; public override IEnumerable<string> Names => Empty; public override Stream GetResourceStream(string name) { return Stream.Null; } } public abstract class ResourceCollection : IDisposable { public abstract string Name { get; } public abstract bool IsEmpty { get; } public abstract IEnumerable<string> Names { get; } public virtual string GetResource(string name) { using (var stream = GetResourceStream(name)) return GetString(stream); } public IEnumerable<KeyValuePair<string, string>> GetResources(string selector = null) { foreach(var pair in GetResourceStreams(selector)) { using (pair.Value) { yield return new KeyValuePair<string, string>(pair.Key, GetString(pair.Value)); } } } public IEnumerable<KeyValuePair<string, Stream>> GetResourceStreams(string selector = null) { Func<string, bool> filter = s => { if (selector != null) { var regex = new Regex(selector, RegexOptions.IgnoreCase); return regex.IsMatch(s); } else { return true; } }; foreach (var name in Names) { if (filter(name)) { yield return new KeyValuePair<string, Stream>(name, GetResourceStream(name)); } } } public abstract Stream GetResourceStream(string name); public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { } /// <summary> /// Override Object.Finalize by defining a destructor /// </summary> ~ResourceCollection() { Dispose(false); } protected static string GetString(Stream stream) { if (stream == null) return null; using (var reader = new StreamReader(stream)) { return reader.ReadToEnd(); } } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using DuckDuck.Areas.HelpPage.ModelDescriptions; using DuckDuck.Areas.HelpPage.Models; namespace DuckDuck.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } // Example: // [TypeConverter(typeof(PointConverter))] // public class Point // { // public Point(int x, int y) // { // X = x; // Y = y; // } // public int X { get; set; } // public int Y { get; set; } // } // Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection. // // public class Point // { // public int X { get; set; } // public int Y { get; set; } // } // Regular complex class Point will have properties X and Y added to UriParameters collection. if (complexTypeDescription != null && !IsBindableWithTypeConverter(parameterType)) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static bool IsBindableWithTypeConverter(Type parameterType) { if (parameterType == null) { return false; } return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string)); } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Xunit; namespace System.Threading.Tasks.Channels.Tests { public class CaseBuilderTests : TestBase { [Fact] public void CaseRead_Sync_InvalidArguments_ThrowsArgumentException() { CaseBuilder cb = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { }); Assert.Throws<ArgumentNullException>("channel", () => cb.CaseRead<int>(null, (Action<int>)null)); Assert.Throws<ArgumentNullException>("channel", () => cb.CaseRead<int>(null, i => { })); Assert.Throws<ArgumentNullException>("action", () => cb.CaseRead<int>(Channel.CreateUnbounded<int>(), (Action<int>)null)); } [Fact] public void CaseRead_Async_InvalidArguments_ThrowsArgumentException() { CaseBuilder cb = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { }); Assert.Throws<ArgumentNullException>("channel", () => cb.CaseRead<int>(null, (Func<int, Task>)null)); Assert.Throws<ArgumentNullException>("channel", () => cb.CaseRead<int>(null, i => Task.CompletedTask)); Assert.Throws<ArgumentNullException>("func", () => cb.CaseRead<int>(Channel.CreateUnbounded<int>(), (Func<int, Task>)null)); } [Fact] public void CaseWrite_Sync_InvalidArguments_ThrowsArgumentException() { CaseBuilder cb = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { }); Assert.Throws<ArgumentNullException>("channel", () => cb.CaseWrite<int>(null, 0, (Action)null)); Assert.Throws<ArgumentNullException>("channel", () => cb.CaseWrite<int>(null, 0, (Action)delegate { })); Assert.Throws<ArgumentNullException>("action", () => cb.CaseWrite<int>(Channel.CreateUnbounded<int>(), 0, (Action)null)); } [Fact] public void CaseWrite_Async_InvalidArguments_ThrowsArgumentException() { CaseBuilder cb = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { }); Assert.Throws<ArgumentNullException>("channel", () => cb.CaseWrite<int>(null, 0, (Func<Task>)null)); Assert.Throws<ArgumentNullException>("channel", () => cb.CaseWrite<int>(null, 0, delegate { return Task.CompletedTask; })); Assert.Throws<ArgumentNullException>("func", () => cb.CaseWrite<int>(Channel.CreateUnbounded<int>(), 0, (Func<Task>)null)); } [Fact] public void CaseDefault_Sync_InvalidAction_ThrowsException() { CaseBuilder builder1 = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { }); Assert.Throws<ArgumentNullException>(() => builder1.CaseDefault((Action)null)); } [Fact] public void CaseDefault_Async_InvalidAction_ThrowsException() { CaseBuilder builder1 = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => Task.CompletedTask); Assert.Throws<ArgumentNullException>(() => builder1.CaseDefault((Func<Task>)null)); } [Fact] public void CaseReadWrite_Sync_CallMultipleTimes_IdempotentResult() { CaseBuilder builder1 = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { }); Assert.Same(builder1, builder1.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { })); Assert.Same(builder1, builder1.CaseWrite(Channel.CreateUnbounded<string>(), "", () => { })); Assert.Same(builder1, builder1.CaseDefault(() => { })); CaseBuilder builder2 = Channel.CaseWrite(Channel.CreateUnbounded<int>(), 0, () => { }); Assert.Same(builder2, builder2.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { })); Assert.Same(builder2, builder2.CaseWrite(Channel.CreateUnbounded<string>(), "", () => { })); Assert.Same(builder2, builder2.CaseDefault(() => { })); } [Fact] public void CaseReadWrite_Async_CallMultipleTimes_IdempotentResult() { CaseBuilder builder1 = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => Task.CompletedTask); Assert.Same(builder1, builder1.CaseRead<int>(Channel.CreateUnbounded<int>(), i => Task.CompletedTask)); Assert.Same(builder1, builder1.CaseWrite(Channel.CreateUnbounded<string>(), "", () => Task.CompletedTask)); Assert.Same(builder1, builder1.CaseDefault(() => Task.CompletedTask)); CaseBuilder builder2 = Channel.CaseWrite(Channel.CreateUnbounded<int>(), 0, () => Task.CompletedTask); Assert.Same(builder2, builder2.CaseRead<int>(Channel.CreateUnbounded<int>(), i => Task.CompletedTask)); Assert.Same(builder2, builder2.CaseWrite(Channel.CreateUnbounded<string>(), "", () => Task.CompletedTask)); Assert.Same(builder2, builder2.CaseDefault(() => Task.CompletedTask)); } [Fact] public void CaseDefault_AlreadyExists_ThrowsException() { CaseBuilder cb = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { }).CaseDefault(() => { }); Assert.Throws<InvalidOperationException>(() => cb.CaseDefault(() => { })); Assert.Throws<InvalidOperationException>(() => cb.CaseDefault(() => Task.CompletedTask)); } [Fact] public void SelectAsync_Precanceled_ThrowsCancellationException() { Channel<int> c = Channel.CreateUnbounded<int>(); Assert.True(c.Out.TryWrite(42)); var cts = new CancellationTokenSource(); cts.Cancel(); Task<bool> select = Channel .CaseRead<int>(c, i => { throw new InvalidOperationException(); }) .SelectAsync(cts.Token); AssertSynchronouslyCanceled(select, cts.Token); } [Fact] public async Task SelectAsync_CanceledAfterSelectBeforeData_ThrowsCancellationException() { Channel<int> c = Channel.CreateUnbounded<int>(); var cts = new CancellationTokenSource(); Task<bool> select = Channel .CaseRead<int>(c, i => { throw new InvalidOperationException(); }) .SelectAsync(cts.Token); cts.Cancel(); await AssertCanceled(select, cts.Token); } [Fact] public void SelectAsync_NoChannelsAvailable_SyncDefault_CompletesSynchronously() { Channel<int> c1 = Channel.CreateUnbounded<int>(); Channel<int> c2 = Channel.CreateUnbuffered<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel .CaseRead<int>(c1, i => { throw new InvalidOperationException(); }) .CaseWrite(c2, 42, () => { throw new InvalidOperationException(); }) .CaseDefault(() => tcs.SetResult(84)) .SelectAsync(); Assert.Equal(TaskStatus.RanToCompletion, select.Status); Assert.Equal(TaskStatus.RanToCompletion, tcs.Task.Status); Assert.True(select.Result); Assert.Equal(84, tcs.Task.Result); } [Fact] public void SelectAsync_NoChannelsAvailable_AsyncDefault_CompletesSynchronously() { Channel<int> c1 = Channel.CreateUnbounded<int>(); Channel<int> c2 = Channel.CreateUnbuffered<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel .CaseRead<int>(c1, i => { throw new InvalidOperationException(); }) .CaseWrite(c2, 42, () => { throw new InvalidOperationException(); }) .CaseDefault(() => { tcs.SetResult(84); return Task.CompletedTask; }) .SelectAsync(); Assert.Equal(TaskStatus.RanToCompletion, select.Status); Assert.Equal(TaskStatus.RanToCompletion, tcs.Task.Status); Assert.True(select.Result); Assert.Equal(84, tcs.Task.Result); } [Fact] public async Task SelectAsync_NoChannelsAvailable_AsyncDefault_CompletesAsynchronously() { Channel<int> c1 = Channel.CreateUnbounded<int>(); Channel<int> c2 = Channel.CreateUnbuffered<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel .CaseRead<int>(c1, i => { throw new InvalidOperationException(); }) .CaseWrite(c2, 42, () => { throw new InvalidOperationException(); }) .CaseDefault(async () => { await Task.Yield(); tcs.SetResult(84); }) .SelectAsync(); Assert.True(await select); Assert.Equal(84, tcs.Task.Result); } [Fact] public async Task SelectAsync_NoChannelsAvailable_SyncDefault_ThrowsSynchronously() { Channel<int> c1 = Channel.CreateUnbounded<int>(); Channel<int> c2 = Channel.CreateUnbuffered<int>(); Task<bool> select = Channel .CaseRead<int>(c1, i => { throw new InvalidOperationException(); }) .CaseWrite(c2, 42, () => { throw new InvalidOperationException(); }) .CaseDefault(new Action(() => { throw new FormatException(); })) .SelectAsync(); Assert.True(select.IsCompleted); await Assert.ThrowsAsync<FormatException>(() => select); } [Fact] public async Task SelectAsync_NoChannelsAvailable_AsyncDefault_ThrowsSynchronously() { Channel<int> c1 = Channel.CreateUnbounded<int>(); Channel<int> c2 = Channel.CreateUnbuffered<int>(); Task<bool> select = Channel .CaseRead<int>(c1, i => { throw new InvalidOperationException(); }) .CaseWrite(c2, 42, () => { throw new InvalidOperationException(); }) .CaseDefault(new Func<Task>(() => { throw new FormatException(); })) .SelectAsync(); Assert.True(select.IsCompleted); await Assert.ThrowsAsync<FormatException>(() => select); } [Fact] public async Task SelectAsync_NoChannelsAvailable_AsyncDefault_ThrowsAsynchronously() { Channel<int> c1 = Channel.CreateUnbounded<int>(); Channel<int> c2 = Channel.CreateUnbuffered<int>(); Task<bool> select = Channel .CaseRead<int>(c1, i => { throw new InvalidOperationException(); }) .CaseWrite(c2, 42, () => { throw new InvalidOperationException(); }) .CaseDefault(async () => { await Task.Yield(); throw new FormatException(); }) .SelectAsync(); await Assert.ThrowsAsync<FormatException>(() => select); } [Fact] public async Task SelectAsync_AllChannelsCompletedBefore_ReturnsFalse() { Channel<int> c1 = Channel.CreateUnbounded<int>(); Channel<int> c2 = Channel.CreateUnbuffered<int>(); c1.Out.Complete(); c2.Out.Complete(); Task<bool> select = Channel .CaseRead<int>(c1, i => { throw new InvalidOperationException(); }) .CaseWrite(c2, 42, () => { throw new InvalidOperationException(); }) .SelectAsync(); Assert.False(await select); } [Fact] public async Task SelectAsync_AllChannelsCompletedAfter_ReturnsFalse() { Channel<int> c1 = Channel.CreateUnbounded<int>(); Channel<int> c2 = Channel.CreateUnbuffered<int>(); Task<bool> select = Channel .CaseRead<int>(c1, i => { throw new InvalidOperationException(); }) .CaseWrite(c2, 42, () => { throw new InvalidOperationException(); }) .SelectAsync(); c1.Out.Complete(); c2.Out.Complete(); Assert.False(await select); } [Fact] public async Task SelectAsync_SingleCaseRead_Sync_DataAlreadyAvailable() { Channel<int> c = Channel.CreateUnbounded<int>(); Assert.True(c.Out.TryWrite(42)); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseRead<int>(c, i => tcs.SetResult(i)).SelectAsync(); Assert.True(select.IsCompleted); Assert.True(await select); Assert.Equal(TaskStatus.RanToCompletion, tcs.Task.Status); Assert.Equal(42, await tcs.Task); } [Fact] public async Task SelectAsync_SingleCaseRead_Async_DataAlreadyAvailable_CompletesSynchronously() { Channel<int> c = Channel.CreateUnbounded<int>(); Assert.True(c.Out.TryWrite(42)); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseRead<int>(c, i => { tcs.SetResult(i); return Task.CompletedTask; }).SelectAsync(); Assert.True(select.IsCompleted); Assert.True(await select); Assert.Equal(TaskStatus.RanToCompletion, tcs.Task.Status); Assert.Equal(42, await tcs.Task); } [Fact] public async Task SelectAsync_SingleCaseRead_Async_DataAlreadyAvailable_CompletesAsynchronously() { Channel<int> c = Channel.CreateUnbounded<int>(); Assert.True(c.Out.TryWrite(42)); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseRead<int>(c, async i => { await Task.Yield(); tcs.SetResult(i); }).SelectAsync(); Assert.True(await select); Assert.Equal(TaskStatus.RanToCompletion, tcs.Task.Status); Assert.Equal(42, await tcs.Task); } [Fact] public async Task SelectAsync_SingleCaseRead_Sync_DataNotAlreadyAvailable() { Channel<int> c = Channel.CreateUnbuffered<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseRead<int>(c, i => tcs.SetResult(i)).SelectAsync(); Assert.False(select.IsCompleted); Task write = c.Out.WriteAsync(42); Assert.True(await select); Assert.Equal(42, await tcs.Task); } [Fact] public async Task SelectAsync_SingleCaseRead_Async_DataNotAlreadyAvailable_CompletesSynchronously() { Channel<int> c = Channel.CreateUnbuffered<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseRead<int>(c, i => { tcs.SetResult(i); return Task.CompletedTask; }).SelectAsync(); Assert.False(select.IsCompleted); Task write = c.Out.WriteAsync(42); Assert.True(await select); Assert.Equal(42, await tcs.Task); } [Fact] public async Task SelectAsync_SingleCaseRead_Async_DataNotAlreadyAvailable_CompletesAsynchronously() { Channel<int> c = Channel.CreateUnbuffered<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseRead<int>(c, async i => { await Task.Yield(); tcs.SetResult(i); }).SelectAsync(); Assert.False(select.IsCompleted); Task write = c.Out.WriteAsync(42); Assert.True(await select); Assert.Equal(42, await tcs.Task); } [Fact] public async Task SelectAsync_SingleCaseWrite_Sync_SpaceAlreadyAvailable_CompletesSynchronously() { Channel<int> c = Channel.CreateUnbounded<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseWrite(c, 42, () => tcs.SetResult(1)).SelectAsync(); Assert.True(select.IsCompleted); Assert.True(await select); Assert.Equal(TaskStatus.RanToCompletion, tcs.Task.Status); Assert.Equal(1, await tcs.Task); int result; Assert.True(c.In.TryRead(out result)); Assert.Equal(42, result); } [Fact] public async Task SelectAsync_SingleCaseWrite_Async_SpaceAlreadyAvailable_CompletesSynchronously() { Channel<int> c = Channel.CreateUnbounded<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseWrite(c, 42, () => { tcs.SetResult(1); return Task.CompletedTask; }).SelectAsync(); Assert.True(select.IsCompleted); Assert.True(await select); Assert.Equal(TaskStatus.RanToCompletion, tcs.Task.Status); Assert.Equal(1, await tcs.Task); int result; Assert.True(c.In.TryRead(out result)); Assert.Equal(42, result); } [Fact] public async Task SelectAsync_SingleCaseWrite_Async_SpaceAlreadyAvailable_CompletesAsynchronously() { Channel<int> c = Channel.CreateUnbounded<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseWrite(c, 42, async () => { await Task.Yield(); tcs.SetResult(1); }).SelectAsync(); Assert.True(await select); Assert.Equal(TaskStatus.RanToCompletion, tcs.Task.Status); Assert.Equal(1, await tcs.Task); int result; Assert.True(c.In.TryRead(out result)); Assert.Equal(42, result); } [Fact] public async Task SelectAsync_SingleCaseWrite_Sync_SpaceNotAlreadyAvailable() { Channel<int> c = Channel.CreateUnbuffered<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseWrite(c, 42, () => tcs.SetResult(1)).SelectAsync(); Assert.False(select.IsCompleted); Task<int> read = c.In.ReadAsync().AsTask(); Assert.True(await select); Assert.Equal(42, await read); Assert.Equal(1, await tcs.Task); } [Fact] public async Task SelectAsync_SingleCaseWrite_Async_SpaceNotAlreadyAvailable_CompletesSynchronously() { Channel<int> c = Channel.CreateUnbuffered<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseWrite(c, 42, () => { tcs.SetResult(1); return Task.CompletedTask; }).SelectAsync(); Assert.False(select.IsCompleted); Task<int> read = c.In.ReadAsync().AsTask(); Assert.True(await select); Assert.Equal(42, await read); Assert.Equal(1, await tcs.Task); } [Fact] public async Task SelectAsync_SingleCaseWrite_Async_SpaceNotAlreadyAvailable_CompletesAsynchronously() { Channel<int> c = Channel.CreateUnbuffered<int>(); var tcs = new TaskCompletionSource<int>(); Task<bool> select = Channel.CaseWrite(c, 42, async () => { await Task.Yield(); tcs.SetResult(1); }).SelectAsync(); Assert.False(select.IsCompleted); Task<int> read = c.In.ReadAsync().AsTask(); Assert.True(await select); Assert.Equal(42, await read); Assert.Equal(1, await tcs.Task); } [Theory] [InlineData(false)] [InlineData(true)] public async Task SelectAsync_CaseRead_Sync_ThrowsSynchronously(bool before) { Channel<int> c = Channel.CreateUnbuffered<int>(); Task write; if (before) write = c.Out.WriteAsync(42); Task t = Channel.CaseRead<int>(c, new Action<int>(i => { throw new FormatException(); })).SelectAsync(); if (!before) write = c.Out.WriteAsync(42); await Assert.ThrowsAsync<FormatException>(() => t); } [Theory] [InlineData(false)] [InlineData(true)] public async Task SelectAsync_CaseRead_Async_DataAlreadyAvailable_ThrowsSynchronously(bool before) { Channel<int> c = Channel.CreateUnbuffered<int>(); Task write; if (before) write = c.Out.WriteAsync(42); Task t = Channel.CaseRead<int>(c, new Func<int, Task>(i => { throw new FormatException(); })).SelectAsync(); if (!before) write = c.Out.WriteAsync(42); await Assert.ThrowsAsync<FormatException>(() => t); } [Theory] [InlineData(false)] [InlineData(true)] public async Task SelectAsync_CaseRead_Async_DataAlreadyAvailable_ThrowsAsynchronously(bool before) { Channel<int> c = Channel.CreateUnbuffered<int>(); Task write; if (before) write = c.Out.WriteAsync(42); Task t = Channel.CaseRead<int>(c, async i => { await Task.Yield(); throw new FormatException(); }).SelectAsync(); if (!before) write = c.Out.WriteAsync(42); await Assert.ThrowsAsync<FormatException>(() => t); } [Theory] [InlineData(false)] [InlineData(true)] public async Task SelectAsync_CaseWrite_Sync_ThrowsSynchronously(bool before) { Channel<int> c = Channel.CreateUnbuffered<int>(); Task read; if (before) read = c.In.ReadAsync().AsTask(); Task t = Channel.CaseWrite(c, 42, new Action(() => { throw new FormatException(); })).SelectAsync(); if (!before) read = c.In.ReadAsync().AsTask(); await Assert.ThrowsAsync<FormatException>(() => t); } [Theory] [InlineData(false)] [InlineData(true)] public async Task SelectAsync_CaseWrite_Async_ThrowsSynchronously(bool before) { Channel<int> c = Channel.CreateUnbuffered<int>(); Task read; if (before) read = c.In.ReadAsync().AsTask(); Task t = Channel.CaseWrite(c, 42, new Func<Task>(() => { throw new FormatException(); })).SelectAsync(); if (!before) read = c.In.ReadAsync().AsTask(); await Assert.ThrowsAsync<FormatException>(() => t); } [Theory] [InlineData(false)] [InlineData(true)] public async Task SelectAsync_CaseWrite_Async_ThrowsAsynchronously(bool before) { Channel<int> c = Channel.CreateUnbuffered<int>(); Task read; if (before) read = c.In.ReadAsync().AsTask(); Task t = Channel.CaseWrite(c, 42, async () => { await Task.Yield(); throw new FormatException(); }).SelectAsync(); if (!before) read = c.In.ReadAsync().AsTask(); await Assert.ThrowsAsync<FormatException>(() => t); } [Fact] public void SelectUntilAsync_InvalidArguments_ThrowsExceptions() { CaseBuilder cb = Channel.CaseRead<int>(Channel.CreateUnbounded<int>(), i => { }); Assert.Throws<ArgumentNullException>(() => { cb.SelectUntilAsync(null); }); } [Theory] [InlineData(false, 100, 150)] [InlineData(true, 100, 150)] [InlineData(false, 100, 100)] [InlineData(true, 100, 100)] [InlineData(false, 100, 99)] [InlineData(true, 100, 99)] [InlineData(false, 100, 1)] [InlineData(true, 100, 1)] [InlineData(false, 100, 0)] [InlineData(true, 100, 0)] public async Task SelectUntilAsync_ProcessUntilAllDataExhausted_Success(bool dataAvailableBefore, int numItems, int maxIterations) { Channel<int> c1 = Channel.CreateUnbounded<int>(); Channel<string> c2 = Channel.CreateUnbounded<string>(); Channel<double> c3 = Channel.CreateUnbounded<double>(); int delegatesInvoked = 0; Task<int> select = null; if (!dataAvailableBefore) { select = Channel .CaseRead<int>(c1, i => { Interlocked.Increment(ref delegatesInvoked); }) .CaseRead<string>(c2, s => { Interlocked.Increment(ref delegatesInvoked); }) .CaseRead<double>(c3, d => { Interlocked.Increment(ref delegatesInvoked); }) .SelectUntilAsync(i => i < maxIterations); } for (int i = 0; i < numItems; i++) { switch (i % 3) { case 0: Assert.True(c1.Out.TryWrite(i)); break; case 1: Assert.True(c2.Out.TryWrite(i.ToString())); break; case 2: Assert.True(c3.Out.TryWrite(i)); break; } } c1.Out.Complete(); c2.Out.Complete(); c3.Out.Complete(); if (dataAvailableBefore) { select = Channel .CaseRead<int>(c1, i => { Interlocked.Increment(ref delegatesInvoked); }) .CaseRead<string>(c2, s => { Interlocked.Increment(ref delegatesInvoked); }) .CaseRead<double>(c3, d => { Interlocked.Increment(ref delegatesInvoked); }) .SelectUntilAsync(i => i < maxIterations); } int expected = Math.Min(numItems, maxIterations); Assert.Equal(expected, await select); Assert.Equal(expected, delegatesInvoked); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.ApplicationInsights.Management.Models { using Microsoft.Azure; using Microsoft.Azure.Management; using Microsoft.Azure.Management.ApplicationInsights; using Microsoft.Azure.Management.ApplicationInsights.Management; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; /// <summary> /// An Application Insights web test definition. /// </summary> [Rest.Serialization.JsonTransformation] public partial class WebTest : Resource { /// <summary> /// Initializes a new instance of the WebTest class. /// </summary> public WebTest() { CustomInit(); } /// <summary> /// Initializes a new instance of the WebTest class. /// </summary> /// <param name="location">Resource location</param> /// <param name="syntheticMonitorId">Unique ID of this WebTest. This is /// typically the same value as the Name field.</param> /// <param name="webTestName">User defined name if this /// WebTest.</param> /// <param name="webTestKind">The kind of web test this is, valid /// choices are ping and multistep. Possible values include: 'ping', /// 'multistep'</param> /// <param name="locations">A list of where to physically run the tests /// from to give global coverage for accessibility of your /// application.</param> /// <param name="id">Azure resource Id</param> /// <param name="name">Azure resource name</param> /// <param name="type">Azure resource type</param> /// <param name="tags">Resource tags</param> /// <param name="kind">The kind of web test that this web test watches. /// Choices are ping and multistep. Possible values include: 'ping', /// 'multistep'</param> /// <param name="description">Purpose/user defined descriptive test for /// this WebTest.</param> /// <param name="enabled">Is the test actively being monitored.</param> /// <param name="frequency">Interval in seconds between test runs for /// this WebTest. Default value is 300.</param> /// <param name="timeout">Seconds until this WebTest will timeout and /// fail. Default value is 30.</param> /// <param name="retryEnabled">Allow for retries should this WebTest /// fail.</param> /// <param name="configuration">An XML configuration specification for /// a WebTest.</param> /// <param name="provisioningState">Current state of this component, /// whether or not is has been provisioned within the resource group it /// is defined. Users cannot change this value but are able to read /// from it. Values will include Succeeded, Deploying, Canceled, and /// Failed.</param> public WebTest(string location, string syntheticMonitorId, string webTestName, WebTestKind webTestKind, IList<WebTestGeolocation> locations, string id = default(string), string name = default(string), string type = default(string), IDictionary<string, string> tags = default(IDictionary<string, string>), WebTestKind? kind = default(WebTestKind?), string description = default(string), bool? enabled = default(bool?), int? frequency = default(int?), int? timeout = default(int?), bool? retryEnabled = default(bool?), WebTestPropertiesConfiguration configuration = default(WebTestPropertiesConfiguration), string provisioningState = default(string)) : base(location, id, name, type, tags) { Kind = kind; SyntheticMonitorId = syntheticMonitorId; WebTestName = webTestName; Description = description; Enabled = enabled; Frequency = frequency; Timeout = timeout; WebTestKind = webTestKind; RetryEnabled = retryEnabled; Locations = locations; Configuration = configuration; ProvisioningState = provisioningState; CustomInit(); } /// <summary> /// An initialization method that performs custom operations like setting defaults /// </summary> partial void CustomInit(); /// <summary> /// Gets or sets the kind of web test that this web test watches. /// Choices are ping and multistep. Possible values include: 'ping', /// 'multistep' /// </summary> [JsonProperty(PropertyName = "kind")] public WebTestKind? Kind { get; set; } /// <summary> /// Gets or sets unique ID of this WebTest. This is typically the same /// value as the Name field. /// </summary> [JsonProperty(PropertyName = "properties.SyntheticMonitorId")] public string SyntheticMonitorId { get; set; } /// <summary> /// Gets or sets user defined name if this WebTest. /// </summary> [JsonProperty(PropertyName = "properties.Name")] public string WebTestName { get; set; } /// <summary> /// Gets or sets purpose/user defined descriptive test for this /// WebTest. /// </summary> [JsonProperty(PropertyName = "properties.Description")] public string Description { get; set; } /// <summary> /// Gets or sets is the test actively being monitored. /// </summary> [JsonProperty(PropertyName = "properties.Enabled")] public bool? Enabled { get; set; } /// <summary> /// Gets or sets interval in seconds between test runs for this /// WebTest. Default value is 300. /// </summary> [JsonProperty(PropertyName = "properties.Frequency")] public int? Frequency { get; set; } /// <summary> /// Gets or sets seconds until this WebTest will timeout and fail. /// Default value is 30. /// </summary> [JsonProperty(PropertyName = "properties.Timeout")] public int? Timeout { get; set; } /// <summary> /// Gets or sets the kind of web test this is, valid choices are ping /// and multistep. Possible values include: 'ping', 'multistep' /// </summary> [JsonProperty(PropertyName = "properties.Kind")] public WebTestKind WebTestKind { get; set; } /// <summary> /// Gets or sets allow for retries should this WebTest fail. /// </summary> [JsonProperty(PropertyName = "properties.RetryEnabled")] public bool? RetryEnabled { get; set; } /// <summary> /// Gets or sets a list of where to physically run the tests from to /// give global coverage for accessibility of your application. /// </summary> [JsonProperty(PropertyName = "properties.Locations")] public IList<WebTestGeolocation> Locations { get; set; } /// <summary> /// Gets or sets an XML configuration specification for a WebTest. /// </summary> [JsonProperty(PropertyName = "properties.Configuration")] public WebTestPropertiesConfiguration Configuration { get; set; } /// <summary> /// Gets current state of this component, whether or not is has been /// provisioned within the resource group it is defined. Users cannot /// change this value but are able to read from it. Values will include /// Succeeded, Deploying, Canceled, and Failed. /// </summary> [JsonProperty(PropertyName = "properties.provisioningState")] public string ProvisioningState { get; private set; } /// <summary> /// Validate the object. /// </summary> /// <exception cref="ValidationException"> /// Thrown if validation fails /// </exception> public override void Validate() { base.Validate(); if (SyntheticMonitorId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "SyntheticMonitorId"); } if (WebTestName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "WebTestName"); } if (Locations == null) { throw new ValidationException(ValidationRules.CannotBeNull, "Locations"); } } } }
/* * Exchange Web Services Managed API * * Copyright (c) Microsoft Corporation * All rights reserved. * * MIT License * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the "Software"), to deal in the Software * without restriction, including without limitation the rights to use, copy, modify, merge, * publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons * to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ namespace Microsoft.Exchange.WebServices.Data { using System; using System.Collections.Generic; using System.Text; /// <summary> /// Represents an abstract Find request. /// </summary> /// <typeparam name="TResponse">The type of the response.</typeparam> internal abstract class FindRequest<TResponse> : MultiResponseServiceRequest<TResponse> where TResponse : ServiceResponse { private FolderIdWrapperList parentFolderIds = new FolderIdWrapperList(); private SearchFilter searchFilter; private string queryString; private bool returnHighlightTerms; private ViewBase view; /// <summary> /// Initializes a new instance of the <see cref="FindRequest&lt;TResponse&gt;"/> class. /// </summary> /// <param name="service">The service.</param> /// <param name="errorHandlingMode"> Indicates how errors should be handled.</param> internal FindRequest(ExchangeService service, ServiceErrorHandling errorHandlingMode) : base(service, errorHandlingMode) { } /// <summary> /// Validate request. /// </summary> internal override void Validate() { base.Validate(); this.View.InternalValidate(this); // query string parameter is only valid for Exchange2010 or higher // if (!String.IsNullOrEmpty(this.queryString) && this.Service.RequestedServerVersion < ExchangeVersion.Exchange2010) { throw new ServiceVersionException( string.Format( Strings.ParameterIncompatibleWithRequestVersion, "queryString", ExchangeVersion.Exchange2010)); } // ReturnHighlightTerms parameter is only valid for Exchange2013 or higher // if (this.ReturnHighlightTerms && this.Service.RequestedServerVersion < ExchangeVersion.Exchange2013) { throw new ServiceVersionException( string.Format( Strings.ParameterIncompatibleWithRequestVersion, "returnHighlightTerms", ExchangeVersion.Exchange2013)); } // SeekToConditionItemView is only valid for Exchange2013 or higher // if ((this.View is SeekToConditionItemView) && this.Service.RequestedServerVersion < ExchangeVersion.Exchange2013) { throw new ServiceVersionException( string.Format( Strings.ParameterIncompatibleWithRequestVersion, "SeekToConditionItemView", ExchangeVersion.Exchange2013)); } if (!String.IsNullOrEmpty(this.queryString) && this.searchFilter != null) { throw new ServiceLocalException(Strings.BothSearchFilterAndQueryStringCannotBeSpecified); } } /// <summary> /// Gets the expected response message count. /// </summary> /// <returns>XML element name.</returns> internal override int GetExpectedResponseMessageCount() { return this.ParentFolderIds.Count; } /// <summary> /// Gets the group by clause. /// </summary> /// <returns>The group by clause, null if the request does not have or support grouping.</returns> internal virtual Grouping GetGroupBy() { return null; } /// <summary> /// Writes XML attributes. /// </summary> /// <param name="writer">The writer.</param> internal override void WriteAttributesToXml(EwsServiceXmlWriter writer) { base.WriteAttributesToXml(writer); this.View.WriteAttributesToXml(writer); } /// <summary> /// Writes XML elements. /// </summary> /// <param name="writer">The writer.</param> internal override void WriteElementsToXml(EwsServiceXmlWriter writer) { this.View.WriteToXml(writer, this.GetGroupBy()); if (this.SearchFilter != null) { writer.WriteStartElement(XmlNamespace.Messages, XmlElementNames.Restriction); this.SearchFilter.WriteToXml(writer); writer.WriteEndElement(); // Restriction } this.View.WriteOrderByToXml(writer); this.ParentFolderIds.WriteToXml( writer, XmlNamespace.Messages, XmlElementNames.ParentFolderIds); if (!string.IsNullOrEmpty(this.queryString)) { // Emit the QueryString // writer.WriteStartElement(XmlNamespace.Messages, XmlElementNames.QueryString); if (this.ReturnHighlightTerms) { writer.WriteAttributeString(XmlAttributeNames.ReturnHighlightTerms, this.ReturnHighlightTerms.ToString().ToLowerInvariant()); } writer.WriteValue(this.queryString, XmlElementNames.QueryString); writer.WriteEndElement(); } } /// <summary> /// Gets the parent folder ids. /// </summary> public FolderIdWrapperList ParentFolderIds { get { return this.parentFolderIds; } } /// <summary> /// Gets or sets the search filter. Available search filter classes include SearchFilter.IsEqualTo, /// SearchFilter.ContainsSubstring and SearchFilter.SearchFilterCollection. If SearchFilter /// is null, no search filters are applied. /// </summary> public SearchFilter SearchFilter { get { return this.searchFilter; } set { this.searchFilter = value; } } /// <summary> /// Gets or sets the query string for indexed search. /// </summary> public string QueryString { get { return this.queryString; } set { this.queryString = value; } } /// <summary> /// Gets or sets the query string highlight terms. /// </summary> internal bool ReturnHighlightTerms { get { return this.returnHighlightTerms; } set { this.returnHighlightTerms = value; } } /// <summary> /// Gets or sets the view controlling the number of items or folders returned. /// </summary> public ViewBase View { get { return this.view; } set { this.view = value; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. //------------------------------------------------------------------------------ using System.Collections.Generic; using System.Diagnostics; using System.Runtime.InteropServices; using System.Threading; namespace System.Data.SqlClient { internal sealed class SNILoadHandle : SafeHandle { internal static readonly SNILoadHandle SingletonInstance = new SNILoadHandle(); internal readonly SNINativeMethodWrapper.SqlAsyncCallbackDelegate ReadAsyncCallbackDispatcher = new SNINativeMethodWrapper.SqlAsyncCallbackDelegate(ReadDispatcher); internal readonly SNINativeMethodWrapper.SqlAsyncCallbackDelegate WriteAsyncCallbackDispatcher = new SNINativeMethodWrapper.SqlAsyncCallbackDelegate(WriteDispatcher); private readonly UInt32 _sniStatus = TdsEnums.SNI_UNINITIALIZED; private readonly EncryptionOptions _encryptionOption; private SNILoadHandle() : base(IntPtr.Zero, true) { // From security review - SafeHandle guarantees this is only called once. // The reason for the safehandle is guaranteed initialization and termination of SNI to // ensure SNI terminates and cleans up properly. try { } finally { _sniStatus = SNINativeMethodWrapper.SNIInitialize(); UInt32 value = 0; // VSDevDiv 479597: If initialize fails, don't call QueryInfo. if (TdsEnums.SNI_SUCCESS == _sniStatus) { // Query OS to find out whether encryption is supported. SNINativeMethodWrapper.SNIQueryInfo(SNINativeMethodWrapper.QTypes.SNI_QUERY_CLIENT_ENCRYPT_POSSIBLE, ref value); } _encryptionOption = (value == 0) ? EncryptionOptions.NOT_SUP : EncryptionOptions.OFF; base.handle = (IntPtr)1; // Initialize to non-zero dummy variable. } } public override bool IsInvalid { get { return (IntPtr.Zero == base.handle); } } override protected bool ReleaseHandle() { if (base.handle != IntPtr.Zero) { if (TdsEnums.SNI_SUCCESS == _sniStatus) { LocalDBAPI.ReleaseDLLHandles(); SNINativeMethodWrapper.SNITerminate(); } base.handle = IntPtr.Zero; } return true; } public UInt32 Status { get { return _sniStatus; } } public EncryptionOptions Options { get { return _encryptionOption; } } static private void ReadDispatcher(IntPtr key, IntPtr packet, UInt32 error) { // This is the app-domain dispatcher for all async read callbacks, It // simply gets the state object from the key that it is passed, and // calls the state object's read callback. Debug.Assert(IntPtr.Zero != key, "no key passed to read callback dispatcher?"); if (IntPtr.Zero != key) { // NOTE: we will get a null ref here if we don't get a key that // contains a GCHandle to TDSParserStateObject; that is // very bad, and we want that to occur so we can catch it. GCHandle gcHandle = (GCHandle)key; TdsParserStateObject stateObj = (TdsParserStateObject)gcHandle.Target; if (null != stateObj) { stateObj.ReadAsyncCallback(IntPtr.Zero, packet, error); } } } static private void WriteDispatcher(IntPtr key, IntPtr packet, UInt32 error) { // This is the app-domain dispatcher for all async write callbacks, It // simply gets the state object from the key that it is passed, and // calls the state object's write callback. Debug.Assert(IntPtr.Zero != key, "no key passed to write callback dispatcher?"); if (IntPtr.Zero != key) { // NOTE: we will get a null ref here if we don't get a key that // contains a GCHandle to TDSParserStateObject; that is // very bad, and we want that to occur so we can catch it. GCHandle gcHandle = (GCHandle)key; TdsParserStateObject stateObj = (TdsParserStateObject)gcHandle.Target; if (null != stateObj) { stateObj.WriteAsyncCallback(IntPtr.Zero, packet, error); } } } } internal sealed class SNIHandle : SafeHandle { private readonly UInt32 _status = TdsEnums.SNI_UNINITIALIZED; private readonly bool _fSync = false; // creates a physical connection internal SNIHandle( SNINativeMethodWrapper.ConsumerInfo myInfo, string serverName, byte[] spnBuffer, bool ignoreSniOpenTimeout, int timeout, out byte[] instanceName, bool flushCache, bool fSync, bool fParallel) : base(IntPtr.Zero, true) { try { } finally { _fSync = fSync; instanceName = new byte[256]; // Size as specified by netlibs. if (ignoreSniOpenTimeout) { timeout = Timeout.Infinite; // -1 == native SNIOPEN_TIMEOUT_VALUE / INFINITE } _status = SNINativeMethodWrapper.SNIOpenSyncEx(myInfo, serverName, ref base.handle, spnBuffer, instanceName, flushCache, fSync, timeout, fParallel); } } // constructs SNI Handle for MARS session internal SNIHandle(SNINativeMethodWrapper.ConsumerInfo myInfo, SNIHandle parent) : base(IntPtr.Zero, true) { try { } finally { _status = SNINativeMethodWrapper.SNIOpenMarsSession(myInfo, parent, ref base.handle, parent._fSync); } } public override bool IsInvalid { get { return (IntPtr.Zero == base.handle); } } override protected bool ReleaseHandle() { // NOTE: The SafeHandle class guarantees this will be called exactly once. IntPtr ptr = base.handle; base.handle = IntPtr.Zero; if (IntPtr.Zero != ptr) { if (0 != SNINativeMethodWrapper.SNIClose(ptr)) { return false; // SNIClose should never fail. } } return true; } internal UInt32 Status { get { return _status; } } } internal sealed class SNIPacket : SafeHandle { internal SNIPacket(SafeHandle sniHandle) : base(IntPtr.Zero, true) { SNINativeMethodWrapper.SNIPacketAllocate(sniHandle, SNINativeMethodWrapper.IOType.WRITE, ref base.handle); if (IntPtr.Zero == base.handle) { throw SQL.SNIPacketAllocationFailure(); } } public override bool IsInvalid { get { return (IntPtr.Zero == base.handle); } } override protected bool ReleaseHandle() { // NOTE: The SafeHandle class guarantees this will be called exactly once. IntPtr ptr = base.handle; base.handle = IntPtr.Zero; if (IntPtr.Zero != ptr) { SNINativeMethodWrapper.SNIPacketRelease(ptr); } return true; } } internal sealed class WritePacketCache : IDisposable { private bool _disposed; private Stack<SNIPacket> _packets; public WritePacketCache() { _disposed = false; _packets = new Stack<SNIPacket>(); } public SNIPacket Take(SNIHandle sniHandle) { SNIPacket packet; if (_packets.Count > 0) { // Success - reset the packet packet = _packets.Pop(); SNINativeMethodWrapper.SNIPacketReset(sniHandle, SNINativeMethodWrapper.IOType.WRITE, packet, SNINativeMethodWrapper.ConsumerNumber.SNI_Consumer_SNI); } else { // Failed to take a packet - create a new one packet = new SNIPacket(sniHandle); } return packet; } public void Add(SNIPacket packet) { if (!_disposed) { _packets.Push(packet); } else { // If we're disposed, then get rid of any packets added to us packet.Dispose(); } } public void Clear() { while (_packets.Count > 0) { _packets.Pop().Dispose(); } } public void Dispose() { if (!_disposed) { _disposed = true; Clear(); } } } }
// "Therefore those skilled at the unorthodox // are infinite as heaven and earth, // inexhaustible as the great rivers. // When they come to an end, // they begin again, // like the days and months; // they die and are reborn, // like the four seasons." // // - Sun Tsu, // "The Art of War" using System; using System.Windows; using System.Windows.Media; using System.Windows.Media.Imaging; using TheArtOfDev.HtmlRenderer.Core; using TheArtOfDev.HtmlRenderer.Core.Entities; using TheArtOfDev.HtmlRenderer.Core.Utils; using TheArtOfDev.HtmlRenderer.WPF.Adapters; using TheArtOfDev.HtmlRenderer.WPF.Utilities; namespace TheArtOfDev.HtmlRenderer.WPF { /// <summary> /// Standalone static class for simple and direct HTML rendering.<br/> /// For WPF UI prefer using HTML controls: <see cref="HtmlPanel"/> or <see cref="HtmlLabel"/>.<br/> /// For low-level control and performance consider using <see cref="HtmlContainer"/>.<br/> /// </summary> /// <remarks> /// <para> /// <b>Rendering to image</b><br/> /// // TODO:a update! /// See https://htmlrenderer.codeplex.com/wikipage?title=Image%20generation <br/> /// Because of GDI text rendering issue with alpha channel clear type text rendering rendering to image requires special handling.<br/> /// <u>Solid color background -</u> generate an image where the background is filled with solid color and all the html is rendered on top /// of the background color, GDI text rendering will be used. (RenderToImage method where the first argument is html string)<br/> /// <u>Image background -</u> render html on top of existing image with whatever currently exist but it cannot have transparent pixels, /// GDI text rendering will be used. (RenderToImage method where the first argument is Image object)<br/> /// <u>Transparent background -</u> render html to empty image using GDI+ text rendering, the generated image can be transparent. /// </para> /// <para> /// <b>Overwrite stylesheet resolution</b><br/> /// Exposed by optional "stylesheetLoad" delegate argument.<br/> /// Invoked when a stylesheet is about to be loaded by file path or URL in 'link' element.<br/> /// Allows to overwrite the loaded stylesheet by providing the stylesheet data manually, or different source (file or URL) to load from.<br/> /// Example: The stylesheet 'href' can be non-valid URI string that is interpreted in the overwrite delegate by custom logic to pre-loaded stylesheet object<br/> /// If no alternative data is provided the original source will be used.<br/> /// </para> /// <para> /// <b>Overwrite image resolution</b><br/> /// Exposed by optional "imageLoad" delegate argument.<br/> /// Invoked when an image is about to be loaded by file path, URL or inline data in 'img' element or background-image CSS style.<br/> /// Allows to overwrite the loaded image by providing the image object manually, or different source (file or URL) to load from.<br/> /// Example: image 'src' can be non-valid string that is interpreted in the overwrite delegate by custom logic to resource image object<br/> /// Example: image 'src' in the html is relative - the overwrite intercepts the load and provide full source URL to load the image from<br/> /// Example: image download requires authentication - the overwrite intercepts the load, downloads the image to disk using custom code and provide /// file path to load the image from.<br/> /// If no alternative data is provided the original source will be used.<br/> /// Note: Cannot use asynchronous scheme overwrite scheme.<br/> /// </para> /// </remarks> /// <example> /// <para> /// <b>Simple rendering</b><br/> /// HtmlRender.Render(g, "<![CDATA[<div>Hello <b>World</b></div>]]>");<br/> /// HtmlRender.Render(g, "<![CDATA[<div>Hello <b>World</b></div>]]>", 10, 10, 500, CssData.Parse("body {font-size: 20px}")");<br/> /// </para> /// <para> /// <b>Image rendering</b><br/> /// HtmlRender.RenderToImage("<![CDATA[<div>Hello <b>World</b></div>]]>", new Size(600,400));<br/> /// HtmlRender.RenderToImage("<![CDATA[<div>Hello <b>World</b></div>]]>", 600);<br/> /// HtmlRender.RenderToImage(existingImage, "<![CDATA[<div>Hello <b>World</b></div>]]>");<br/> /// </para> /// </example> public static class HtmlRender { /// <summary> /// Adds a font family to be used in html rendering.<br/> /// The added font will be used by all rendering function including <see cref="HtmlContainer"/> and all WPF controls. /// </summary> /// <remarks> /// The given font family instance must be remain alive while the renderer is in use.<br/> /// If loaded from file then the file must not be deleted. /// </remarks> /// <param name="fontFamily">The font family to add.</param> public static void AddFontFamily(FontFamily fontFamily) { ArgChecker.AssertArgNotNull(fontFamily, "fontFamily"); WpfAdapter.Instance.AddFontFamily(new FontFamilyAdapter(fontFamily)); } /// <summary> /// Adds a font mapping from <paramref name="fromFamily"/> to <paramref name="toFamily"/> iff the <paramref name="fromFamily"/> is not found.<br/> /// When the <paramref name="fromFamily"/> font is used in rendered html and is not found in existing /// fonts (installed or added) it will be replaced by <paramref name="toFamily"/>.<br/> /// </summary> /// <remarks> /// This fonts mapping can be used as a fallback in case the requested font is not installed in the client system. /// </remarks> /// <param name="fromFamily">the font family to replace</param> /// <param name="toFamily">the font family to replace with</param> public static void AddFontFamilyMapping(string fromFamily, string toFamily) { ArgChecker.AssertArgNotNullOrEmpty(fromFamily, "fromFamily"); ArgChecker.AssertArgNotNullOrEmpty(toFamily, "toFamily"); WpfAdapter.Instance.AddFontFamilyMapping(fromFamily, toFamily); } /// <summary> /// Parse the given stylesheet to <see cref="CssData"/> object.<br/> /// If <paramref name="combineWithDefault"/> is true the parsed css blocks are added to the /// default css data (as defined by W3), merged if class name already exists. If false only the data in the given stylesheet is returned. /// </summary> /// <seealso cref="http://www.w3.org/TR/CSS21/sample.html"/> /// <param name="stylesheet">the stylesheet source to parse</param> /// <param name="combineWithDefault">true - combine the parsed css data with default css data, false - return only the parsed css data</param> /// <returns>the parsed css data</returns> public static CssData ParseStyleSheet(string stylesheet, bool combineWithDefault = true) { return CssData.Parse(WpfAdapter.Instance, stylesheet, combineWithDefault); } /// <summary> /// Measure the size (width and height) required to draw the given html under given max width restriction.<br/> /// If no max width restriction is given the layout will use the maximum possible width required by the content, /// it can be the longest text line or full image width.<br/> /// </summary> /// <param name="html">HTML source to render</param> /// <param name="maxWidth">optional: bound the width of the html to render in (default - 0, unlimited)</param> /// <param name="cssData">optional: the style to use for html rendering (default - use W3 default style)</param> /// <param name="stylesheetLoad">optional: can be used to overwrite stylesheet resolution logic</param> /// <param name="imageLoad">optional: can be used to overwrite image resolution logic</param> /// <returns>the size required for the html</returns> public static Size Measure(string html, double maxWidth = 0, CssData cssData = null, EventHandler<HtmlStylesheetLoadEventArgs> stylesheetLoad = null, EventHandler<HtmlImageLoadEventArgs> imageLoad = null) { Size actualSize = Size.Empty; if (!string.IsNullOrEmpty(html)) { using (var container = new HtmlContainer()) { container.MaxSize = new Size(maxWidth, 0); container.AvoidAsyncImagesLoading = true; container.AvoidImagesLateLoading = true; if (stylesheetLoad != null) container.StylesheetLoad += stylesheetLoad; if (imageLoad != null) container.ImageLoad += imageLoad; container.SetHtml(html, cssData); container.PerformLayout(); actualSize = container.ActualSize; } } return actualSize; } /// <summary> /// Renders the specified HTML source on the specified location and max width restriction.<br/> /// If <paramref name="maxWidth"/> is zero the html will use all the required width, otherwise it will perform line /// wrap as specified in the html<br/> /// Returned is the actual width and height of the rendered html.<br/> /// </summary> /// <param name="g">Device to render with</param> /// <param name="html">HTML source to render</param> /// <param name="left">optional: the left most location to start render the html at (default - 0)</param> /// <param name="top">optional: the top most location to start render the html at (default - 0)</param> /// <param name="maxWidth">optional: bound the width of the html to render in (default - 0, unlimited)</param> /// <param name="cssData">optional: the style to use for html rendering (default - use W3 default style)</param> /// <param name="stylesheetLoad">optional: can be used to overwrite stylesheet resolution logic</param> /// <param name="imageLoad">optional: can be used to overwrite image resolution logic</param> /// <returns>the actual size of the rendered html</returns> public static Size Render(DrawingContext g, string html, double left = 0, double top = 0, double maxWidth = 0, CssData cssData = null, EventHandler<HtmlStylesheetLoadEventArgs> stylesheetLoad = null, EventHandler<HtmlImageLoadEventArgs> imageLoad = null) { ArgChecker.AssertArgNotNull(g, "g"); return RenderClip(g, html, new Point(left, top), new Size(maxWidth, 0), cssData, stylesheetLoad, imageLoad); } /// <summary> /// Renders the specified HTML source on the specified location and max size restriction.<br/> /// If <paramref name="maxSize"/>.Width is zero the html will use all the required width, otherwise it will perform line /// wrap as specified in the html<br/> /// If <paramref name="maxSize"/>.Height is zero the html will use all the required height, otherwise it will clip at the /// given max height not rendering the html below it.<br/> /// Returned is the actual width and height of the rendered html.<br/> /// </summary> /// <param name="g">Device to render with</param> /// <param name="html">HTML source to render</param> /// <param name="location">the top-left most location to start render the html at</param> /// <param name="maxSize">the max size of the rendered html (if height above zero it will be clipped)</param> /// <param name="cssData">optional: the style to use for html rendering (default - use W3 default style)</param> /// <param name="stylesheetLoad">optional: can be used to overwrite stylesheet resolution logic</param> /// <param name="imageLoad">optional: can be used to overwrite image resolution logic</param> /// <returns>the actual size of the rendered html</returns> public static Size Render(DrawingContext g, string html, Point location, Size maxSize, CssData cssData = null, EventHandler<HtmlStylesheetLoadEventArgs> stylesheetLoad = null, EventHandler<HtmlImageLoadEventArgs> imageLoad = null) { ArgChecker.AssertArgNotNull(g, "g"); return RenderClip(g, html, location, maxSize, cssData, stylesheetLoad, imageLoad); } /// <summary> /// Renders the specified HTML into a new image of the requested size.<br/> /// The HTML will be layout by the given size but will be clipped if cannot fit.<br/> /// </summary> /// <param name="html">HTML source to render</param> /// <param name="size">The size of the image to render into, layout html by width and clipped by height</param> /// <param name="cssData">optional: the style to use for html rendering (default - use W3 default style)</param> /// <param name="stylesheetLoad">optional: can be used to overwrite stylesheet resolution logic</param> /// <param name="imageLoad">optional: can be used to overwrite image resolution logic</param> /// <returns>the generated image of the html</returns> public static BitmapFrame RenderToImage(string html, Size size, CssData cssData = null, EventHandler<HtmlStylesheetLoadEventArgs> stylesheetLoad = null, EventHandler<HtmlImageLoadEventArgs> imageLoad = null) { var renderTarget = new RenderTargetBitmap((int)size.Width, (int)size.Height, 96, 96, PixelFormats.Pbgra32); if (!string.IsNullOrEmpty(html)) { // render HTML into the visual DrawingVisual drawingVisual = new DrawingVisual(); using (DrawingContext g = drawingVisual.RenderOpen()) { RenderHtml(g, html, new Point(), size, cssData, stylesheetLoad, imageLoad); } // render visual into target bitmap renderTarget.Render(drawingVisual); } return BitmapFrame.Create(renderTarget); } /// <summary> /// Renders the specified HTML into a new image of unknown size that will be determined by max width/height and HTML layout.<br/> /// If <paramref name="maxWidth"/> is zero the html will use all the required width, otherwise it will perform line /// wrap as specified in the html<br/> /// If <paramref name="maxHeight"/> is zero the html will use all the required height, otherwise it will clip at the /// given max height not rendering the html below it.<br/> /// <p> /// Limitation: The image cannot have transparent background, by default it will be white.<br/> /// See "Rendering to image" remarks section on <see cref="HtmlRender"/>.<br/> /// </p> /// </summary> /// <param name="html">HTML source to render</param> /// <param name="maxWidth">optional: the max width of the rendered html, if not zero and html cannot be layout within the limit it will be clipped</param> /// <param name="maxHeight">optional: the max height of the rendered html, if not zero and html cannot be layout within the limit it will be clipped</param> /// <param name="backgroundColor">optional: the color to fill the image with (default - white)</param> /// <param name="cssData">optional: the style to use for html rendering (default - use W3 default style)</param> /// <param name="stylesheetLoad">optional: can be used to overwrite stylesheet resolution logic</param> /// <param name="imageLoad">optional: can be used to overwrite image resolution logic</param> /// <returns>the generated image of the html</returns> public static BitmapFrame RenderToImage(string html, int maxWidth = 0, int maxHeight = 0, Color backgroundColor = new Color(), CssData cssData = null, EventHandler<HtmlStylesheetLoadEventArgs> stylesheetLoad = null, EventHandler<HtmlImageLoadEventArgs> imageLoad = null) { return RenderToImage(html, Size.Empty, new Size(maxWidth, maxHeight), backgroundColor, cssData, stylesheetLoad, imageLoad); } /// <summary> /// Renders the specified HTML into a new image of unknown size that will be determined by min/max width/height and HTML layout.<br/> /// If <paramref name="maxSize.Width"/> is zero the html will use all the required width, otherwise it will perform line /// wrap as specified in the html<br/> /// If <paramref name="maxSize.Height"/> is zero the html will use all the required height, otherwise it will clip at the /// given max height not rendering the html below it.<br/> /// If <paramref name="minSize"/> (Width/Height) is above zero the rendered image will not be smaller than the given min size.<br/> /// <p> /// Limitation: The image cannot have transparent background, by default it will be white.<br/> /// See "Rendering to image" remarks section on <see cref="HtmlRender"/>.<br/> /// </p> /// </summary> /// <param name="html">HTML source to render</param> /// <param name="minSize">optional: the min size of the rendered html (zero - not limit the width/height)</param> /// <param name="maxSize">optional: the max size of the rendered html, if not zero and html cannot be layout within the limit it will be clipped (zero - not limit the width/height)</param> /// <param name="backgroundColor">optional: the color to fill the image with (default - white)</param> /// <param name="cssData">optional: the style to use for html rendering (default - use W3 default style)</param> /// <param name="stylesheetLoad">optional: can be used to overwrite stylesheet resolution logic</param> /// <param name="imageLoad">optional: can be used to overwrite image resolution logic</param> /// <returns>the generated image of the html</returns> public static BitmapFrame RenderToImage(string html, Size minSize, Size maxSize, Color backgroundColor = new Color(), CssData cssData = null, EventHandler<HtmlStylesheetLoadEventArgs> stylesheetLoad = null, EventHandler<HtmlImageLoadEventArgs> imageLoad = null) { RenderTargetBitmap renderTarget; if (!string.IsNullOrEmpty(html)) { using (var container = new HtmlContainer()) { container.AvoidAsyncImagesLoading = true; container.AvoidImagesLateLoading = true; if (stylesheetLoad != null) container.StylesheetLoad += stylesheetLoad; if (imageLoad != null) container.ImageLoad += imageLoad; container.SetHtml(html, cssData); var finalSize = MeasureHtmlByRestrictions(container, minSize, maxSize); container.MaxSize = finalSize; renderTarget = new RenderTargetBitmap((int)finalSize.Width, (int)finalSize.Height, 96, 96, PixelFormats.Pbgra32); // render HTML into the visual DrawingVisual drawingVisual = new DrawingVisual(); using (DrawingContext g = drawingVisual.RenderOpen()) { container.PerformPaint(g, new Rect(new Size(maxSize.Width > 0 ? maxSize.Width : double.MaxValue, maxSize.Height > 0 ? maxSize.Height : double.MaxValue))); } // render visual into target bitmap renderTarget.Render(drawingVisual); } } else { renderTarget = new RenderTargetBitmap(0, 0, 96, 96, PixelFormats.Pbgra32); } return BitmapFrame.Create(renderTarget); } #region Private methods /// <summary> /// Measure the size of the html by performing layout under the given restrictions. /// </summary> /// <param name="htmlContainer">the html to calculate the layout for</param> /// <param name="minSize">the minimal size of the rendered html (zero - not limit the width/height)</param> /// <param name="maxSize">the maximum size of the rendered html, if not zero and html cannot be layout within the limit it will be clipped (zero - not limit the width/height)</param> /// <returns>return: the size of the html to be rendered within the min/max limits</returns> private static Size MeasureHtmlByRestrictions(HtmlContainer htmlContainer, Size minSize, Size maxSize) { // use desktop created graphics to measure the HTML using (var mg = new GraphicsAdapter()) { var sizeInt = HtmlRendererUtils.MeasureHtmlByRestrictions(mg, htmlContainer.HtmlContainerInt, Utils.Convert(minSize), Utils.Convert(maxSize)); return Utils.ConvertRound(sizeInt); } } /// <summary> /// Renders the specified HTML source on the specified location and max size restriction.<br/> /// If <paramref name="maxSize"/>.Width is zero the html will use all the required width, otherwise it will perform line /// wrap as specified in the html<br/> /// If <paramref name="maxSize"/>.Height is zero the html will use all the required height, otherwise it will clip at the /// given max height not rendering the html below it.<br/> /// Clip the graphics so the html will not be rendered outside the max height bound given.<br/> /// Returned is the actual width and height of the rendered html.<br/> /// </summary> /// <param name="g">Device to render with</param> /// <param name="html">HTML source to render</param> /// <param name="location">the top-left most location to start render the html at</param> /// <param name="maxSize">the max size of the rendered html (if height above zero it will be clipped)</param> /// <param name="cssData">optional: the style to use for html rendering (default - use W3 default style)</param> /// <param name="stylesheetLoad">optional: can be used to overwrite stylesheet resolution logic</param> /// <param name="imageLoad">optional: can be used to overwrite image resolution logic</param> /// <returns>the actual size of the rendered html</returns> private static Size RenderClip(DrawingContext g, string html, Point location, Size maxSize, CssData cssData, EventHandler<HtmlStylesheetLoadEventArgs> stylesheetLoad, EventHandler<HtmlImageLoadEventArgs> imageLoad) { if (maxSize.Height > 0) g.PushClip(new RectangleGeometry(new Rect(location, maxSize))); var actualSize = RenderHtml(g, html, location, maxSize, cssData, stylesheetLoad, imageLoad); if (maxSize.Height > 0) g.Pop(); return actualSize; } /// <summary> /// Renders the specified HTML source on the specified location and max size restriction.<br/> /// If <paramref name="maxSize"/>.Width is zero the html will use all the required width, otherwise it will perform line /// wrap as specified in the html<br/> /// If <paramref name="maxSize"/>.Height is zero the html will use all the required height, otherwise it will clip at the /// given max height not rendering the html below it.<br/> /// Returned is the actual width and height of the rendered html.<br/> /// </summary> /// <param name="g">Device to render with</param> /// <param name="html">HTML source to render</param> /// <param name="location">the top-left most location to start render the html at</param> /// <param name="maxSize">the max size of the rendered html (if height above zero it will be clipped)</param> /// <param name="cssData">optional: the style to use for html rendering (default - use W3 default style)</param> /// <param name="stylesheetLoad">optional: can be used to overwrite stylesheet resolution logic</param> /// <param name="imageLoad">optional: can be used to overwrite image resolution logic</param> /// <returns>the actual size of the rendered html</returns> private static Size RenderHtml(DrawingContext g, string html, Point location, Size maxSize, CssData cssData, EventHandler<HtmlStylesheetLoadEventArgs> stylesheetLoad, EventHandler<HtmlImageLoadEventArgs> imageLoad) { Size actualSize = Size.Empty; if (!string.IsNullOrEmpty(html)) { using (var container = new HtmlContainer()) { container.Location = location; container.MaxSize = maxSize; container.AvoidAsyncImagesLoading = true; container.AvoidImagesLateLoading = true; if (stylesheetLoad != null) container.StylesheetLoad += stylesheetLoad; if (imageLoad != null) container.ImageLoad += imageLoad; container.SetHtml(html, cssData); container.PerformLayout(); container.PerformPaint(g, new Rect(0, 0, double.MaxValue, double.MaxValue)); actualSize = container.ActualSize; } } return actualSize; } #endregion } }