context
stringlengths
2.52k
185k
gt
stringclasses
1 value
//------------------------------------------------------------------------------ // <copyright file="Shape.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // <owner current="true" primary="true">[....]</owner> //------------------------------------------------------------------------------ #if ENABLEDATABINDING using System; using System.Xml; using System.Xml.Schema; using System.Xml.XPath; using System.Collections; using System.Diagnostics; using System.ComponentModel; using System.Text; namespace System.Xml.XPath.DataBinding { internal enum BindingType { Text, Element, Attribute, ElementNested, Repeat, Sequence, Choice, All } internal sealed class Shape { string name; BindingType bindingType; ArrayList particles; // XmlSchemaElement or XmlSchemaAttribute ArrayList subShapes; Shape nestedShape; PropertyDescriptor[] propertyDescriptors; XmlSchemaElement containerDecl; static object[] emptyIList = new object[0]; public Shape(string name, BindingType bindingType) { this.name = name; this.bindingType = bindingType; } public string Name { get { return this.name; } set { this.name = value; } } public BindingType BindingType { get { return this.bindingType; } set { this.bindingType = value; } } public XmlSchemaElement ContainerDecl { get { return this.containerDecl; } set { this.containerDecl = value; } } public bool IsNestedTable { get { switch (this.BindingType) { case BindingType.ElementNested: case BindingType.Repeat: case BindingType.Sequence: case BindingType.Choice: case BindingType.All: return true; default: return false; } } } public bool IsGroup { get { switch (this.BindingType) { case BindingType.Sequence: case BindingType.Choice: case BindingType.All: return true; default: return false; } } } public XmlSchemaType SchemaType { get { switch (this.bindingType) { case BindingType.Text: case BindingType.Element: case BindingType.ElementNested: { Debug.Assert(this.particles.Count == 1); XmlSchemaElement xse = (XmlSchemaElement)this.particles[0]; return xse.ElementSchemaType; } case BindingType.Attribute: { Debug.Assert(this.particles.Count == 1); XmlSchemaAttribute xsa = (XmlSchemaAttribute)this.particles[0]; return xsa.AttributeSchemaType; } default: return null; } } } public XmlSchemaElement XmlSchemaElement { get { switch (this.bindingType) { case BindingType.Text: case BindingType.Element: case BindingType.ElementNested: { Debug.Assert(this.particles.Count == 1); return (XmlSchemaElement)this.particles[0]; } default: return this.containerDecl; } } } public IList Particles { get { if (null == this.particles) return emptyIList; return this.particles; } } public IList SubShapes { get { if (null == this.subShapes) return emptyIList; return this.subShapes; } } public Shape SubShape(int i) { return (Shape)SubShapes[i]; } public Shape NestedShape { get { //Debug.Assert(this.bindingType == BindingType.ElementNested); return this.nestedShape; } set { this.nestedShape = value; } } public XmlQualifiedName AttributeName { get { Debug.Assert(this.bindingType == BindingType.Attribute); XmlSchemaAttribute xsa = (XmlSchemaAttribute)this.particles[0]; return xsa.QualifiedName; } } public void Clear() { if (this.subShapes != null) { this.subShapes.Clear(); this.subShapes = null; } if (this.particles != null) { this.particles.Clear(); this.particles = null; } } public void AddParticle(XmlSchemaElement elem) { if (null == this.particles) this.particles = new ArrayList(); Debug.Assert(this.bindingType != BindingType.Attribute); this.particles.Add(elem); } public void AddParticle(XmlSchemaAttribute elem) { Debug.Assert(this.bindingType == BindingType.Attribute); Debug.Assert(this.particles == null); this.particles = new ArrayList(); this.particles.Add(elem); } public void AddSubShape(Shape shape) { if (null == this.subShapes) this.subShapes = new ArrayList(); this.subShapes.Add(shape); foreach (object p in shape.Particles) { XmlSchemaElement xse = p as XmlSchemaElement; if (null != xse) AddParticle(xse); } } public void AddAttrShapeAt(Shape shape, int pos) { if (null == this.subShapes) this.subShapes = new ArrayList(); this.subShapes.Insert(pos, shape); } public string[] SubShapeNames() { string[] names = new string[SubShapes.Count]; for (int i=0; i<SubShapes.Count; i++) names[i] = this.SubShape(i).Name; return names; } public PropertyDescriptor[] PropertyDescriptors { get { if (null == this.propertyDescriptors) { PropertyDescriptor[] descs; switch (this.BindingType) { case BindingType.Element: case BindingType.Text: case BindingType.Attribute: case BindingType.Repeat: descs = new PropertyDescriptor[1]; descs[0] = new XPathNodeViewPropertyDescriptor(this); break; case BindingType.ElementNested: descs = this.nestedShape.PropertyDescriptors; break; case BindingType.Sequence: case BindingType.Choice: case BindingType.All: descs = new PropertyDescriptor[SubShapes.Count]; for (int i=0; i < descs.Length; i++) { descs[i] = new XPathNodeViewPropertyDescriptor(this, this.SubShape(i), i); } break; default: throw new NotSupportedException(); } this.propertyDescriptors = descs; } return this.propertyDescriptors; } } public int FindNamedSubShape(string name) { for (int i=0; i<SubShapes.Count; i++) { Shape shape = SubShape(i); if (shape.Name == name) return i; } return -1; } public int FindMatchingSubShape(object particle) { for (int i=0; i<SubShapes.Count; i++) { Shape shape = SubShape(i); if (shape.IsParticleMatch(particle)) return i; } return -1; } public bool IsParticleMatch(object particle) { for (int i=0; i<this.particles.Count; i++) { if (particle == this.particles[i]) return true; } return false; } #if DEBUG public string DebugDump() { StringBuilder sb = new StringBuilder(); DebugDump(sb,""); return sb.ToString(); } void DebugDump(StringBuilder sb, String indent) { sb.AppendFormat("{0}{1} '{2}'", indent, this.BindingType.ToString(), this.Name); if (this.subShapes != null) { sb.AppendLine(" {"); string subindent = String.Concat(indent, " "); foreach (Shape s in this.SubShapes) { s.DebugDump(sb, subindent); } sb.Append(indent); sb.Append('}'); } sb.AppendLine(); } #endif } } #endif
using UnityEngine; using System.Collections; using System.Linq; using System; #pragma warning disable 618 [AddComponentMenu("LightSaber/Ray System")] public class LightSaber_Arc : MonoBehaviour { public LineRendererInfo[] arcs; public Camera myCamera; public float lifetime; public ArcsPlaybackType playbackType = ArcsPlaybackType.once; public bool playbackMessages = false; public GameObject messageReciever; public float elapsedTime = 0; public bool playBackward = false; public bool freeze = false; public float sizeMultiplier = 1; public InterpolationType interpolation = InterpolationType.CatmullRom_Splines; public EaseInOutOptions easeInOutOptions; public Transform[] shapeTransforms; public Vector3[] shapePoints; public bool[] transformsDestructionFlags; public bool closedShape; public Vector3 oscillationNormal = Vector3.up; public bool localSpaceOcillations = false; public float reinitThreshold = 0.5f; public int performancePriority = 0; public bool customSorting; public string sortingLayerName; public int sortingOrder; const int maxCalcDetalization = 10; protected Vector3[] resultingShape; protected int oldShapeTransformsSize = 0; protected float overlap = 0; protected float[] noiseOffsets; protected float[] noiseScale; protected Vector3[,] arcPoints; protected Vector3[,] shiftVectors; protected Vector3[,] arcTangents; protected Quaternion[,] arcTangentsShift; protected Vector3[] shapeTangents; protected Vector3[][] vertices; protected Vector3[][] oldVertices; protected Transform[,] lightsTransforms; protected Light[,] lights; protected LineRenderer[] lrends; protected int[] segmNums; protected int[] vertexCount; protected int[] oldVertexCount; protected int[] lightsCount; protected float shapeLength; protected float oldShapeLength; protected float[] shapeKeyLocations; protected float[] shapeKeyNormalizedLocations; protected float[] maxStartWidth; protected float[] maxEndWidth; protected float[] coreCoefs; protected Vector3 oscNormal; protected LensFlare startFlare; protected LensFlare endFlare; //protected Mesh[][] emitterMeshes; protected ParticleSystem[][] emitterSystems; protected LightSaber_EmitterDestructor[][] emitterDestructors; public float ShapeLength { get { return shapeLength; } } public int PerformancePriority { get { return performancePriority; } } public enum PropagationType { instant = 0, globalSpaceSpeed = 1, localTimeCurve = 2 } public enum ArcsPlaybackType { once = 0, loop = 1, pingpong = 2, clamp = 3 } public enum InterpolationType { CatmullRom_Splines = 0, Linear = 1 } public enum SpatialNoiseType { TangentRandomization = 0, CubicRandomization = 1, BrokenTangentRandomization = 2 } public enum OscillationType { sine_wave = 0, rectangular = 1, zigzag = 2 } public enum FadeTypes { none = 0, worldspacePoint = 1, relativePoint = 2 } [System.Serializable] public class ArcNestingOptions { public bool Nested = false; public int parentArcIndex = 0; public bool combinedNesting = false; public int secondaryArcIndex = 0; public float nestingCoef = 0; } [System.Serializable] public class EaseInOutOptions { public bool useEaseInOut; public AnimationCurve easeInOutCurve; public float distance; } [System.Serializable] public class ArcPropagationOptions { public PropagationType propagationType = PropagationType.instant; public float globalSpeed = 1.0f; public AnimationCurve timeCurve; } [System.Serializable] public class ArcColorOptions { public Gradient startColor; public bool onlyStartColor = true; public Gradient endColor; public Gradient coreColor; public AnimationCurve coreCurve; public float coreJitter; public FadeTypes fade; public float fadePoint; } [System.Serializable] public class ArcSizeOptions { public InterpolationType interpolation = InterpolationType.CatmullRom_Splines; public AnimationCurve startWidthCurve; public bool onlyStartWidth = true; public AnimationCurve endWidthCurve; public float segmentLength = 10; public bool snapSegmentsToShape = false; public int numberOfSmoothingSegments = 0; public int minNumberOfSegments = 1; } [System.Serializable] public class TextureAnimationOptions { public Texture shapeTexture; public Texture noiseTexture; public AnimationCurve noiseCoef; public bool animateTexture; public float tileSize; public float noiseSpeed; //public float noisePower; } [System.Serializable] public class ArcSpatialNoiseOptions { public SpatialNoiseType type = SpatialNoiseType.TangentRandomization; public float scale = 0; public float scaleMovement = 0; public float resetFrequency = 0; public int invisiblePriority; } [System.Serializable] public class ArcLightsOptions { public bool lights = false; public float lightsRange = 5; public float lightsIntensityMultiplyer = 5; public LightRenderMode renderMode = LightRenderMode.Auto; public int priority; } [System.Serializable] public class OscillationInfo { public OscillationType type = OscillationType.sine_wave; public bool swirl = false; public float planeRotation; public float wavelength; public bool integerPeriods; public WavelengthMetric metric = WavelengthMetric.globalSpace; public float amplitude; public float phase; public float phaseMovementSpeed; public int invisiblePriority; } [System.Serializable] public class ParticleEmissionOptions { public bool emit = false; public GameObject shurikenPrefab; public bool emitAfterRayDeath = false; public float particlesPerMeter = 0; public AnimationCurve emissionDuringLifetime; public AnimationCurve radiusCoefDuringLifetime; public AnimationCurve directionDuringLifetime; public bool startColorByRay; public ParticleRandomizationOptions randomizationOptions; } [System.Serializable] public class ParticleRandomizationOptions { public float sizeRndCoef = 0; public float velocityRndCoef = 0; public float angularVelocityRndCoef = 0; public float rotationRndCoef = 0; public float lifetimeRndCoef = 0; } public enum WavelengthMetric { globalSpace = 0, localSpace = 1 } [System.Serializable] public class ArcFlaresInfo { public FlareInfo startFlare; public FlareInfo endFlare; public bool useNoiseMask; public AnimationCurve noiseMaskPowerCurve; } [System.Serializable] public class FlareInfo { public bool enabled = false; public Flare flare; public float fadeSpeed = 50; public float maxBrightness; public float maxBrightnessDistance; public float minBrightness; public float minBrightnessDistance; public LayerMask ignoreLayers = (LayerMask)6; } [System.Serializable] public class ShiftCurveInfo { public AnimationCurve shapeCurve; public float curveWidth; public float planeRotation; public WavelengthMetric metric = WavelengthMetric.globalSpace; public float curveLength; public bool notAffectedByEaseInOut; public int invisiblePriority; } [System.Serializable] public class LineRendererInfo { public Material material; public ArcColorOptions colorOptions; public ArcSizeOptions sizeOptions; public ArcPropagationOptions propagationOptions; public ParticleEmissionOptions[] emissionOptions; public ArcSpatialNoiseOptions[] spatialNoise; public TextureAnimationOptions textureOptions; public ArcLightsOptions lightsOptions; public ArcFlaresInfo flaresOptions; public ArcNestingOptions nesting; public OscillationInfo[] oscillations; public ShiftCurveInfo[] shapeCurves; } public static Vector3 HermiteCurvePoint(float t,Vector3 p0, Vector3 m0, Vector3 p1, Vector3 m1) { float tsq = t*t; float tcub = t*t*t; return (2*tcub - 3*tsq + 1) * p0 + (tcub - 2*tsq + t) * m0 + (-2*tcub + 3*tsq) * p1 + (tcub - tsq) * m1; } public void FillResultingShape() { if (resultingShape == null) resultingShape = new Vector3[0]; if (shapePoints != null && shapeTransforms != null) { if (Mathf.Max(shapeTransforms.Length,shapePoints.Length) != resultingShape.Length) Array.Resize(ref resultingShape,Mathf.Max(shapeTransforms.Length,shapePoints.Length)); for (int i = 0; i < resultingShape.Length; i++) { if ((shapeTransforms.Length > i) && (shapeTransforms[i] != null)) resultingShape[i] = shapeTransforms[i].position; else { if(i < shapePoints.Length) resultingShape[i] = shapePoints[i]; } } } else if (shapeTransforms != null) { if (shapeTransforms.Length != resultingShape.Length) Array.Resize(ref resultingShape,shapeTransforms.Length); for (int i = 0; i < resultingShape.Length; i++) resultingShape[i] = shapeTransforms[i].position; } else if (shapePoints != null) { if (shapePoints.Length != resultingShape.Length) Array.Resize(ref resultingShape,shapePoints.Length); for (int i = 0; i < resultingShape.Length; i++) resultingShape[i] = shapePoints[i]; } } public static Material GetDefaultMaterial() { return new Material(Shader.Find("LightSaber/Additive_core_higlight")); } public void SetPerformancePriority(int newPriority) { if (lightsCount != null && performancePriority != newPriority) { performancePriority = newPriority; for (int n = 0; n < arcs.Length; n++) { if (arcs[n].lightsOptions.lights && (lightsCount[n] > 0)) { for (int i = 0; i < lightsCount[n]; i++) lights[n,i].enabled = arcs[n].lightsOptions.priority <= performancePriority; } } } } protected Vector3 CalculateCurveShift(Vector3 direction, float position, int arcInd) { Vector3 sumShift = Vector3.zero; foreach (ShiftCurveInfo curv in arcs[arcInd].shapeCurves) { if (lrends[arcInd].isVisible || curv.invisiblePriority <= performancePriority) { float shift; if (curv.metric == WavelengthMetric.localSpace) shift = curv.shapeCurve.Evaluate(position/shapeLength) * curv.curveWidth; else shift = curv.shapeCurve.Evaluate(position/curv.curveLength) * curv.curveWidth; Quaternion rot; rot = Quaternion.AngleAxis(curv.planeRotation,direction); Vector3 normal = Vector3.Cross(direction,oscNormal); if (curv.notAffectedByEaseInOut) sumShift += rot * normal.normalized * shift; else sumShift += rot * normal.normalized * shift * GetShiftCoef(position/shapeLength); } } return sumShift * sizeMultiplier; } protected Vector3 CalculateOscillationShift(Vector3 direction, float position, int arcInd) { Vector3 sumShift = Vector3.zero; foreach (OscillationInfo osc in arcs[arcInd].oscillations) { if (lrends[arcInd].isVisible || osc.invisiblePriority <= performancePriority) { float wavelength = osc.wavelength * sizeMultiplier; float effectiveWavelength = wavelength; if (osc.integerPeriods && osc.metric == WavelengthMetric.globalSpace) effectiveWavelength = shapeLength/Mathf.Ceil(shapeLength/wavelength); if (osc.integerPeriods && osc.metric == WavelengthMetric.localSpace) effectiveWavelength = 1/Mathf.Ceil(1/wavelength); float angle; if (osc.metric == WavelengthMetric.globalSpace) angle = osc.phase*Mathf.Deg2Rad + (position - effectiveWavelength*((int)(position/effectiveWavelength)))/effectiveWavelength * Mathf.PI * 2; else angle = osc.phase*Mathf.Deg2Rad + (position/shapeLength - effectiveWavelength*((int)(position/shapeLength/effectiveWavelength)))/effectiveWavelength * Mathf.PI * 2; float shift; switch (osc.type) { case OscillationType.sine_wave: shift = osc.amplitude * Mathf.Sin (angle); break; case OscillationType.rectangular: if ((angle*Mathf.Rad2Deg)%360 > 180) shift = -osc.amplitude; else shift = osc.amplitude; break; case OscillationType.zigzag: shift = osc.amplitude * (Mathf.Abs(((angle*Mathf.Rad2Deg)%180)/45-2)-1); break; default: shift = 0; break; } Quaternion rot; rot = Quaternion.AngleAxis(osc.planeRotation,direction); Vector3 normal = Vector3.Cross(direction,oscNormal); sumShift += rot * normal.normalized * shift; if (osc.swirl) { if (osc.metric == WavelengthMetric.globalSpace) angle = (osc.phase+90)*Mathf.Deg2Rad + (position - effectiveWavelength*((int)(position/effectiveWavelength)))/effectiveWavelength * Mathf.PI * 2; else angle = (osc.phase+90)*Mathf.Deg2Rad + (position/shapeLength - effectiveWavelength*((int)(position/shapeLength/effectiveWavelength)))/effectiveWavelength * Mathf.PI * 2; switch (osc.type) { case OscillationType.sine_wave: shift = osc.amplitude * Mathf.Sin (angle); break; case OscillationType.rectangular: if ((angle*Mathf.Rad2Deg)%360 > 180) shift = -osc.amplitude; else shift = osc.amplitude; break; case OscillationType.zigzag: shift = osc.amplitude * (Mathf.Abs(((angle*Mathf.Rad2Deg)%180)/45-2)-1); break; default: shift = 0; break; } rot = Quaternion.AngleAxis(osc.planeRotation+90,direction); sumShift += rot * normal.normalized * shift; } } } return sumShift * sizeMultiplier; } protected void CalculateShape() { FillResultingShape(); if (oldShapeTransformsSize != resultingShape.Length) { SetShapeArrays(); } if (closedShape) { shapeLength = 0; for (int i = 0; i < resultingShape.Length-1; i++) { shapeKeyLocations[i] = shapeLength; shapeLength += (resultingShape[i] - resultingShape[i+1]).magnitude; } shapeKeyLocations[resultingShape.Length-1] = shapeLength; float closeLoopLength = (resultingShape[0] - resultingShape[resultingShape.Length - 1]).magnitude; shapeLength += closeLoopLength; shapeKeyLocations[resultingShape.Length] = shapeLength; shapeLength += overlap; } else { shapeLength = 0; for (int i = 0; i < resultingShape.Length-1; i++) { shapeKeyLocations[i] = shapeLength; shapeLength += (resultingShape[i] - resultingShape[i+1]).magnitude; } shapeKeyLocations[resultingShape.Length-1] = shapeLength; } for (int i = 0; i < shapeKeyLocations.Length; i++) shapeKeyNormalizedLocations[i] = shapeKeyLocations[i]/shapeLength; switch (interpolation) { case InterpolationType.CatmullRom_Splines: if (closedShape) { for (int i = 0; i < resultingShape.Length; i++) { shapeTangents[i] = (resultingShape[AddCyclicShift(i,1,resultingShape.Length-1)] - resultingShape[AddCyclicShift(i,-1,resultingShape.Length-1)])/2; } } else { shapeTangents[0] = resultingShape[1] - resultingShape[0]; shapeTangents[resultingShape.Length-1] = resultingShape[resultingShape.Length-1] - resultingShape[resultingShape.Length-2]; for (int i = 1; i < resultingShape.Length-1; i++) { shapeTangents[i] = (resultingShape[i+1] - resultingShape[i-1])/2; } } break; } if (oldShapeLength == 0 || Mathf.Abs((oldShapeLength-shapeLength)/shapeLength) > reinitThreshold) { Initialize(); } } protected int AddCyclicShift(int a,int b,int size) { int s = a+b; if (s < 0) return s + size + 1; if (s > size) return s - size - 1; return s; } protected float AddCyclicShift(float a,float b,float size) { float s = a+b; if (s < 0) return s + size; if (s > size) return s - size; return s; } protected Quaternion RandomXYQuaternion(float angle) { if (angle > 0) return Quaternion.Euler(new Vector3(UnityEngine.Random.Range(-angle,angle), UnityEngine.Random.Range(-angle,angle), 0)); else return Quaternion.identity; } protected void SetArcShape(int n) { float overlapCeof = 1 + overlap/shapeLength; int closeShapeShift = 1; for (int nI = 0; nI < arcs[n].spatialNoise.Length; nI++) { switch (arcs[n].spatialNoise[nI].type) { case SpatialNoiseType.CubicRandomization: if (UnityEngine.Random.value > arcs[n].spatialNoise[nI].resetFrequency * Time.deltaTime) { closeShapeShift = 1; if (closedShape) closeShapeShift = 0; for (int i = 0; i < segmNums[n] + closeShapeShift; i++) { shiftVectors[n,i] += RandomVector3(arcs[n].spatialNoise[nI].scaleMovement*Time.deltaTime*60) * GetShiftCoef((float)i/segmNums[n]); } } else { ResetArcNoise(n,nI); } break; case SpatialNoiseType.TangentRandomization: if (UnityEngine.Random.value > arcs[n].spatialNoise[nI].resetFrequency * Time.deltaTime) { closeShapeShift = 1; if (closedShape) closeShapeShift = 0; for (int i = 0; i < segmNums[n] + closeShapeShift; i++) { arcTangentsShift[n,i*2] = arcTangentsShift[n,i*2] * RandomXYQuaternion(arcs[n].spatialNoise[nI].scaleMovement * GetShiftCoef((float)i/segmNums[n])); arcTangentsShift[n,i*2+1] = arcTangentsShift[n,i*2]; } } else { ResetArcNoise(n,nI); } break; case SpatialNoiseType.BrokenTangentRandomization: if (UnityEngine.Random.value > arcs[n].spatialNoise[nI].resetFrequency * Time.deltaTime) { closeShapeShift = 1; if (closedShape) closeShapeShift = 0; for (int i = 0; i < segmNums[n] + closeShapeShift; i++) { arcTangentsShift[n,i*2] = arcTangentsShift[n,i*2] * RandomXYQuaternion(arcs[n].spatialNoise[nI].scaleMovement * GetShiftCoef((float)i/segmNums[n])); arcTangentsShift[n,i*2+1] = arcTangentsShift[n,i*2+1] * RandomXYQuaternion(arcs[n].spatialNoise[nI].scaleMovement * GetShiftCoef((float)i/segmNums[n])); } } else { ResetArcNoise(n,nI); } break; } } closeShapeShift = 1; if (closedShape) closeShapeShift = 0; if (arcs[n].nesting.Nested && !arcs[n].nesting.combinedNesting) for (int i = 0; i < segmNums[n] + closeShapeShift; i++) arcPoints[n,i] = GetArcPoint((float)i/segmNums[n]*overlapCeof,arcs[n].nesting.parentArcIndex) + shiftVectors[n,i] * sizeMultiplier; else if (arcs[n].nesting.Nested && arcs[n].nesting.combinedNesting) for (int i = 0; i < segmNums[n] + closeShapeShift; i++) arcPoints[n,i] = Vector3.Lerp(GetArcPoint((float)i/segmNums[n]*overlapCeof,arcs[n].nesting.parentArcIndex), GetArcPoint(Mathf.Clamp01((float)i/segmNums[n]*overlapCeof-0.001f),arcs[n].nesting.secondaryArcIndex), arcs[n].nesting.nestingCoef) + shiftVectors[n,i] * sizeMultiplier; else for (int i = 0; i < segmNums[n] + closeShapeShift; i++) arcPoints[n,i] = CalcShapePoint((float)i/segmNums[n]*overlapCeof) + shiftVectors[n,i] * sizeMultiplier; switch (arcs[n].sizeOptions.interpolation) { case InterpolationType.CatmullRom_Splines: if (closedShape) { for (int i = 0; i < segmNums[n]; i++) { arcTangents[n,i] = (arcPoints[n,AddCyclicShift(i,1,segmNums[n]-1)] - arcPoints[n,AddCyclicShift(i,-1,segmNums[n]-1)])/2; } } else { arcTangents[n,0] = arcPoints[n,1] - arcPoints[n,0]; arcTangents[n,segmNums[n]] = arcPoints[n,segmNums[n]] - arcPoints[n,segmNums[n]-1]; for (int i = 1; i < segmNums[n]; i++) { arcTangents[n,i] = (arcPoints[n,i+1] - arcPoints[n,i-1])/2; } } break; } } protected Vector3 CalcArcPoint(float point,int n) { int st = 0; int end = 1; if (closedShape) { st = Mathf.FloorToInt(point*segmNums[n]); if (point == 1) st -= 1; if (st == segmNums[n]-1) end = 0; else end = st + 1; } else { st = Mathf.FloorToInt(point*segmNums[n]); if (point != 1) end = st + 1; else { end = st; st -= 1; } } switch (arcs[n].sizeOptions.interpolation) { case InterpolationType.CatmullRom_Splines: return HermiteCurvePoint(point*segmNums[n] - st,arcPoints[n,st],arcTangentsShift[n,st*2]*arcTangents[n,st],arcPoints[n,end],arcTangentsShift[n,end*2+1]*arcTangents[n,end]); //break; case InterpolationType.Linear: return arcPoints[n,st] + (arcPoints[n,end] - arcPoints[n,st])*(point*segmNums[n] - st); //break; default: return arcPoints[n,st] + (arcPoints[n,end] - arcPoints[n,st])*(point*segmNums[n] - st); //break; } } public Vector3 CalcShapePoint(float point) { //point = PointShift (point); float pos = point * shapeLength; int stTr = 0; int endTr = 1; float localPos = 0; for (int i = 0; i < shapeKeyLocations.Length-1; i++) { if (pos > shapeKeyLocations[i] && pos <= shapeKeyLocations[i+1]) { stTr = i; endTr = i+1; localPos = 1 - (shapeKeyLocations[i+1]-pos)/(shapeKeyLocations[i+1]-shapeKeyLocations[i]); break; } } if (closedShape && endTr == shapeKeyLocations.Length-1) { stTr = resultingShape.Length-1; endTr = 0; } switch (interpolation) { case InterpolationType.CatmullRom_Splines: return HermiteCurvePoint(localPos,resultingShape[stTr],shapeTangents[stTr],resultingShape[endTr],shapeTangents[endTr]); case InterpolationType.Linear: return resultingShape[stTr] + (resultingShape[endTr] - resultingShape[stTr])*localPos; } return Vector3.zero; } public Vector3 GetArcPoint(float point,int arcIndex) { float pos = point * (vertexCount[arcIndex]-1); int ind1 = Mathf.Clamp(Mathf.FloorToInt(pos),0,vertexCount[arcIndex]-1); int ind2 = Mathf.Clamp(Mathf.CeilToInt(pos),0,vertexCount[arcIndex]-1); float koef = pos - Mathf.Floor(pos); Vector3 vert1; Vector3 vert2; if (vertices[arcIndex][ind1] == Vector3.zero) vert1 = CalcArcPoint(point,arcIndex); else vert1 = vertices[arcIndex][ind1]; if (vertices[arcIndex][ind2] == Vector3.zero) vert2 = CalcArcPoint(point,arcIndex); else vert2 = vertices[arcIndex][ind2]; return vert1*(1-koef) + vert2*koef; } public Vector3 GetOldArcPoint(float point,int arcIndex) { float pos = point * (oldVertexCount[arcIndex]-1); int ind1 = Mathf.Clamp(Mathf.FloorToInt(pos),0,oldVertexCount[arcIndex]-1); int ind2 = Mathf.Clamp(Mathf.CeilToInt(pos),0,oldVertexCount[arcIndex]-1); float koef = pos - Mathf.Floor(pos); Vector3 oldVert1; Vector3 oldVert2; if (oldVertices[arcIndex][ind1] == Vector3.zero) oldVert1 = CalcArcPoint(point,arcIndex); else oldVert1 = oldVertices[arcIndex][ind1]; if (oldVertices[arcIndex][ind2] == Vector3.zero) oldVert2 = CalcArcPoint(point,arcIndex); else oldVert2 = oldVertices[arcIndex][ind2]; return oldVert1*(1-koef) + oldVert2*koef; } public float GetShiftCoef(float point) { if (easeInOutOptions.useEaseInOut) { float length = point*shapeLength; if (length > easeInOutOptions.distance/2 && length < shapeLength - easeInOutOptions.distance/2) return easeInOutOptions.easeInOutCurve.Evaluate(0.5f); else { if (length < easeInOutOptions.distance/2) return easeInOutOptions.easeInOutCurve.Evaluate(length/easeInOutOptions.distance); else return easeInOutOptions.easeInOutCurve.Evaluate(1-(shapeLength - length)/easeInOutOptions.distance); } } else return 1; } public void ResetArc(int n) { float point; for (int i = 0; i < arcs[n].spatialNoise.Length; i++) { ResetArcNoise(n,i); } /* for (int i = 0; i <= segmNums[n]; i++) { point = (float)i/segmNums[n]; if (arcs[n].nesting.Nested) arcPoints[n,i] = GetArcPoint(point,arcs[n].nesting.parentArcIndex) + shiftVectors[n,i]; else arcPoints[n,i] = CalcShapePoint(point) + shiftVectors[n,i]; } */ if (arcs[n].nesting.Nested && !arcs[n].nesting.combinedNesting) { for (int i = 0; i < segmNums[n]; i++) { point = (float)i/segmNums[n]; arcPoints[n,i] = GetArcPoint(point,arcs[n].nesting.parentArcIndex) + shiftVectors[n,i] * sizeMultiplier; } } else if (arcs[n].nesting.Nested && arcs[n].nesting.combinedNesting) { for (int i = 0; i < segmNums[n]; i++) { point = (float)i/segmNums[n]; arcPoints[n,i] = Vector3.Lerp(GetArcPoint(point,arcs[n].nesting.parentArcIndex), GetArcPoint(Mathf.Clamp01(point-0.001f),arcs[n].nesting.secondaryArcIndex), arcs[n].nesting.nestingCoef) + shiftVectors[n,i] * sizeMultiplier; } } else { for (int i = 0; i < segmNums[n]; i++) { point = (float)i/segmNums[n]; arcPoints[n,i] = CalcShapePoint(point) + shiftVectors[n,i] * sizeMultiplier; } } } public void ResetArcNoise(int n, int noiseInd) { switch (arcs[n].spatialNoise[noiseInd].type) { case SpatialNoiseType.CubicRandomization: for (int i = 0; i <= segmNums[n]; i++) shiftVectors[n,i] = RandomVector3(arcs[n].spatialNoise[noiseInd].scale) * GetShiftCoef((float)i/segmNums[n]); break; case SpatialNoiseType.TangentRandomization: for (int i = 0; i <= segmNums[n]; i++) { arcTangentsShift[n,i*2] = RandomXYQuaternion(arcs[n].spatialNoise[noiseInd].scale * GetShiftCoef((float)i/segmNums[n])); arcTangentsShift[n,i*2+1] = arcTangentsShift[n,i*2]; } break; case SpatialNoiseType.BrokenTangentRandomization: for (int i = 0; i <= segmNums[n]; i++) { arcTangentsShift[n,i*2] = RandomXYQuaternion(arcs[n].spatialNoise[noiseInd].scale * GetShiftCoef((float)i/segmNums[n])); arcTangentsShift[n,i*2+1] = RandomXYQuaternion(arcs[n].spatialNoise[noiseInd].scale * GetShiftCoef((float)i/segmNums[n])); } break; } } protected float GetFlareBrightness(Vector3 cameraPosition,Vector3 flarePosition, FlareInfo flInfo, float multiplier = 1) { float distance = Mathf.Clamp((cameraPosition - flarePosition).magnitude,flInfo.maxBrightnessDistance,flInfo.minBrightnessDistance) - flInfo.maxBrightnessDistance; return Mathf.Lerp (flInfo.maxBrightness,flInfo.minBrightness,distance / (flInfo.minBrightnessDistance - flInfo.maxBrightnessDistance)) * multiplier; } protected void SetFlares(int n) { float multiplier = 1; if (arcs[n].flaresOptions.startFlare.enabled) { startFlare.transform.position = resultingShape[0]; if (arcs[n].flaresOptions.useNoiseMask) multiplier = arcs[n].flaresOptions.noiseMaskPowerCurve.Evaluate(noiseOffsets[n]); startFlare.brightness = GetFlareBrightness(myCamera.transform.position,resultingShape[0],arcs[n].flaresOptions.startFlare, arcs[n].sizeOptions.startWidthCurve.Evaluate(elapsedTime/lifetime)/maxStartWidth[n]) * multiplier; startFlare.color = arcs[n].colorOptions.startColor.Evaluate(elapsedTime/lifetime); } if (arcs[n].flaresOptions.endFlare.enabled) { endFlare.transform.position = resultingShape[resultingShape.Length-1]; if (arcs[n].flaresOptions.useNoiseMask) multiplier = arcs[n].flaresOptions.noiseMaskPowerCurve.Evaluate(AddCyclicShift(noiseScale[n]-Mathf.Floor(noiseScale[n]),noiseOffsets[n],1)); if (arcs[n].sizeOptions.onlyStartWidth) endFlare.brightness = GetFlareBrightness(myCamera.transform.position,resultingShape[resultingShape.Length - 1],arcs[n].flaresOptions.endFlare, arcs[n].sizeOptions.startWidthCurve.Evaluate(elapsedTime/lifetime)/maxStartWidth[n]) * multiplier; else endFlare.brightness = GetFlareBrightness(myCamera.transform.position,resultingShape[resultingShape.Length - 1],arcs[n].flaresOptions.endFlare, arcs[n].sizeOptions.endWidthCurve.Evaluate(elapsedTime/lifetime)/maxEndWidth[n]) * multiplier; if (arcs[n].colorOptions.onlyStartColor) endFlare.color = arcs[n].colorOptions.startColor.Evaluate(elapsedTime/lifetime); else endFlare.color = arcs[n].colorOptions.endColor.Evaluate(elapsedTime/lifetime); } } protected void Initialize () { oldShapeLength = shapeLength; bool anyLights = false; for(int n = 0; n < arcs.Length; n++) { //Particle emitter initialization for (int q = 0; q < arcs[n].emissionOptions.Length; q++) { if (emitterSystems[n][q] == null && arcs[n].emissionOptions[q].shurikenPrefab != null) { //GameObject partGameObject = (GameObject)GameObject.Instantiate(arcs[n].emissionOptions[q].shurikenPrefab); GameObject partGameObject = arcs[n].emissionOptions[q].shurikenPrefab.InstantiateFromPool(); partGameObject.name = "EmitterObject "+gameObject.name+" "+n.ToString()+","+q.ToString(); emitterSystems[n][q] = partGameObject.GetComponent<ParticleSystem>(); //if (emitterSystems[n][q].enableEmission) if (emitterSystems[n][q].emission.enabled) { //emitterSystems[n][q].enableEmission = false; var em = emitterSystems[n][q].emission; em.enabled = false; } if (!arcs[n].emissionOptions[q].emitAfterRayDeath) partGameObject.transform.parent = transform; else { emitterDestructors[n][q] = partGameObject.AddComponent<LightSaber_EmitterDestructor>(); emitterDestructors[n][q].partSystem = emitterSystems[n][q]; emitterDestructors[n][q].enabled = false; } partGameObject.transform.position = transform.position; partGameObject.transform.rotation = transform.rotation; } } //Lights initialization if (arcs[n].lightsOptions.lights) { for (int i = 0; i < lightsCount[n]; i++) { Destroy(lights[n,i].gameObject); } } anyLights |= arcs[n].lightsOptions.lights; lightsCount[n] = Mathf.Max ((int)(shapeLength * 2 / arcs[n].lightsOptions.lightsRange + 1),2); //Segment and vertex initialization segmNums[n] = Mathf.Max((int)(shapeLength / (arcs[n].sizeOptions.segmentLength * sizeMultiplier))+arcs[n].sizeOptions.minNumberOfSegments,2); vertexCount[n] = segmNums[n]*(arcs[n].sizeOptions.numberOfSmoothingSegments+1)+1; oldVertexCount[n] = vertexCount[n]; oldVertices[n] = new Vector3[vertexCount[n]]; vertices[n] = new Vector3[vertexCount[n]]; lrends[n].SetVertexCount(vertexCount[n]); //Flares placing if (arcs[n].flaresOptions.startFlare.enabled && startFlare == null) { GameObject obj = new GameObject(gameObject.name + "_Start_flare"); obj.transform.parent = transform; startFlare = obj.gameObject.AddComponent<LensFlare>(); startFlare.flare = arcs[n].flaresOptions.startFlare.flare; startFlare.fadeSpeed = arcs[n].flaresOptions.startFlare.fadeSpeed; } if (arcs[n].flaresOptions.endFlare.enabled && endFlare == null) { GameObject obj = new GameObject(gameObject.name + "_End_flare"); obj.transform.parent = transform; endFlare = obj.gameObject.AddComponent<LensFlare>(); endFlare.flare = arcs[n].flaresOptions.endFlare.flare; endFlare.fadeSpeed = arcs[n].flaresOptions.endFlare.fadeSpeed; } } //vertices = new Vector3[arcs.Length,vertexCount.Max()]; arcPoints = new Vector3[arcs.Length,segmNums.Max()+2]; shiftVectors = new Vector3[arcs.Length,segmNums.Max()+2]; arcTangents = new Vector3[arcs.Length,segmNums.Max()+2]; arcTangentsShift = new Quaternion[arcs.Length,segmNums.Max()*2+2]; for (int n = 0; n < arcs.Length; n++) { ResetArc(n); } if (anyLights) { GameObject lightObject; lights = new Light[arcs.Length,lightsCount.Max()]; lightsTransforms = new Transform[arcs.Length,lightsCount.Max()+1]; for(int n = 0; n < arcs.Length; n++) { if (arcs[n].lightsOptions.lights) { for (int i = 0; i < lightsCount[n]; i++) { lightObject = new GameObject("ArcLight"); lightObject.transform.parent = transform; lightsTransforms[n,i] = lightObject.transform; lights[n,i] = lightObject.AddComponent<Light>(); lights[n,i].type = LightType.Point; lights[n,i].renderMode = arcs[n].lightsOptions.renderMode; lights[n,i].range = arcs[n].lightsOptions.lightsRange; } } } } } protected void SetShapeArrays() { int shapeLen = Mathf.Max(shapeTransforms.Length,shapePoints.Length); oldShapeTransformsSize = shapeLen; if (closedShape) { shapeKeyLocations = new float[shapeLen+1]; shapeKeyNormalizedLocations = new float[shapeLen+1]; } else { shapeKeyLocations = new float[shapeLen]; shapeKeyNormalizedLocations = new float[shapeLen]; } shapeTangents = new Vector3[shapeLen]; } void Start () { //Checks for correct parameters if (Mathf.Max(shapeTransforms.Length, shapePoints.Length) < 2) { Debug.LogError(gameObject.name + " : There should be at least 2 shape transforms or points for correct shape calculation. Deactivating component."); Debug.Break(); this.enabled = false; return; } if (arcs.Length == 0) { Debug.LogError(gameObject.name + " : No arcs set up. Deactivating component."); this.enabled = false; return; } if (lifetime == 0) Debug.LogWarning(gameObject.name + " : Lifetime set to zero. That's a waste of a perfectly good component."); if (oscillationNormal == Vector3.zero) Debug.LogWarning(gameObject.name + " : Oscillation normal set to zero. Oscillation planes will be unpredictable."); if (easeInOutOptions.useEaseInOut && easeInOutOptions.distance == 0) Debug.LogWarning(gameObject.name + " : EaseInOut enabled but it's distance set to zero. It will have no effect except performance hit."); for (int i = 0; i < arcs.Length; i++) { if ((arcs[i].flaresOptions.startFlare.enabled || arcs[i].flaresOptions.endFlare.enabled) && myCamera == null) myCamera = Camera.main; if (arcs[i].colorOptions.startColor.colorKeys.Length == 2 && arcs[i].colorOptions.startColor.colorKeys[0].color == new Color(0,0,0,255) && arcs[i].colorOptions.startColor.colorKeys[0].time == 0 && arcs[i].colorOptions.startColor.colorKeys[1].color == new Color(0,0,0,255) && arcs[i].colorOptions.startColor.colorKeys[1].time == 1 && arcs[i].colorOptions.startColor.alphaKeys.Length == 2 && arcs[i].colorOptions.startColor.alphaKeys[0].alpha == 0 && arcs[i].colorOptions.startColor.alphaKeys[0].time == 0 && arcs[i].colorOptions.startColor.alphaKeys[1].alpha == 0 && arcs[i].colorOptions.startColor.alphaKeys[1].time == 1) { Debug.LogWarning(gameObject.name + " : Start color gradient has not been assigned to Arc #"+i.ToString()+", arc probably wouldn't be visible. Set color options to see the arc."); } if (arcs[i].sizeOptions.segmentLength == 0) { Debug.LogWarning(gameObject.name + " : Segment length of Arc #"+i.ToString()+" is set to zero, arc will always be consisting of only 2 vertexes"); } if (arcs[i].sizeOptions.startWidthCurve.keys.Length == 0 && (arcs[i].sizeOptions.onlyStartWidth || arcs[i].sizeOptions.endWidthCurve.keys.Length == 0)) { Debug.LogWarning(gameObject.name + " : Width curves has not been assigned to Arc #"+i.ToString()+", setting default curves."); arcs[i].sizeOptions.startWidthCurve.AddKey(0,0.5f); if (!arcs[i].sizeOptions.onlyStartWidth) arcs[i].sizeOptions.endWidthCurve.AddKey(0,0.5f); } if (arcs[i].material == null) { Debug.LogWarning(gameObject.name + " : Material have not been assigned to Arc #"+i.ToString()+", setting default material."); arcs[i].material = GetDefaultMaterial(); } if (arcs[i].nesting.Nested && arcs[i].nesting.parentArcIndex > i) Debug.LogWarning(gameObject.name + " : Arc #"+i.ToString()+" is nested to arc with higher index. That's not recommended because of vertex caching."); for (int q = 0; q < arcs[i].oscillations.Length; q++) { if (arcs[i].oscillations[q].amplitude == 0) { Debug.LogWarning(gameObject.name + " : Amplitude of oscillation #"+q.ToString()+" of Arc #"+i.ToString()+" set to zero. It will have no effect except performance hit"); } if (arcs[i].oscillations[q].wavelength == 0) { Debug.LogError(gameObject.name + " : Wavelength of oscillation #"+q.ToString()+" of Arc #"+i.ToString()+" set to zero. That makes no mathematical sense. Disabling component"); this.enabled = false; return; } } } //Service array initialization, actual data creation happens at Initialize() emitterSystems = new ParticleSystem[arcs.Length][]; emitterDestructors = new LightSaber_EmitterDestructor[arcs.Length][]; for (int n = 0; n < arcs.Length; n++) { emitterSystems[n] = new ParticleSystem[arcs[n].emissionOptions.Length]; emitterDestructors[n] = new LightSaber_EmitterDestructor[arcs[n].emissionOptions.Length]; } lrends = new LineRenderer[arcs.Length]; segmNums = new int[arcs.Length]; lightsCount = new int[arcs.Length]; vertexCount = new int[arcs.Length]; oldVertexCount = new int[arcs.Length]; noiseOffsets = new float[arcs.Length]; noiseScale = new float[arcs.Length]; maxStartWidth = new float[arcs.Length]; maxEndWidth = new float[arcs.Length]; coreCoefs = new float[arcs.Length]; vertices = new Vector3[arcs.Length][]; oldVertices = new Vector3[arcs.Length][]; //Init SetShapeArrays(); GameObject rayLineRenderer; for(int n = 0; n < arcs.Length; n++) { rayLineRenderer = new GameObject("ArcLineRenderer"); rayLineRenderer.transform.parent = transform; lrends[n] = rayLineRenderer.AddComponent<LineRenderer>(); lrends[n].material = arcs[n].material; lrends[n].shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off; lrends[n].receiveShadows = false; if (customSorting) { lrends[n].sortingLayerName = sortingLayerName; lrends[n].sortingOrder = sortingOrder; } //texture setup if (arcs[n].textureOptions.shapeTexture != null) lrends[n].material.SetTexture("_MainTex",arcs[n].textureOptions.shapeTexture); if (arcs[n].textureOptions.noiseTexture != null) lrends[n].material.SetTexture("_NoiseMask",arcs[n].textureOptions.noiseTexture); //Calculating maximum widths float maxWidth = 0; if (arcs[n].flaresOptions.startFlare.enabled) { for (int i = 0; i <= maxCalcDetalization; i++) { if (maxWidth < arcs[n].sizeOptions.startWidthCurve.Evaluate((float)i/maxCalcDetalization)) maxWidth = arcs[n].sizeOptions.startWidthCurve.Evaluate((float)i/maxCalcDetalization); } maxStartWidth[n] = maxWidth; } if (arcs[n].flaresOptions.endFlare.enabled) { if (arcs[n].sizeOptions.onlyStartWidth) { if (arcs[n].flaresOptions.startFlare.enabled) maxEndWidth[n] = maxStartWidth[n]; else { for (int i = 0; i <= maxCalcDetalization; i++) { if (maxWidth < arcs[n].sizeOptions.startWidthCurve.Evaluate((float)i/maxCalcDetalization)) maxWidth = arcs[n].sizeOptions.startWidthCurve.Evaluate((float)i/maxCalcDetalization); } maxStartWidth[n] = maxWidth; maxEndWidth[n] = maxStartWidth[n]; } } else { maxWidth = 0; for (int i = 0; i <= maxCalcDetalization; i++) if (maxWidth < arcs[n].sizeOptions.endWidthCurve.Evaluate((float)i/maxCalcDetalization)) maxWidth = arcs[n].sizeOptions.endWidthCurve.Evaluate((float)i/maxCalcDetalization); maxEndWidth[n] = maxWidth; } } } CalculateShape(); //Adding this system to performance manager if it exists if (LightSaber_Manager.Instance != null) LightSaber_Manager.Instance.AddArcSystem(this); } public Vector3 RandomVector3(float range) { return new Vector3(UnityEngine.Random.Range(-range,range), UnityEngine.Random.Range(-range,range), UnityEngine.Random.Range(-range,range)); } void Update() { //Phase shifting for (int n = 0; n < arcs.Length; n++) { foreach(OscillationInfo osc in arcs[n].oscillations) { osc.phase += osc.phaseMovementSpeed * Time.deltaTime; if (osc.phase > 360) osc.phase = osc.phase - 360; if (osc.phase < 0) osc.phase = osc.phase + 360; } } //Time management if (!freeze) { if (!playBackward) elapsedTime += Time.deltaTime; else elapsedTime -= Time.deltaTime; } if (elapsedTime > lifetime) { switch (playbackType) { case ArcsPlaybackType.once: for (int i = 0; i < Mathf.Min(shapeTransforms.Length,transformsDestructionFlags.Length); i++) { if (transformsDestructionFlags[i]) { Destroy(shapeTransforms[i].gameObject); } } for (int n = 0; n < arcs.Length; n++) { for (int i = 0; i < arcs[n].emissionOptions.Length; i++) { try { if (arcs[n].emissionOptions[i].emitAfterRayDeath) emitterDestructors[n][i].enabled = true; // no destructor sometimes } catch (NullReferenceException e) { Debug.Log(e); } } } if (playbackMessages) messageReciever.SendMessage("LightSaberPlayback",this); Destroy(gameObject); break; case ArcsPlaybackType.loop: elapsedTime -= lifetime; if (playbackMessages) messageReciever.SendMessage("LightSaberPlayback",this); break; case ArcsPlaybackType.pingpong: playBackward = true; elapsedTime = lifetime; if (playbackMessages) messageReciever.SendMessage("LightSaberPlayback",this); break; case ArcsPlaybackType.clamp: elapsedTime = lifetime; freeze = true; if (playbackMessages) messageReciever.SendMessage("LightSaberPlayback",this); break; } } if (elapsedTime < 0) { playBackward = false; elapsedTime = 0; } } public Vector3 GetArcEndPosition(int arcIndex) { return GetArcPoint(GetArcEndPoint(arcIndex),arcIndex); } public float GetArcEndPoint(int arcIndex) { switch (arcs[arcIndex].propagationOptions.propagationType) { case PropagationType.globalSpaceSpeed: return Mathf.Min(vertexCount[arcIndex] * arcs[arcIndex].propagationOptions.globalSpeed * elapsedTime / shapeLength,vertexCount[arcIndex])/vertexCount[arcIndex]; case PropagationType.localTimeCurve: return Mathf.Clamp01(arcs[arcIndex].propagationOptions.timeCurve.Evaluate(elapsedTime/lifetime)); case PropagationType.instant: return 1; default: return 1; } } void LateUpdate () { float lifetimePos = elapsedTime/lifetime; CalculateShape(); if (localSpaceOcillations) oscNormal = transform.rotation * oscillationNormal; else oscNormal = oscillationNormal; for (int n = 0; n < arcs.Length; n++) { vertices[n].CopyTo(oldVertices[n],0); Color StartColor = arcs[n].colorOptions.startColor.Evaluate(lifetimePos); Color EndColor; if (arcs[n].colorOptions.onlyStartColor) EndColor = StartColor; else EndColor = arcs[n].colorOptions.endColor.Evaluate(lifetimePos); Color coreColor = arcs[n].colorOptions.coreColor.Evaluate(lifetimePos); lrends[n].material.SetColor("_StartColor",StartColor); lrends[n].material.SetColor("_EndColor",EndColor); lrends[n].material.SetColor("_CoreColor",coreColor); if (arcs[n].colorOptions.coreJitter > 0) { coreCoefs[n] = arcs[n].colorOptions.coreCurve.Evaluate(lifetimePos)+UnityEngine.Random.Range(-arcs[n].colorOptions.coreJitter*0.5f,arcs[n].colorOptions.coreJitter*0.5f); lrends[n].material.SetFloat("_CoreCoef",coreCoefs[n]); } else { coreCoefs[n] = arcs[n].colorOptions.coreCurve.Evaluate(lifetimePos); lrends[n].material.SetFloat("_CoreCoef",coreCoefs[n]); } //Fading switch (arcs[n].colorOptions.fade) { case FadeTypes.none: lrends[n].material.SetFloat("_FadeLevel",0.001f); break; case FadeTypes.relativePoint: lrends[n].material.SetFloat("_FadeLevel",Mathf.Max(arcs[n].colorOptions.fadePoint,0.001f)); break; case FadeTypes.worldspacePoint: lrends[n].material.SetFloat("_FadeLevel",Mathf.Max(Mathf.Clamp01(arcs[n].colorOptions.fadePoint / shapeLength),0.001f)); break; } //Ray size change float startWidth = arcs[n].sizeOptions.startWidthCurve.Evaluate(lifetimePos) * sizeMultiplier; float endWidth; if (arcs[n].sizeOptions.onlyStartWidth) endWidth = startWidth; else endWidth = arcs[n].sizeOptions.endWidthCurve.Evaluate(lifetimePos) * sizeMultiplier; lrends[n].SetWidth(startWidth,endWidth); float vertexCnt = vertexCount[n]; switch (arcs[n].propagationOptions.propagationType) { case PropagationType.globalSpaceSpeed: vertexCnt = Mathf.Min(vertexCount[n] * arcs[n].propagationOptions.globalSpeed * elapsedTime / shapeLength,vertexCount[n]); lrends[n].SetVertexCount(Mathf.CeilToInt(vertexCnt)); break; case PropagationType.localTimeCurve: vertexCnt = Mathf.Min(vertexCount[n] * arcs[n].propagationOptions.timeCurve.Evaluate(lifetimePos),vertexCount[n]); lrends[n].SetVertexCount(Mathf.Max(Mathf.CeilToInt(vertexCnt),0)); break; } //Texture handling if (arcs[n].textureOptions.noiseTexture != null) { lrends[n].material.SetFloat("_NoiseCoef", arcs[n].textureOptions.noiseCoef.Evaluate(lifetimePos)); if (arcs[n].textureOptions.animateTexture) { noiseOffsets[n] += arcs[n].textureOptions.noiseSpeed * Time.deltaTime; if (noiseOffsets[n]>1) noiseOffsets[n] -= 1; if (noiseOffsets[n]<0) noiseOffsets[n] += 1; noiseScale[n] = vertexCnt/vertexCount[n]*shapeLength / arcs[n].textureOptions.tileSize; lrends[n].material.SetTextureScale("_NoiseMask",new Vector2(noiseScale[n],1)); lrends[n].material.SetTextureOffset("_NoiseMask",new Vector2(noiseOffsets[n],1)); } else { noiseScale[n] = vertexCnt/vertexCount[n]*shapeLength / arcs[n].textureOptions.tileSize; lrends[n].material.SetTextureScale("_NoiseMask",new Vector2(noiseScale[n],1)); } } SetFlares (n); SetArcShape(n); Vector3 curVertexPos; curVertexPos = CalcArcPoint(0,n); Vector3 nextVertexPos = Vector3.zero; Vector3 direction = Vector3.zero; float pos; int currentShapeKey = 1; for (int curVertex = 0; curVertex < vertexCnt-1; curVertex++) { pos = (float)curVertex/vertexCount[n]; if (arcs[n].sizeOptions.snapSegmentsToShape && Mathf.Abs(shapeKeyNormalizedLocations[currentShapeKey]-pos)*vertexCount[n] < 0.5) { pos = shapeKeyNormalizedLocations[currentShapeKey]; curVertexPos = shapeTransforms[currentShapeKey].position; currentShapeKey++; } nextVertexPos = CalcArcPoint((float)(curVertex+1)/vertexCount[n],n); direction = nextVertexPos-curVertexPos; vertices[n][curVertex] = curVertexPos + CalculateOscillationShift(direction,pos*shapeLength,n) * GetShiftCoef(pos) + CalculateCurveShift(direction,pos*ShapeLength,n); lrends[n].SetPosition(curVertex,vertices[n][curVertex]); curVertexPos = nextVertexPos; } if (Mathf.CeilToInt(vertexCnt)>0 && Mathf.CeilToInt(vertexCnt) <= vertexCount[n]) { vertices[n][Mathf.CeilToInt(vertexCnt)-1] = CalculateOscillationShift(direction,shapeLength*(vertexCnt)/vertexCount[n],n) * GetShiftCoef(vertexCnt/vertexCount[n]) + CalcArcPoint(vertexCnt/vertexCount[n],n); lrends[n].SetPosition(Mathf.CeilToInt(vertexCnt)-1,vertices[n][Mathf.CeilToInt(vertexCnt)-1]); } //Particles emissions for (int i = 0; i < arcs[n].emissionOptions.Length; i++) { if (arcs[n].emissionOptions[i].emit) { ParticleSystem.Particle tmpParticle = new ParticleSystem.Particle(); int particleCount = (int)(UnityEngine.Random.value + vertexCnt/vertexCount[n] * shapeLength * arcs[n].emissionOptions[i].particlesPerMeter * Time.deltaTime * arcs[n].emissionOptions[i].emissionDuringLifetime.Evaluate(lifetimePos)); float arcEndPoint = vertexCnt/vertexCount[n]; float radiusCoef = arcs[n].emissionOptions[i].radiusCoefDuringLifetime.Evaluate(lifetimePos); float directionCoef = arcs[n].emissionOptions[i].directionDuringLifetime.Evaluate(lifetimePos); float radius; float rand = 0; Vector3 randomVect = Vector3.one; Vector3 spaceShiftVect; if (emitterSystems[n][i].simulationSpace == ParticleSystemSimulationSpace.Local) spaceShiftVect = -emitterSystems[n][i].transform.position; else spaceShiftVect = Vector3.zero; Color emitStartColor; Color emitEndColor; Vector3 emitPos; Vector3 emitDir; if (arcs[n].emissionOptions[i].startColorByRay) { emitStartColor = StartColor; emitEndColor = EndColor; } else { emitStartColor = emitterSystems[n][i].startColor; emitEndColor = emitterSystems[n][i].startColor; } //Debug.Log(emitterSystems[n][i].particleCount); for (int q = 1; q <= particleCount; q++) { rand = 0.001f + UnityEngine.Random.value * (arcEndPoint-0.002f); //get random point without touching exact end of arc randomVect = UnityEngine.Random.rotation * Vector3.forward; radius = Mathf.Lerp(startWidth,endWidth,rand) * radiusCoef; emitPos = GetArcPoint(rand,n); emitDir = (GetArcPoint(rand+0.001f,n) - emitPos).normalized; tmpParticle.position = Vector3.Lerp(emitPos,GetOldArcPoint(rand,n),UnityEngine.Random.value) + randomVect * radius + spaceShiftVect; tmpParticle.startLifetime = emitterSystems[n][i].startLifetime * (1 - arcs[n].emissionOptions[i].randomizationOptions.lifetimeRndCoef + arcs[n].emissionOptions[i].randomizationOptions.lifetimeRndCoef * UnityEngine.Random.value); tmpParticle.lifetime = tmpParticle.startLifetime; tmpParticle.velocity = (randomVect * (1f - Mathf.Clamp01(Mathf.Abs(directionCoef))) + emitDir * directionCoef) * emitterSystems[n][i].startSpeed * (1 - arcs[n].emissionOptions[i].randomizationOptions.velocityRndCoef + arcs[n].emissionOptions[i].randomizationOptions.velocityRndCoef * UnityEngine.Random.value) * sizeMultiplier; tmpParticle.rotation = emitterSystems[n][i].startRotation * (1 - arcs[n].emissionOptions[i].randomizationOptions.rotationRndCoef + arcs[n].emissionOptions[i].randomizationOptions.rotationRndCoef * UnityEngine.Random.value); tmpParticle.angularVelocity = (arcs[n].emissionOptions[i].randomizationOptions.rotationRndCoef + arcs[n].emissionOptions[i].randomizationOptions.rotationRndCoef * UnityEngine.Random.value * 2); tmpParticle.size = emitterSystems[n][i].startSize * (1 - arcs[n].emissionOptions[i].randomizationOptions.sizeRndCoef + arcs[n].emissionOptions[i].randomizationOptions.sizeRndCoef * UnityEngine.Random.value) * sizeMultiplier; tmpParticle.color = Color.Lerp(emitStartColor,emitEndColor,rand); emitterSystems[n][i].Emit(tmpParticle); /*emitterSystems[n][i].Emit(Vector3.Lerp(emitPos,GetOldArcPoint(rand,n),UnityEngine.Random.value) + randomVect * radius + spaceShiftVect, (randomVect * (1f - Mathf.Clamp01(Mathf.Abs(directionCoef))) + emitDir * directionCoef) * emitterSystems[n][i].startSpeed, emitterSystems[n][i].startSize, emitterSystems[n][i].startLifetime, Color.Lerp(emitStartColor,emitEndColor,rand));*/ } } } //Lights placing if (arcs[n].lightsOptions.lights && arcs[n].lightsOptions.priority <= performancePriority) { for (int i = 0; i < lightsCount[n]; i++) { if ((float)(i)/lightsCount[n] <= vertexCnt/vertexCount[n]) { lights[n,i].enabled = true; Color mainLightColor; if (!arcs[n].colorOptions.onlyStartColor) mainLightColor = Color.Lerp(StartColor,EndColor,(float)(i)/(lightsCount[n]-1)); else mainLightColor = StartColor; lights[n,i].color = Color.Lerp(mainLightColor,coreColor,coreCoefs[n]/2); if (!arcs[n].sizeOptions.onlyStartWidth) lights[n,i].intensity = arcs[n].lightsOptions.lightsIntensityMultiplyer * Mathf.Lerp(startWidth,endWidth,(float)i/(segmNums[n]+1)); else lights[n,i].intensity = arcs[n].lightsOptions.lightsIntensityMultiplyer * startWidth; lightsTransforms[n,i].position = GetArcPoint((float)(i)/(lightsCount[n]-1),n); } else { lights[n,i].enabled = false; } } } } } }
//---------------------------------------------------------------- // Copyright (c) Microsoft Corporation. All rights reserved. //---------------------------------------------------------------- namespace System.Activities.Presentation.View { using System; using System.Collections.Generic; using System.ComponentModel; using System.Windows; using System.Windows.Controls; using System.Windows.Media; using System.Globalization; using System.Runtime; using System.Diagnostics.CodeAnalysis; //This class is responsible for providing functionality to display additional information in context of //the designer view in a popup-like manner. It is basically the canvas control, which is placed on top of //the other visual elements. It provides functionality to add and remove extension windows, as well as manipulating //their position and size sealed class ExtensionSurface : Panel { public static readonly DependencyProperty DesignerProperty = DependencyProperty.Register( "Designer", typeof(DesignerView), typeof(ExtensionSurface), new PropertyMetadata(OnDesignerChanged)); public static readonly DependencyProperty AutoExpandCanvasProperty = DependencyProperty.Register( "AutoExpandCanvas", typeof(bool), typeof(ExtensionSurface), new UIPropertyMetadata(false)); public static readonly DependencyProperty PlacementTargetProperty = DependencyProperty.RegisterAttached( "PlacementTarget", typeof(FrameworkElement), typeof(ExtensionSurface), new UIPropertyMetadata(null, OnPlacementTargetChanged)); public static readonly DependencyProperty AlignmentProperty = DependencyProperty.RegisterAttached( "Alignment", typeof(PositionAlignment), typeof(ExtensionSurface), new UIPropertyMetadata(PositionAlignment.LeftTop)); public static readonly DependencyProperty ModeProperty = DependencyProperty.RegisterAttached( "Mode", typeof(PlacementMode), typeof(ExtensionSurface), new UIPropertyMetadata(PlacementMode.Absolute, OnPlacementModeChanged)); public static readonly DependencyProperty PositionProperty = DependencyProperty.RegisterAttached( "Position", typeof(Point), typeof(ExtensionSurface), new UIPropertyMetadata(new Point())); Func<double, double, double, bool> IsGreater; KeyValuePair<FrameworkElement, Point> selectedChild; Size rearangeStartSize = new Size(); Rect actualPanelRect = new Rect(0, 0, 0, 0); Point canvasOffset = new Point(); int currentZIndex = 1000; public ExtensionSurface() { //add global handled for ExtensionWindow's CloseEvent this.AddHandler(ExtensionWindow.CloseEvent, new RoutedEventHandler(OnExtensionWindowClosed)); this.ClipToBounds = true; this.IsGreater = (v1, v2, v3) => (v1 + v2 > v3); } [Fx.Tag.KnownXamlExternal] public DesignerView Designer { get { return (DesignerView)GetValue(DesignerProperty); } set { SetValue(DesignerProperty, value); } } public bool AutoExpandCanvas { get { return (bool)GetValue(AutoExpandCanvasProperty); } set { SetValue(AutoExpandCanvasProperty, value); } } static void OnPlacementModeChanged(DependencyObject sender, DependencyPropertyChangedEventArgs args) { ExtensionWindow window = sender as ExtensionWindow; if (null != window && null != window.Surface && window.Visibility == Visibility.Visible) { window.Surface.PlaceWindow(window); } } static void OnPlacementTargetChanged(DependencyObject sender, DependencyPropertyChangedEventArgs args) { } //hook for designer mouse events - they are required to handle positioning and resizing static void OnDesignerChanged(DependencyObject sender, DependencyPropertyChangedEventArgs args) { ExtensionSurface ctrl = (ExtensionSurface)sender; DesignerView designer; if (null != args.OldValue) { designer = (DesignerView)args.OldValue; } if (null != args.NewValue) { designer = (DesignerView)args.NewValue; } } protected override void OnVisualChildrenChanged(DependencyObject visualAdded, DependencyObject visualRemoved) { ExtensionWindow window = visualRemoved as ExtensionWindow; if (null != window) { window.VisibilityChanged -= OnWindowVisibilityChanged; // window.SizeChanged -= OnWindowSizeChanged; this.rearangeStartSize.Width = 0; this.rearangeStartSize.Height = 0; } base.OnVisualChildrenChanged(visualAdded, visualRemoved); window = visualAdded as ExtensionWindow; if (null != window) { window.VisibilityChanged += OnWindowVisibilityChanged; // window.SizeChanged += OnWindowSizeChanged; if (!window.IsLoaded) { window.Loaded += OnChildWindowLoaded; } } } protected override void OnRenderSizeChanged(SizeChangedInfo sizeInfo) { base.OnRenderSizeChanged(sizeInfo); foreach (FrameworkElement child in this.Children) { ExtensionWindow window = child as ExtensionWindow; if (null != window) { if (PlacementMode.Relative == GetMode(window) && null != GetPlacementTarget(window)) { this.PlaceWindow(window); continue; } if (!this.AutoExpandCanvas) { this.EnsureWindowIsVisible(window); } } } } void OnChildWindowLoaded(object sender, EventArgs e) { ExtensionWindow window = (ExtensionWindow)sender; this.OnWindowVisibilityChanged(window, null); window.Loaded -= OnChildWindowLoaded; } //void OnWindowSizeChanged(object sender, SizeChangedEventArgs e) //{ // ExtensionWindow window = (ExtensionWindow)sender; // // EnsureWindowIsVisible(window); //} void OnWindowVisibilityChanged(object sender, RoutedEventArgs args) { ExtensionWindow window = (ExtensionWindow)sender; if (window.IsVisible) { Func<double, bool> IsInvalid = x => (double.IsInfinity(x) || double.IsNaN(x) || double.Epsilon > x); if (IsInvalid(window.ActualWidth) || IsInvalid(window.ActualWidth) || IsInvalid(window.DesiredSize.Width) || IsInvalid(window.DesiredSize.Height)) { window.Measure(new Size(double.PositiveInfinity, double.PositiveInfinity)); } PlaceWindow(window); } } void PlaceWindow(ExtensionWindow window) { if (null != window) { FrameworkElement target = ExtensionSurface.GetPlacementTarget(window); PositionAlignment alignment = ExtensionSurface.GetAlignment(window); PlacementMode mode = ExtensionSurface.GetMode(window); Point position = ExtensionSurface.GetPosition(window); Point calculatedPosition = new Point(); FrameworkElement commonRoot = null; MatrixTransform transform = null; switch (mode) { case PlacementMode.Relative: if (null != target) { commonRoot = target.FindCommonVisualAncestor(this) as FrameworkElement; if (null == commonRoot) { return; } transform = (MatrixTransform)target.TransformToAncestor(commonRoot); } else { if (!DesignerProperties.GetIsInDesignMode(this)) { Fx.Assert(string.Format(CultureInfo.InvariantCulture, "PlacementTarget must be set in RelativeMode on ExtensionSurface '{0}'", this.Name)); } } break; case PlacementMode.Absolute: calculatedPosition = position; break; default: Fx.Assert(string.Format(CultureInfo.CurrentCulture, "ExtensionWindowPlacement.Mode {0} specified in ExtensionWindow '{1}' is not supported for ExtensionSurface", mode, window.Name)); return; } if (PlacementMode.Relative == mode) { if (null != target) { double x; double y; switch (alignment) { case PositionAlignment.LeftTop: calculatedPosition = transform.Transform(calculatedPosition); break; case PositionAlignment.LeftBottom: calculatedPosition = transform.Transform(new Point(0.0, target.ActualHeight)); break; case PositionAlignment.RightTop: calculatedPosition = transform.Transform(new Point(target.ActualWidth, 0.0)); break; case PositionAlignment.RightBottom: calculatedPosition = transform.Transform(new Point(target.ActualWidth, target.ActualHeight)); break; case PositionAlignment.Center: calculatedPosition = transform.Transform(calculatedPosition); x = ((target.ActualWidth * transform.Matrix.M11) - window.Width) / 2.0; y = ((target.ActualHeight * transform.Matrix.M22) - window.Height) / 2.0; calculatedPosition.Offset(x, y); break; case PositionAlignment.CenterHorizontal: calculatedPosition = transform.Transform(calculatedPosition); x = ((target.ActualWidth * transform.Matrix.M11) - window.Width) / 2.0; calculatedPosition.Offset(x, 0.0); break; case PositionAlignment.CenterVertical: calculatedPosition = transform.Transform(calculatedPosition); y = ((target.ActualHeight * transform.Matrix.M22) - window.Height) / 2.0; calculatedPosition.Offset(0.0, y); break; default: Fx.Assert(string.Format(CultureInfo.CurrentCulture, "ExtensionWindowPlacement.Position = '{0}' is not supported", alignment)); return; } } } SetWindowPosition(window, calculatedPosition); } } internal void SetWindowPosition(ExtensionWindow window, Point position) { Func<double, double, double, double, double> CalculateInBoundsValue = (pos, size, limit, modifier) => { if (this.AutoExpandCanvas) { return pos - modifier; } else { pos = Math.Max(0.0, pos); return pos + size > limit ? limit - size : pos; } }; //in case of AutoExpandCanvas == false: // - do not allow placing window outside surface bounds //in case of AutoExpandCanvas == true: // - include possible negative canvas offset position.X = CalculateInBoundsValue(position.X, window.DesiredSize.Width, this.ActualWidth, this.selectedChild.Value.X); position.Y = CalculateInBoundsValue(position.Y, window.DesiredSize.Height, this.ActualHeight, this.selectedChild.Value.Y); //update its position on canvas ExtensionSurface.SetPosition(window, position); bool requiresMeasure = false; if (this.AutoExpandCanvas) { requiresMeasure = true; this.canvasOffset.X = 0; this.canvasOffset.Y = 0; foreach (UIElement item in this.Children) { FrameworkElement child = item as FrameworkElement; if (null != child) { Point p = ExtensionSurface.GetPosition(child); this.canvasOffset.X = Math.Min(this.canvasOffset.X, p.X); this.canvasOffset.Y = Math.Min(this.canvasOffset.Y, p.Y); } } this.canvasOffset.X = Math.Abs(this.canvasOffset.X); this.canvasOffset.Y = Math.Abs(this.canvasOffset.Y); } if (requiresMeasure) { this.InvalidateMeasure(); } else { this.InvalidateArrange(); } } void EnsureWindowIsVisible(ExtensionWindow window) { SetWindowPosition(window, ExtensionSurface.GetPosition(window)); } internal void SetSize(ExtensionWindow window, Size size) { Point pos = ExtensionSurface.GetPosition(window); if (!this.AutoExpandCanvas) { if (IsGreater(pos.X, size.Width, this.ActualWidth)) { size.Width = this.ActualWidth - pos.X; } if (IsGreater(pos.Y, size.Height, this.ActualHeight)) { size.Height = this.ActualHeight - pos.Y; } } System.Diagnostics.Debug.WriteLine("SetSize oldSize (" + window.Width + "," + window.Height + ") newSize (" + size.Width + "," + size.Height + ")"); window.Width = size.Width; window.Height = size.Height; if (this.AutoExpandCanvas) { // this.InvalidateMeasure(); } } protected override Size ArrangeOverride(Size arrangeSize) { foreach (UIElement child in this.Children) { //get (left, top) coorinates Point pos = ExtensionSurface.GetPosition(child); //include eventual negative offset (panel wouldn't display elements with negative coorinates by default) pos.Offset(this.canvasOffset.X, this.canvasOffset.Y); //request child to rearange itself in given rectangle child.Arrange(new Rect(pos, child.DesiredSize)); } System.Diagnostics.Debug.WriteLine(string.Format(CultureInfo.InvariantCulture, "ArrangeOverride Size({0},{1})", arrangeSize.Width, arrangeSize.Height)); return arrangeSize; } protected override Size MeasureOverride(Size constraint) { Size result; if (this.AutoExpandCanvas) { double panelWidth = 0.0; double panelHeight = 0.0; //initially assume that whole content fits in rectangle with coordinates (0,0, ActualWidth, ActualHeight) double offsetMinusX = 0.0; double offsetMinusY = 0.0; double offsetPlusX = this.rearangeStartSize.Width; double offsetPlusY = this.rearangeStartSize.Height; foreach (UIElement item in this.Children) { FrameworkElement child = item as FrameworkElement; if (null != child) { child.Measure(constraint); //get child's position Point pos = ExtensionSurface.GetPosition(child); //calculate the minimum value of panel's (left,top) corner offsetMinusX = Math.Min(offsetMinusX, pos.X); offsetMinusY = Math.Min(offsetMinusY, pos.Y); //calculate the maximum value of panel's (right, bottom) corner offsetPlusX = Math.Max(offsetPlusX, pos.X + child.DesiredSize.Width); offsetPlusY = Math.Max(offsetPlusY, pos.Y + child.DesiredSize.Height); } } //get required panel's width and height panelWidth = Math.Abs(offsetPlusX - offsetMinusX); panelHeight = Math.Abs(offsetPlusY - offsetMinusY); this.actualPanelRect.Location = new Point(offsetMinusX, offsetMinusY); this.actualPanelRect.Size = new Size(panelWidth, panelHeight); //return it as result result = new Size(panelWidth, panelHeight); } else { result = base.MeasureOverride(constraint); } System.Diagnostics.Debug.WriteLine("MO constraint:" + constraint.Width + "," + constraint.Height + " new: " + result.Width + "," + result.Height); return result; } public void SelectWindow(ExtensionWindow window) { if (null != window && this.Children.Contains(window)) { this.selectedChild = new KeyValuePair<FrameworkElement, Point>(window, this.canvasOffset); this.rearangeStartSize.Width = this.ActualWidth; this.rearangeStartSize.Height = this.ActualHeight; Panel.SetZIndex(window, ++this.currentZIndex); } } void OnExtensionWindowClosed(object sender, RoutedEventArgs args) { ExtensionWindow window = args.Source as ExtensionWindow; if (null != window) { //remove window from children collection this.Children.Remove(window); } } public static void SetPlacementTarget(DependencyObject container, FrameworkElement value) { container.SetValue(PlacementTargetProperty, value); } public static FrameworkElement GetPlacementTarget(DependencyObject container) { return (FrameworkElement)container.GetValue(PlacementTargetProperty); } public static void SetAlignment(DependencyObject container, PositionAlignment value) { container.SetValue(AlignmentProperty, value); } public static PositionAlignment GetAlignment(DependencyObject container) { return (PositionAlignment)container.GetValue(AlignmentProperty); } public static void SetMode(DependencyObject container, PlacementMode value) { container.SetValue(ModeProperty, value); } public static PlacementMode GetMode(DependencyObject container) { return (PlacementMode)container.GetValue(ModeProperty); } public static void SetPosition(DependencyObject container, Point value) { container.SetValue(PositionProperty, value); } public static Point GetPosition(DependencyObject container) { return (Point)container.GetValue(PositionProperty); } [SuppressMessage("Microsoft.Design", "CA1034:NestedTypesShouldNotBeVisible", Justification = "Suppress to avoid unnecessary changes.")] public enum PlacementMode { Relative, Absolute } [SuppressMessage("Microsoft.Design", "CA1034:NestedTypesShouldNotBeVisible", Justification = "Suppress to avoid unnecessary changes.")] public enum PositionAlignment { LeftTop, LeftBottom, RightTop, RightBottom, Center, CenterHorizontal, CenterVertical }; } }
// // Author: // Jb Evain (jbevain@gmail.com) // // Copyright (c) 2008 - 2015 Jb Evain // Copyright (c) 2008 - 2011 Novell, Inc. // // Licensed under the MIT/X11 license. // using System; using Zenject.ReflectionBaking.Mono.Cecil.Metadata; using Zenject.ReflectionBaking.Mono.Collections.Generic; namespace Zenject.ReflectionBaking.Mono.Cecil { public enum MetadataType : byte { Void = ElementType.Void, Boolean = ElementType.Boolean, Char = ElementType.Char, SByte = ElementType.I1, Byte = ElementType.U1, Int16 = ElementType.I2, UInt16 = ElementType.U2, Int32 = ElementType.I4, UInt32 = ElementType.U4, Int64 = ElementType.I8, UInt64 = ElementType.U8, Single = ElementType.R4, Double = ElementType.R8, String = ElementType.String, Pointer = ElementType.Ptr, ByReference = ElementType.ByRef, ValueType = ElementType.ValueType, Class = ElementType.Class, Var = ElementType.Var, Array = ElementType.Array, GenericInstance = ElementType.GenericInst, TypedByReference = ElementType.TypedByRef, IntPtr = ElementType.I, UIntPtr = ElementType.U, FunctionPointer = ElementType.FnPtr, Object = ElementType.Object, MVar = ElementType.MVar, RequiredModifier = ElementType.CModReqD, OptionalModifier = ElementType.CModOpt, Sentinel = ElementType.Sentinel, Pinned = ElementType.Pinned, } public class TypeReference : MemberReference, IGenericParameterProvider, IGenericContext { string @namespace; bool value_type; internal IMetadataScope scope; internal ModuleDefinition module; internal ElementType etype = ElementType.None; string fullname; protected Collection<GenericParameter> generic_parameters; public override string Name { get { return base.Name; } set { base.Name = value; fullname = null; } } public virtual string Namespace { get { return @namespace; } set { @namespace = value; fullname = null; } } public virtual bool IsValueType { get { return value_type; } set { value_type = value; } } public override ModuleDefinition Module { get { if (module != null) return module; var declaring_type = this.DeclaringType; if (declaring_type != null) return declaring_type.Module; return null; } } IGenericParameterProvider IGenericContext.Type { get { return this; } } IGenericParameterProvider IGenericContext.Method { get { return null; } } GenericParameterType IGenericParameterProvider.GenericParameterType { get { return GenericParameterType.Type; } } public virtual bool HasGenericParameters { get { return !generic_parameters.IsNullOrEmpty (); } } public virtual Collection<GenericParameter> GenericParameters { get { if (generic_parameters != null) return generic_parameters; return generic_parameters = new GenericParameterCollection (this); } } public virtual IMetadataScope Scope { get { var declaring_type = this.DeclaringType; if (declaring_type != null) return declaring_type.Scope; return scope; } set { var declaring_type = this.DeclaringType; if (declaring_type != null) { declaring_type.Scope = value; return; } scope = value; } } public bool IsNested { get { return this.DeclaringType != null; } } public override TypeReference DeclaringType { get { return base.DeclaringType; } set { base.DeclaringType = value; fullname = null; } } public override string FullName { get { if (fullname != null) return fullname; fullname = this.TypeFullName (); if (IsNested) fullname = DeclaringType.FullName + "/" + fullname; return fullname; } } public virtual bool IsByReference { get { return false; } } public virtual bool IsPointer { get { return false; } } public virtual bool IsSentinel { get { return false; } } public virtual bool IsArray { get { return false; } } public virtual bool IsGenericParameter { get { return false; } } public virtual bool IsGenericInstance { get { return false; } } public virtual bool IsRequiredModifier { get { return false; } } public virtual bool IsOptionalModifier { get { return false; } } public virtual bool IsPinned { get { return false; } } public virtual bool IsFunctionPointer { get { return false; } } public virtual bool IsPrimitive { get { return etype.IsPrimitive (); } } public virtual MetadataType MetadataType { get { switch (etype) { case ElementType.None: return IsValueType ? MetadataType.ValueType : MetadataType.Class; default: return (MetadataType) etype; } } } protected TypeReference (string @namespace, string name) : base (name) { this.@namespace = @namespace ?? string.Empty; this.token = new MetadataToken (TokenType.TypeRef, 0); } public TypeReference (string @namespace, string name, ModuleDefinition module, IMetadataScope scope) : this (@namespace, name) { this.module = module; this.scope = scope; } public TypeReference (string @namespace, string name, ModuleDefinition module, IMetadataScope scope, bool valueType) : this (@namespace, name, module, scope) { value_type = valueType; } public virtual TypeReference GetElementType () { return this; } public virtual TypeDefinition Resolve () { var module = this.Module; if (module == null) throw new NotSupportedException (); return module.Resolve (this); } } static partial class Mixin { public static bool IsPrimitive (this ElementType self) { switch (self) { case ElementType.Boolean: case ElementType.Char: case ElementType.I: case ElementType.U: case ElementType.I1: case ElementType.U1: case ElementType.I2: case ElementType.U2: case ElementType.I4: case ElementType.U4: case ElementType.I8: case ElementType.U8: case ElementType.R4: case ElementType.R8: return true; default: return false; } } public static string TypeFullName (this TypeReference self) { return string.IsNullOrEmpty (self.Namespace) ? self.Name : self.Namespace + '.' + self.Name; } public static bool IsTypeOf (this TypeReference self, string @namespace, string name) { return self.Name == name && self.Namespace == @namespace; } public static bool IsTypeSpecification (this TypeReference type) { switch (type.etype) { case ElementType.Array: case ElementType.ByRef: case ElementType.CModOpt: case ElementType.CModReqD: case ElementType.FnPtr: case ElementType.GenericInst: case ElementType.MVar: case ElementType.Pinned: case ElementType.Ptr: case ElementType.SzArray: case ElementType.Sentinel: case ElementType.Var: return true; } return false; } public static TypeDefinition CheckedResolve (this TypeReference self) { var type = self.Resolve (); if (type == null) throw new ResolutionException (self); return type; } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) Under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You Under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed Under the License is distributed on an "AS Is" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations Under the License. ==================================================================== */ namespace NPOI.HSSF.Record { using System.Text; using System; using NPOI.Util; using System.Globalization; /** * NOTE: Comment Associated with a Cell (1Ch) * * @author Yegor Kozlov */ public class NoteRecord : StandardRecord, ICloneable { public static readonly NoteRecord[] EMPTY_ARRAY = { }; public const short sid = 0x1C; /** * Flag indicating that the comment Is hidden (default) */ public const short NOTE_HIDDEN = 0x0; /** * Flag indicating that the comment Is visible */ public const short NOTE_VISIBLE = 0x2; private int field_1_row; private int field_2_col; private short field_3_flags; private int field_4_shapeid; private bool field_5_hasMultibyte; private String field_6_author; private const Byte DEFAULT_PADDING = (byte)0; /** * Saves padding byte value to reduce delta during round-trip serialization.<br/> * * The documentation is not clear about how padding should work. In any case * Excel(2007) does something different. */ private Byte? field_7_padding; /** * Construct a new <c>NoteRecord</c> and * Fill its data with the default values */ public NoteRecord() { field_6_author = ""; field_3_flags = 0; field_7_padding = DEFAULT_PADDING; // seems to be always present regardless of author text } /** * Constructs a <c>NoteRecord</c> and Fills its fields * from the supplied <c>RecordInputStream</c>. * * @param in the stream to Read from */ public NoteRecord(RecordInputStream in1) { field_1_row = in1.ReadShort(); field_2_col = in1.ReadUShort(); field_3_flags = in1.ReadShort(); field_4_shapeid = in1.ReadUShort(); int length = in1.ReadShort(); field_5_hasMultibyte = in1.ReadByte() != 0x00; if (field_5_hasMultibyte) { field_6_author = StringUtil.ReadUnicodeLE(in1, length); } else { field_6_author = StringUtil.ReadCompressedUnicode(in1, length); } if (in1.Available() == 1) { field_7_padding = (byte)in1.ReadByte(); } else if (in1.Available() == 2 && length == 0) { // If there's no author, may be double padded field_7_padding = (byte)in1.ReadByte(); in1.ReadByte(); } } /** * @return id of this record. */ public override short Sid { get { return sid; } } /** * Serialize the record data into the supplied array of bytes * * @param offset offset in the <c>data</c> * @param data the data to Serialize into * * @return size of the record */ public override void Serialize(ILittleEndianOutput out1) { out1.WriteShort(field_1_row); out1.WriteShort(field_2_col); out1.WriteShort(field_3_flags); out1.WriteShort(field_4_shapeid); out1.WriteShort(field_6_author.Length); out1.WriteByte(field_5_hasMultibyte ? 0x01 : 0x00); if (field_5_hasMultibyte) { StringUtil.PutUnicodeLE(field_6_author, out1); } else { StringUtil.PutCompressedUnicode(field_6_author, out1); } if (field_7_padding != null) { out1.WriteByte(Convert.ToInt32(field_7_padding, CultureInfo.InvariantCulture)); } } /** * Size of record */ protected override int DataSize { get { return 11 // 5 shorts + 1 byte + field_6_author.Length * (field_5_hasMultibyte ? 2 : 1) + (field_7_padding == null ? 0 : 1); } } /** * Convert this record to string. * Used by BiffViewer and other utulities. */ public override String ToString() { StringBuilder buffer = new StringBuilder(); buffer.Append("[NOTE]\n"); buffer.Append(" .recordid = 0x" + StringUtil.ToHexString(Sid) + ", size = " + RecordSize + "\n"); buffer.Append(" .row = " + field_1_row + "\n"); buffer.Append(" .col = " + field_2_col + "\n"); buffer.Append(" .flags = " + field_3_flags + "\n"); buffer.Append(" .shapeid = " + field_4_shapeid + "\n"); buffer.Append(" .author = " + field_6_author + "\n"); buffer.Append("[/NOTE]\n"); return buffer.ToString(); } /** * Return the row that Contains the comment * * @return the row that Contains the comment */ public int Row { get{return field_1_row;} set{ field_1_row = value;} } /** * Return the column that Contains the comment * * @return the column that Contains the comment */ public int Column { get { return field_2_col; } set { field_2_col = value; } } /** * Options flags. * * @return the options flag * @see #NOTE_VISIBLE * @see #NOTE_HIDDEN */ public short Flags { get { return field_3_flags; } set { field_3_flags = value; } } /** * Object id for OBJ record that Contains the comment */ public int ShapeId { get { return field_4_shapeid; } set { field_4_shapeid = value; } } /** * Name of the original comment author * * @return the name of the original author of the comment */ public String Author { get { return field_6_author; } set { field_6_author = value; field_5_hasMultibyte = StringUtil.HasMultibyte(value); } } /** * For unit testing only! */ internal bool AuthorIsMultibyte { get { return field_5_hasMultibyte; } } public override Object Clone() { NoteRecord rec = new NoteRecord(); rec.field_1_row = field_1_row; rec.field_2_col = field_2_col; rec.field_3_flags = field_3_flags; rec.field_4_shapeid = field_4_shapeid; rec.field_6_author = field_6_author; return rec; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using TermPositions = Lucene.Net.Index.TermPositions; namespace Lucene.Net.Search { /// <summary>Expert: Scoring functionality for phrase queries. /// <br/>A document is considered matching if it contains the phrase-query terms /// at "valid" positons. What "valid positions" are /// depends on the type of the phrase query: for an exact phrase query terms are required /// to appear in adjacent locations, while for a sloppy phrase query some distance between /// the terms is allowed. The abstract method {@link #PhraseFreq()} of extending classes /// is invoked for each document containing all the phrase query terms, in order to /// compute the frequency of the phrase query in that document. A non zero frequency /// means a match. /// </summary> abstract class PhraseScorer:Scorer { private Weight weight; protected internal byte[] norms; protected internal float value_Renamed; private bool firstTime = true; private bool more = true; protected internal PhraseQueue pq; protected internal PhrasePositions first, last; private float freq; //prhase frequency in current doc as computed by phraseFreq(). internal PhraseScorer(Weight weight, TermPositions[] tps, int[] offsets, Similarity similarity, byte[] norms):base(similarity) { this.norms = norms; this.weight = weight; this.value_Renamed = weight.GetValue(); // convert tps to a list of phrase positions. // note: phrase-position differs from term-position in that its position // reflects the phrase offset: pp.pos = tp.pos - offset. // this allows to easily identify a matching (exact) phrase // when all PhrasePositions have exactly the same position. for (int i = 0; i < tps.Length; i++) { PhrasePositions pp = new PhrasePositions(tps[i], offsets[i]); if (last != null) { // add next to end of list last.next = pp; } else { first = pp; } last = pp; } pq = new PhraseQueue(tps.Length); // construct empty pq first.doc = - 1; } /// <deprecated> use {@link #DocID()} instead. /// </deprecated> [Obsolete("use DocID() instead.")] public override int Doc() { return first.doc; } public override int DocID() { return first.doc; } /// <deprecated> use {@link #NextDoc()} instead. /// </deprecated> [Obsolete("use NextDoc() instead.")] public override bool Next() { return NextDoc() != NO_MORE_DOCS; } public override int NextDoc() { if (firstTime) { Init(); firstTime = false; } else if (more) { more = last.Next(); // trigger further scanning } if (!DoNext()) { first.doc = NO_MORE_DOCS; } return first.doc; } // next without initial increment private bool DoNext() { while (more) { while (more && first.doc < last.doc) { // find doc w/ all the terms more = first.SkipTo(last.doc); // skip first upto last FirstToLast(); // and move it to the end } if (more) { // found a doc with all of the terms freq = PhraseFreq(); // check for phrase if (freq == 0.0f) // no match more = last.Next(); // trigger further scanning else return true; // found a match } } return false; // no more matches } public override float Score() { //System.out.println("scoring " + first.doc); float raw = GetSimilarity().Tf(freq) * value_Renamed; // raw score return norms == null?raw:raw * Similarity.DecodeNorm(norms[first.doc]); // normalize } /// <deprecated> use {@link #Advance(int)} instead. /// </deprecated> [Obsolete("use Advance(int) instead.")] public override bool SkipTo(int target) { return Advance(target) != NO_MORE_DOCS; } public override int Advance(int target) { firstTime = false; for (PhrasePositions pp = first; more && pp != null; pp = pp.next) { more = pp.SkipTo(target); } if (more) { Sort(); // re-sort } if (!DoNext()) { first.doc = NO_MORE_DOCS; } return first.doc; } /// <summary> For a document containing all the phrase query terms, compute the /// frequency of the phrase in that document. /// A non zero frequency means a match. /// <br/>Note, that containing all phrase terms does not guarantee a match - they have to be found in matching locations. /// </summary> /// <returns> frequency of the phrase in current doc, 0 if not found. /// </returns> protected internal abstract float PhraseFreq(); private void Init() { for (PhrasePositions pp = first; more && pp != null; pp = pp.next) { more = pp.Next(); } if (more) { Sort(); } } private void Sort() { pq.Clear(); for (PhrasePositions pp = first; pp != null; pp = pp.next) { pq.Add(pp); } PqToList(); } protected internal void PqToList() { last = first = null; while (pq.Top() != null) { PhrasePositions pp = (PhrasePositions) pq.Pop(); if (last != null) { // add next to end of list last.next = pp; } else first = pp; last = pp; pp.next = null; } } protected internal void FirstToLast() { last.next = first; // move first to end of list last = first; first = first.next; last.next = null; } public override Explanation Explain(int doc) { Explanation tfExplanation = new Explanation(); int d = Advance(doc); float phraseFreq = (d == doc)?freq:0.0f; tfExplanation.SetValue(GetSimilarity().Tf(phraseFreq)); tfExplanation.SetDescription("tf(phraseFreq=" + phraseFreq + ")"); return tfExplanation; } public override System.String ToString() { return "scorer(" + weight + ")"; } } }
using System; using System.Diagnostics; using System.Globalization; using System.Net; using System.Threading; using Orleans.AzureUtils; using Orleans.Runtime.Configuration; using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Orleans.Logging; using Orleans.AzureUtils.Utilities; using Orleans.Hosting.AzureCloudServices; namespace Orleans.Runtime.Host { /// <summary> /// Wrapper class for an Orleans silo running in the current host process. /// </summary> public class AzureSilo { /// <summary> /// Amount of time to pause before retrying if a secondary silo is unable to connect to the primary silo for this deployment. /// Defaults to 5 seconds. /// </summary> public TimeSpan StartupRetryPause { get; set; } /// <summary> /// Number of times to retrying if a secondary silo is unable to connect to the primary silo for this deployment. /// Defaults to 120 times. /// </summary> public int MaxRetries { get; set; } /// <summary> /// The name of the configuration key value for locating the DataConnectionString setting from the Azure configuration for this role. /// Defaults to <c>DataConnectionString</c> /// </summary> public string DataConnectionConfigurationSettingName { get; set; } /// <summary> /// The name of the configuration key value for locating the OrleansSiloEndpoint setting from the Azure configuration for this role. /// Defaults to <c>OrleansSiloEndpoint</c> /// </summary> public string SiloEndpointConfigurationKeyName { get; set; } /// <summary> /// The name of the configuration key value for locating the OrleansProxyEndpoint setting from the Azure configuration for this role. /// Defaults to <c>OrleansProxyEndpoint</c> /// </summary> public string ProxyEndpointConfigurationKeyName { get; set; } private SiloHost host; private OrleansSiloInstanceManager siloInstanceManager; private SiloInstanceTableEntry myEntry; private readonly ILogger logger; private readonly IServiceRuntimeWrapper serviceRuntimeWrapper; //TODO: hook this up with SiloBuilder when SiloBuilder supports create AzureSilo private static ILoggerFactory DefaultLoggerFactory = CreateDefaultLoggerFactory("AzureSilo.log"); private readonly ILoggerFactory loggerFactory = DefaultLoggerFactory; public AzureSilo() :this(new ServiceRuntimeWrapper(DefaultLoggerFactory), DefaultLoggerFactory) { } /// <summary> /// Constructor /// </summary> public AzureSilo(ILoggerFactory loggerFactory) : this(new ServiceRuntimeWrapper(loggerFactory), loggerFactory) { } public static ILoggerFactory CreateDefaultLoggerFactory(string filePath) { var factory = new LoggerFactory(); factory.AddProvider(new FileLoggerProvider(filePath)); if (ConsoleText.IsConsoleAvailable) factory.AddConsole(); return factory; } internal AzureSilo(IServiceRuntimeWrapper serviceRuntimeWrapper, ILoggerFactory loggerFactory) { this.serviceRuntimeWrapper = serviceRuntimeWrapper; DataConnectionConfigurationSettingName = AzureConstants.DataConnectionConfigurationSettingName; SiloEndpointConfigurationKeyName = AzureConstants.SiloEndpointConfigurationKeyName; ProxyEndpointConfigurationKeyName = AzureConstants.ProxyEndpointConfigurationKeyName; StartupRetryPause = AzureConstants.STARTUP_TIME_PAUSE; // 5 seconds MaxRetries = AzureConstants.MAX_RETRIES; // 120 x 5s = Total: 10 minutes this.loggerFactory = loggerFactory; logger = loggerFactory.CreateLogger<AzureSilo>(); } /// <summary> /// Async method to validate specific cluster configuration /// </summary> /// <param name="config"></param> /// <returns>Task object of boolean type for this async method </returns> public async Task<bool> ValidateConfiguration(ClusterConfiguration config) { if (config.Globals.LivenessType == GlobalConfiguration.LivenessProviderType.AzureTable) { string clusterId = config.Globals.ClusterId ?? serviceRuntimeWrapper.DeploymentId; string connectionString = config.Globals.DataConnectionString ?? serviceRuntimeWrapper.GetConfigurationSettingValue(DataConnectionConfigurationSettingName); try { var manager = siloInstanceManager ?? await OrleansSiloInstanceManager.GetManager(clusterId, connectionString, loggerFactory); var instances = await manager.DumpSiloInstanceTable(); logger.Debug(instances); } catch (Exception exc) { var error = String.Format("Connecting to the storage table has failed with {0}", LogFormatter.PrintException(exc)); Trace.TraceError(error); logger.Error((int)AzureSiloErrorCode.AzureTable_34, error, exc); return false; } } return true; } /// <summary> /// Default cluster configuration /// </summary> /// <returns>Default ClusterConfiguration </returns> public static ClusterConfiguration DefaultConfiguration() { return DefaultConfiguration(new ServiceRuntimeWrapper(DefaultLoggerFactory)); } internal static ClusterConfiguration DefaultConfiguration(IServiceRuntimeWrapper serviceRuntimeWrapper) { var config = new ClusterConfiguration(); config.Globals.LivenessType = GlobalConfiguration.LivenessProviderType.AzureTable; config.Globals.ClusterId = serviceRuntimeWrapper.DeploymentId; try { config.Globals.DataConnectionString = serviceRuntimeWrapper.GetConfigurationSettingValue(AzureConstants.DataConnectionConfigurationSettingName); } catch (Exception exc) { if (exc.GetType().Name.Contains("RoleEnvironmentException")) { config.Globals.DataConnectionString = null; } else { throw; } } return config; } #region Azure RoleEntryPoint methods /// <summary> /// Initialize this Orleans silo for execution. Config data will be read from silo config file as normal /// </summary> /// <param name="deploymentId">Azure ClusterId this silo is running under. If null, defaults to the value from the configuration.</param> /// <param name="connectionString">Azure DataConnectionString. If null, defaults to the DataConnectionString setting from the Azure configuration for this role.</param> /// <returns><c>true</c> is the silo startup was successful</returns> public bool Start(string deploymentId = null, string connectionString = null) { return Start(null, deploymentId, connectionString); } /// <summary> /// Initialize this Orleans silo for execution /// </summary> /// <param name="config">Use the specified config data.</param> /// <param name="connectionString">Azure DataConnectionString. If null, defaults to the DataConnectionString setting from the Azure configuration for this role.</param> /// <returns><c>true</c> is the silo startup was successful</returns> public bool Start(ClusterConfiguration config, string connectionString = null) { if (config == null) throw new ArgumentNullException(nameof(config)); return Start(config, null, connectionString); } /// <summary> /// Initialize this Orleans silo for execution with the specified Azure clusterId /// </summary> /// <param name="config">If null, Config data will be read from silo config file as normal, otherwise use the specified config data.</param> /// <param name="clusterId">Azure ClusterId this silo is running under</param> /// <param name="connectionString">Azure DataConnectionString. If null, defaults to the DataConnectionString setting from the Azure configuration for this role.</param> /// <returns><c>true</c> if the silo startup was successful</returns> internal bool Start(ClusterConfiguration config, string clusterId, string connectionString) { if (config != null && clusterId != null) throw new ArgumentException("Cannot use config and clusterId on the same time"); // Program ident Trace.TraceInformation("Starting {0} v{1}", this.GetType().FullName, RuntimeVersion.Current); // Read endpoint info for this instance from Azure config string instanceName = serviceRuntimeWrapper.InstanceName; // Configure this Orleans silo instance if (config == null) { host = new SiloHost(instanceName); host.LoadOrleansConfig(); // Load config from file + Initializes logger configurations } else { host = new SiloHost(instanceName, config); // Use supplied config data + Initializes logger configurations } IPEndPoint myEndpoint = serviceRuntimeWrapper.GetIPEndpoint(SiloEndpointConfigurationKeyName); IPEndPoint proxyEndpoint = serviceRuntimeWrapper.GetIPEndpoint(ProxyEndpointConfigurationKeyName); host.SetSiloType(Silo.SiloType.Secondary); int generation = SiloAddress.AllocateNewGeneration(); // Bootstrap this Orleans silo instance // If clusterId was not direclty provided, take the value in the config. If it is not // in the config too, just take the ClusterId from Azure if (clusterId == null) clusterId = string.IsNullOrWhiteSpace(host.Config.Globals.ClusterId) ? serviceRuntimeWrapper.DeploymentId : host.Config.Globals.ClusterId; myEntry = new SiloInstanceTableEntry { DeploymentId = clusterId, Address = myEndpoint.Address.ToString(), Port = myEndpoint.Port.ToString(CultureInfo.InvariantCulture), Generation = generation.ToString(CultureInfo.InvariantCulture), HostName = host.Config.GetOrCreateNodeConfigurationForSilo(host.Name).DNSHostName, ProxyPort = (proxyEndpoint != null ? proxyEndpoint.Port : 0).ToString(CultureInfo.InvariantCulture), RoleName = serviceRuntimeWrapper.RoleName, SiloName = instanceName, UpdateZone = serviceRuntimeWrapper.UpdateDomain.ToString(CultureInfo.InvariantCulture), FaultZone = serviceRuntimeWrapper.FaultDomain.ToString(CultureInfo.InvariantCulture), StartTime = LogFormatter.PrintDate(DateTime.UtcNow), PartitionKey = clusterId, RowKey = myEndpoint.Address + "-" + myEndpoint.Port + "-" + generation }; if (connectionString == null) connectionString = serviceRuntimeWrapper.GetConfigurationSettingValue(DataConnectionConfigurationSettingName); try { siloInstanceManager = OrleansSiloInstanceManager.GetManager( clusterId, connectionString, this.loggerFactory).WithTimeout(AzureTableDefaultPolicies.TableCreationTimeout).Result; } catch (Exception exc) { var error = String.Format("Failed to create OrleansSiloInstanceManager. This means CreateTableIfNotExist for silo instance table has failed with {0}", LogFormatter.PrintException(exc)); Trace.TraceError(error); logger.Error((int)AzureSiloErrorCode.AzureTable_34, error, exc); throw new OrleansException(error, exc); } // Always use Azure table for membership when running silo in Azure host.SetSiloLivenessType(GlobalConfiguration.LivenessProviderType.AzureTable); if (host.Config.Globals.ReminderServiceType == GlobalConfiguration.ReminderServiceProviderType.NotSpecified || host.Config.Globals.ReminderServiceType == GlobalConfiguration.ReminderServiceProviderType.ReminderTableGrain) { host.SetReminderServiceType(GlobalConfiguration.ReminderServiceProviderType.AzureTable); } host.SetExpectedClusterSize(serviceRuntimeWrapper.RoleInstanceCount); siloInstanceManager.RegisterSiloInstance(myEntry); // Initialize this Orleans silo instance host.SetDeploymentId(clusterId, connectionString); host.SetSiloEndpoint(myEndpoint, generation); host.SetProxyEndpoint(proxyEndpoint); host.InitializeOrleansSilo(); return StartSilo(); } /// <summary> /// Makes this Orleans silo begin executing and become active. /// Note: This method call will only return control back to the caller when the silo is shutdown. /// </summary> public void Run() { RunImpl(); } /// <summary> /// Makes this Orleans silo begin executing and become active. /// Note: This method call will only return control back to the caller when the silo is shutdown or /// an external request for cancellation has been issued. /// </summary> /// <param name="cancellationToken">Cancellation token.</param> public void Run(CancellationToken cancellationToken) { RunImpl(cancellationToken); } /// <summary> /// Stop this Orleans silo executing. /// </summary> public void Stop() { logger.Info(ErrorCode.Runtime_Error_100290, "Stopping {0}", this.GetType().FullName); serviceRuntimeWrapper.UnsubscribeFromStoppingNotification(this, HandleAzureRoleStopping); host.ShutdownOrleansSilo(); logger.Info(ErrorCode.Runtime_Error_100291, "Orleans silo '{0}' shutdown.", host.Name); } #endregion private bool StartSilo() { logger.Info(ErrorCode.Runtime_Error_100292, "Starting Orleans silo '{0}' as a {1} node.", host.Name, host.Type); bool ok = host.StartOrleansSilo(); if (ok) logger.Info(ErrorCode.Runtime_Error_100293, "Successfully started Orleans silo '{0}' as a {1} node.", host.Name, host.Type); else logger.Error(ErrorCode.Runtime_Error_100285, string.Format("Failed to start Orleans silo '{0}' as a {1} node.", host.Name, host.Type)); return ok; } private void HandleAzureRoleStopping(object sender, object e) { // Try to perform gracefull shutdown of Silo when we detect Azure role instance is being stopped logger.Info(ErrorCode.SiloStopping, "HandleAzureRoleStopping - starting to shutdown silo"); host.ShutdownOrleansSilo(); } /// <summary> /// Run method helper. /// </summary> /// <remarks> /// Makes this Orleans silo begin executing and become active. /// Note: This method call will only return control back to the caller when the silo is shutdown or /// an external request for cancellation has been issued. /// </remarks> /// <param name="cancellationToken">Optional cancellation token.</param> private void RunImpl(CancellationToken? cancellationToken = null) { logger.Info(ErrorCode.Runtime_Error_100289, "OrleansAzureHost entry point called"); // Hook up to receive notification of Azure role stopping events serviceRuntimeWrapper.SubscribeForStoppingNotification(this, HandleAzureRoleStopping); if (host.IsStarted) { if (cancellationToken.HasValue) host.WaitForOrleansSiloShutdown(cancellationToken.Value); else host.WaitForOrleansSiloShutdown(); } else throw new Exception("Silo failed to start correctly - aborting"); } } }
// SPDX-License-Identifier: MIT // Copyright wtfsckgh@gmail.com // Copyright iced contributors #if INSTR_INFO using System; using System.Collections.Generic; using System.Diagnostics; using Iced.Intel; using Xunit; namespace Iced.UnitTests.Intel.InstructionInfoTests { sealed class UsedRegisterEqualityComparer : IEqualityComparer<UsedRegister> { public static readonly UsedRegisterEqualityComparer Instance = new UsedRegisterEqualityComparer(); UsedRegisterEqualityComparer() { } public bool Equals(UsedRegister x, UsedRegister y) => x.Register == y.Register && x.Access == y.Access; public int GetHashCode(UsedRegister obj) => (int)obj.Register ^ (int)obj.Access; } sealed class UsedMemoryEqualityComparer : IEqualityComparer<UsedMemory> { public static readonly UsedMemoryEqualityComparer Instance = new UsedMemoryEqualityComparer(); UsedMemoryEqualityComparer() { } public bool Equals(UsedMemory x, UsedMemory y) => x.Segment == y.Segment && x.Base == y.Base && x.Index == y.Index && x.Scale == y.Scale && x.Displacement == y.Displacement && x.MemorySize == y.MemorySize && x.Access == y.Access && x.AddressSize == y.AddressSize && x.VsibSize == y.VsibSize; public int GetHashCode(UsedMemory obj) { int hc = 0; hc ^= (int)obj.Segment; hc ^= (int)obj.Base << 8; hc ^= (int)obj.Index << 16; hc ^= obj.Scale << 28; hc ^= obj.Displacement.GetHashCode(); hc ^= (int)obj.MemorySize << 12; hc ^= (int)obj.Access << 24; hc ^= (int)obj.AddressSize << 3; hc ^= (int)obj.VsibSize << 11; return hc; } } public abstract class InstructionInfoTest { protected void TestInstructionInfo(int bitness, string hexBytes, Code code, DecoderOptions options, int lineNo, InstructionInfoTestCase testCase) { var codeBytes = HexUtils.ToByteArray(hexBytes); Instruction instruction; if (testCase.IsSpecial) { if (bitness == 16 && code == Code.Popw_CS && hexBytes == "0F") { instruction = default; instruction.Code = Code.Popw_CS; instruction.Op0Kind = OpKind.Register; instruction.Op0Register = Register.CS; instruction.CodeSize = CodeSize.Code16; instruction.Length = 1; } else if (code <= Code.DeclareQword) { instruction = default; instruction.Code = code; instruction.DeclareDataCount = 1; Assert.Equal(64, bitness); instruction.CodeSize = CodeSize.Code64; switch (code) { case Code.DeclareByte: Assert.Equal("66", hexBytes); instruction.SetDeclareByteValue(0, 0x66); break; case Code.DeclareWord: Assert.Equal("6644", hexBytes); instruction.SetDeclareWordValue(0, 0x4466); break; case Code.DeclareDword: Assert.Equal("664422EE", hexBytes); instruction.SetDeclareDwordValue(0, 0xEE224466); break; case Code.DeclareQword: Assert.Equal("664422EE12345678", hexBytes); instruction.SetDeclareQwordValue(0, 0x78563412EE224466); break; default: throw new InvalidOperationException(); } } else { var decoder = CreateDecoder(bitness, codeBytes, options); instruction = decoder.Decode(); if (codeBytes.Length > 1 && codeBytes[0] == 0x9B && instruction.Length == 1) { instruction = decoder.Decode(); instruction.Code = instruction.Code switch { Code.Fnstenv_m14byte => Code.Fstenv_m14byte, Code.Fnstenv_m28byte => Code.Fstenv_m28byte, Code.Fnstcw_m2byte => Code.Fstcw_m2byte, Code.Fneni => Code.Feni, Code.Fndisi => Code.Fdisi, Code.Fnclex => Code.Fclex, Code.Fninit => Code.Finit, Code.Fnsetpm => Code.Fsetpm, Code.Fnsave_m94byte => Code.Fsave_m94byte, Code.Fnsave_m108byte => Code.Fsave_m108byte, Code.Fnstsw_m2byte => Code.Fstsw_m2byte, Code.Fnstsw_AX => Code.Fstsw_AX, Code.Fnstdw_AX => Code.Fstdw_AX, Code.Fnstsg_AX => Code.Fstsg_AX, _ => throw new InvalidOperationException(), }; } else throw new InvalidOperationException(); } } else { var decoder = CreateDecoder(bitness, codeBytes, options); instruction = decoder.Decode(); } Assert.Equal(code, instruction.Code); Assert.Equal(testCase.StackPointerIncrement, instruction.StackPointerIncrement); var info = new InstructionInfoFactory().GetInfo(instruction); Assert.Equal(testCase.Encoding, info.Encoding); Assert.Equal(testCase.CpuidFeatures, info.CpuidFeatures); Assert.Equal(testCase.RflagsRead, info.RflagsRead); Assert.Equal(testCase.RflagsUndefined, info.RflagsUndefined); Assert.Equal(testCase.RflagsWritten, info.RflagsWritten); Assert.Equal(testCase.RflagsCleared, info.RflagsCleared); Assert.Equal(testCase.RflagsSet, info.RflagsSet); Assert.Equal(testCase.IsPrivileged, info.IsPrivileged); Assert.Equal(testCase.IsStackInstruction, info.IsStackInstruction); Assert.Equal(testCase.IsSaveRestoreInstruction, info.IsSaveRestoreInstruction); Assert.Equal(testCase.FlowControl, info.FlowControl); Assert.Equal(testCase.Op0Access, info.Op0Access); Assert.Equal(testCase.Op1Access, info.Op1Access); Assert.Equal(testCase.Op2Access, info.Op2Access); Assert.Equal(testCase.Op3Access, info.Op3Access); Assert.Equal(testCase.Op4Access, info.Op4Access); var fpuInfo = instruction.GetFpuStackIncrementInfo(); Assert.Equal(testCase.FpuTopIncrement, fpuInfo.Increment); Assert.Equal(testCase.FpuConditionalTop, fpuInfo.Conditional); Assert.Equal(testCase.FpuWritesTop, fpuInfo.WritesTop); Assert.Equal( new HashSet<UsedMemory>(testCase.UsedMemory, UsedMemoryEqualityComparer.Instance), new HashSet<UsedMemory>(info.GetUsedMemory(), UsedMemoryEqualityComparer.Instance)); Assert.Equal( new HashSet<UsedRegister>(GetUsedRegisters(testCase.UsedRegisters), UsedRegisterEqualityComparer.Instance), new HashSet<UsedRegister>(GetUsedRegisters(info.GetUsedRegisters()), UsedRegisterEqualityComparer.Instance)); Static.Assert(IcedConstants.MaxOpCount == 5 ? 0 : -1); Debug.Assert(instruction.OpCount <= IcedConstants.MaxOpCount); for (int i = 0; i < instruction.OpCount; i++) { switch (i) { case 0: Assert.Equal(testCase.Op0Access, info.GetOpAccess(i)); break; case 1: Assert.Equal(testCase.Op1Access, info.GetOpAccess(i)); break; case 2: Assert.Equal(testCase.Op2Access, info.GetOpAccess(i)); break; case 3: Assert.Equal(testCase.Op3Access, info.GetOpAccess(i)); break; case 4: Assert.Equal(testCase.Op4Access, info.GetOpAccess(i)); break; default: throw new InvalidOperationException(); } } for (int i = instruction.OpCount; i < IcedConstants.MaxOpCount; i++) Assert.Equal(OpAccess.None, info.GetOpAccess(i)); Assert.Equal(RflagsBits.None, info.RflagsWritten & (info.RflagsCleared | info.RflagsSet | info.RflagsUndefined)); Assert.Equal(RflagsBits.None, info.RflagsCleared & (info.RflagsWritten | info.RflagsSet | info.RflagsUndefined)); Assert.Equal(RflagsBits.None, info.RflagsSet & (info.RflagsWritten | info.RflagsCleared | info.RflagsUndefined)); Assert.Equal(RflagsBits.None, info.RflagsUndefined & (info.RflagsWritten | info.RflagsCleared | info.RflagsSet)); Assert.Equal(info.RflagsWritten | info.RflagsCleared | info.RflagsSet | info.RflagsUndefined, info.RflagsModified); var info2 = new InstructionInfoFactory().GetInfo(instruction, InstructionInfoOptions.None); CheckEqual(ref info, ref info2, hasRegs2: true, hasMem2: true); info2 = new InstructionInfoFactory().GetInfo(instruction, InstructionInfoOptions.NoMemoryUsage); CheckEqual(ref info, ref info2, hasRegs2: true, hasMem2: false); info2 = new InstructionInfoFactory().GetInfo(instruction, InstructionInfoOptions.NoRegisterUsage); CheckEqual(ref info, ref info2, hasRegs2: false, hasMem2: true); info2 = new InstructionInfoFactory().GetInfo(instruction, InstructionInfoOptions.NoRegisterUsage | InstructionInfoOptions.NoMemoryUsage); CheckEqual(ref info, ref info2, hasRegs2: false, hasMem2: false); Assert.Equal(info.Encoding, instruction.Code.Encoding()); #if ENCODER && OPCODE_INFO Assert.Equal(code.ToOpCode().Encoding, instruction.Code.Encoding()); #endif Assert.Equal(info.CpuidFeatures, instruction.Code.CpuidFeatures()); Assert.Equal(info.FlowControl, instruction.Code.FlowControl()); Assert.Equal(info.IsPrivileged, instruction.Code.IsPrivileged()); Assert.Equal(info.IsStackInstruction, instruction.Code.IsStackInstruction()); Assert.Equal(info.IsSaveRestoreInstruction, instruction.Code.IsSaveRestoreInstruction()); Assert.Equal(info.Encoding, instruction.Encoding); Assert.Equal(info.CpuidFeatures, instruction.CpuidFeatures); Assert.Equal(info.FlowControl, instruction.FlowControl); Assert.Equal(info.IsPrivileged, instruction.IsPrivileged); Assert.Equal(info.IsStackInstruction, instruction.IsStackInstruction); Assert.Equal(info.IsSaveRestoreInstruction, instruction.IsSaveRestoreInstruction); Assert.Equal(info.RflagsRead, instruction.RflagsRead); Assert.Equal(info.RflagsWritten, instruction.RflagsWritten); Assert.Equal(info.RflagsCleared, instruction.RflagsCleared); Assert.Equal(info.RflagsSet, instruction.RflagsSet); Assert.Equal(info.RflagsUndefined, instruction.RflagsUndefined); Assert.Equal(info.RflagsModified, instruction.RflagsModified); } void CheckEqual(ref InstructionInfo info1, ref InstructionInfo info2, bool hasRegs2, bool hasMem2) { if (hasRegs2) Assert.Equal(info1.GetUsedRegisters(), info2.GetUsedRegisters(), UsedRegisterEqualityComparer.Instance); else Assert.Empty(info2.GetUsedRegisters()); if (hasMem2) Assert.Equal(info1.GetUsedMemory(), info2.GetUsedMemory(), UsedMemoryEqualityComparer.Instance); else Assert.Empty(info2.GetUsedMemory()); Assert.Equal(info1.IsPrivileged, info2.IsPrivileged); Assert.Equal(info1.IsStackInstruction, info2.IsStackInstruction); Assert.Equal(info1.IsSaveRestoreInstruction, info2.IsSaveRestoreInstruction); Assert.Equal(info1.Encoding, info2.Encoding); Assert.Equal(info1.CpuidFeatures, info2.CpuidFeatures); Assert.Equal(info1.FlowControl, info2.FlowControl); Assert.Equal(info1.Op0Access, info2.Op0Access); Assert.Equal(info1.Op1Access, info2.Op1Access); Assert.Equal(info1.Op2Access, info2.Op2Access); Assert.Equal(info1.Op3Access, info2.Op3Access); Assert.Equal(info1.Op4Access, info2.Op4Access); Assert.Equal(info1.RflagsRead, info2.RflagsRead); Assert.Equal(info1.RflagsWritten, info2.RflagsWritten); Assert.Equal(info1.RflagsCleared, info2.RflagsCleared); Assert.Equal(info1.RflagsSet, info2.RflagsSet); Assert.Equal(info1.RflagsUndefined, info2.RflagsUndefined); Assert.Equal(info1.RflagsModified, info2.RflagsModified); } IEnumerable<UsedRegister> GetUsedRegisters(IEnumerable<UsedRegister> usedRegisterIterator) { var read = new List<Register>(); var write = new List<Register>(); var condRead = new List<Register>(); var condWrite = new List<Register>(); foreach (var info in usedRegisterIterator) { switch (info.Access) { case OpAccess.Read: read.Add(info.Register); break; case OpAccess.CondRead: condRead.Add(info.Register); break; case OpAccess.Write: write.Add(info.Register); break; case OpAccess.CondWrite: condWrite.Add(info.Register); break; case OpAccess.ReadWrite: read.Add(info.Register); write.Add(info.Register); break; case OpAccess.ReadCondWrite: read.Add(info.Register); condWrite.Add(info.Register); break; case OpAccess.None: case OpAccess.NoMemAccess: default: throw new InvalidOperationException(); } } foreach (var reg in GetRegisters(read)) yield return new UsedRegister(reg, OpAccess.Read); foreach (var reg in GetRegisters(write)) yield return new UsedRegister(reg, OpAccess.Write); foreach (var reg in GetRegisters(condRead)) yield return new UsedRegister(reg, OpAccess.CondRead); foreach (var reg in GetRegisters(condWrite)) yield return new UsedRegister(reg, OpAccess.CondWrite); } IEnumerable<Register> GetRegisters(List<Register> regs) { if (regs.Count <= 1) return regs; regs.Sort(RegisterSorter); var hash = new HashSet<Register>(); int index; foreach (var reg in regs) { if (Register.EAX <= reg && reg <= Register.R15D) { index = reg - Register.EAX; if (hash.Contains(Register.RAX + index)) continue; } else if (Register.AX <= reg && reg <= Register.R15W) { index = reg - Register.AX; if (hash.Contains(Register.RAX + index)) continue; if (hash.Contains(Register.EAX + index)) continue; } else if (Register.AL <= reg && reg <= Register.R15L) { index = reg - Register.AL; if (Register.AH <= reg && reg <= Register.BH) index -= 4; if (hash.Contains(Register.RAX + index)) continue; if (hash.Contains(Register.EAX + index)) continue; if (hash.Contains(Register.AX + index)) continue; } else if (Register.YMM0 <= reg && reg <= IcedConstants.YMM_last) { index = reg - Register.YMM0; if (hash.Contains(Register.ZMM0 + index)) continue; } else if (Register.XMM0 <= reg && reg <= IcedConstants.XMM_last) { index = reg - Register.XMM0; if (hash.Contains(Register.ZMM0 + index)) continue; if (hash.Contains(Register.YMM0 + index)) continue; } hash.Add(reg); } foreach (var info in lowRegs) { if (hash.Contains(info.rl) && hash.Contains(info.rh)) { hash.Remove(info.rl); hash.Remove(info.rh); hash.Add(info.rx); } } return hash; } static readonly (Register rl, Register rh, Register rx)[] lowRegs = new(Register rl, Register rh, Register rx)[4] { (Register.AL, Register.AH, Register.AX), (Register.CL, Register.CH, Register.CX), (Register.DL, Register.DH, Register.DX), (Register.BL, Register.BH, Register.BX), }; static int RegisterSorter(Register x, Register y) { int c = GetRegisterGroupOrder(x) - GetRegisterGroupOrder(y); if (c != 0) return c; return x - y; } static int GetRegisterGroupOrder(Register reg) { if (Register.RAX <= reg && reg <= Register.R15) return 0; if (Register.EAX <= reg && reg <= Register.R15D) return 1; if (Register.AX <= reg && reg <= Register.R15W) return 2; if (Register.AL <= reg && reg <= Register.R15L) return 3; if (Register.ZMM0 <= reg && reg <= IcedConstants.ZMM_last) return 4; if (Register.YMM0 <= reg && reg <= IcedConstants.YMM_last) return 5; if (Register.XMM0 <= reg && reg <= IcedConstants.XMM_last) return 6; return -1; } Decoder CreateDecoder(int bitness, byte[] codeBytes, DecoderOptions options) { var codeReader = new ByteArrayCodeReader(codeBytes); var decoder = Decoder.Create(bitness, codeReader, options); decoder.IP = bitness switch { 16 => DecoderConstants.DEFAULT_IP16, 32 => DecoderConstants.DEFAULT_IP32, 64 => DecoderConstants.DEFAULT_IP64, _ => throw new ArgumentOutOfRangeException(nameof(bitness)), }; Assert.Equal(bitness, decoder.Bitness); return decoder; } static protected IEnumerable<object[]> GetTestCases(int bitness) => InstructionInfoTestReader.GetTestCases(bitness, bitness); } } #endif
namespace zlib { using System; internal sealed class InfCodes { static InfCodes() { InfCodes.inflate_mask = new int[] { 0, 1, 3, 7, 15, 0x1f, 0x3f, 0x7f, 0xff, 0x1ff, 0x3ff, 0x7ff, 0xfff, 0x1fff, 0x3fff, 0x7fff, 0xffff }; } internal InfCodes(int bl, int bd, int[] tl, int[] td, ZStream z) { this.mode = 0; this.lbits = (byte) bl; this.dbits = (byte) bd; this.ltree = tl; this.ltree_index = 0; this.dtree = td; this.dtree_index = 0; } internal InfCodes(int bl, int bd, int[] tl, int tl_index, int[] td, int td_index, ZStream z) { this.mode = 0; this.lbits = (byte) bl; this.dbits = (byte) bd; this.ltree = tl; this.ltree_index = tl_index; this.dtree = td; this.dtree_index = td_index; } internal void free(ZStream z) { } internal int inflate_fast(int bl, int bd, int[] tl, int tl_index, int[] td, int td_index, InfBlocks s, ZStream z) { int num12; int num6 = z.next_in_index; int num7 = z.avail_in; int num4 = s.bitb; int num5 = s.bitk; int num8 = s.write; int num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); int num10 = InfCodes.inflate_mask[bl]; int num11 = InfCodes.inflate_mask[bd]; Label_0092: while (num5 < 20) { num7--; num4 |= (z.next_in[num6++] & 0xff) << (num5 & 0x1f); num5 += 8; } int num1 = num4 & num10; int[] numArray1 = tl; int num2 = tl_index; int num3 = numArray1[(num2 + num1) * 3]; if (num3 == 0) { num4 = num4 >> (numArray1[((num2 + num1) * 3) + 1] & 0x1f); num5 -= numArray1[((num2 + num1) * 3) + 1]; s.window[num8++] = (byte) numArray1[((num2 + num1) * 3) + 2]; num9--; goto Label_05E0; } Label_00F1: num4 = num4 >> (numArray1[((num2 + num1) * 3) + 1] & 0x1f); num5 -= numArray1[((num2 + num1) * 3) + 1]; if ((num3 & 0x10) == 0) { if ((num3 & 0x40) == 0) { num1 += numArray1[((num2 + num1) * 3) + 2]; num1 += num4 & InfCodes.inflate_mask[num3]; num3 = numArray1[(num2 + num1) * 3]; if (num3 != 0) { goto Label_00F1; } num4 = num4 >> (numArray1[((num2 + num1) * 3) + 1] & 0x1f); num5 -= numArray1[((num2 + num1) * 3) + 1]; s.window[num8++] = (byte) numArray1[((num2 + num1) * 3) + 2]; num9--; } else { if ((num3 & 0x20) != 0) { num12 = z.avail_in - num7; num12 = ((num5 >> 3) < num12) ? (num5 >> 3) : num12; num7 += num12; num6 -= num12; num5 -= num12 << 3; s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return 1; } z.msg = "invalid literal/length code"; num12 = z.avail_in - num7; num12 = ((num5 >> 3) < num12) ? (num5 >> 3) : num12; num7 += num12; num6 -= num12; num5 -= num12 << 3; s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return -3; } goto Label_05E0; } num3 &= 15; num12 = numArray1[((num2 + num1) * 3) + 2] + (num4 & InfCodes.inflate_mask[num3]); num4 = num4 >> (num3 & 0x1f); num5 -= num3; while (num5 < 15) { num7--; num4 |= (z.next_in[num6++] & 0xff) << (num5 & 0x1f); num5 += 8; } num1 = num4 & num11; numArray1 = td; num2 = td_index; num3 = numArray1[(num2 + num1) * 3]; Label_018B: num4 = num4 >> (numArray1[((num2 + num1) * 3) + 1] & 0x1f); num5 -= numArray1[((num2 + num1) * 3) + 1]; if ((num3 & 0x10) != 0) { int num14; num3 &= 15; while (num5 < num3) { num7--; num4 |= (z.next_in[num6++] & 0xff) << (num5 & 0x1f); num5 += 8; } int num13 = numArray1[((num2 + num1) * 3) + 2] + (num4 & InfCodes.inflate_mask[num3]); num4 = num4 >> (num3 & 0x1f); num5 -= num3; num9 -= num12; if (num8 >= num13) { num14 = num8 - num13; if (((num8 - num14) > 0) && (2 > (num8 - num14))) { s.window[num8++] = s.window[num14++]; num12--; s.window[num8++] = s.window[num14++]; num12--; } else { Array.Copy(s.window, num14, s.window, num8, 2); num8 += 2; num14 += 2; num12 -= 2; } } else { num14 = num8 - num13; do { num14 += s.end; } while (num14 < 0); num3 = s.end - num14; if (num12 > num3) { num12 -= num3; if (((num8 - num14) > 0) && (num3 > (num8 - num14))) { do { s.window[num8++] = s.window[num14++]; } while (--num3 != 0); } else { Array.Copy(s.window, num14, s.window, num8, num3); num8 += num3; num14 += num3; num3 = 0; } num14 = 0; } } if (((num8 - num14) > 0) && (num12 > (num8 - num14))) { do { s.window[num8++] = s.window[num14++]; } while (--num12 != 0); } else { Array.Copy(s.window, num14, s.window, num8, num12); num8 += num12; num14 += num12; num12 = 0; } } else { if ((num3 & 0x40) == 0) { num1 += numArray1[((num2 + num1) * 3) + 2]; num1 += num4 & InfCodes.inflate_mask[num3]; num3 = numArray1[(num2 + num1) * 3]; goto Label_018B; } z.msg = "invalid distance code"; num12 = z.avail_in - num7; num12 = ((num5 >> 3) < num12) ? (num5 >> 3) : num12; num7 += num12; num6 -= num12; num5 -= num12 << 3; s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return -3; } Label_05E0: if ((num9 < 0x102) || (num7 < 10)) { num12 = z.avail_in - num7; num12 = ((num5 >> 3) < num12) ? (num5 >> 3) : num12; num7 += num12; num6 -= num12; num5 -= num12 << 3; s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return 0; } goto Label_0092; } internal int proc(InfBlocks s, ZStream z, int r) { int num1; int num10; int num4 = 0; int num5 = 0; int num6 = 0; num6 = z.next_in_index; int num7 = z.avail_in; num4 = s.bitb; num5 = s.bitk; int num8 = s.write; int num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); Label_0051: switch (this.mode) { case 0: if ((num9 < 0x102) || (num7 < 10)) { break; } s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; r = this.inflate_fast(this.lbits, this.dbits, this.ltree, this.ltree_index, this.dtree, this.dtree_index, s, z); num6 = z.next_in_index; num7 = z.avail_in; num4 = s.bitb; num5 = s.bitk; num8 = s.write; num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); if (r == 0) { break; } this.mode = (r == 1) ? 7 : 9; goto Label_0051; case 1: goto Label_0199; case 2: num1 = this.get_Renamed; while (num5 < num1) { if (num7 != 0) { r = 0; } else { s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); } num7--; num4 |= (z.next_in[num6++] & 0xff) << (num5 & 0x1f); num5 += 8; } this.len += num4 & InfCodes.inflate_mask[num1]; num4 = num4 >> (num1 & 0x1f); num5 -= num1; this.need = this.dbits; this.tree = this.dtree; this.tree_index = this.dtree_index; this.mode = 3; goto Label_0412; case 3: goto Label_0412; case 4: num1 = this.get_Renamed; while (num5 < num1) { if (num7 != 0) { r = 0; } else { s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); } num7--; num4 |= (z.next_in[num6++] & 0xff) << (num5 & 0x1f); num5 += 8; } this.dist += num4 & InfCodes.inflate_mask[num1]; num4 = num4 >> (num1 & 0x1f); num5 -= num1; this.mode = 5; goto Label_0635; case 5: goto Label_0635; case 6: if (num9 == 0) { if ((num8 == s.end) && (s.read != 0)) { num8 = 0; num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); } if (num9 == 0) { s.write = num8; r = s.inflate_flush(z, r); num8 = s.write; num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); if ((num8 == s.end) && (s.read != 0)) { num8 = 0; num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); } if (num9 == 0) { s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); } } } r = 0; s.window[num8++] = (byte) this.lit; num9--; this.mode = 0; goto Label_0051; case 7: if (num5 > 7) { num5 -= 8; num7++; num6--; } s.write = num8; r = s.inflate_flush(z, r); num8 = s.write; num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); if (s.read != s.write) { s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); } this.mode = 8; goto Label_098A; case 8: goto Label_098A; case 9: r = -3; s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); default: r = -2; s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); } this.need = this.lbits; this.tree = this.ltree; this.tree_index = this.ltree_index; this.mode = 1; Label_0199: num1 = this.need; while (num5 < num1) { if (num7 != 0) { r = 0; } else { s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); } num7--; num4 |= (z.next_in[num6++] & 0xff) << (num5 & 0x1f); num5 += 8; } int num2 = (this.tree_index + (num4 & InfCodes.inflate_mask[num1])) * 3; num4 = SupportClass.URShift(num4, this.tree[num2 + 1]); num5 -= this.tree[num2 + 1]; int num3 = this.tree[num2]; if (num3 == 0) { this.lit = this.tree[num2 + 2]; this.mode = 6; goto Label_0051; } if ((num3 & 0x10) != 0) { this.get_Renamed = num3 & 15; this.len = this.tree[num2 + 2]; this.mode = 2; goto Label_0051; } if ((num3 & 0x40) == 0) { this.need = num3; this.tree_index = (num2 / 3) + this.tree[num2 + 2]; goto Label_0051; } if ((num3 & 0x20) != 0) { this.mode = 7; goto Label_0051; } this.mode = 9; z.msg = "invalid literal/length code"; r = -3; s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); Label_0412: num1 = this.need; while (num5 < num1) { if (num7 != 0) { r = 0; } else { s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); } num7--; num4 |= (z.next_in[num6++] & 0xff) << (num5 & 0x1f); num5 += 8; } num2 = (this.tree_index + (num4 & InfCodes.inflate_mask[num1])) * 3; num4 = num4 >> (this.tree[num2 + 1] & 0x1f); num5 -= this.tree[num2 + 1]; num3 = this.tree[num2]; if ((num3 & 0x10) != 0) { this.get_Renamed = num3 & 15; this.dist = this.tree[num2 + 2]; this.mode = 4; goto Label_0051; } if ((num3 & 0x40) == 0) { this.need = num3; this.tree_index = (num2 / 3) + this.tree[num2 + 2]; goto Label_0051; } this.mode = 9; z.msg = "invalid distance code"; r = -3; s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); Label_0635: num10 = num8 - this.dist; while (num10 < 0) { num10 += s.end; } while (this.len != 0) { if (num9 == 0) { if ((num8 == s.end) && (s.read != 0)) { num8 = 0; num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); } if (num9 == 0) { s.write = num8; r = s.inflate_flush(z, r); num8 = s.write; num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); if ((num8 == s.end) && (s.read != 0)) { num8 = 0; num9 = (num8 < s.read) ? ((s.read - num8) - 1) : (s.end - num8); } if (num9 == 0) { s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); } } } s.window[num8++] = s.window[num10++]; num9--; if (num10 == s.end) { num10 = 0; } this.len--; } this.mode = 0; goto Label_0051; Label_098A: r = 1; s.bitb = num4; s.bitk = num5; z.avail_in = num7; z.total_in += num6 - z.next_in_index; z.next_in_index = num6; s.write = num8; return s.inflate_flush(z, r); } private const int BADCODE = 9; private const int COPY = 5; internal byte dbits; internal int dist; private const int DIST = 3; private const int DISTEXT = 4; internal int[] dtree; internal int dtree_index; private const int END = 8; internal int get_Renamed; private static readonly int[] inflate_mask; internal byte lbits; internal int len; private const int LEN = 1; private const int LENEXT = 2; internal int lit; private const int LIT = 6; internal int[] ltree; internal int ltree_index; internal int mode; internal int need; private const int START = 0; internal int[] tree; internal int tree_index; private const int WASH = 7; private const int Z_BUF_ERROR = -5; private const int Z_DATA_ERROR = -3; private const int Z_ERRNO = -1; private const int Z_MEM_ERROR = -4; private const int Z_NEED_DICT = 2; private const int Z_OK = 0; private const int Z_STREAM_END = 1; private const int Z_STREAM_ERROR = -2; private const int Z_VERSION_ERROR = -6; } }
using System; using System.Linq; using System.Management.Automation; using EnvDTE; using Moq; using NuGet.Test; using NuGet.Test.Mocks; using NuGet.VisualStudio; using NuGet.VisualStudio.Test; using Xunit; using Xunit.Extensions; namespace NuGet.PowerShell.Commands.Test { using PackageUtility = NuGet.Test.PackageUtility; public class UpdatePackageCommandTest { [Fact] public void UpdatePackageCmdletThrowsWhenSolutionIsClosed() { // Arrange var packageManagerFactory = new Mock<IVsPackageManagerFactory>(); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns((IVsPackageManager)null); var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManager(isSolutionOpen: false), packageManagerFactory.Object, null, null, null, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); // Act and Assert ExceptionAssert.Throws<InvalidOperationException>(() => cmdlet.GetResults(), "The current environment doesn't have a solution open."); } [Fact] public void UpdatePackageCmdletUsesPackageManangerWithSourceIfSpecified() { // Arrange var packageManagerFactory = new Mock<IVsPackageManagerFactory>(); var vsPackageManager = new MockVsPackageManager(); var sourceVsPackageManager = new MockVsPackageManager(); var mockPackageRepository = new MockPackageRepository(); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns(vsPackageManager); packageManagerFactory.Setup(m => m.CreatePackageManager(mockPackageRepository, true)).Returns(sourceVsPackageManager); var sourceProvider = GetPackageSourceProvider(new PackageSource("somesource")); var repositoryFactory = new Mock<IPackageRepositoryFactory>(); repositoryFactory.Setup(c => c.CreateRepository(It.Is<string>(s => s == "somesource"))).Returns(mockPackageRepository); var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManagerWithProjects("foo"), packageManagerFactory.Object, repositoryFactory.Object, sourceProvider, null, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Source = "somesource"; cmdlet.Id = "my-id"; cmdlet.Version = new SemanticVersion("2.8"); cmdlet.ProjectName = "foo"; // Act cmdlet.Execute(); // Assert Assert.Same(sourceVsPackageManager, cmdlet.PackageManager); } [Fact] public void UpdatePackageCmdletPassesParametersCorrectlyWhenIdAndVersionAreSpecified() { // Arrange var vsPackageManager = new MockVsPackageManager(); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns(vsPackageManager); var mockPackageRepository = new MockPackageRepository(); var sourceProvider = GetPackageSourceProvider(new PackageSource("somesource")); var repositoryFactory = new Mock<IPackageRepositoryFactory>(); repositoryFactory.Setup(c => c.CreateRepository(It.Is<string>(s => s == "somesource"))).Returns(mockPackageRepository); var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManagerWithProjects("foo"), packageManagerFactory.Object, repositoryFactory.Object, sourceProvider, null, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "my-id"; cmdlet.Version = new SemanticVersion("2.8"); cmdlet.ProjectName = "foo"; // Act cmdlet.Execute(); // Assert Assert.Equal("my-id", vsPackageManager.PackageId); Assert.Equal(new SemanticVersion("2.8"), vsPackageManager.Version); } [Fact] public void UpdatePackageCmdletPassesIgnoreDependencySwitchCorrectly() { // Arrange var vsPackageManager = new MockVsPackageManager(); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns(vsPackageManager); var mockPackageRepository = new MockPackageRepository(); var sourceProvider = GetPackageSourceProvider(new PackageSource("somesource")); var repositoryFactory = new Mock<IPackageRepositoryFactory>(); repositoryFactory.Setup(c => c.CreateRepository(It.Is<string>(s => s == "somesource"))).Returns(mockPackageRepository); var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManagerWithProjects("foo"), packageManagerFactory.Object, repositoryFactory.Object, sourceProvider, null, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "my-id"; cmdlet.Version = new SemanticVersion("2.8"); cmdlet.ProjectName = "foo"; // Act cmdlet.Execute(); // Assert Assert.Equal("my-id", vsPackageManager.PackageId); Assert.Equal(new SemanticVersion("2.8"), vsPackageManager.Version); Assert.True(vsPackageManager.UpdateDependencies); } [Fact] public void UpdatePackageCmdletPassesIgnoreDependencySwitchCorrectlyWhenPresent() { // Arrange var vsPackageManager = new MockVsPackageManager(); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns(vsPackageManager); var mockPackageRepository = new MockPackageRepository(); var sourceProvider = GetPackageSourceProvider(new PackageSource("somesource")); var repositoryFactory = new Mock<IPackageRepositoryFactory>(); repositoryFactory.Setup(c => c.CreateRepository(It.Is<string>(s => s == "somesource"))).Returns(mockPackageRepository); var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManagerWithProjects("foo"), packageManagerFactory.Object, repositoryFactory.Object, sourceProvider, null, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "my-id"; cmdlet.Version = new SemanticVersion("2.8"); cmdlet.IgnoreDependencies = new SwitchParameter(isPresent: true); cmdlet.ProjectName = "foo"; // Act cmdlet.Execute(); // Assert Assert.Equal("my-id", vsPackageManager.PackageId); Assert.Equal(new SemanticVersion("2.8"), vsPackageManager.Version); Assert.False(vsPackageManager.UpdateDependencies); } [Fact] public void UpdatePackageCmdletInvokeProductUpdateCheckWhenSourceIsHttpAddress() { // Arrange string source = "http://bing.com"; var productUpdateService = new Mock<IProductUpdateService>(); var sourceRepository = new Mock<IPackageRepository>(); sourceRepository.Setup(p => p.Source).Returns(source); var vsPackageManager = new MockVsPackageManager(sourceRepository.Object); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(); packageManagerFactory.Setup(c => c.CreatePackageManager(sourceRepository.Object, true)).Returns(vsPackageManager); var mockPackageRepository = new MockPackageRepository(); var sourceProvider = GetPackageSourceProvider(new PackageSource(source)); var repositoryFactory = new Mock<IPackageRepositoryFactory>(); repositoryFactory.Setup(c => c.CreateRepository(source)).Returns(sourceRepository.Object); var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManagerWithProjects("foo"), packageManagerFactory.Object, repositoryFactory.Object, sourceProvider, null, productUpdateService.Object, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "my-id"; cmdlet.Version = new SemanticVersion("2.8"); cmdlet.IgnoreDependencies = new SwitchParameter(isPresent: true); cmdlet.Source = source; cmdlet.ProjectName = "foo"; // Act cmdlet.Execute(); // Assert productUpdateService.Verify(p => p.CheckForAvailableUpdateAsync(), Times.Once()); } [Fact] public void UpdatePackageCmdletInvokeProductUpdateCheckWhenSourceIsHttpAddressAndSourceIsSpecified() { // Arrange string source = "http://bing.com"; var productUpdateService = new Mock<IProductUpdateService>(); var sourceRepository = new Mock<IPackageRepository>(); sourceRepository.Setup(p => p.Source).Returns(source); var vsPackageManager = new MockVsPackageManager(sourceRepository.Object); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(); packageManagerFactory.Setup(c => c.CreatePackageManager(sourceRepository.Object, true)).Returns(vsPackageManager); var sourceProvider = GetPackageSourceProvider(new PackageSource(source, "bing")); var repositoryFactory = new Mock<IPackageRepositoryFactory>(); repositoryFactory.Setup(c => c.CreateRepository(source)).Returns(sourceRepository.Object); var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManagerWithProjects("foo"), packageManagerFactory.Object, repositoryFactory.Object, sourceProvider, null, productUpdateService.Object, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "my-id"; cmdlet.Version = new SemanticVersion("2.8"); cmdlet.IgnoreDependencies = new SwitchParameter(isPresent: true); cmdlet.Source = "bing"; cmdlet.ProjectName = "foo"; // Act cmdlet.Execute(); // Assert productUpdateService.Verify(p => p.CheckForAvailableUpdateAsync(), Times.Once()); } [Fact] public void UpdatePackageCmdletDoNotInvokeProductUpdateCheckWhenSourceIsNotHttpAddress() { // Arrange string source = "ftp://bing.com"; var productUpdateService = new Mock<IProductUpdateService>(); var sourceRepository = new Mock<IPackageRepository>(); sourceRepository.Setup(p => p.Source).Returns(source); var vsPackageManager = new MockVsPackageManager(sourceRepository.Object); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(); packageManagerFactory.Setup(m => m.CreatePackageManager(sourceRepository.Object, true)).Returns(vsPackageManager); var sourceProvider = GetPackageSourceProvider(new PackageSource(source, "bing")); var repositoryFactory = new Mock<IPackageRepositoryFactory>(); repositoryFactory.Setup(c => c.CreateRepository(source)).Returns(sourceRepository.Object); var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManagerWithProjects("foo"), packageManagerFactory.Object, repositoryFactory.Object, sourceProvider, null, productUpdateService.Object, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "my-id"; cmdlet.Version = new SemanticVersion("2.8"); cmdlet.IgnoreDependencies = new SwitchParameter(isPresent: true); cmdlet.Source = source; cmdlet.ProjectName = "foo"; // Act cmdlet.Execute(); // Assert productUpdateService.Verify(p => p.CheckForAvailableUpdateAsync(), Times.Never()); } [Theory] [InlineData("1.0.0", "2.0.0-alpha")] [InlineData("1.0.0-beta", "2.0.0")] [InlineData("1.0.0-beta", "1.0.1-beta")] [InlineData("1.0.0", "1.0.1")] public void UpdatePackageDoNotUpdateToUnlistedPackageWithPrerelease(string versionA1, string versionA2) { // Arrange var packageA1 = PackageUtility.CreatePackage("A", versionA1); var packageA2 = PackageUtility.CreatePackage("A", versionA2, listed: false); var sharedRepository = new MockSharedPackageRepository(); sharedRepository.AddPackage(packageA1); var packageRepository = new MockPackageRepository { packageA1, packageA2 }; var packageManager = new MockVsPackageManager( TestUtils.GetSolutionManagerWithProjects(), packageRepository, sharedRepository); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(MockBehavior.Strict); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns(packageManager); // Act var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManager(), packageManagerFactory.Object, null, new Mock<IVsPackageSourceProvider>().Object, new Mock<IHttpClientEvents>().Object, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "A"; cmdlet.IncludePrerelease = true; cmdlet.Execute(); // Assert Assert.True(sharedRepository.Contains(packageA1)); Assert.False(sharedRepository.Contains(packageA2)); } [Theory] [InlineData("1.0.0", "1.0.1-alpha")] [InlineData("1.0.0-beta", "1.0.9")] [InlineData("1.0.0-beta", "1.0.1-beta")] [InlineData("1.0.0", "1.0.1")] public void SafeUpdatePackageDoNotUpdateToUnlistedPackageWithPrerelease(string versionA1, string versionA2) { // Arrange var packageA1 = PackageUtility.CreatePackage("A", versionA1); var packageA2 = PackageUtility.CreatePackage("A", versionA2, listed: false); var sharedRepository = new MockSharedPackageRepository(); sharedRepository.AddPackage(packageA1); var packageRepository = new MockPackageRepository { packageA1, packageA2 }; var packageManager = new MockVsPackageManager( TestUtils.GetSolutionManagerWithProjects(), packageRepository, sharedRepository); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(MockBehavior.Strict); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns(packageManager); // Act var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManager(), packageManagerFactory.Object, null, new Mock<IVsPackageSourceProvider>().Object, new Mock<IHttpClientEvents>().Object, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "A"; cmdlet.IncludePrerelease = true; cmdlet.Safe = true; cmdlet.Execute(); // Assert Assert.False(sharedRepository.Contains(packageA2)); } [Theory] [InlineData("1.0.0", "2.0.0")] [InlineData("1.0.0", "1.0.1")] public void UpdatePackageDoNotUpdateToUnlistedPackage(string versionA1, string versionA2) { // Arrange var packageA1 = PackageUtility.CreatePackage("A", versionA1); var packageA2 = PackageUtility.CreatePackage("A", versionA2, listed: false); var sharedRepository = new MockSharedPackageRepository(); sharedRepository.AddPackage(packageA1); var packageRepository = new MockPackageRepository { packageA1, packageA2 }; var packageManager = new MockVsPackageManager( TestUtils.GetSolutionManagerWithProjects(), packageRepository, sharedRepository); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(MockBehavior.Strict); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns(packageManager); // Act var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManager(), packageManagerFactory.Object, null, new Mock<IVsPackageSourceProvider>().Object, new Mock<IHttpClientEvents>().Object, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "A"; cmdlet.Execute(); // Assert Assert.True(sharedRepository.Contains(packageA1)); Assert.False(sharedRepository.Contains(packageA2)); } [Theory] [InlineData("1.0.0", "1.0.0.2")] [InlineData("1.0.0", "1.0.1.3")] public void SafeUpdatePackageDoNotUpdateToUnlistedPackage(string versionA1, string versionA2) { // Arrange var packageA1 = PackageUtility.CreatePackage("A", versionA1); var packageA2 = PackageUtility.CreatePackage("A", versionA2, listed: false); var sharedRepository = new MockSharedPackageRepository(); sharedRepository.AddPackage(packageA1); var packageRepository = new MockPackageRepository { packageA1, packageA2 }; var packageManager = new MockVsPackageManager( TestUtils.GetSolutionManagerWithProjects(), packageRepository, sharedRepository); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(MockBehavior.Strict); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns(packageManager); // Act var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManager(), packageManagerFactory.Object, null, new Mock<IVsPackageSourceProvider>().Object, new Mock<IHttpClientEvents>().Object, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "A"; cmdlet.Safe = true; cmdlet.Execute(); // Assert Assert.True(sharedRepository.Contains(packageA1)); Assert.False(sharedRepository.Contains(packageA2)); } [Theory] [InlineData("1.0.0", "2.0.0")] [InlineData("1.0.0", "1.0.1")] [InlineData("1.0.0", "2.0.0-alpha")] [InlineData("1.0.0-beta", "2.0.0")] [InlineData("1.0.0-beta", "1.0.1-beta")] public void UpdatePackageUpdateToUnlistedPackageIfVersionIsSet(string versionA1, string versionA2) { // Arrange var packageA1 = PackageUtility.CreatePackage("A", versionA1); var packageA2 = PackageUtility.CreatePackage("A", versionA2, listed: false); var localRepository = new Mock<MockPackageRepository>() { CallBase = true }; var sharedRepository = localRepository.As<ISharedPackageRepository>(); localRepository.Object.AddPackage(packageA1); var packageRepository = new MockPackageRepository { packageA1, packageA2 }; var packageManager = new MockVsPackageManager( TestUtils.GetSolutionManagerWithProjects(), packageRepository, sharedRepository.Object); var packageManagerFactory = new Mock<IVsPackageManagerFactory>(MockBehavior.Strict); packageManagerFactory.Setup(m => m.CreatePackageManager()).Returns(packageManager); // Act var cmdlet = new UpdatePackageCommand(TestUtils.GetSolutionManager(), packageManagerFactory.Object, null, new Mock<IVsPackageSourceProvider>().Object, new Mock<IHttpClientEvents>().Object, null, new Mock<IVsCommonOperations>().Object, new Mock<IDeleteOnRestartManager>().Object); cmdlet.Id = "A"; cmdlet.Version = new SemanticVersion(versionA2); cmdlet.Execute(); // Assert Assert.False(localRepository.Object.Contains(packageA1)); Assert.True(localRepository.Object.Contains(packageA2)); } private static IVsPackageSourceProvider GetPackageSourceProvider(params PackageSource[] sources) { var sourceProvider = new Mock<IVsPackageSourceProvider>(); sourceProvider.Setup(c => c.LoadPackageSources()).Returns(sources); return sourceProvider.Object; } private class MockVsPackageManager : VsPackageManager { public MockVsPackageManager() : this(new Mock<IPackageRepository>().Object) { } public MockVsPackageManager(IPackageRepository sourceRepository) : base(new Mock<ISolutionManager>().Object, sourceRepository, new Mock<IFileSystemProvider>().Object, new Mock<IFileSystem>().Object, new Mock<ISharedPackageRepository>().Object, new Mock<IDeleteOnRestartManager>().Object, new Mock<VsPackageInstallerEvents>().Object) { } public MockVsPackageManager( ISolutionManager solutionManager, IPackageRepository sourceRepository, ISharedPackageRepository localRepository) : base(solutionManager, sourceRepository, new Mock<IFileSystemProvider>().Object, new MockFileSystem("x:\\root"), localRepository, new Mock<IDeleteOnRestartManager>().Object, new Mock<VsPackageInstallerEvents>().Object) { } public IProjectManager ProjectManager { get; set; } public string PackageId { get; set; } public SemanticVersion Version { get; set; } public bool UpdateDependencies { get; set; } public override void UpdatePackage(IProjectManager projectManager, string packageId, SemanticVersion version, bool updateDependencies, bool allowPreReleaseVersions, ILogger logger) { ProjectManager = projectManager; PackageId = packageId; Version = version; UpdateDependencies = updateDependencies; } public override IProjectManager GetProjectManager(Project project) { return new Mock<IProjectManager>().Object; } } } }
// Cis.Fiscalization v1.3.0 :: CIS WSDL v1.4 (2012-2017) // https://github.com/tgrospic/Cis.Fiscalization // Copyright (c) 2013-present Tomislav Grospic // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System; using System.Security.Cryptography.X509Certificates; using System.Threading.Tasks; namespace Cis { public static partial class Fiscalization { #region Service Async API /// <summary> /// Send invoice request async /// </summary> /// <param name="request">Request to send</param> /// <param name="certificate">Signing certificate, optional if request is already signed</param> /// <param name="setupService">Function to set service settings</param> public static Task<RacunOdgovor> SendInvoiceRequestAsync(RacunZahtjev request, X509Certificate2 certificate = null, Action<FiskalizacijaService> setupService = null) { if (request == null) throw new ArgumentNullException("request"); if (request.Racun == null) throw new ArgumentNullException("request.Racun"); return SignAndSendRequestAsync<RacunZahtjev, RacunOdgovor>(request, x => x.RacuniAsync, certificate, setupService); } /// <summary> /// Send invoice async /// </summary> /// <param name="invoice">Invoice to send</param> /// <param name="certificate">Signing certificate</param> /// <param name="setupService">Function to set service settings</param> public static Task<RacunOdgovor> SendInvoiceAsync(RacunType invoice, X509Certificate2 certificate, Action<FiskalizacijaService> setupService = null) { if (invoice == null) throw new ArgumentNullException("invoice"); if (certificate == null) throw new ArgumentNullException("certificate"); var request = new RacunZahtjev { Racun = invoice, Zaglavlje = Cis.Fiscalization.GetRequestHeader() }; return SendInvoiceRequestAsync(request, certificate, setupService); } /// <summary> /// Check invoice request async /// </summary> /// <param name="request">Request to send</param> /// <param name="certificate">Signing certificate, optional if request is already signed</param> /// <param name="setupService">Function to set service settings</param> public static Task<ProvjeraOdgovor> CheckInvoiceRequestAsync(ProvjeraZahtjev request, X509Certificate2 certificate = null, Action<FiskalizacijaService> setupService = null) { if (request == null) throw new ArgumentNullException("request"); if (request.Racun == null) throw new ArgumentNullException("request.Racun"); return SignAndSendRequestAsync<ProvjeraZahtjev, ProvjeraOdgovor>(request, x => x.ProvjeraAsync, certificate, setupService); } /// <summary> /// Send invoice async /// </summary> /// <param name="invoice">Invoice to check</param> /// <param name="certificate">Signing certificate</param> /// <param name="setupService">Function to set service settings</param> public static Task<ProvjeraOdgovor> CheckInvoiceAsync(RacunType invoice, X509Certificate2 certificate, Action<FiskalizacijaService> setupService = null) { if (invoice == null) throw new ArgumentNullException("invoice"); if (certificate == null) throw new ArgumentNullException("certificate"); var request = new ProvjeraZahtjev { Racun = invoice, Zaglavlje = Cis.Fiscalization.GetRequestHeader() }; return CheckInvoiceRequestAsync(request, certificate, setupService); } /// <summary> /// Send echo request async /// </summary> /// <param name="echo">String to send</param> /// <param name="setupService">Function to set service settings</param> public static Task<string> SendEchoAsync(string echo, Action<FiskalizacijaService> setupService = null) { if (echo == null) throw new ArgumentNullException("echo"); // Create service endpoint var fs = new FiskalizacijaService(); if (setupService != null) setupService(fs); // Response is not signed fs.CheckResponseSignature = false; // Send request return fs.EchoAsync(echo); } #endregion #region Send methods (generic) /// <summary> /// Send request async /// </summary> /// <typeparam name="TRequest">Type of service method argument</typeparam> /// <typeparam name="TResponse">Type of service method result</typeparam> /// <param name="request">Request to send</param> /// <param name="serviceMethod">Function to provide service method</param> /// <param name="certificate">Signing certificate</param> /// <param name="setupService">Function to set service settings</param> /// <returns>Service response object</returns> public static async Task<TResponse> SignAndSendRequestAsync<TRequest, TResponse>(TRequest request, Func<FiskalizacijaService, Func<TRequest, Task<TResponse>>> serviceMethod, X509Certificate2 certificate = null, Action<FiskalizacijaService> setupService = null) where TRequest : ICisRequest where TResponse : ICisResponse { if (request == null) throw new ArgumentNullException("request"); if (serviceMethod == null) throw new ArgumentNullException("serviceMethod"); if (certificate == null && request.Signature == null) throw new ArgumentNullException("cert"); // Create service endpoint var fs = new FiskalizacijaService(); fs.CheckResponseSignature = true; if (setupService != null) setupService(fs); // Sign request Sign(request, certificate); // Send request to fiscalization service var method = serviceMethod(fs); var result = await method(request); // Add reference to request object result.Request = request; ThrowOnResponseErrors(result); return result; } /// <summary> /// Send request (sync) using async service method /// TODO: Test /// </summary> /// <typeparam name="TRequest">Type of service method argument</typeparam> /// <typeparam name="TResponse">Type of service method result</typeparam> /// <param name="request">Request to send</param> /// <param name="serviceMethod">Function to provide service method</param> /// <param name="certificate">Signing certificate</param> /// <param name="setupService">Function to set service settings</param> /// <returns>Service response object</returns> public static TResponse SignAndSendRequest<TRequest, TResponse>(TRequest request, Func<FiskalizacijaService, Func<TRequest, Task<TResponse>>> serviceMethod, X509Certificate2 certificate = null, Action<FiskalizacijaService> setupService = null) where TRequest : ICisRequest where TResponse : ICisResponse { var task = SignAndSendRequestAsync(request, serviceMethod, certificate, setupService); try { // Wait for task to end task.Wait(); } catch (AggregateException aggEx) { // We are sure that only one error exist throw aggEx.InnerException; } return task.Result; } #endregion } #region FiskalizacijaService partial implementation public partial class FiskalizacijaService { #region Async (TPL) version of main methods public Task<RacunOdgovor> RacuniAsync(RacunZahtjev request) { return Task.Factory.FromAsync(Beginracuni, Endracuni, request, null); } public Task<ProvjeraOdgovor> ProvjeraAsync(ProvjeraZahtjev request) { return Task.Factory.FromAsync(Beginprovjera, Endprovjera, request, null); } public Task<string> EchoAsync(string request) { return Task.Factory.FromAsync(Beginecho, Endecho, request, null); } #endregion } #endregion }
using Microsoft.IdentityModel; using Microsoft.IdentityModel.S2S.Protocols.OAuth2; using Microsoft.IdentityModel.S2S.Tokens; using Microsoft.SharePoint.Client; using Microsoft.SharePoint.Client.EventReceivers; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.IO; using System.Linq; using System.Net; using System.Security.Cryptography.X509Certificates; using System.Security.Principal; using System.ServiceModel; using System.Text; using System.Web; using System.Web.Configuration; using System.Web.Script.Serialization; using AudienceRestriction = Microsoft.IdentityModel.Tokens.AudienceRestriction; using AudienceUriValidationFailedException = Microsoft.IdentityModel.Tokens.AudienceUriValidationFailedException; using SecurityTokenHandlerConfiguration = Microsoft.IdentityModel.Tokens.SecurityTokenHandlerConfiguration; using X509SigningCredentials = Microsoft.IdentityModel.SecurityTokenService.X509SigningCredentials; namespace PowerPointAssemblyWeb { public static class TokenHelper { #region public fields /// <summary> /// SharePoint principal. /// </summary> public const string SharePointPrincipal = "00000003-0000-0ff1-ce00-000000000000"; /// <summary> /// Lifetime of HighTrust access token, 12 hours. /// </summary> public static readonly TimeSpan HighTrustAccessTokenLifetime = TimeSpan.FromHours(12.0); #endregion public fields #region public methods /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequest request) { return GetContextTokenFromRequest(new HttpRequestWrapper(request)); } /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequestBase request) { string[] paramNames = { "AppContext", "AppContextToken", "AccessToken", "SPAppToken" }; foreach (string paramName in paramNames) { if (!string.IsNullOrEmpty(request.Form[paramName])) { return request.Form[paramName]; } if (!string.IsNullOrEmpty(request.QueryString[paramName])) { return request.QueryString[paramName]; } } return null; } /// <summary> /// Validate that a specified context token string is intended for this application based on the parameters /// specified in web.config. Parameters used from web.config used for validation include ClientId, /// HostedAppHostNameOverride, HostedAppHostName, ClientSecret, and Realm (if it is specified). If HostedAppHostNameOverride is present, /// it will be used for validation. Otherwise, if the <paramref name="appHostName"/> is not /// null, it is used for validation instead of the web.config's HostedAppHostName. If the token is invalid, an /// exception is thrown. If the token is valid, TokenHelper's static STS metadata url is updated based on the token contents /// and a JsonWebSecurityToken based on the context token is returned. /// </summary> /// <param name="contextTokenString">The context token to validate</param> /// <param name="appHostName">The URL authority, consisting of Domain Name System (DNS) host name or IP address and the port number, to use for token audience validation. /// If null, HostedAppHostName web.config setting is used instead. HostedAppHostNameOverride web.config setting, if present, will be used /// for validation instead of <paramref name="appHostName"/> .</param> /// <returns>A JsonWebSecurityToken based on the context token.</returns> public static SharePointContextToken ReadAndValidateContextToken(string contextTokenString, string appHostName = null) { JsonWebSecurityTokenHandler tokenHandler = CreateJsonWebSecurityTokenHandler(); SecurityToken securityToken = tokenHandler.ReadToken(contextTokenString); JsonWebSecurityToken jsonToken = securityToken as JsonWebSecurityToken; SharePointContextToken token = SharePointContextToken.Create(jsonToken); string stsAuthority = (new Uri(token.SecurityTokenServiceUri)).Authority; int firstDot = stsAuthority.IndexOf('.'); GlobalEndPointPrefix = stsAuthority.Substring(0, firstDot); AcsHostUrl = stsAuthority.Substring(firstDot + 1); tokenHandler.ValidateToken(jsonToken); string[] acceptableAudiences; if (!String.IsNullOrEmpty(HostedAppHostNameOverride)) { acceptableAudiences = HostedAppHostNameOverride.Split(';'); } else if (appHostName == null) { acceptableAudiences = new[] { HostedAppHostName }; } else { acceptableAudiences = new[] { appHostName }; } bool validationSuccessful = false; string realm = Realm ?? token.Realm; foreach (var audience in acceptableAudiences) { string principal = GetFormattedPrincipal(ClientId, audience, realm); if (StringComparer.OrdinalIgnoreCase.Equals(token.Audience, principal)) { validationSuccessful = true; break; } } if (!validationSuccessful) { throw new AudienceUriValidationFailedException( String.Format(CultureInfo.CurrentCulture, "\"{0}\" is not the intended audience \"{1}\"", String.Join(";", acceptableAudiences), token.Audience)); } return token; } /// <summary> /// Retrieves an access token from ACS to call the source of the specified context token at the specified /// targetHost. The targetHost must be registered for the principal that sent the context token. /// </summary> /// <param name="contextToken">Context token issued by the intended access token audience</param> /// <param name="targetHost">Url authority of the target principal</param> /// <returns>An access token with an audience matching the context token's source</returns> public static OAuth2AccessTokenResponse GetAccessToken(SharePointContextToken contextToken, string targetHost) { string targetPrincipalName = contextToken.TargetPrincipalName; // Extract the refreshToken from the context token string refreshToken = contextToken.RefreshToken; if (String.IsNullOrEmpty(refreshToken)) { return null; } string targetRealm = Realm ?? contextToken.Realm; return GetAccessToken(refreshToken, targetPrincipalName, targetHost, targetRealm); } /// <summary> /// Uses the specified authorization code to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="authorizationCode">Authorization code to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string authorizationCode, string targetPrincipalName, string targetHost, string targetRealm, Uri redirectUri) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); // Create request for token. The RedirectUri is null here. This will fail if redirect uri is registered OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithAuthorizationCode( clientId, ClientSecret, authorizationCode, redirectUri, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Uses the specified refresh token to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="refreshToken">Refresh token to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string refreshToken, string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithRefreshToken(clientId, ClientSecret, refreshToken, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Retrieves an app-only access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAppOnlyAccessToken( string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, HostedAppHostName, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithClientCredentials(clientId, ClientSecret, resource); oauth2Request.Resource = resource; // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Creates a client context based on the properties of a remote event receiver /// </summary> /// <param name="properties">Properties of a remote event receiver</param> /// <returns>A ClientContext ready to call the web where the event originated</returns> public static ClientContext CreateRemoteEventReceiverClientContext(SPRemoteEventProperties properties) { Uri sharepointUrl; if (properties.ListEventProperties != null) { sharepointUrl = new Uri(properties.ListEventProperties.WebUrl); } else if (properties.ItemEventProperties != null) { sharepointUrl = new Uri(properties.ItemEventProperties.WebUrl); } else if (properties.WebEventProperties != null) { sharepointUrl = new Uri(properties.WebEventProperties.FullUrl); } else { return null; } if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Creates a client context based on the properties of an app event /// </summary> /// <param name="properties">Properties of an app event</param> /// <param name="useAppWeb">True to target the app web, false to target the host web</param> /// <returns>A ClientContext ready to call the app web or the parent web</returns> public static ClientContext CreateAppEventClientContext(SPRemoteEventProperties properties, bool useAppWeb) { if (properties.AppEventProperties == null) { return null; } Uri sharepointUrl = useAppWeb ? properties.AppEventProperties.AppWebFullUrl : properties.AppEventProperties.HostWebFullUrl; if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string authorizationCode, Uri redirectUri) { return GetClientContextWithAuthorizationCode(targetUrl, SharePointPrincipal, authorizationCode, GetRealmFromTargetUrl(new Uri(targetUrl)), redirectUri); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="targetPrincipalName">Name of the target SharePoint principal</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string targetPrincipalName, string authorizationCode, string targetRealm, Uri redirectUri) { Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(authorizationCode, targetPrincipalName, targetUri.Authority, targetRealm, redirectUri).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Uses the specified access token to create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="accessToken">Access token to be used when calling the specified targetUrl</param> /// <returns>A ClientContext ready to call targetUrl with the specified access token</returns> public static ClientContext GetClientContextWithAccessToken(string targetUrl, string accessToken) { ClientContext clientContext = new ClientContext(targetUrl); clientContext.AuthenticationMode = ClientAuthenticationMode.Anonymous; clientContext.FormDigestHandlingEnabled = false; clientContext.ExecutingWebRequest += delegate(object oSender, WebRequestEventArgs webRequestEventArgs) { webRequestEventArgs.WebRequestExecutor.RequestHeaders["Authorization"] = "Bearer " + accessToken; }; return clientContext; } /// <summary> /// Retrieves an access token from ACS using the specified context token, and uses that access token to create /// a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="contextTokenString">Context token received from the target SharePoint site</param> /// <param name="appHostUrl">Url authority of the hosted app. If this is null, the value in the HostedAppHostName /// of web.config will be used instead</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithContextToken( string targetUrl, string contextTokenString, string appHostUrl) { SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, appHostUrl); Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(contextToken, targetUri.Authority).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to after consent is /// granted</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope, string redirectUri) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code&redirect_uri={4}", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope, redirectUri); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request a new context token. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to with a context token</param> /// <returns>Url of the SharePoint site's context token redirect page</returns> public static string GetAppContextTokenRequestUrl(string contextUrl, string redirectUri) { return string.Format( "{0}{1}?client_id={2}&redirect_uri={3}", EnsureTrailingSlash(contextUrl), RedirectPage, ClientId, redirectUri); } /// <summary> /// Retrieves an S2S access token signed by the application's private certificate on behalf of the specified /// WindowsIdentity and intended for the SharePoint at the targetApplicationUri. If no Realm is specified in /// web.config, an auth challenge will be issued to the targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>An access token with an audience of the target principal</returns> public static string GetS2SAccessTokenWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; return GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); } /// <summary> /// Retrieves an S2S client context with an access token signed by the application's private certificate on /// behalf of the specified WindowsIdentity and intended for application at the targetApplicationUri using the /// targetRealm. If no Realm is specified in web.config, an auth challenge will be issued to the /// targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>A ClientContext using an access token with an audience of the target application</returns> public static ClientContext GetS2SClientContextWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; string accessToken = GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); return GetClientContextWithAccessToken(targetApplicationUri.ToString(), accessToken); } /// <summary> /// Get authentication realm from SharePoint /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <returns>String representation of the realm GUID</returns> public static string GetRealmFromTargetUrl(Uri targetApplicationUri) { WebRequest request = WebRequest.Create(targetApplicationUri + "/_vti_bin/client.svc"); request.Headers.Add("Authorization: Bearer "); try { using (request.GetResponse()) { } } catch (WebException e) { if (e.Response == null) { return null; } string bearerResponseHeader = e.Response.Headers["WWW-Authenticate"]; if (string.IsNullOrEmpty(bearerResponseHeader)) { return null; } const string bearer = "Bearer realm=\""; int bearerIndex = bearerResponseHeader.IndexOf(bearer, StringComparison.Ordinal); if (bearerIndex < 0) { return null; } int realmIndex = bearerIndex + bearer.Length; if (bearerResponseHeader.Length >= realmIndex + 36) { string targetRealm = bearerResponseHeader.Substring(realmIndex, 36); Guid realmGuid; if (Guid.TryParse(targetRealm, out realmGuid)) { return targetRealm; } } } return null; } /// <summary> /// Determines if this is a high trust app. /// </summary> /// <returns>True if this is a high trust app.</returns> public static bool IsHighTrustApp() { return SigningCredentials != null; } /// <summary> /// Ensures that the specified URL ends with '/' if it is not null or empty. /// </summary> /// <param name="url">The url.</param> /// <returns>The url ending with '/' if it is not null or empty.</returns> public static string EnsureTrailingSlash(string url) { if (!string.IsNullOrEmpty(url) && url[url.Length - 1] != '/') { return url + "/"; } return url; } #endregion #region private fields // // Configuration Constants // private const string AuthorizationPage = "_layouts/15/OAuthAuthorize.aspx"; private const string RedirectPage = "_layouts/15/AppRedirect.aspx"; private const string AcsPrincipalName = "00000001-0000-0000-c000-000000000000"; private const string AcsMetadataEndPointRelativeUrl = "metadata/json/1"; private const string S2SProtocol = "OAuth2"; private const string DelegationIssuance = "DelegationIssuance1.0"; private const string NameIdentifierClaimType = JsonWebTokenConstants.ReservedClaims.NameIdentifier; private const string TrustedForImpersonationClaimType = "trustedfordelegation"; private const string ActorTokenClaimType = JsonWebTokenConstants.ReservedClaims.ActorToken; // // Environment Constants // private static string GlobalEndPointPrefix = "accounts"; private static string AcsHostUrl = "accesscontrol.windows.net"; // // Hosted app configuration // private static readonly string ClientId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientId")) ? WebConfigurationManager.AppSettings.Get("HostedAppName") : WebConfigurationManager.AppSettings.Get("ClientId"); private static readonly string IssuerId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("IssuerId")) ? ClientId : WebConfigurationManager.AppSettings.Get("IssuerId"); private static readonly string HostedAppHostNameOverride = WebConfigurationManager.AppSettings.Get("HostedAppHostNameOverride"); private static readonly string HostedAppHostName = WebConfigurationManager.AppSettings.Get("HostedAppHostName"); private static readonly string ClientSecret = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientSecret")) ? WebConfigurationManager.AppSettings.Get("HostedAppSigningKey") : WebConfigurationManager.AppSettings.Get("ClientSecret"); private static readonly string SecondaryClientSecret = WebConfigurationManager.AppSettings.Get("SecondaryClientSecret"); private static readonly string Realm = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ServiceNamespace = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ClientSigningCertificatePath = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePath"); private static readonly string ClientSigningCertificatePassword = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePassword"); private static readonly X509Certificate2 ClientCertificate = (string.IsNullOrEmpty(ClientSigningCertificatePath) || string.IsNullOrEmpty(ClientSigningCertificatePassword)) ? null : new X509Certificate2(ClientSigningCertificatePath, ClientSigningCertificatePassword); private static readonly X509SigningCredentials SigningCredentials = (ClientCertificate == null) ? null : new X509SigningCredentials(ClientCertificate, SecurityAlgorithms.RsaSha256Signature, SecurityAlgorithms.Sha256Digest); #endregion #region private methods private static ClientContext CreateAcsClientContextForUrl(SPRemoteEventProperties properties, Uri sharepointUrl) { string contextTokenString = properties.ContextToken; if (String.IsNullOrEmpty(contextTokenString)) { return null; } SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, OperationContext.Current.IncomingMessageHeaders.To.Host); string accessToken = GetAccessToken(contextToken, sharepointUrl.Authority).AccessToken; return GetClientContextWithAccessToken(sharepointUrl.ToString(), accessToken); } private static string GetAcsMetadataEndpointUrl() { return Path.Combine(GetAcsGlobalEndpointUrl(), AcsMetadataEndPointRelativeUrl); } private static string GetFormattedPrincipal(string principalName, string hostName, string realm) { if (!String.IsNullOrEmpty(hostName)) { return String.Format(CultureInfo.InvariantCulture, "{0}/{1}@{2}", principalName, hostName, realm); } return String.Format(CultureInfo.InvariantCulture, "{0}@{1}", principalName, realm); } private static string GetAcsPrincipalName(string realm) { return GetFormattedPrincipal(AcsPrincipalName, new Uri(GetAcsGlobalEndpointUrl()).Host, realm); } private static string GetAcsGlobalEndpointUrl() { return String.Format(CultureInfo.InvariantCulture, "https://{0}.{1}/", GlobalEndPointPrefix, AcsHostUrl); } private static JsonWebSecurityTokenHandler CreateJsonWebSecurityTokenHandler() { JsonWebSecurityTokenHandler handler = new JsonWebSecurityTokenHandler(); handler.Configuration = new SecurityTokenHandlerConfiguration(); handler.Configuration.AudienceRestriction = new AudienceRestriction(AudienceUriMode.Never); handler.Configuration.CertificateValidator = X509CertificateValidator.None; List<byte[]> securityKeys = new List<byte[]>(); securityKeys.Add(Convert.FromBase64String(ClientSecret)); if (!string.IsNullOrEmpty(SecondaryClientSecret)) { securityKeys.Add(Convert.FromBase64String(SecondaryClientSecret)); } List<SecurityToken> securityTokens = new List<SecurityToken>(); securityTokens.Add(new MultipleSymmetricKeySecurityToken(securityKeys)); handler.Configuration.IssuerTokenResolver = SecurityTokenResolver.CreateDefaultSecurityTokenResolver( new ReadOnlyCollection<SecurityToken>(securityTokens), false); SymmetricKeyIssuerNameRegistry issuerNameRegistry = new SymmetricKeyIssuerNameRegistry(); foreach (byte[] securitykey in securityKeys) { issuerNameRegistry.AddTrustedIssuer(securitykey, GetAcsPrincipalName(ServiceNamespace)); } handler.Configuration.IssuerNameRegistry = issuerNameRegistry; return handler; } private static string GetS2SAccessTokenWithClaims( string targetApplicationHostName, string targetRealm, IEnumerable<JsonWebTokenClaim> claims) { return IssueToken( ClientId, IssuerId, targetRealm, SharePointPrincipal, targetRealm, targetApplicationHostName, true, claims, claims == null); } private static JsonWebTokenClaim[] GetClaimsWithWindowsIdentity(WindowsIdentity identity) { JsonWebTokenClaim[] claims = new JsonWebTokenClaim[] { new JsonWebTokenClaim(NameIdentifierClaimType, identity.User.Value.ToLower()), new JsonWebTokenClaim("nii", "urn:office:idp:activedirectory") }; return claims; } private static string IssueToken( string sourceApplication, string issuerApplication, string sourceRealm, string targetApplication, string targetRealm, string targetApplicationHostName, bool trustedForDelegation, IEnumerable<JsonWebTokenClaim> claims, bool appOnly = false) { if (null == SigningCredentials) { throw new InvalidOperationException("SigningCredentials was not initialized"); } #region Actor token string issuer = string.IsNullOrEmpty(sourceRealm) ? issuerApplication : string.Format("{0}@{1}", issuerApplication, sourceRealm); string nameid = string.IsNullOrEmpty(sourceRealm) ? sourceApplication : string.Format("{0}@{1}", sourceApplication, sourceRealm); string audience = string.Format("{0}/{1}@{2}", targetApplication, targetApplicationHostName, targetRealm); List<JsonWebTokenClaim> actorClaims = new List<JsonWebTokenClaim>(); actorClaims.Add(new JsonWebTokenClaim(JsonWebTokenConstants.ReservedClaims.NameIdentifier, nameid)); if (trustedForDelegation && !appOnly) { actorClaims.Add(new JsonWebTokenClaim(TrustedForImpersonationClaimType, "true")); } // Create token JsonWebSecurityToken actorToken = new JsonWebSecurityToken( issuer: issuer, audience: audience, validFrom: DateTime.UtcNow, validTo: DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), signingCredentials: SigningCredentials, claims: actorClaims); string actorTokenString = new JsonWebSecurityTokenHandler().WriteTokenAsString(actorToken); if (appOnly) { // App-only token is the same as actor token for delegated case return actorTokenString; } #endregion Actor token #region Outer token List<JsonWebTokenClaim> outerClaims = null == claims ? new List<JsonWebTokenClaim>() : new List<JsonWebTokenClaim>(claims); outerClaims.Add(new JsonWebTokenClaim(ActorTokenClaimType, actorTokenString)); JsonWebSecurityToken jsonToken = new JsonWebSecurityToken( nameid, // outer token issuer should match actor token nameid audience, DateTime.UtcNow, DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), outerClaims); string accessToken = new JsonWebSecurityTokenHandler().WriteTokenAsString(jsonToken); #endregion Outer token return accessToken; } #endregion #region AcsMetadataParser // This class is used to get MetaData document from the global STS endpoint. It contains // methods to parse the MetaData document and get endpoints and STS certificate. public static class AcsMetadataParser { public static X509Certificate2 GetAcsSigningCert(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); if (null != document.keys && document.keys.Count > 0) { JsonKey signingKey = document.keys[0]; if (null != signingKey && null != signingKey.keyValue) { return new X509Certificate2(Encoding.UTF8.GetBytes(signingKey.keyValue.value)); } } throw new Exception("Metadata document does not contain ACS signing certificate."); } public static string GetDelegationServiceUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint delegationEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == DelegationIssuance); if (null != delegationEndpoint) { return delegationEndpoint.location; } throw new Exception("Metadata document does not contain Delegation Service endpoint Url"); } private static JsonMetadataDocument GetMetadataDocument(string realm) { string acsMetadataEndpointUrlWithRealm = String.Format(CultureInfo.InvariantCulture, "{0}?realm={1}", GetAcsMetadataEndpointUrl(), realm); byte[] acsMetadata; using (WebClient webClient = new WebClient()) { acsMetadata = webClient.DownloadData(acsMetadataEndpointUrlWithRealm); } string jsonResponseString = Encoding.UTF8.GetString(acsMetadata); JavaScriptSerializer serializer = new JavaScriptSerializer(); JsonMetadataDocument document = serializer.Deserialize<JsonMetadataDocument>(jsonResponseString); if (null == document) { throw new Exception("No metadata document found at the global endpoint " + acsMetadataEndpointUrlWithRealm); } return document; } public static string GetStsUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint s2sEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == S2SProtocol); if (null != s2sEndpoint) { return s2sEndpoint.location; } throw new Exception("Metadata document does not contain STS endpoint url"); } private class JsonMetadataDocument { public string serviceName { get; set; } public List<JsonEndpoint> endpoints { get; set; } public List<JsonKey> keys { get; set; } } private class JsonEndpoint { public string location { get; set; } public string protocol { get; set; } public string usage { get; set; } } private class JsonKeyValue { public string type { get; set; } public string value { get; set; } } private class JsonKey { public string usage { get; set; } public JsonKeyValue keyValue { get; set; } } } #endregion } /// <summary> /// A JsonWebSecurityToken generated by SharePoint to authenticate to a 3rd party application and allow callbacks using a refresh token /// </summary> public class SharePointContextToken : JsonWebSecurityToken { public static SharePointContextToken Create(JsonWebSecurityToken contextToken) { return new SharePointContextToken(contextToken.Issuer, contextToken.Audience, contextToken.ValidFrom, contextToken.ValidTo, contextToken.Claims); } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims) : base(issuer, audience, validFrom, validTo, claims) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SecurityToken issuerToken, JsonWebSecurityToken actorToken) : base(issuer, audience, validFrom, validTo, claims, issuerToken, actorToken) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SigningCredentials signingCredentials) : base(issuer, audience, validFrom, validTo, claims, signingCredentials) { } public string NameId { get { return GetClaimValue(this, "nameid"); } } /// <summary> /// The principal name portion of the context token's "appctxsender" claim /// </summary> public string TargetPrincipalName { get { string appctxsender = GetClaimValue(this, "appctxsender"); if (appctxsender == null) { return null; } return appctxsender.Split('@')[0]; } } /// <summary> /// The context token's "refreshtoken" claim /// </summary> public string RefreshToken { get { return GetClaimValue(this, "refreshtoken"); } } /// <summary> /// The context token's "CacheKey" claim /// </summary> public string CacheKey { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string cacheKey = (string)dict["CacheKey"]; return cacheKey; } } /// <summary> /// The context token's "SecurityTokenServiceUri" claim /// </summary> public string SecurityTokenServiceUri { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string securityTokenServiceUri = (string)dict["SecurityTokenServiceUri"]; return securityTokenServiceUri; } } /// <summary> /// The realm portion of the context token's "audience" claim /// </summary> public string Realm { get { string aud = Audience; if (aud == null) { return null; } string tokenRealm = aud.Substring(aud.IndexOf('@') + 1); return tokenRealm; } } private static string GetClaimValue(JsonWebSecurityToken token, string claimType) { if (token == null) { throw new ArgumentNullException("token"); } foreach (JsonWebTokenClaim claim in token.Claims) { if (StringComparer.Ordinal.Equals(claim.ClaimType, claimType)) { return claim.Value; } } return null; } } /// <summary> /// Represents a security token which contains multiple security keys that are generated using symmetric algorithms. /// </summary> public class MultipleSymmetricKeySecurityToken : SecurityToken { /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(IEnumerable<byte[]> keys) : this(UniqueId.CreateUniqueId(), keys) { } /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="tokenId">The unique identifier of the security token.</param> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(string tokenId, IEnumerable<byte[]> keys) { if (keys == null) { throw new ArgumentNullException("keys"); } if (String.IsNullOrEmpty(tokenId)) { throw new ArgumentException("Value cannot be a null or empty string.", "tokenId"); } foreach (byte[] key in keys) { if (key.Length <= 0) { throw new ArgumentException("The key length must be greater then zero.", "keys"); } } id = tokenId; effectiveTime = DateTime.UtcNow; securityKeys = CreateSymmetricSecurityKeys(keys); } /// <summary> /// Gets the unique identifier of the security token. /// </summary> public override string Id { get { return id; } } /// <summary> /// Gets the cryptographic keys associated with the security token. /// </summary> public override ReadOnlyCollection<SecurityKey> SecurityKeys { get { return securityKeys.AsReadOnly(); } } /// <summary> /// Gets the first instant in time at which this security token is valid. /// </summary> public override DateTime ValidFrom { get { return effectiveTime; } } /// <summary> /// Gets the last instant in time at which this security token is valid. /// </summary> public override DateTime ValidTo { get { // Never expire return DateTime.MaxValue; } } /// <summary> /// Returns a value that indicates whether the key identifier for this instance can be resolved to the specified key identifier. /// </summary> /// <param name="keyIdentifierClause">A SecurityKeyIdentifierClause to compare to this instance</param> /// <returns>true if keyIdentifierClause is a SecurityKeyIdentifierClause and it has the same unique identifier as the Id property; otherwise, false.</returns> public override bool MatchesKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause) { if (keyIdentifierClause == null) { throw new ArgumentNullException("keyIdentifierClause"); } // Since this is a symmetric token and we do not have IDs to distinguish tokens, we just check for the // presence of a SymmetricIssuerKeyIdentifier. The actual mapping to the issuer takes place later // when the key is matched to the issuer. if (keyIdentifierClause is SymmetricIssuerKeyIdentifierClause) { return true; } return base.MatchesKeyIdentifierClause(keyIdentifierClause); } #region private members private List<SecurityKey> CreateSymmetricSecurityKeys(IEnumerable<byte[]> keys) { List<SecurityKey> symmetricKeys = new List<SecurityKey>(); foreach (byte[] key in keys) { symmetricKeys.Add(new InMemorySymmetricSecurityKey(key)); } return symmetricKeys; } private string id; private DateTime effectiveTime; private List<SecurityKey> securityKeys; #endregion } }
using System; using System.Collections.Generic; using System.Collections; /// <summary> /// System.Collections.Generic.List<T>.Ctor(IEnumerable<T>) /// </summary> public class ListCtor2 { #region Public Methods public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; retVal = PosTest4() && retVal; TestLibrary.TestFramework.LogInformation("[Negative]"); retVal = NegTest1() && retVal; return retVal; } #region Positive Test Cases public bool PosTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest1: The genaric type is a value type"); try { int[] intArray = new int[5] { 1, 2, 3, 4, 5 }; List<int> listObject = new List<int>(intArray); if (listObject == null) { TestLibrary.TestFramework.LogError("001", "The constructor does not work well"); retVal = false; } if (listObject.Count != 5) { TestLibrary.TestFramework.LogError("002", "The result is not the value as expected"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("003", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest2: The generic type is a reference type"); try { string[] stringArray = { "Hello", "world", "thanks", "school" }; List<string> listObject = new List<string>(stringArray); if (listObject == null) { TestLibrary.TestFramework.LogError("004", "The constructor does not work well"); retVal = false; } if (listObject.Count != 4) { TestLibrary.TestFramework.LogError("005", "The result is not the value as expected"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest3: The generic type is a custom type"); try { int length = TestLibrary.Generator.GetByte(-55); MyClass[] myClass = new MyClass[length]; List<MyClass> listObject = new List<MyClass>(myClass); if (listObject == null) { TestLibrary.TestFramework.LogError("007", "The constructor does not work well"); retVal = false; } if (listObject.Count != length) { TestLibrary.TestFramework.LogError("008", "The result is not the value as expected,the count is: " + listObject.Count + ",The length is: " + length); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("009", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest4: Using a list to construct another list"); try { int[] iArray = new int[10] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 }; List<int> listObject1 = new List<int>(iArray); List<int> listObject2 = new List<int>(listObject1); if (listObject2 == null) { TestLibrary.TestFramework.LogError("010", "The constructor does not work well"); retVal = false; } if (listObject2.Count != 10) { TestLibrary.TestFramework.LogError("011", "The result is not the value as expected"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("012", "Unexpected exception: " + e); retVal = false; } return retVal; } #endregion #region Nagetive Test Cases public bool NegTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("NegTest1: The argument is a null reference"); try { IEnumerable<char> i = null; List<char> listObject = new List<char>(i); TestLibrary.TestFramework.LogError("101", "The ArgumentNullException was not thrown as expected"); retVal = false; } catch (ArgumentNullException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("102", "Unexpected exception: " + e); TestLibrary.TestFramework.LogInformation(e.StackTrace); retVal = false; } return retVal; } #endregion #endregion public static int Main() { ListCtor2 test = new ListCtor2(); TestLibrary.TestFramework.BeginTestCase("ListCtor2"); if (test.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } } public class MyClass { }
#region --- License & Copyright Notice --- /* Copyright (c) 2005-2012 Jeevan James All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #endregion using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Reflection; using System.Runtime.Serialization; using Id3.Frames; using Id3.Id3; namespace Id3 { public sealed class Id3Tag : IEnumerable<Id3Frame>, IComparable<Id3Tag>, IEquatable<Id3Tag>, IId3Tag { #region Private fields //Specific single instance frames private AlbumFrame _album; private ArtistsFrame _artists; private AudioFileUrlFrame _audioFileUrl; private AudioSourceUrlFrame _audioSourceUrl; private BandFrame _band; private BeatsPerMinuteFrame _beatsPerMinute; private ComposersFrame _composers; private ConductorFrame _conductor; private ContentGroupDescriptionFrame _contentGroupDescription; private CopyrightFrame _copyright; private CopyrightUrlFrame _copyrightUrl; private EncoderFrame _encoder; private EncodingSettingsFrame _encodingSettings; private FileOwnerFrame _fileOwner; private FileTypeFrame _fileType; private GenreFrame _genre; private LengthFrame _length; private LyricistsFrame _lyricists; private PaymentUrlFrame _paymentUrl; private PublisherFrame _publisher; private RecordingDateFrame _recordingDate; private SubtitleFrame _subtitle; private TitleFrame _title; private TrackFrame _track; private YearFrame _year; //Specific multiple instance frames private ArtistUrlFrameList _artistUrls; private CommentFrameList _comments; private CommercialUrlFrameList _commercialUrls; private Id3SyncFrameList<CustomTextFrame> _customTexts; private Id3SyncFrameList<LyricsFrame> _lyrics; private Id3SyncFrameList<PictureFrame> _pictures; private PrivateFrameList _privateData; #endregion public Id3Tag() { IsSupported = true; Frames = new Id3FrameList(); } [OnDeserializing] private void OnDeserialized(StreamingContext context) { IsSupported = true; Frames = new Id3FrameList(); } //Converts an ID3 tag to another version after resolving the differences between the two //versions. The resultant tag will have all the frames from the source tag, but those //frames not recognized in the new version will be treated as UnknownFrame objects. //Similarly, frames recognized in the output tag version, but not in the source version are //converted accordingly. public Id3Tag ConvertTo(int majorVersion, int minorVersion) { if (MajorVersion == majorVersion && MinorVersion == minorVersion) return this; RegisteredId3Handler sourceHandler = Mp3Stream.RegisteredHandlers.GetHandler(MajorVersion, MinorVersion); if (sourceHandler == null) return null; RegisteredId3Handler destinationHandler = Mp3Stream.RegisteredHandlers.GetHandler(majorVersion, minorVersion); if (destinationHandler == null) return null; Id3Tag destinationTag = destinationHandler.Handler.CreateTag(); foreach (Id3Frame sourceFrame in Frames) { var unknownFrame = sourceFrame as UnknownFrame; if (unknownFrame != null) { string frameId = unknownFrame.Id; Id3Frame destinationFrame = destinationHandler.Handler.GetFrameFromFrameId(frameId); destinationTag.Frames.Add(destinationFrame); } else destinationTag.Frames.Add(sourceFrame); } return destinationTag; } public void MergeWith(params Id3Tag[] tags) { Array.Sort(tags); //TODO: } #region Metadata properties public Id3TagFamily Family { get; internal set; } public int MajorVersion { get; internal set; } public int MinorVersion { get; internal set; } public bool IsSupported { get; internal set; } public object AdditionalData { get; internal set; } #endregion #region Main frames list and associated operations internal Id3FrameList Frames { get; private set; } IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable<Id3Frame>)this).GetEnumerator(); } IEnumerator<Id3Frame> IEnumerable<Id3Frame>.GetEnumerator() { return Frames.Where(frame => frame.IsAssigned).GetEnumerator(); } /// <summary> /// Removes all unassigned frames from the tag. /// </summary> public void Cleanup() { for (int i = Frames.Count - 1; i >= 0; i--) { if (!Frames[i].IsAssigned) Frames.RemoveAt(i); } } public int Clear() { int clearedCount = this.Count(); Frames.Clear(); return clearedCount; } public bool Contains<TFrame>(Expression<Func<Id3Tag, TFrame>> frameProperty) where TFrame : Id3Frame { if (frameProperty == null) throw new ArgumentNullException("frameProperty"); var lambda = (LambdaExpression)frameProperty; var memberExpression = (MemberExpression)lambda.Body; var property = (PropertyInfo)memberExpression.Member; return this.Any(f => f.GetType() == property.PropertyType && f.IsAssigned); } public bool Remove<TFrame>() where TFrame : Id3Frame { for (int i = 0; i < Frames.Count; i++) { Id3Frame frame = Frames[i]; if (frame.IsAssigned && frame.GetType() == typeof(TFrame)) { Frames.RemoveAt(i); return true; } } return false; } public int RemoveAll<TFrame>(Func<TFrame, bool> predicate = null) where TFrame : Id3Frame { int removalCount = 0; for (int i = Frames.Count - 1; i >= 0; i--) { var frame = Frames[i] as TFrame; if (frame != null && (predicate == null || predicate(frame))) { if (frame.IsAssigned) removalCount++; Frames.RemoveAt(i); } } return removalCount; } #endregion #region Frame properties public AlbumFrame Album { get { return GetSingleFrame(ref _album); } } public ArtistsFrame Artists { get { return GetSingleFrame(ref _artists); } } public ArtistUrlFrameList ArtistUrls { get { return _artistUrls ?? (_artistUrls = new ArtistUrlFrameList(Frames)); } } public AudioFileUrlFrame AudioFileUrl { get { return GetSingleFrame(ref _audioFileUrl); } } public AudioSourceUrlFrame AudioSourceUrl { get { return GetSingleFrame(ref _audioSourceUrl); } } public BandFrame Band { get { return GetSingleFrame(ref _band); } } public BeatsPerMinuteFrame BeatsPerMinute { get { return GetSingleFrame(ref _beatsPerMinute); } } public CommentFrameList Comments { get { return _comments ?? (_comments = new CommentFrameList(Frames)); } } public CommercialUrlFrameList CommercialUrls { get { return _commercialUrls ?? (_commercialUrls = new CommercialUrlFrameList(Frames)); } } public ComposersFrame Composers { get { return GetSingleFrame(ref _composers); } } public ConductorFrame Conductor { get { return GetSingleFrame(ref _conductor); } } public ContentGroupDescriptionFrame ContentGroupDescription { get { return GetSingleFrame(ref _contentGroupDescription); } } public CopyrightFrame Copyright { get { return GetSingleFrame(ref _copyright); } } public CopyrightUrlFrame CopyrightUrl { get { return GetSingleFrame(ref _copyrightUrl); } } public Id3SyncFrameList<CustomTextFrame> CustomTexts { get { return GetMultipleFrames(ref _customTexts); } } public EncoderFrame Encoder { get { return GetSingleFrame(ref _encoder); } } public EncodingSettingsFrame EncodingSettings { get { return GetSingleFrame(ref _encodingSettings); } } public FileOwnerFrame FileOwner { get { return GetSingleFrame(ref _fileOwner); } } public FileTypeFrame FileType { get { return GetSingleFrame(ref _fileType); } } public GenreFrame Genre { get { return GetSingleFrame(ref _genre); } } public LengthFrame Length { get { return GetSingleFrame(ref _length); } } public LyricistsFrame Lyricists { get { return GetSingleFrame(ref _lyricists); } } public Id3SyncFrameList<LyricsFrame> Lyrics { get { return GetMultipleFrames(ref _lyrics); } } public PaymentUrlFrame PaymentUrl { get { return GetSingleFrame(ref _paymentUrl); } } public PublisherFrame Publisher { get { return GetSingleFrame(ref _publisher); } } public Id3SyncFrameList<PictureFrame> Pictures { get { return GetMultipleFrames(ref _pictures); } } public PrivateFrameList PrivateData { get { return _privateData ?? (_privateData = new PrivateFrameList(Frames)); } } public RecordingDateFrame RecordingDate { get { return GetSingleFrame(ref _recordingDate); } } public SubtitleFrame Subtitle { get { return GetSingleFrame(ref _subtitle); } } public TitleFrame Title { get { return GetSingleFrame(ref _title); } } public TrackFrame Track { get { return GetSingleFrame(ref _track); } } public YearFrame Year { get { return GetSingleFrame(ref _year); } } #endregion #region IComparable<Id3Tag> and IEquatable<Id3Tag> implementations /// <summary> /// Compares two tags based on their version details. /// </summary> /// <param name="other">The tag instance to compare against.</param> /// <returns>TODO:</returns> public int CompareTo(Id3Tag other) { if (other == null) return 1; int majorComparison = MajorVersion.CompareTo(other.MajorVersion); int minorComparison = MinorVersion.CompareTo(other.MinorVersion); if (majorComparison == 0 && minorComparison == 0) return 0; return majorComparison != 0 ? majorComparison : minorComparison; } public bool Equals(Id3Tag other) { return MajorVersion == other.MajorVersion && MinorVersion == other.MinorVersion; } #endregion #region Private helper methods //Retrieves a single-occuring frame from the main frames list. This method is called from //the corresponding property getters. //Since each frame already has private field declared, we simply need to get a reference //to that field, instead of creating a new object. However, if the field is not available, //we create a new one with default values, which is then assigned to the private field. //Hence the use of a ref parameter. private TFrame GetSingleFrame<TFrame>(ref TFrame frame) where TFrame : Id3Frame, new() { if (frame != null) return frame; frame = Frames.OfType<TFrame>().FirstOrDefault(); if (frame == null) { frame = new TFrame(); Frames.Add(frame); } return frame; } private Id3SyncFrameList<TFrame> GetMultipleFrames<TFrame>(ref Id3SyncFrameList<TFrame> frames) where TFrame : Id3Frame { return frames ?? (frames = new Id3SyncFrameList<TFrame>(Frames)); } #endregion #region Static members public static readonly Id3Tag[] Empty = new Id3Tag[0]; public static Id3Tag Merge(params Id3Tag[] tags) { if (tags.Length == 0) throw new ArgumentNullException("tags", "Specify 2 or more tags to merge"); if (tags.Length == 1) return tags[0]; var tag = new Id3Tag(); tag.MergeWith(tags); return tag; } #endregion } }
using System; using System.Collections; using System.ComponentModel; using System.Drawing; using System.Data; using System.Windows.Forms; namespace ModuleUpdater.Controls { /// <summary> /// Summary description for ModulesControl. /// </summary> public class VersionsControl : System.Windows.Forms.UserControl { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; private System.Windows.Forms.GroupBox versionsGroup; private ModuleUpdater.InheritedControls.ColorUpdateViewDeleteDataGrid versionsDataGrid; private long moduleID; private ModuleUpdater.InheritedControls.ColorDataGridLabelColumn versionColumn; private ModuleUpdater.InheritedControls.ColorDataGridLabelColumn releaseColumn; private ModuleUpdater.InheritedControls.ColorDataGridLabelColumn contactColumn; private ModuleUpdater.InheritedControls.ColorDataGridLinkColumn viewColumn; private System.Windows.Forms.LinkLabel backLinkLabel; private System.Windows.Forms.DataGridTableStyle versionsStyle; private System.Windows.Forms.LinkLabel addLinkLabel; private DataSet versionsDataSet; public VersionsControl() { // This call is required by the Windows.Forms Form Designer. InitializeComponent(); EnableVisualStyles.Enable(this); this.versionColumn.Width = this.releaseColumn.Width = 100; } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.versionsDataGrid = new ModuleUpdater.InheritedControls.ColorUpdateViewDeleteDataGrid(); this.versionsStyle = new System.Windows.Forms.DataGridTableStyle(); this.versionColumn = new ModuleUpdater.InheritedControls.ColorDataGridLabelColumn(); this.releaseColumn = new ModuleUpdater.InheritedControls.ColorDataGridLabelColumn(); this.contactColumn = new ModuleUpdater.InheritedControls.ColorDataGridLabelColumn(); this.viewColumn = new ModuleUpdater.InheritedControls.ColorDataGridLinkColumn(); this.versionsGroup = new System.Windows.Forms.GroupBox(); this.backLinkLabel = new System.Windows.Forms.LinkLabel(); this.addLinkLabel = new System.Windows.Forms.LinkLabel(); ((System.ComponentModel.ISupportInitialize)(this.versionsDataGrid)).BeginInit(); this.versionsGroup.SuspendLayout(); this.SuspendLayout(); // // versionsDataGrid // this.versionsDataGrid.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.versionsDataGrid.CaptionVisible = false; this.versionsDataGrid.DataMember = ""; this.versionsDataGrid.HeaderForeColor = System.Drawing.SystemColors.ControlText; this.versionsDataGrid.LinkColor = System.Drawing.SystemColors.ActiveCaption; this.versionsDataGrid.Location = new System.Drawing.Point(8, 16); this.versionsDataGrid.Name = "versionsDataGrid"; this.versionsDataGrid.ParentRowsVisible = false; this.versionsDataGrid.ReadOnly = true; this.versionsDataGrid.RowHeadersVisible = false; this.versionsDataGrid.Size = new System.Drawing.Size(648, 232); this.versionsDataGrid.TabIndex = 0; this.versionsDataGrid.TableStyles.AddRange(new System.Windows.Forms.DataGridTableStyle[] { this.versionsStyle}); this.versionsDataGrid.LinkLabelClicked += new ModuleUpdater.InheritedControls.ColorUpdateViewDeleteDataGrid.LinkLabelClickedHandler(this.modulesDataGrid_LinkLabelClicked); // // versionsStyle // this.versionsStyle.DataGrid = this.versionsDataGrid; this.versionsStyle.GridColumnStyles.AddRange(new System.Windows.Forms.DataGridColumnStyle[] { this.versionColumn, this.releaseColumn, this.contactColumn, this.viewColumn}); this.versionsStyle.HeaderForeColor = System.Drawing.SystemColors.ControlText; this.versionsStyle.LinkColor = System.Drawing.SystemColors.Highlight; this.versionsStyle.MappingName = "tblVersion"; this.versionsStyle.PreferredColumnWidth = 100; this.versionsStyle.ReadOnly = true; this.versionsStyle.RowHeadersVisible = false; // // versionColumn // this.versionColumn.Alignment = System.Windows.Forms.HorizontalAlignment.Center; this.versionColumn.Format = ""; this.versionColumn.FormatInfo = null; this.versionColumn.HeaderText = "Version Number"; this.versionColumn.MappingName = "fldVersionNumber"; this.versionColumn.ReadOnly = true; this.versionColumn.Width = 143; // // releaseColumn // this.releaseColumn.Alignment = System.Windows.Forms.HorizontalAlignment.Center; this.releaseColumn.Format = ""; this.releaseColumn.FormatInfo = null; this.releaseColumn.HeaderText = "Release Date"; this.releaseColumn.MappingName = "fldReleaseDate"; this.releaseColumn.ReadOnly = true; this.releaseColumn.Width = 75; // // contactColumn // this.contactColumn.Alignment = System.Windows.Forms.HorizontalAlignment.Center; this.contactColumn.Format = ""; this.contactColumn.FormatInfo = null; this.contactColumn.HeaderText = "Contact"; this.contactColumn.MappingName = "fldContactName"; this.contactColumn.ReadOnly = true; this.contactColumn.Width = 320; // // viewColumn // this.viewColumn.Alignment = System.Windows.Forms.HorizontalAlignment.Center; this.viewColumn.Format = "View"; this.viewColumn.FormatInfo = null; this.viewColumn.HeaderText = "View Info"; this.viewColumn.MappingName = "fldView"; this.viewColumn.NullText = "View"; this.viewColumn.ReadOnly = true; this.viewColumn.Width = 80; // // versionsGroup // this.versionsGroup.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.versionsGroup.Controls.Add(this.versionsDataGrid); this.versionsGroup.Location = new System.Drawing.Point(0, 0); this.versionsGroup.Name = "versionsGroup"; this.versionsGroup.Size = new System.Drawing.Size(664, 256); this.versionsGroup.TabIndex = 1; this.versionsGroup.TabStop = false; this.versionsGroup.Text = "Versions in EarLab Module Database"; // // backLinkLabel // this.backLinkLabel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left))); this.backLinkLabel.Font = new System.Drawing.Font("Microsoft Sans Serif", 10F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0))); this.backLinkLabel.LinkBehavior = System.Windows.Forms.LinkBehavior.HoverUnderline; this.backLinkLabel.LinkColor = System.Drawing.SystemColors.ActiveCaption; this.backLinkLabel.Location = new System.Drawing.Point(0, 264); this.backLinkLabel.Name = "backLinkLabel"; this.backLinkLabel.Size = new System.Drawing.Size(72, 16); this.backLinkLabel.TabIndex = 3; this.backLinkLabel.TabStop = true; this.backLinkLabel.Text = "< back"; this.backLinkLabel.TextAlign = System.Drawing.ContentAlignment.BottomLeft; this.backLinkLabel.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.backLinkLabel_LinkClicked); // // addLinkLabel // this.addLinkLabel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right))); this.addLinkLabel.LinkBehavior = System.Windows.Forms.LinkBehavior.HoverUnderline; this.addLinkLabel.LinkColor = System.Drawing.SystemColors.ActiveCaption; this.addLinkLabel.Location = new System.Drawing.Point(568, 256); this.addLinkLabel.Name = "addLinkLabel"; this.addLinkLabel.Size = new System.Drawing.Size(96, 16); this.addLinkLabel.TabIndex = 4; this.addLinkLabel.TabStop = true; this.addLinkLabel.Text = "Add New Version"; this.addLinkLabel.TextAlign = System.Drawing.ContentAlignment.TopRight; this.addLinkLabel.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.addLinkLabel_LinkClicked); // // VersionsControl // this.Controls.Add(this.addLinkLabel); this.Controls.Add(this.backLinkLabel); this.Controls.Add(this.versionsGroup); this.Name = "VersionsControl"; this.Size = new System.Drawing.Size(664, 288); ((System.ComponentModel.ISupportInitialize)(this.versionsDataGrid)).EndInit(); this.versionsGroup.ResumeLayout(false); this.ResumeLayout(false); } #endregion public long ModuleID { get { return this.moduleID; } set { if (value != 0) { this.moduleID = value; this.SetupControl(); } } } private void SetupControl() { ModuleService.EarDevModuleService moduleService = new ModuleService.EarDevModuleService(); this.versionsDataSet = moduleService.GetVersionList(this.moduleID); this.versionsDataSet.Tables["tblVersion"].Columns.Add("fldView", typeof(string)); this.versionsDataGrid.DataSource = this.versionsDataSet.Tables["tblVersion"].DefaultView; this.versionsGroup.Text = "Versions in '" + this.versionsDataSet.Tables["tblModule"].Rows[0]["fldName"].ToString() + "' Module"; } public event NavigateClickedHandler NavigateClicked; private void modulesDataGrid_LinkLabelClicked(string columnName, System.Data.DataRow row) { if (columnName == "fldView" && this.NavigateClicked != null) this.NavigateClicked(this, "View", row["fldVersionID"]); } private void backLinkLabel_LinkClicked(object sender, System.Windows.Forms.LinkLabelLinkClickedEventArgs e) { if (this.NavigateClicked != null) this.NavigateClicked(this, "Back", null); } private void addLinkLabel_LinkClicked(object sender, System.Windows.Forms.LinkLabelLinkClickedEventArgs e) { Dialogs.VersionAddDialog addDialog = new Dialogs.VersionAddDialog(ref this.versionsDataSet); if (addDialog.ShowDialog(this) == DialogResult.OK && this.NavigateClicked != null) this.NavigateClicked(this, "Add", addDialog.versionID); } } }
// Copyright (c) 2007-2014 Joe White // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections.Generic; using System.Text; using DGrok.Framework; using NUnit.Framework; namespace DGrok.Tests { [TestFixture] public class AtomTests : ParserTestCase { protected override RuleType RuleType { get { return RuleType.Atom; } } [Test] public void Particle() { Assert.That("42", ParsesAs("Number |42|")); } [Test] public void Semikeyword() { Assert.That("Absolute", ParsesAs("Identifier |Absolute|")); } [Test] public void DotOperator() { Assert.That("Foo.Bar", ParsesAs( "BinaryOperationNode", " LeftNode: Identifier |Foo|", " OperatorNode: Dot |.|", " RightNode: Identifier |Bar|")); } [Test] public void TwoDotOperators() { Assert.That("Foo.Bar.Baz", ParsesAs( "BinaryOperationNode", " LeftNode: BinaryOperationNode", " LeftNode: Identifier |Foo|", " OperatorNode: Dot |.|", " RightNode: Identifier |Bar|", " OperatorNode: Dot |.|", " RightNode: Identifier |Baz|")); } [Test] public void DotFollowedByKeyword() { Assert.That("Should.Not", ParsesAs( "BinaryOperationNode", " LeftNode: Identifier |Should|", " OperatorNode: Dot |.|", " RightNode: Identifier |Not|")); } [Test] public void Caret() { Assert.That("Foo^", ParsesAs( "PointerDereferenceNode", " OperandNode: Identifier |Foo|", " CaretNode: Caret |^|")); } [Test] public void TwoCarets() { Assert.That("Foo^^", ParsesAs( "PointerDereferenceNode", " OperandNode: PointerDereferenceNode", " OperandNode: Identifier |Foo|", " CaretNode: Caret |^|", " CaretNode: Caret |^|")); } [Test] public void EmptyArrayIndexDoesNotParse() { AssertDoesNotParse("Foo[]"); } [Test] public void OneArrayIndex() { Assert.That("Foo[42]", ParsesAs( "ParameterizedNode", " LeftNode: Identifier |Foo|", " OpenDelimiterNode: OpenBracket |[|", " ParameterListNode: ListNode", " Items[0]: DelimitedItemNode", " ItemNode: Number |42|", " DelimiterNode: (none)", " CloseDelimiterNode: CloseBracket |]|")); } [Test] public void TwoArrayIndexes() { Assert.That("Foo[24, 42]", ParsesAs( "ParameterizedNode", " LeftNode: Identifier |Foo|", " OpenDelimiterNode: OpenBracket |[|", " ParameterListNode: ListNode", " Items[0]: DelimitedItemNode", " ItemNode: Number |24|", " DelimiterNode: Comma |,|", " Items[1]: DelimitedItemNode", " ItemNode: Number |42|", " DelimiterNode: (none)", " CloseDelimiterNode: CloseBracket |]|")); } [Test] public void NoParameters() { Assert.That("Foo()", ParsesAs( "ParameterizedNode", " LeftNode: Identifier |Foo|", " OpenDelimiterNode: OpenParenthesis |(|", " ParameterListNode: ListNode", " CloseDelimiterNode: CloseParenthesis |)|")); } [Test] public void OneParameter() { Assert.That("Foo(42)", ParsesAs( "ParameterizedNode", " LeftNode: Identifier |Foo|", " OpenDelimiterNode: OpenParenthesis |(|", " ParameterListNode: ListNode", " Items[0]: DelimitedItemNode", " ItemNode: Number |42|", " DelimiterNode: (none)", " CloseDelimiterNode: CloseParenthesis |)|")); } [Test] public void TwoParameters() { Assert.That("Foo(24, 42)", ParsesAs( "ParameterizedNode", " LeftNode: Identifier |Foo|", " OpenDelimiterNode: OpenParenthesis |(|", " ParameterListNode: ListNode", " Items[0]: DelimitedItemNode", " ItemNode: Number |24|", " DelimiterNode: Comma |,|", " Items[1]: DelimitedItemNode", " ItemNode: Number |42|", " DelimiterNode: (none)", " CloseDelimiterNode: CloseParenthesis |)|")); } [Test] public void StringCast() { Assert.That("string('0')", ParsesAs( "ParameterizedNode", " LeftNode: StringKeyword |string|", " OpenDelimiterNode: OpenParenthesis |(|", " ParameterListNode: ListNode", " Items[0]: DelimitedItemNode", " ItemNode: StringLiteral |'0'|", " DelimiterNode: (none)", " CloseDelimiterNode: CloseParenthesis |)|")); } [Test] public void FileCast() { Assert.That("file(AUntypedVarParameter)", ParsesAs( "ParameterizedNode", " LeftNode: FileKeyword |file|", " OpenDelimiterNode: OpenParenthesis |(|", " ParameterListNode: ListNode", " Items[0]: DelimitedItemNode", " ItemNode: Identifier |AUntypedVarParameter|", " DelimiterNode: (none)", " CloseDelimiterNode: CloseParenthesis |)|")); } [Test] public void ColonSyntax() { Assert.That("Str(X:0, S)", ParsesAs( "ParameterizedNode", " LeftNode: Identifier |Str|", " OpenDelimiterNode: OpenParenthesis |(|", " ParameterListNode: ListNode", " Items[0]: DelimitedItemNode", " ItemNode: NumberFormatNode", " ValueNode: Identifier |X|", " SizeColonNode: Colon |:|", " SizeNode: Number |0|", " PrecisionColonNode: (none)", " PrecisionNode: (none)", " DelimiterNode: Comma |,|", " Items[1]: DelimitedItemNode", " ItemNode: Identifier |S|", " DelimiterNode: (none)", " CloseDelimiterNode: CloseParenthesis |)|")); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. extern alias PDB; using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Collections.ObjectModel; using System.Globalization; using System.IO; using System.Linq; using System.Reflection; using System.Reflection.Metadata; using System.Reflection.Metadata.Ecma335; using System.Reflection.PortableExecutable; using System.Runtime.CompilerServices; using System.Runtime.InteropServices.WindowsRuntime; using System.Threading; using Microsoft.Cci; using Microsoft.CodeAnalysis.CodeGen; using Microsoft.CodeAnalysis.Collections; using Microsoft.CodeAnalysis.Emit; using Microsoft.CodeAnalysis.Test.Utilities; using Microsoft.DiaSymReader; using Microsoft.VisualStudio.Debugger.Evaluation; using Microsoft.VisualStudio.Debugger.Evaluation.ClrCompilation; using Roslyn.Test.Utilities; using Xunit; using PDB::Roslyn.Test.MetadataUtilities; using PDB::Roslyn.Test.PdbUtilities; namespace Microsoft.CodeAnalysis.ExpressionEvaluator.UnitTests { internal sealed class Scope { internal readonly int StartOffset; internal readonly int EndOffset; internal readonly ImmutableArray<string> Locals; internal Scope(int startOffset, int endOffset, ImmutableArray<string> locals, bool isEndInclusive) { this.StartOffset = startOffset; this.EndOffset = endOffset + (isEndInclusive ? 1 : 0); this.Locals = locals; } internal int Length { get { return this.EndOffset - this.StartOffset + 1; } } internal bool Contains(int offset) { return (offset >= this.StartOffset) && (offset < this.EndOffset); } } internal static class ExpressionCompilerTestHelpers { internal static CompileResult CompileAssignment( this EvaluationContextBase context, string target, string expr, out string error, CompilationTestData testData = null, DiagnosticFormatter formatter = null) { ResultProperties resultProperties; ImmutableArray<AssemblyIdentity> missingAssemblyIdentities; var result = context.CompileAssignment( target, expr, ImmutableArray<Alias>.Empty, formatter ?? DebuggerDiagnosticFormatter.Instance, out resultProperties, out error, out missingAssemblyIdentities, EnsureEnglishUICulture.PreferredOrNull, testData); Assert.Empty(missingAssemblyIdentities); // This is a crude way to test the language, but it's convenient to share this test helper. var isCSharp = context.GetType().Namespace.IndexOf("csharp", StringComparison.OrdinalIgnoreCase) >= 0; var expectedFlags = error != null ? DkmClrCompilationResultFlags.None : isCSharp ? DkmClrCompilationResultFlags.PotentialSideEffect : DkmClrCompilationResultFlags.PotentialSideEffect | DkmClrCompilationResultFlags.ReadOnlyResult; Assert.Equal(expectedFlags, resultProperties.Flags); Assert.Equal(default(DkmEvaluationResultCategory), resultProperties.Category); Assert.Equal(default(DkmEvaluationResultAccessType), resultProperties.AccessType); Assert.Equal(default(DkmEvaluationResultStorageType), resultProperties.StorageType); Assert.Equal(default(DkmEvaluationResultTypeModifierFlags), resultProperties.ModifierFlags); return result; } internal static CompileResult CompileAssignment( this EvaluationContextBase context, string target, string expr, ImmutableArray<Alias> aliases, DiagnosticFormatter formatter, out ResultProperties resultProperties, out string error, out ImmutableArray<AssemblyIdentity> missingAssemblyIdentities, CultureInfo preferredUICulture, CompilationTestData testData) { var diagnostics = DiagnosticBag.GetInstance(); var result = context.CompileAssignment(target, expr, aliases, diagnostics, out resultProperties, testData); if (diagnostics.HasAnyErrors()) { bool useReferencedModulesOnly; error = context.GetErrorMessageAndMissingAssemblyIdentities(diagnostics, formatter, preferredUICulture, EvaluationContextBase.SystemCoreIdentity, out useReferencedModulesOnly, out missingAssemblyIdentities); } else { error = null; missingAssemblyIdentities = ImmutableArray<AssemblyIdentity>.Empty; } diagnostics.Free(); return result; } internal static ReadOnlyCollection<byte> CompileGetLocals( this EvaluationContextBase context, ArrayBuilder<LocalAndMethod> locals, bool argumentsOnly, out string typeName, CompilationTestData testData, DiagnosticDescription[] expectedDiagnostics = null) { var diagnostics = DiagnosticBag.GetInstance(); var result = context.CompileGetLocals( locals, argumentsOnly, ImmutableArray<Alias>.Empty, diagnostics, out typeName, testData); diagnostics.Verify(expectedDiagnostics ?? DiagnosticDescription.None); diagnostics.Free(); return result; } internal static CompileResult CompileExpression( this EvaluationContextBase context, string expr, out string error, CompilationTestData testData = null, DiagnosticFormatter formatter = null) { ResultProperties resultProperties; return CompileExpression(context, expr, out resultProperties, out error, testData, formatter); } internal static CompileResult CompileExpression( this EvaluationContextBase context, string expr, out ResultProperties resultProperties, out string error, CompilationTestData testData = null, DiagnosticFormatter formatter = null) { ImmutableArray<AssemblyIdentity> missingAssemblyIdentities; var result = context.CompileExpression( expr, DkmEvaluationFlags.TreatAsExpression, ImmutableArray<Alias>.Empty, formatter ?? DebuggerDiagnosticFormatter.Instance, out resultProperties, out error, out missingAssemblyIdentities, EnsureEnglishUICulture.PreferredOrNull, testData); Assert.Empty(missingAssemblyIdentities); return result; } static internal CompileResult CompileExpression( this EvaluationContextBase evaluationContext, string expr, DkmEvaluationFlags compilationFlags, ImmutableArray<Alias> aliases, out string error, CompilationTestData testData = null, DiagnosticFormatter formatter = null) { ResultProperties resultProperties; ImmutableArray<AssemblyIdentity> missingAssemblyIdentities; var result = evaluationContext.CompileExpression( expr, compilationFlags, aliases, formatter ?? DebuggerDiagnosticFormatter.Instance, out resultProperties, out error, out missingAssemblyIdentities, EnsureEnglishUICulture.PreferredOrNull, testData); Assert.Empty(missingAssemblyIdentities); return result; } /// <summary> /// Compile C# expression and emit assembly with evaluation method. /// </summary> /// <returns> /// Result containing generated assembly, type and method names, and any format specifiers. /// </returns> static internal CompileResult CompileExpression( this EvaluationContextBase evaluationContext, string expr, DkmEvaluationFlags compilationFlags, ImmutableArray<Alias> aliases, DiagnosticFormatter formatter, out ResultProperties resultProperties, out string error, out ImmutableArray<AssemblyIdentity> missingAssemblyIdentities, CultureInfo preferredUICulture, CompilationTestData testData) { var diagnostics = DiagnosticBag.GetInstance(); var result = evaluationContext.CompileExpression(expr, compilationFlags, aliases, diagnostics, out resultProperties, testData); if (diagnostics.HasAnyErrors()) { bool useReferencedModulesOnly; error = evaluationContext.GetErrorMessageAndMissingAssemblyIdentities(diagnostics, formatter, preferredUICulture, EvaluationContextBase.SystemCoreIdentity, out useReferencedModulesOnly, out missingAssemblyIdentities); } else { error = null; missingAssemblyIdentities = ImmutableArray<AssemblyIdentity>.Empty; } diagnostics.Free(); return result; } internal static CompileResult CompileExpressionWithRetry( ImmutableArray<MetadataBlock> metadataBlocks, EvaluationContextBase context, ExpressionCompiler.CompileDelegate<CompileResult> compile, DkmUtilities.GetMetadataBytesPtrFunction getMetaDataBytesPtr, out string errorMessage) { return ExpressionCompiler.CompileWithRetry( metadataBlocks, DebuggerDiagnosticFormatter.Instance, (blocks, useReferencedModulesOnly) => context, compile, getMetaDataBytesPtr, out errorMessage); } internal static CompileResult CompileExpressionWithRetry( ImmutableArray<MetadataBlock> metadataBlocks, string expr, ImmutableArray<Alias> aliases, ExpressionCompiler.CreateContextDelegate createContext, DkmUtilities.GetMetadataBytesPtrFunction getMetaDataBytesPtr, out string errorMessage, out CompilationTestData testData) { var r = ExpressionCompiler.CompileWithRetry( metadataBlocks, DebuggerDiagnosticFormatter.Instance, createContext, (context, diagnostics) => { var td = new CompilationTestData(); ResultProperties resultProperties; var compileResult = context.CompileExpression( expr, DkmEvaluationFlags.TreatAsExpression, aliases, diagnostics, out resultProperties, td); return new CompileExpressionResult(compileResult, td); }, getMetaDataBytesPtr, out errorMessage); testData = r.TestData; return r.CompileResult; } private struct CompileExpressionResult { internal readonly CompileResult CompileResult; internal readonly CompilationTestData TestData; internal CompileExpressionResult(CompileResult compileResult, CompilationTestData testData) { this.CompileResult = compileResult; this.TestData = testData; } } internal static TypeDefinition GetTypeDef(this MetadataReader reader, string typeName) { return reader.TypeDefinitions.Select(reader.GetTypeDefinition).First(t => reader.StringComparer.Equals(t.Name, typeName)); } internal static MethodDefinition GetMethodDef(this MetadataReader reader, TypeDefinition typeDef, string methodName) { return typeDef.GetMethods().Select(reader.GetMethodDefinition).First(m => reader.StringComparer.Equals(m.Name, methodName)); } internal static MethodDefinitionHandle GetMethodDefHandle(this MetadataReader reader, TypeDefinition typeDef, string methodName) { return typeDef.GetMethods().First(h => reader.StringComparer.Equals(reader.GetMethodDefinition(h).Name, methodName)); } internal static void CheckTypeParameters(this MetadataReader reader, GenericParameterHandleCollection genericParameters, params string[] expectedNames) { var actualNames = genericParameters.Select(reader.GetGenericParameter).Select(tp => reader.GetString(tp.Name)).ToArray(); Assert.True(expectedNames.SequenceEqual(actualNames)); } internal static AssemblyName GetAssemblyName(this byte[] exeBytes) { using (var reader = new PEReader(ImmutableArray.CreateRange(exeBytes))) { var metadataReader = reader.GetMetadataReader(); var def = metadataReader.GetAssemblyDefinition(); var name = metadataReader.GetString(def.Name); return new AssemblyName() { Name = name, Version = def.Version }; } } internal static Guid GetModuleVersionId(this byte[] exeBytes) { using (var reader = new PEReader(ImmutableArray.CreateRange(exeBytes))) { return reader.GetMetadataReader().GetModuleVersionId(); } } internal static ImmutableArray<string> GetLocalNames(this ISymUnmanagedReader symReader, int methodToken, int methodVersion = 1) { var method = symReader.GetMethodByVersion(methodToken, methodVersion); if (method == null) { return ImmutableArray<string>.Empty; } var scopes = ArrayBuilder<ISymUnmanagedScope>.GetInstance(); method.GetAllScopes(scopes); var names = ArrayBuilder<string>.GetInstance(); foreach (var scope in scopes) { foreach (var local in scope.GetLocals()) { var name = local.GetName(); int slot; local.GetAddressField1(out slot); while (names.Count <= slot) { names.Add(null); } names[slot] = name; } } scopes.Free(); return names.ToImmutableAndFree(); } internal static void VerifyIL( this ImmutableArray<byte> assembly, string qualifiedName, string expectedIL, [CallerLineNumber]int expectedValueSourceLine = 0, [CallerFilePath]string expectedValueSourcePath = null) { var parts = qualifiedName.Split('.'); if (parts.Length != 2) { throw new NotImplementedException(); } using (var metadata = ModuleMetadata.CreateFromImage(assembly)) { var module = metadata.Module; var reader = module.MetadataReader; var typeDef = reader.GetTypeDef(parts[0]); var methodName = parts[1]; var methodHandle = reader.GetMethodDefHandle(typeDef, methodName); var methodBody = module.GetMethodBodyOrThrow(methodHandle); var pooled = PooledStringBuilder.GetInstance(); var builder = pooled.Builder; var writer = new StringWriter(pooled.Builder); var visualizer = new MetadataVisualizer(reader, writer); visualizer.VisualizeMethodBody(methodBody, methodHandle, emitHeader: false); var actualIL = pooled.ToStringAndFree(); AssertEx.AssertEqualToleratingWhitespaceDifferences(expectedIL, actualIL, escapeQuotes: true, expectedValueSourcePath: expectedValueSourcePath, expectedValueSourceLine: expectedValueSourceLine); } } internal static ImmutableArray<MetadataReference> GetEmittedReferences(Compilation compilation, MetadataReader mdReader) { // Determine the set of references that were actually used // and ignore any references that were dropped in emit. var referenceNames = new HashSet<string>(mdReader.AssemblyReferences.Select(h => GetAssemblyReferenceName(mdReader, h))); return ImmutableArray.CreateRange(compilation.References.Where(r => IsReferenced(r, referenceNames))); } internal static ImmutableArray<Scope> GetScopes(this ISymUnmanagedReader symReader, int methodToken, int methodVersion, bool isEndInclusive) { var method = symReader.GetMethodByVersion(methodToken, methodVersion); if (method == null) { return ImmutableArray<Scope>.Empty; } var scopes = ArrayBuilder<ISymUnmanagedScope>.GetInstance(); method.GetAllScopes(scopes); var result = scopes.SelectAsArray(s => new Scope(s.GetStartOffset(), s.GetEndOffset(), ImmutableArray.CreateRange(s.GetLocals().Select(l => l.GetName())), isEndInclusive)); scopes.Free(); return result; } internal static Scope GetInnermostScope(this ImmutableArray<Scope> scopes, int offset) { Scope result = null; foreach (var scope in scopes) { if (scope.Contains(offset)) { if ((result == null) || (result.Length > scope.Length)) { result = scope; } } } return result; } private static string GetAssemblyReferenceName(MetadataReader reader, AssemblyReferenceHandle handle) { var reference = reader.GetAssemblyReference(handle); return reader.GetString(reference.Name); } private static bool IsReferenced(MetadataReference reference, HashSet<string> referenceNames) { var assemblyMetadata = ((PortableExecutableReference)reference).GetMetadataNoCopy() as AssemblyMetadata; if (assemblyMetadata == null) { // Netmodule. Assume it is referenced. return true; } var name = assemblyMetadata.GetAssembly().Identity.Name; return referenceNames.Contains(name); } internal static ModuleInstance ToModuleInstance(this MetadataReference reference) { return ModuleInstance.Create((PortableExecutableReference)reference); } internal static ModuleInstance ToModuleInstance( this Compilation compilation, DebugInformationFormat debugFormat = DebugInformationFormat.Pdb, bool includeLocalSignatures = true) { var pdbStream = (debugFormat != 0) ? new MemoryStream() : null; var peImage = compilation.EmitToArray(new EmitOptions(debugInformationFormat: debugFormat), pdbStream: pdbStream); var symReader = (debugFormat != 0) ? SymReaderFactory.CreateReader(pdbStream, new PEReader(peImage)) : null; return ModuleInstance.Create(peImage, symReader, includeLocalSignatures); } internal static ModuleInstance GetModuleInstanceForIL(string ilSource) { ImmutableArray<byte> peBytes; ImmutableArray<byte> pdbBytes; CommonTestBase.EmitILToArray(ilSource, appendDefaultHeader: true, includePdb: true, assemblyBytes: out peBytes, pdbBytes: out pdbBytes); return ModuleInstance.Create(peBytes, SymReaderFactory.CreateReader(pdbBytes), includeLocalSignatures: true); } internal static AssemblyIdentity GetAssemblyIdentity(this MetadataReference reference) { using (var moduleMetadata = GetManifestModuleMetadata(reference)) { return moduleMetadata.MetadataReader.ReadAssemblyIdentityOrThrow(); } } internal static Guid GetModuleVersionId(this MetadataReference reference) { using (var moduleMetadata = GetManifestModuleMetadata(reference)) { return moduleMetadata.MetadataReader.GetModuleVersionIdOrThrow(); } } private static ModuleMetadata GetManifestModuleMetadata(MetadataReference reference) { // make a copy to avoid disposing shared reference metadata: var metadata = ((MetadataImageReference)reference).GetMetadata(); return (metadata as AssemblyMetadata)?.GetModules()[0] ?? (ModuleMetadata)metadata; } internal static void VerifyLocal<TMethodSymbol>( this CompilationTestData testData, string typeName, LocalAndMethod localAndMethod, string expectedMethodName, string expectedLocalName, string expectedLocalDisplayName, DkmClrCompilationResultFlags expectedFlags, Action<TMethodSymbol> verifyTypeParameters, string expectedILOpt, bool expectedGeneric, string expectedValueSourcePath, int expectedValueSourceLine) where TMethodSymbol : IMethodSymbol { Assert.Equal(expectedLocalName, localAndMethod.LocalName); Assert.Equal(expectedLocalDisplayName, localAndMethod.LocalDisplayName); Assert.True(expectedMethodName.StartsWith(localAndMethod.MethodName, StringComparison.Ordinal), expectedMethodName + " does not start with " + localAndMethod.MethodName); // Expected name may include type arguments and parameters. Assert.Equal(expectedFlags, localAndMethod.Flags); var methodData = testData.GetMethodData(typeName + "." + expectedMethodName); verifyTypeParameters((TMethodSymbol)methodData.Method); if (expectedILOpt != null) { string actualIL = methodData.GetMethodIL(); AssertEx.AssertEqualToleratingWhitespaceDifferences( expectedILOpt, actualIL, escapeQuotes: true, expectedValueSourcePath: expectedValueSourcePath, expectedValueSourceLine: expectedValueSourceLine); } Assert.Equal(((Cci.IMethodDefinition)methodData.Method).CallingConvention, expectedGeneric ? Cci.CallingConvention.Generic : Cci.CallingConvention.Default); } internal static ISymUnmanagedReader ConstructSymReaderWithImports(ImmutableArray<byte> peImage, string methodName, params string[] importStrings) { using (var peReader = new PEReader(peImage)) { var metadataReader = peReader.GetMetadataReader(); var methodHandle = metadataReader.MethodDefinitions.Single(h => metadataReader.StringComparer.Equals(metadataReader.GetMethodDefinition(h).Name, methodName)); var methodToken = metadataReader.GetToken(methodHandle); return new MockSymUnmanagedReader(new Dictionary<int, MethodDebugInfoBytes> { { methodToken, new MethodDebugInfoBytes.Builder(new [] { importStrings }).Build() }, }.ToImmutableDictionary()); } } internal const uint NoILOffset = 0xffffffff; internal static readonly MetadataReference IntrinsicAssemblyReference = GetIntrinsicAssemblyReference(); internal static ImmutableArray<MetadataReference> AddIntrinsicAssembly(this ImmutableArray<MetadataReference> references) { var builder = ArrayBuilder<MetadataReference>.GetInstance(); builder.AddRange(references); builder.Add(IntrinsicAssemblyReference); return builder.ToImmutableAndFree(); } private static MetadataReference GetIntrinsicAssemblyReference() { var source = @".assembly extern mscorlib { } .class public Microsoft.VisualStudio.Debugger.Clr.IntrinsicMethods { .method public static object GetObjectAtAddress(uint64 address) { ldnull throw } .method public static class [mscorlib]System.Exception GetException() { ldnull throw } .method public static class [mscorlib]System.Exception GetStowedException() { ldnull throw } .method public static object GetReturnValue(int32 index) { ldnull throw } .method public static void CreateVariable(class [mscorlib]System.Type 'type', string name, valuetype [mscorlib]System.Guid customTypeInfoPayloadTypeId, uint8[] customTypeInfoPayload) { ldnull throw } .method public static object GetObjectByAlias(string name) { ldnull throw } .method public static !!T& GetVariableAddress<T>(string name) { ldnull throw } }"; return CommonTestBase.CompileIL(source); } /// <summary> /// Return MetadataReferences to the .winmd assemblies /// for the given namespaces. /// </summary> internal static ImmutableArray<MetadataReference> GetRuntimeWinMds(params string[] namespaces) { var paths = new HashSet<string>(); foreach (var @namespace in namespaces) { foreach (var path in WindowsRuntimeMetadata.ResolveNamespace(@namespace, null)) { paths.Add(path); } } return ImmutableArray.CreateRange(paths.Select(GetAssembly)); } private const string Version1_3CLRString = "WindowsRuntime 1.3;CLR v4.0.30319"; private const string Version1_3String = "WindowsRuntime 1.3"; private const string Version1_4String = "WindowsRuntime 1.4"; private static readonly int s_versionStringLength = Version1_3CLRString.Length; private static readonly byte[] s_version1_3CLRBytes = ToByteArray(Version1_3CLRString, s_versionStringLength); private static readonly byte[] s_version1_3Bytes = ToByteArray(Version1_3String, s_versionStringLength); private static readonly byte[] s_version1_4Bytes = ToByteArray(Version1_4String, s_versionStringLength); private static byte[] ToByteArray(string str, int length) { var bytes = new byte[length]; for (int i = 0; i < str.Length; i++) { bytes[i] = (byte)str[i]; } return bytes; } internal static byte[] ToVersion1_3(byte[] bytes) { return ToVersion(bytes, s_version1_3CLRBytes, s_version1_3Bytes); } internal static byte[] ToVersion1_4(byte[] bytes) { return ToVersion(bytes, s_version1_3CLRBytes, s_version1_4Bytes); } private static byte[] ToVersion(byte[] bytes, byte[] from, byte[] to) { int n = bytes.Length; var copy = new byte[n]; Array.Copy(bytes, copy, n); int index = IndexOf(copy, from); Array.Copy(to, 0, copy, index, to.Length); return copy; } private static int IndexOf(byte[] a, byte[] b) { int m = b.Length; int n = a.Length - m; for (int x = 0; x < n; x++) { var matches = true; for (int y = 0; y < m; y++) { if (a[x + y] != b[y]) { matches = false; break; } } if (matches) { return x; } } return -1; } private static MetadataReference GetAssembly(string path) { var bytes = File.ReadAllBytes(path); var metadata = ModuleMetadata.CreateFromImage(bytes); return metadata.GetReference(filePath: path); } internal static uint GetOffset(int methodToken, ISymUnmanagedReader symReader, int atLineNumber = -1) { int ilOffset; if (symReader == null) { ilOffset = 0; } else { var symMethod = symReader.GetMethod(methodToken); if (symMethod == null) { ilOffset = 0; } else { var sequencePoints = symMethod.GetSequencePoints(); ilOffset = atLineNumber < 0 ? sequencePoints.Where(sp => sp.StartLine != SequencePointList.HiddenSequencePointLine).Select(sp => sp.Offset).FirstOrDefault() : sequencePoints.First(sp => sp.StartLine == atLineNumber).Offset; } } Assert.InRange(ilOffset, 0, int.MaxValue); return (uint)ilOffset; } internal static string GetMethodOrTypeSignatureParts(string signature, out string[] parameterTypeNames) { var parameterListStart = signature.IndexOf('('); if (parameterListStart < 0) { parameterTypeNames = null; return signature; } var parameters = signature.Substring(parameterListStart + 1, signature.Length - parameterListStart - 2); var methodName = signature.Substring(0, parameterListStart); parameterTypeNames = (parameters.Length == 0) ? new string[0] : parameters.Split(','); return methodName; } internal unsafe static ModuleMetadata ToModuleMetadata(this PEMemoryBlock metadata, bool ignoreAssemblyRefs) { return ModuleMetadata.CreateFromMetadata( (IntPtr)metadata.Pointer, metadata.Length, includeEmbeddedInteropTypes: false, ignoreAssemblyRefs: ignoreAssemblyRefs); } internal unsafe static MetadataReader ToMetadataReader(this PEMemoryBlock metadata) { return new MetadataReader(metadata.Pointer, metadata.Length, MetadataReaderOptions.None); } internal static void EmitCorLibWithAssemblyReferences( Compilation comp, string pdbPath, Func<CommonPEModuleBuilder, CommonPEModuleBuilder> getModuleBuilder, out ImmutableArray<byte> peBytes, out ImmutableArray<byte> pdbBytes) { var diagnostics = DiagnosticBag.GetInstance(); var emitOptions = EmitOptions.Default.WithRuntimeMetadataVersion("0.0.0.0").WithDebugInformationFormat(DebugInformationFormat.PortablePdb); var moduleBuilder = comp.CheckOptionsAndCreateModuleBuilder( diagnostics, null, emitOptions, null, null, null, null, default(CancellationToken)); // Wrap the module builder in a module builder that // reports the "System.Object" type as having no base type. moduleBuilder = getModuleBuilder(moduleBuilder); bool result = comp.Compile( moduleBuilder, emittingPdb: pdbPath != null, diagnostics: diagnostics, filterOpt: null, cancellationToken: default(CancellationToken)); using (var peStream = new MemoryStream()) { using (var pdbStream = new MemoryStream()) { PeWriter.WritePeToStream( new EmitContext(moduleBuilder, null, diagnostics), comp.MessageProvider, () => peStream, () => pdbStream, null, null, allowMissingMethodBodies: true, isDeterministic: false, cancellationToken: default(CancellationToken)); peBytes = peStream.ToImmutable(); pdbBytes = pdbStream.ToImmutable(); } } diagnostics.Verify(); diagnostics.Free(); } } }
using System; using System.Linq; using Mono.Cecil; using Mono.Cecil.Cil; using Mono.Cecil.Metadata; using NUnit.Framework; namespace Mono.Cecil.Tests { [TestFixture] public class TypeTests : BaseTestFixture { [Test] public void TypeLayout () { TestCSharp ("Layouts.cs", module => { var foo = module.GetType ("Foo"); Assert.IsNotNull (foo); Assert.IsTrue (foo.IsValueType); Assert.IsTrue (foo.HasLayoutInfo); Assert.AreEqual (16, foo.ClassSize); var babar = module.GetType ("Babar"); Assert.IsNotNull (babar); Assert.IsFalse (babar.IsValueType); Assert.IsFalse (babar.HasLayoutInfo); }); } [Test] public void SimpleInterfaces () { TestIL ("types.il", module => { var ibaz = module.GetType ("IBaz"); Assert.IsNotNull (ibaz); Assert.IsTrue (ibaz.HasInterfaces); var interfaces = ibaz.Interfaces; Assert.AreEqual (2, interfaces.Count); // Mono's ilasm and .NET's are ordering interfaces differently Assert.IsNotNull (interfaces.Single (i => i.InterfaceType.FullName == "IBar")); Assert.IsNotNull (interfaces.Single (i => i.InterfaceType.FullName == "IFoo")); }); } [Test] public void GenericTypeDefinition () { TestCSharp ("Generics.cs", module => { var foo = module.GetType ("Foo`2"); Assert.IsNotNull (foo); Assert.IsTrue (foo.HasGenericParameters); Assert.AreEqual (2, foo.GenericParameters.Count); var tbar = foo.GenericParameters [0]; Assert.AreEqual ("TBar", tbar.Name); Assert.AreEqual (foo, tbar.Owner); var tbaz = foo.GenericParameters [1]; Assert.AreEqual ("TBaz", tbaz.Name); Assert.AreEqual (foo, tbaz.Owner); }); } [Test] public void ConstrainedGenericType () { TestCSharp ("Generics.cs", module => { var bongo_t = module.GetType ("Bongo`1"); Assert.IsNotNull (bongo_t); var t = bongo_t.GenericParameters [0]; Assert.IsNotNull (t); Assert.AreEqual ("T", t.Name); Assert.IsTrue (t.HasConstraints); Assert.AreEqual (2, t.Constraints.Count); Assert.AreEqual ("Zap", t.Constraints [0].FullName); Assert.AreEqual ("IZoom", t.Constraints [1].FullName); }); } [Test] public void GenericBaseType () { TestCSharp ("Generics.cs", module => { var child = module.GetType ("Child`1"); var child_t = child.GenericParameters [0]; Assert.IsNotNull (child_t); var instance = child.BaseType as GenericInstanceType; Assert.IsNotNull (instance); Assert.AreNotEqual (0, instance.MetadataToken.RID); Assert.AreEqual (child_t, instance.GenericArguments [0]); }); } [Test] public void GenericConstraintOnGenericParameter () { TestCSharp ("Generics.cs", module => { var duel = module.GetType ("Duel`3"); Assert.AreEqual (3, duel.GenericParameters.Count); var t1 = duel.GenericParameters [0]; var t2 = duel.GenericParameters [1]; var t3 = duel.GenericParameters [2]; Assert.AreEqual (t1, t2.Constraints [0]); Assert.AreEqual (t2, t3.Constraints [0]); }); } [Test] public void GenericForwardBaseType () { TestCSharp ("Generics.cs", module => { var tamchild = module.GetType ("TamChild"); Assert.IsNotNull (tamchild); Assert.IsNotNull (tamchild.BaseType); var generic_instance = tamchild.BaseType as GenericInstanceType; Assert.IsNotNull (generic_instance); Assert.AreEqual (1, generic_instance.GenericArguments.Count); Assert.AreEqual (module.GetType ("Tamtam"), generic_instance.GenericArguments [0]); }); } [Test] public void TypeExtentingGenericOfSelf () { TestCSharp ("Generics.cs", module => { var rec_child = module.GetType ("RecChild"); Assert.IsNotNull (rec_child); Assert.IsNotNull (rec_child.BaseType); var generic_instance = rec_child.BaseType as GenericInstanceType; Assert.IsNotNull (generic_instance); Assert.AreEqual (1, generic_instance.GenericArguments.Count); Assert.AreEqual (rec_child, generic_instance.GenericArguments [0]); }); } [Test] public void TypeReferenceValueType () { TestCSharp ("Methods.cs", module => { var baz = module.GetType ("Baz"); var method = baz.GetMethod ("PrintAnswer"); var box = method.Body.Instructions.Where (i => i.OpCode == OpCodes.Box).First (); var int32 = (TypeReference) box.Operand; Assert.IsTrue (int32.IsValueType); }); } [Test] public void GenericInterfaceReference () { TestModule ("gifaceref.exe", module => { var type = module.GetType ("Program"); var iface = type.Interfaces [0]; var instance = (GenericInstanceType) iface.InterfaceType; var owner = instance.ElementType; Assert.AreEqual (1, instance.GenericArguments.Count); Assert.AreEqual (1, owner.GenericParameters.Count); }); } [Test] public void UnboundGenericParameter () { TestModule ("cscgpbug.dll", module => { var type = module.GetType ("ListViewModel"); var method = type.GetMethod ("<>n__FabricatedMethod1"); var parameter = method.ReturnType as GenericParameter; Assert.IsNotNull (parameter); Assert.AreEqual (0, parameter.Position); Assert.IsNull (parameter.Owner); }, verify: false); } [Test] public void GenericMultidimensionalArray () { TestCSharp ("Generics.cs", module => { var type = module.GetType ("LaMatrix"); var method = type.GetMethod ("At"); var call = method.Body.Instructions.Where (i => i.Operand is MethodReference).First (); var get = (MethodReference) call.Operand; Assert.IsNotNull (get); Assert.AreEqual (0, get.GenericParameters.Count); Assert.AreEqual (MethodCallingConvention.Default, get.CallingConvention); Assert.AreEqual (method.GenericParameters [0], get.ReturnType); }); } [Test] public void CorlibPrimitive () { var module = typeof (TypeTests).ToDefinition ().Module; var int32 = module.TypeSystem.Int32; Assert.IsTrue (int32.IsPrimitive); Assert.AreEqual (MetadataType.Int32, int32.MetadataType); var int32_def = int32.Resolve (); Assert.IsTrue (int32_def.IsPrimitive); Assert.AreEqual (MetadataType.Int32, int32_def.MetadataType); } [Test] public void ExplicitThis () { TestIL ("explicitthis.il", module => { var type = module.GetType ("MakeDecision"); var method = type.GetMethod ("Decide"); var fptr = method.ReturnType as FunctionPointerType; Assert.IsNotNull (fptr); Assert.IsTrue (fptr.HasThis); Assert.IsTrue (fptr.ExplicitThis); Assert.AreEqual (0, fptr.Parameters [0].Sequence); Assert.AreEqual (1, fptr.Parameters [1].Sequence); }, verify: false); } [Test] public void DeferredCorlibTypeDef () { using (var module = ModuleDefinition.ReadModule (typeof (object).Assembly.Location, new ReaderParameters (ReadingMode.Deferred))) { var object_type = module.TypeSystem.Object; Assert.IsInstanceOf<TypeDefinition> (object_type); } } [Test] public void CorlibTypesMetadataType () { using (var module = ModuleDefinition.ReadModule (typeof (object).Assembly.Location)) { var type = module.GetType ("System.String"); Assert.IsNotNull (type); Assert.IsNotNull (type.BaseType); Assert.AreEqual ("System.Object", type.BaseType.FullName); Assert.IsInstanceOf<TypeDefinition> (type.BaseType); Assert.AreEqual (MetadataType.String, type.MetadataType); Assert.AreEqual (MetadataType.Object, type.BaseType.MetadataType); } } } }
using System; using System.Collections.Generic; using System.Text; using MbUnit.Framework; using MbUnit.Framework.Reflection; namespace MbUnit.Framework.Tests20.Reflection { [TestFixture] public class InstanceTests { TestSample _sampleObject; Reflector _reflect; [SetUp] public void Setup() { _sampleObject = new TestSample(); _reflect = new Reflector(_sampleObject); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void ConstructorWithNullArgument() { Reflector reflector = new Reflector(null); } [Test] public void CreateInstanceWithDefaultConstructor() { string className = "System.Number"; Reflector reflector = new Reflector(ReflectorStaticTests.MSCorLibAssembly, className); Assert.IsNotNull(reflector); Assert.AreEqual(true, reflector.InvokeMethod("IsWhite", ' ')); Assert.AreEqual(false, reflector.InvokeMethod("IsWhite", 'V')); } [Test] public void CreateInstanceWithParametizedConstructor() { string className = "System.Collections.KeyValuePairs"; Reflector reflector = new Reflector(ReflectorStaticTests.MSCorLibAssembly, className, 1, 'A'); Assert.IsNotNull(reflector); Assert.AreEqual(1, reflector.GetProperty("Key")); Assert.AreEqual('A', reflector.GetProperty("Value")); } #region GetField Tests [Test] public void GetPublicField_DefaultAccessibility() { Assert.AreEqual("MbUnit Rocks!!!", _reflect.GetField("publicString")); } [Test] public void GetPublicField_PublicAccessibility() { Assert.AreEqual("MbUnit Rocks!!!", _reflect.GetField("publicString", AccessModifier.Public)); } [Test] public void GetNonPublicField_DefaultAccessibility() { Assert.AreEqual(DateTime.Today, _reflect.GetField("privateDateTime")); } [Test] public void GetNonPublicField_NonPublicAccessibility() { Assert.AreEqual(DateTime.Today, _reflect.GetField("privateDateTime", AccessModifier.NonPublic)); } [Test] public void GetStaticField_DefaultAccessibility() { Assert.AreEqual(7, _reflect.GetField("staticNum")); } [Test] public void GetStaticField_StaticAccessibility() { Assert.AreEqual(7, _reflect.GetField("staticNum", AccessModifier.Static | AccessModifier.NonPublic)); } [Test] public void GetBaseClassField() { Assert.AreEqual("Base var", _reflect.GetField("_baseString")); } [Test] [ExpectedException(typeof(ReflectionException))] public void UnAccessibleField() { _reflect.GetField("staticNum", AccessModifier.Public); } [Test] [ExpectedException(typeof(ReflectionException))] public void TryToGetBaseClassFieldButSettingLookInBaseToFalse() { _reflect.GetField("_baseString", AccessModifier.NonPublic, false); } #endregion #region SetField Tests [Test] public void SetPublicField_DefaultAccessibility() { string fieldName = "publicString"; string newValue = "Just mbunit"; _reflect.SetField(fieldName, newValue); Assert.AreEqual(newValue, _reflect.GetField(fieldName)); } [Test] public void SetPublicField_PublicAccessibility() { string fieldName = "publicString"; string newValue = "Just mbunit"; _reflect.SetField(AccessModifier.Public, fieldName, newValue); Assert.AreEqual(newValue, _reflect.GetField(fieldName)); } [Test] public void SetNonPublicField_DefaultAccessibility() { string fieldName = "privateDateTime"; DateTime dt = new DateTime(2008, 1, 1); _reflect.SetField(fieldName, dt); Assert.AreEqual(dt, _reflect.GetField(fieldName)); } [Test] public void SetNonPublicField_NonPublicAccessibility() { string fieldName = "privateDateTime"; DateTime dt = new DateTime(2008, 1, 1); _reflect.SetField(AccessModifier.NonPublic, fieldName, dt); Assert.AreEqual(dt, _reflect.GetField(fieldName)); } [Test] public void SetStaticPublicField_DefaultAccessibility() { string fieldName = "staticNum"; int newValue = 10; int originalValue = (int)_reflect.GetField(fieldName); _reflect.SetField(fieldName, newValue); Assert.AreEqual(newValue, _reflect.GetField(fieldName)); // It's a static field, we need to restore to original value; othwerwise, // some other tests will fail. _reflect.SetField(fieldName, originalValue); Assert.AreEqual(originalValue, _reflect.GetField(fieldName)); } [Test] public void SetStaticPublicField_StaticNonPublicAccessibility() { string fieldName = "staticNum"; int newValue = 10; int originalValue = (int)_reflect.GetField(fieldName); _reflect.SetField(AccessModifier.Static | AccessModifier.NonPublic, fieldName, newValue); Assert.AreEqual(newValue, _reflect.GetField(fieldName)); // It's a static field, we need to restore to original value; othwerwise, // some other tests will fail. _reflect.SetField(AccessModifier.Static | AccessModifier.NonPublic, fieldName, originalValue); Assert.AreEqual(originalValue, _reflect.GetField(fieldName)); } [Test] public void SetBaseClassField() { _reflect.SetField("_baseString", "Test Field"); Assert.AreEqual("Test Field", _reflect.GetField("_baseString")); } #endregion #region GetProperty Tests [Test] public void GetPublicProperty_DefaultAccessibility() { Assert.AreEqual("MbUnit Rocks!!!", _reflect.GetProperty("PublicProperty")); } [Test] public void GetPublicProperty_PublicAccessibility() { Assert.AreEqual("MbUnit Rocks!!!", _reflect.GetProperty(AccessModifier.Public, "PublicProperty")); } [Test] public void GetNonPublicProperty_DefaultAccessibility() { Assert.AreEqual(DateTime.Today, _reflect.GetProperty("InternalProperty")); } [Test] public void GetNonPublicProperty_NonPublicAccessibility() { Assert.AreEqual(DateTime.Today, _reflect.GetProperty(AccessModifier.NonPublic, "InternalProperty")); } [Test] public void GetStaticProperty_DefaultAccessibility() { Assert.AreEqual(7, _reflect.GetProperty("StaticProperty")); } [Test] public void GetStaticProperty_StaticAccessibility() { Assert.AreEqual(7, _reflect.GetProperty(AccessModifier.Static | AccessModifier.NonPublic, "StaticProperty")); } #endregion #region SetProperty Tests [Test] public void SetPublicPropert_DefaultAccessibility() { string propertyName = "PublicProperty"; string newValue = "Just mbunit"; _reflect.SetProperty(propertyName, newValue); Assert.AreEqual(newValue, _reflect.GetProperty(propertyName)); } [Test] public void SetPublicPropert_PublicAccessibility() { string propertyName = "PublicProperty"; string newValue = "Just mbunit"; _reflect.SetProperty(AccessModifier.Public, propertyName, newValue); Assert.AreEqual(newValue, _reflect.GetProperty(propertyName)); } [Test] public void SetNonPublicProperty_DefaultAccessibility() { string propertyName = "InternalProperty"; DateTime dt = new DateTime(2008, 1, 1); _reflect.SetProperty(propertyName, dt); Assert.AreEqual(dt, _reflect.GetProperty(propertyName)); } [Test] public void SetNonPublicProperty_NonPublicAccessibility() { string propertyName = "InternalProperty"; DateTime dt = new DateTime(2008, 1, 1); _reflect.SetProperty(AccessModifier.NonPublic, propertyName, dt); Assert.AreEqual(dt, _reflect.GetProperty(propertyName)); } [Test] public void SetStaticProperty_DefaultAccessibility() { string propertyName = "StaticProperty"; int newValue = 10; int originalValue = (int)_reflect.GetProperty(propertyName); _reflect.SetProperty(propertyName, newValue); Assert.AreEqual(newValue, _reflect.GetProperty(propertyName)); // It's a static field, we need to restore to original value; othwerwise, // some other tests will fail. _reflect.SetProperty(propertyName, originalValue); Assert.AreEqual(originalValue, _reflect.GetProperty(propertyName)); } [Test] public void SetStaticProperty_StaticNonPublicAccessibility() { string propertyName = "StaticProperty"; int newValue = 10; int originalValue = (int)_reflect.GetProperty(propertyName); _reflect.SetProperty(AccessModifier.Static | AccessModifier.NonPublic, propertyName, newValue); Assert.AreEqual(newValue, _reflect.GetProperty(propertyName)); // It's a static field, we need to restore to original value; othwerwise, // some other tests will fail. _reflect.SetProperty(propertyName, originalValue); Assert.AreEqual(originalValue, _reflect.GetProperty(propertyName)); } #endregion #region InvokeMethod [Test] public void PublicMethodWithNoParameters_DefaultAccessibility() { Assert.AreEqual("MbUnit Rocks!!!", _reflect.InvokeMethod("PraiseMe")); } [Test] public void PublicMethod_DefaultAccessibility() { Assert.AreEqual(25, _reflect.InvokeMethod("Pow", 5)); } [Test] public void PublicMethod_PublicAccessibility() { Assert.AreEqual(25, _reflect.InvokeMethod(AccessModifier.Public, "Pow", 5)); } [Test] public void NonPublicMethod_DefaultAccessiblity() { Assert.AreEqual(15, _reflect.InvokeMethod("Multiply", 5, 3)); } [Test] public void StaticPublicMethod_StaticAccessiblity() { Assert.AreEqual(7, _reflect.InvokeMethod(AccessModifier.Static | AccessModifier.Public, "Add", 1, 6)); } #endregion } }
// Copyright (c) 2010-2013 SharpDX - Alexandre Mutel // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Text.RegularExpressions; using Microsoft.Win32; using SharpCore.Logging; using SharpGen.Config; namespace SharpGen.Parser { /// <summary> /// GccXml front end for command line. /// see http://www.gccxml.org/HTML/Index.html /// </summary> public class GccXml { private const string GccXmlGccOptionsFile = "gccxml_preprocess_sharpdx_options.txt"; private static readonly Regex MatchError = new Regex("error:"); /// <summary> /// GccXml tag for FundamentalType /// </summary> public const string TagFundamentalType = "FundamentalType"; /// <summary> /// GccXml tag for Enumeration /// </summary> public const string TagEnumeration = "Enumeration"; /// <summary> /// GccXml tag for Struct /// </summary> public const string TagStruct = "Struct"; /// <summary> /// GccXml tag for Field /// </summary> public const string TagField = "Field"; /// <summary> /// GccXml tag for Union /// </summary> public const string TagUnion = "Union"; /// <summary> /// GccXml tag for Typedef /// </summary> public const string TagTypedef = "Typedef"; /// <summary> /// GccXml tag for Function /// </summary> public const string TagFunction = "Function"; /// <summary> /// GccXml tag for PointerType /// </summary> public const string TagPointerType = "PointerType"; /// <summary> /// GccXml tag for ArrayType /// </summary> public const string TagArrayType = "ArrayType"; /// <summary> /// GccXml tag for ReferenceType /// </summary> public const string TagReferenceType = "ReferenceType"; /// <summary> /// GccXml tag for CvQualifiedType /// </summary> public const string TagCvQualifiedType = "CvQualifiedType"; /// <summary> /// GccXml tag for Namespace /// </summary> public const string TagNamespace = "Namespace"; /// <summary> /// GccXml tag for Variable /// </summary> public const string TagVariable = "Variable"; /// <summary> /// GccXml tag for FunctionType /// </summary> public const string TagFunctionType = "FunctionType"; /// <summary> /// Gets or sets the executable path of gccxml.exe. /// </summary> /// <value>The executable path.</value> public string ExecutablePath {get;set;} /// <summary> /// Gets or sets the include directory list. /// </summary> /// <value>The include directory list.</value> public List<IncludeDirRule> IncludeDirectoryList { get; private set; } /// <summary> /// List of error filters regexp. /// </summary> private readonly List<Regex> _filterErrors; /// <summary> /// Initializes a new instance of the <see cref="GccXml"/> class. /// </summary> public GccXml() { IncludeDirectoryList = new List<IncludeDirRule>(); _filterErrors = new List<Regex>(); } /// <summary> /// Adds a filter error that will ignore a particular error from gccxml. /// </summary> /// <param name="file">The headerFile.</param> /// <param name="regexpError">a regexp that filters a particular gccxml error message.</param> public void AddFilterError(string file, string regexpError) { string fullRegexpError = @"[\\/]" + Regex.Escape(file) + ":.*" + regexpError; _filterErrors.Add(new Regex(fullRegexpError)); } /// <summary> /// Preprocesses the specified header file. /// </summary> /// <param name="headerFile">The header file.</param> /// <param name="handler">The handler.</param> public void Preprocess(string headerFile, DataReceivedEventHandler handler) { Logger.RunInContext("gccxml", () => { string vsVersion = GetVisualStudioVersion(); if (!File.Exists(ExecutablePath)) Logger.Fatal("gccxml.exe not found from path: [{0}]", ExecutablePath); if (!File.Exists(headerFile)) Logger.Fatal("C++ Header file [{0}] not found", headerFile); var currentProcess = new Process(); var startInfo = new ProcessStartInfo(ExecutablePath) { RedirectStandardOutput = true, RedirectStandardError = true, UseShellExecute = false, CreateNoWindow = true, WorkingDirectory = Environment.CurrentDirectory }; File.WriteAllText(GccXmlGccOptionsFile, "-dDI -E"); var arguments = ""; // "--gccxml-gcc-options " + GccXmlGccOptionsFile; // Overrides settings for gccxml for compiling Win8 version arguments += " --gccxml-config \"" + Path.Combine(Path.GetDirectoryName(ExecutablePath), @"..\share\gccxml-0.9\vc" + vsVersion + @"\gccxml_config") + "\""; arguments += " -E --gccxml-gcc-options " + GccXmlGccOptionsFile; foreach (var directory in GetIncludePaths()) arguments += " " + directory; startInfo.Arguments = arguments + " " + headerFile; Console.WriteLine(startInfo.Arguments); currentProcess.StartInfo = startInfo; currentProcess.ErrorDataReceived += ProcessErrorFromHeaderFile; currentProcess.OutputDataReceived += handler; currentProcess.Start(); currentProcess.BeginOutputReadLine(); currentProcess.BeginErrorReadLine(); currentProcess.WaitForExit(); currentProcess.Close(); }); } private List<string> GetIncludePaths() { var paths = new List<string>(); foreach (var directory in IncludeDirectoryList) { var path = directory.Path; // Is Using registry? if (path.StartsWith("=")) { var registryPath = directory.Path.Substring(1); var indexOfSubPath = registryPath.IndexOf(";"); string subPath = ""; if (indexOfSubPath >= 0) { subPath = registryPath.Substring(indexOfSubPath + 1); registryPath = registryPath.Substring(0, indexOfSubPath); } var indexOfKey = registryPath.LastIndexOf("\\"); var subKeyStr = registryPath.Substring(indexOfKey + 1); registryPath = registryPath.Substring(0, indexOfKey); var indexOfHive = registryPath.IndexOf("\\"); var hiveStr = registryPath.Substring(0, indexOfHive).ToUpper(); registryPath = registryPath.Substring(indexOfHive+1); try { var hive = RegistryHive.LocalMachine; switch (hiveStr) { case "HKEY_LOCAL_MACHINE": hive = RegistryHive.LocalMachine; break; case "HKEY_CURRENT_USER": hive = RegistryHive.CurrentUser; break; case "HKEY_CURRENT_CONFIG": hive = RegistryHive.CurrentConfig; break; } var rootKey = RegistryKey.OpenBaseKey(hive, RegistryView.Registry32); var subKey = rootKey.OpenSubKey(registryPath); if (subKey == null) { Logger.Error("Unable to locate key [{0}] in registry", registryPath); continue; } path = Path.Combine(subKey.GetValue(subKeyStr).ToString(), subPath); } catch (Exception ex) { Logger.Error("Unable to locate key [{0}] in registry", registryPath); continue; } } if (directory.IsOverride) { paths.Add("-iwrapper\"" + path.TrimEnd('\\') + "\""); } else { paths.Add("-I\"" + path.TrimEnd('\\') + "\""); } } foreach (var path in paths) { Logger.Message("Path used for gccxml [{0}]", path); } return paths; } private static bool CheckVisualStudioVersion(string vsVersion) { var key = RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, RegistryView.Registry32); var subKey = key.OpenSubKey(@"SOFTWARE\Microsoft\VisualStudio\" + vsVersion + @".0\Setup\VC"); return subKey != null; } public static string ResolveVisualStudioVersion(params string[] versions) { foreach (var version in versions) { if (CheckVisualStudioVersion(version)) return version; } Logger.Exit("Visual Studio [{0}] with C++ not found. SharpDX requires this version to generate code from C++", string.Join("/", versions)); return null; } public static string GetVisualStudioVersion() { #if DIRECTX11_1 string vsVersion = ResolveVisualStudioVersion("11"); #else string vsVersion = ResolveVisualStudioVersion("10"); #endif return vsVersion; } public static string GetWindowsFramework7Version(params string[] versions) { var key = RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, RegistryView.Registry32); foreach (var version in versions) { var subKey = key.OpenSubKey(@"SOFTWARE\Microsoft\Microsoft SDKs\Windows\v" + version); if (subKey != null) { // Check that the include directory actually exist object directory = subKey.GetValue("InstallationFolder"); if (directory != null && Directory.Exists(Path.Combine(directory.ToString(), "include"))) { return version; } } } Logger.Exit("Missing Windows SDK [{0}]. Download SDK 7.1 from: http://www.microsoft.com/en-us/download/details.aspx?id=8279", string.Join("/", versions)); return null; } /// <summary> /// Processes the specified header headerFile. /// </summary> /// <param name="headerFile">The header headerFile.</param> /// <returns></returns> public StreamReader Process(string headerFile) { StreamReader result = null; Logger.RunInContext("gccxml", () => { string vsVersion = GetVisualStudioVersion(); ExecutablePath = Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, ExecutablePath)); if (!File.Exists(ExecutablePath)) Logger.Fatal("gccxml.exe not found from path: [{0}]", ExecutablePath); if (!File.Exists(headerFile)) Logger.Fatal("C++ Header file [{0}] not found", headerFile); var currentProcess = new Process(); var startInfo = new ProcessStartInfo(ExecutablePath) { RedirectStandardOutput = true, RedirectStandardError = true, UseShellExecute = false, CreateNoWindow = true, WorkingDirectory = Environment.CurrentDirectory }; var xmlFile = Path.ChangeExtension(headerFile, "xml"); // Delete any previously generated xml file File.Delete(xmlFile); var arguments = ""; // "--gccxml-gcc-options " + GccXmlGccOptionsFile; // Overrides settings for gccxml for compiling Win8 version arguments += " --gccxml-config \"" + Path.Combine(Path.GetDirectoryName(ExecutablePath), @"..\share\gccxml-0.9\vc" + vsVersion + @"\gccxml_config") + "\""; arguments += " -fxml=" + xmlFile; foreach (var directory in GetIncludePaths()) arguments += " " + directory; startInfo.Arguments = arguments + " " + headerFile; Console.WriteLine(startInfo.Arguments); currentProcess.StartInfo = startInfo; currentProcess.ErrorDataReceived += ProcessErrorFromHeaderFile; currentProcess.OutputDataReceived += ProcessOutputFromHeaderFile; currentProcess.Start(); currentProcess.BeginOutputReadLine(); currentProcess.BeginErrorReadLine(); currentProcess.WaitForExit(); currentProcess.Close(); if (!File.Exists(xmlFile) || Logger.HasErrors) { Logger.Error("Unable to generate XML file with gccxml [{0}]. Check previous errors.", xmlFile); } else { result = new StreamReader(xmlFile); } }); return result; } // E:/Code/Microsoft DirectX SDK (June 2010)//include/xaudio2fx.h:68:1: error: private static Regex matchFileErrorRegex = new Regex(@"^(.*):(\d+):(\d+):\s+error:(.*)"); /// <summary> /// Processes the error from header file. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The <see cref="System.Diagnostics.DataReceivedEventArgs"/> instance containing the event data.</param> void ProcessErrorFromHeaderFile(object sender, DataReceivedEventArgs e) { bool popContext = false; try { if (e.Data != null) { var matchError = matchFileErrorRegex.Match(e.Data); bool lineFiltered = false; foreach (var filterError in _filterErrors) { if (filterError.Match(e.Data).Success) { lineFiltered = true; break; } } string errorText = e.Data; if (matchError.Success) { Logger.PushLocation(matchError.Groups[1].Value, int.Parse(matchError.Groups[2].Value), int.Parse(matchError.Groups[3].Value)); popContext = true; errorText = matchError.Groups[4].Value; } if (!lineFiltered) { if (MatchError.Match(e.Data).Success) Logger.Error(errorText); else Logger.Warning(errorText); } else { Logger.Warning(errorText); } } } finally { if (popContext) Logger.PopLocation(); } } /// <summary> /// Processes the output from header file. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The <see cref="System.Diagnostics.DataReceivedEventArgs"/> instance containing the event data.</param> static void ProcessOutputFromHeaderFile(object sender, DataReceivedEventArgs e) { if (e.Data != null) Logger.Message(e.Data); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.IO; using System.Collections.Generic; using Microsoft.Build.BuildEngine; using Microsoft.Build.BuildEngine.Shared; using error = Microsoft.Build.BuildEngine.Shared.ErrorUtilities; namespace Microsoft.Build.BuildEngine { internal class PropertyDefinition { private string name = null; private string value = null; private string source = null; public PropertyDefinition(string name, string value, string source) { error.VerifyThrowArgumentLength(name, "name"); error.VerifyThrowArgumentLength(source, "source"); // value can be the empty string but not null error.VerifyThrowArgumentNull(value, "value"); this.name = name; this.value = value; this.source = source; } /// <summary> /// The name of the property /// </summary> public string Name { get { return name; } } /// <summary> /// The value of the property /// </summary> public string Value { get { return value; } } /// <summary> /// A description of the location where the property was defined, /// such as a registry key path or a path to a config file and /// line number. /// </summary> public string Source { get { return source; } } } internal abstract class ToolsetReader { /// <summary> /// Gathers toolset data from both the registry and configuration file, if any /// </summary> /// <param name="toolsets"></param> /// <param name="globalProperties"></param> /// <param name="initialProperties"></param> /// <returns></returns> internal static string ReadAllToolsets(ToolsetCollection toolsets, BuildPropertyGroup globalProperties, BuildPropertyGroup initialProperties) { return ReadAllToolsets(toolsets, null, null, globalProperties, initialProperties, ToolsetDefinitionLocations.ConfigurationFile | ToolsetDefinitionLocations.Registry); } /// <summary> /// Gathers toolset data from the registry and configuration file, if any: /// allows you to specify which of the registry and configuration file to /// read from by providing ToolsetInitialization /// </summary> /// <param name="toolsets"></param> /// <param name="globalProperties"></param> /// <param name="initialProperties"></param> /// <param name="locations"></param> /// <returns></returns> internal static string ReadAllToolsets(ToolsetCollection toolsets, BuildPropertyGroup globalProperties, BuildPropertyGroup initialProperties, ToolsetDefinitionLocations locations) { return ReadAllToolsets(toolsets, null, null, globalProperties, initialProperties, locations); } /// <summary> /// Gathers toolset data from the registry and configuration file, if any. /// NOTE: this method is internal for unit testing purposes only. /// </summary> /// <param name="toolsets"></param> /// <param name="registryReader"></param> /// <param name="configurationReader"></param> /// <param name="globalProperties"></param> /// <param name="initialProperties"></param> /// <param name="locations"></param> /// <returns></returns> internal static string ReadAllToolsets(ToolsetCollection toolsets, ToolsetRegistryReader registryReader, ToolsetConfigurationReader configurationReader, BuildPropertyGroup globalProperties, BuildPropertyGroup initialProperties, ToolsetDefinitionLocations locations) { // The 2.0 .NET Framework installer did not write a ToolsVersion key for itself in the registry. // The 3.5 installer writes one for 2.0, but 3.5 might not be installed. // The 4.0 and subsequent installers can't keep writing the 2.0 one, because (a) it causes SxS issues and (b) we // don't want it unless 2.0 is installed. // So if the 2.0 framework is actually installed, and we're reading the registry, create a toolset for it. // The registry and config file can overwrite it. if ( ((locations & ToolsetDefinitionLocations.Registry) != 0) && !toolsets.Contains("2.0") && FrameworkLocationHelper.PathToDotNetFrameworkV20 != null ) { Toolset synthetic20Toolset = new Toolset("2.0", FrameworkLocationHelper.PathToDotNetFrameworkV20, initialProperties); toolsets.Add(synthetic20Toolset); } // The ordering here is important because the configuration file should have greater precedence // than the registry string defaultToolsVersionFromRegistry = null; ToolsetRegistryReader registryReaderToUse = null; if ((locations & ToolsetDefinitionLocations.Registry) == ToolsetDefinitionLocations.Registry) { registryReaderToUse = registryReader == null ? new ToolsetRegistryReader() : registryReader; // We do not accumulate properties when reading them from the registry, because the order // in which values are returned to us is essentially random: so we disallow one property // in the registry to refer to another also in the registry defaultToolsVersionFromRegistry = registryReaderToUse.ReadToolsets(toolsets, globalProperties, initialProperties, false /* do not accumulate properties */); } string defaultToolsVersionFromConfiguration = null; ToolsetConfigurationReader configurationReaderToUse = null; if ((locations & ToolsetDefinitionLocations.ConfigurationFile) == ToolsetDefinitionLocations.ConfigurationFile) { if (configurationReader == null && ConfigurationFileMayHaveToolsets()) { // We haven't been passed in a fake configuration reader by a unit test, // and it looks like we have a .config file to read, so create a real // configuration reader configurationReader = new ToolsetConfigurationReader(); } if (configurationReader != null) { configurationReaderToUse = configurationReader == null ? new ToolsetConfigurationReader() : configurationReader; // Accumulation of properties is okay in the config file because it's deterministically ordered defaultToolsVersionFromConfiguration = configurationReaderToUse.ReadToolsets(toolsets, globalProperties, initialProperties, true /* accumulate properties */); } } // We'll use the default from the configuration file if it was specified, otherwise we'll try // the one from the registry. It's possible (and valid) that neither the configuration file // nor the registry specify a default, in which case we'll just return null. string defaultToolsVersion = defaultToolsVersionFromConfiguration ?? defaultToolsVersionFromRegistry; // If we got a default version from the registry or config file, and it // actually exists, fine. // Otherwise we have to come up with one. if (defaultToolsVersion == null || !toolsets.Contains(defaultToolsVersion)) { // We're going to choose a hard coded default tools version of 2.0. defaultToolsVersion = Constants.defaultToolsVersion; // But don't overwrite any existing tools path for this default we're choosing. if (!toolsets.Contains(Constants.defaultToolsVersion)) { // There's no tools path already for 2.0, so use the path to the v2.0 .NET Framework. // If an old-fashioned caller sets BinPath property, or passed a BinPath to the constructor, // that will overwrite what we're setting here. ErrorUtilities.VerifyThrow(Constants.defaultToolsVersion == "2.0", "Getting 2.0 FX path so default should be 2.0"); string pathToFramework = FrameworkLocationHelper.PathToDotNetFrameworkV20; // We could not find the default toolsversion because it was not installed on the machine. Fallback to the // one we expect to always be there when running msbuild 4.0. if (pathToFramework == null) { pathToFramework = FrameworkLocationHelper.PathToDotNetFrameworkV40; defaultToolsVersion = Constants.defaultFallbackToolsVersion; } // Again don't overwrite any existing tools path for this default we're choosing. if (!toolsets.Contains(defaultToolsVersion)) { Toolset defaultToolset = new Toolset(defaultToolsVersion, pathToFramework, initialProperties); toolsets.Add(defaultToolset); } } } return defaultToolsVersion; } /// <summary> /// Creating a ToolsetConfigurationReader, and also reading toolsets from the /// configuration file, are a little expensive. To try to avoid this cost if it's /// not necessary, we'll check if the file exists first. If it exists, we'll scan for /// the string "toolsVersion" to see if it might actually have any tools versions /// defined in it. /// </summary> /// <returns>True if there may be toolset definitions, otherwise false</returns> private static bool ConfigurationFileMayHaveToolsets() { bool result; try { result = (File.Exists(FileUtilities.CurrentExecutableConfigurationFilePath) && File.ReadAllText(FileUtilities.CurrentExecutableConfigurationFilePath).Contains("toolsVersion")); } catch (Exception e) // Catching Exception, but rethrowing unless it's an IO related exception. { if (ExceptionHandling.NotExpectedException(e)) throw; // There was some problem reading the config file: let the configuration reader // encounter it result = true; } return result; } /// <summary> /// Populates the toolset collection passed in with the toolsets read from some location. /// </summary> /// <remarks>Internal for unit testing only</remarks> /// <param name="toolsets"></param> /// <param name="globalProperties"></param> /// <param name="initialProperties"></param> /// <param name="accumulateProperties"></param> /// <returns>the default tools version if available, or null otherwise</returns> internal string ReadToolsets(ToolsetCollection toolsets, BuildPropertyGroup globalProperties, BuildPropertyGroup initialProperties, bool accumulateProperties) { error.VerifyThrowArgumentNull(toolsets, "toolsets"); ReadEachToolset(toolsets, globalProperties, initialProperties, accumulateProperties); string defaultToolsVersion = DefaultToolsVersion; // We don't check whether the default tools version actually // corresponds to a toolset definition. That's because our default for // the indefinite future is 2.0, and 2.0 might not be installed, which is fine. // If a project tries to use 2.0 (or whatever the default is) in these circumstances // they'll get a nice error saying that toolset isn't available and listing those that are. return defaultToolsVersion; } /// <summary> /// Reads all the toolsets and populates the given ToolsetCollection with them /// </summary> /// <param name="toolsets"></param> /// <param name="globalProperties"></param> /// <param name="initialProperties"></param> /// <param name="accumulateProperties"></param> private void ReadEachToolset(ToolsetCollection toolsets, BuildPropertyGroup globalProperties, BuildPropertyGroup initialProperties, bool accumulateProperties) { foreach (PropertyDefinition toolsVersion in ToolsVersions) { // We clone here because we don't want to interfere with the evaluation // of subsequent Toolsets; otherwise, properties found during the evaluation // of this Toolset would be persisted in initialProperties and appear // to later Toolsets as Global or Environment properties from the Engine. BuildPropertyGroup initialPropertiesClone = initialProperties.Clone(true /* deep clone */); Toolset toolset = ReadToolset(toolsVersion, globalProperties, initialPropertiesClone, accumulateProperties); if (toolset != null) { toolsets.Add(toolset); } } } /// <summary> /// Reads the settings for a specified tools version /// </summary> /// <param name="toolsVersion"></param> /// <param name="globalProperties"></param> /// <param name="initialProperties"></param> /// <param name="accumulateProperties"></param> /// <returns></returns> private Toolset ReadToolset(PropertyDefinition toolsVersion, BuildPropertyGroup globalProperties, BuildPropertyGroup initialProperties, bool accumulateProperties) { // Initial properties is the set of properties we're going to use to expand property expressions like $(foo) // in the values we read out of the registry or config file. We'll add to it as we pick up properties (including binpath) // from the registry or config file, so that properties there can be referenced in values below them. // After processing all the properties, we don't need initialProperties anymore. string toolsPath = null; string binPath = null; BuildPropertyGroup properties = new BuildPropertyGroup(); IEnumerable<PropertyDefinition> rawProperties = GetPropertyDefinitions(toolsVersion.Name); Expander expander = new Expander(initialProperties); foreach (PropertyDefinition property in rawProperties) { if (0 == String.Compare(property.Name, ReservedPropertyNames.toolsPath, StringComparison.OrdinalIgnoreCase)) { toolsPath = ExpandProperty(property, expander); toolsPath = ExpandRelativePathsRelativeToExeLocation(toolsPath); if (accumulateProperties) { SetProperty ( new PropertyDefinition(ReservedPropertyNames.toolsPath, toolsPath, property.Source), initialProperties, globalProperties ); } } else if (0 == String.Compare(property.Name, ReservedPropertyNames.binPath, StringComparison.OrdinalIgnoreCase)) { binPath = ExpandProperty(property, expander); binPath = ExpandRelativePathsRelativeToExeLocation(binPath); if (accumulateProperties) { SetProperty ( new PropertyDefinition(ReservedPropertyNames.binPath, binPath, property.Source), initialProperties, globalProperties ); } } else if(ReservedPropertyNames.IsReservedProperty(property.Name)) { // We don't allow toolsets to define reserved properties string baseMessage = ResourceUtilities.FormatResourceString("CannotModifyReservedProperty", property.Name); InvalidToolsetDefinitionException.Throw("InvalidPropertyNameInToolset", property.Name, property.Source, baseMessage); } else { // It's an arbitrary property string propertyValue = ExpandProperty(property, expander); PropertyDefinition expandedProperty = new PropertyDefinition(property.Name, propertyValue, property.Source); SetProperty(expandedProperty, properties, globalProperties); if (accumulateProperties) { SetProperty(expandedProperty, initialProperties, globalProperties); } } if (accumulateProperties) { expander = new Expander(initialProperties); } } // All tools versions must specify a value for MSBuildToolsPath (or MSBuildBinPath) if (String.IsNullOrEmpty(toolsPath) && String.IsNullOrEmpty(binPath)) { InvalidToolsetDefinitionException.Throw("MSBuildToolsPathIsNotSpecified", toolsVersion.Name, toolsVersion.Source); } // If both MSBuildBinPath and MSBuildToolsPath are present, they must be the same if (toolsPath != null && binPath != null && !toolsPath.Equals(binPath, StringComparison.OrdinalIgnoreCase)) { return null; } Toolset toolset = null; try { toolset = new Toolset(toolsVersion.Name, toolsPath == null ? binPath : toolsPath, properties); } catch (ArgumentException e) { InvalidToolsetDefinitionException.Throw("ErrorCreatingToolset", toolsVersion.Name, e.Message); } return toolset; } /// <summary> /// Expands the given unexpanded property expression using the properties in the /// given BuildPropertyGroup. /// </summary> /// <param name="unexpandedProperty"></param> /// <param name="properties"></param> /// <returns></returns> private string ExpandProperty(PropertyDefinition property, Expander expander) { try { return expander.ExpandAllIntoStringLeaveEscaped(property.Value, null); } catch (InvalidProjectFileException ex) { InvalidToolsetDefinitionException.Throw(ex, "ErrorEvaluatingToolsetPropertyExpression", property.Value, property.Source, ex.Message); } return string.Empty; } /// <summary> /// Sets the given property in the given property group. /// </summary> /// <param name="property"></param> /// <param name="propertyGroup"></param> /// <param name="globalProperties"></param> private void SetProperty(PropertyDefinition property, BuildPropertyGroup propertyGroup, BuildPropertyGroup globalProperties) { try { // Global properties cannot be overwritten if (globalProperties[property.Name] == null) { propertyGroup.SetProperty(property.Name, property.Value); } } catch (ArgumentException ex) { InvalidToolsetDefinitionException.Throw(ex, "InvalidPropertyNameInToolset", property.Name, property.Source, ex.Message); } } /// <summary> /// Given a path, de-relativizes it using the location of the currently /// executing .exe as the base directory. For example, the path "..\foo" /// becomes "c:\windows\microsoft.net\framework\foo" if the current exe is /// "c:\windows\microsoft.net\framework\v3.5.1234\msbuild.exe". /// If the path is not relative, it is returned without modification. /// If the path is invalid, it is returned without modification. /// </summary> /// <param name="path"></param> /// <returns></returns> private string ExpandRelativePathsRelativeToExeLocation(string path) { try { // Trim, because we don't want to do anything with empty values // (those should cause an error) string trimmedValue = path.Trim(); if (trimmedValue.Length > 0 && !Path.IsPathRooted(trimmedValue)) { path = Path.GetFullPath( Path.Combine(FileUtilities.CurrentExecutableDirectory, trimmedValue)); } } catch (Exception e) // Catching Exception, but rethrowing unless it's an IO related exception. { if (ExceptionHandling.NotExpectedException(e)) throw; // This means that the path looked relative, but was an invalid path. In this case, we'll // just not expand it, and carry on - to be consistent with what happens when there's a // non-relative bin path with invalid characters. The problem will be detected later when // it's used in a project file. } return path; } /// <summary> /// Returns the list of tools versions /// </summary> protected abstract IEnumerable<PropertyDefinition> ToolsVersions { get; } /// <summary> /// Returns the default tools version, or null if none was specified /// </summary> protected abstract string DefaultToolsVersion { get; } /// <summary> /// Provides an enumerator over property definitions for a specified tools version /// </summary> /// <param name="toolsVersion"></param> /// <returns></returns> protected abstract IEnumerable<PropertyDefinition> GetPropertyDefinitions(string toolsVersion); } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using MonoTouch.Foundation; using MonoTouch.UIKit; using MonoTouch.CoreAnimation; using MonoTouch.CoreGraphics; using System.Drawing; namespace VirtualSales.iOS.DraggableList { // you can drop items onto the list and rearrange this target flow layout and only drag items inside of it [Register("DraggableListTargetFlowLayout")] public class DraggableListTargetFlowLayout : DraggableListFlowLayoutBase { private Dictionary<CADisplayLink, NSDictionary> userInfos = new Dictionary<CADisplayLink, NSDictionary>(); public DraggableListTargetFlowLayout(IntPtr handle) :base(handle) { } // enum used to indicate scrolling direction when item dragged to edges private enum ScrollingDirection { ScrollingDirectionUnknown = 0, ScrollingDirectionUp, ScrollingDirectionDown, ScrollingDirectionLeft, ScrollingDirectionRight }; // stops automatic scrolling private void InvalidatesScrollTimer() { if (DisplayLink == null) return; if (DisplayLink.Paused) { DisplayLink.Invalidate(); } userInfos.Remove(DisplayLink); DisplayLink = null; } protected override void HandleLongPressGesture() { switch (LongPressGestureRecognizer.State) { case UIGestureRecognizerState.Began: { // we get the current item and draw a floating copy on it on the Collection View, so that it cannot be taken outside the bounds of the collection view // we get the copy usign RasterizedImage var currentIndexPath = CollectionView.IndexPathForItemAtPoint(LongPressGestureRecognizer.LocationInView(CollectionView)); SelectedItemIndexPath = currentIndexPath; if (SelectedItemIndexPath == null) return; if (!DataSource.CanMoveItemAtIndexPath(SelectedItemIndexPath)) return; var collectionViewCell = CollectionView.CellForItem(SelectedItemIndexPath); CurrentView = new UIView(collectionViewCell.Frame); collectionViewCell.Highlighted = true; var highlightedImageView = new UIImageView(RastertizedImage(collectionViewCell)); highlightedImageView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; highlightedImageView.Alpha = 1.0f; collectionViewCell.Highlighted = false; var imageView = new UIImageView(RastertizedImage(collectionViewCell)); imageView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; imageView.Alpha = 0.0f; CurrentView.AddSubview(imageView); CurrentView.AddSubview(highlightedImageView); CollectionView.AddSubview(CurrentView); CurrentViewCenter = CurrentView.Center; OnWillBeginDraggingItem(SelectedItemIndexPath); // animate the floating copy into existence UIView.Animate(0.3, 0.0, UIViewAnimationOptions.BeginFromCurrentState, () => { CurrentView.Transform = CGAffineTransform.MakeScale(1.1f, 1.1f); highlightedImageView.Alpha = 0.0f; imageView.Alpha = 1.0f; }, () => { highlightedImageView.RemoveFromSuperview(); OnDidBegingDraggingItem(SelectedItemIndexPath); }); InvalidateLayout(); } break; case UIGestureRecognizerState.Cancelled: case UIGestureRecognizerState.Ended: { var currentIndexPath = SelectedItemIndexPath; if (currentIndexPath == null) return; SelectedItemIndexPath = null; CurrentViewCenter = PointF.Empty; OnWillEndDraggingItem(currentIndexPath); UIView.Animate(0.3, 0.0, UIViewAnimationOptions.BeginFromCurrentState, () => { var layoutAttributes = LayoutAttributesForItem(currentIndexPath); CurrentView.Transform = CGAffineTransform.MakeScale(1.0f, 1.0f); CurrentView.Center = layoutAttributes.Center; }, () => { CurrentView.RemoveFromSuperview(); CurrentView = null; InvalidateLayout(); OnDidEndDraggingItem(currentIndexPath); }); } break; } } // sets up the scrolling in the correct direction // utilizes CSDisplayLink, a timer synchronized to the display refresh rate, to execute the smooth automated scrolling private void SetupScrollTimerInDirection(ScrollingDirection direction) { if (DisplayLink != null && !DisplayLink.Paused) { var userInfo = userInfos[DisplayLink]; var scrollingDirection = userInfo[ScrollingDirectionKey]; var number = (NSNumber)NSNumber.FromObject(scrollingDirection); ScrollingDirection oldDirection = (ScrollingDirection)number.Int32Value; if (direction == oldDirection) { return; } } InvalidatesScrollTimer(); DisplayLink = CADisplayLink.Create(HandleScroll); userInfos.Add(DisplayLink, NSDictionary.FromObjectAndKey(NSNumber.FromInt32((int)direction), new NSString(ScrollingDirectionKey))); DisplayLink.AddToRunLoop(NSRunLoop.Main, NSRunLoop.NSRunLoopCommonModes); } // some methods to verify whether a point in in the scrolling bounds. private bool IsPointInTopScrollingBounds(PointF pt) { var isAboveInset = pt.Y < (CollectionView.Bounds.GetMinY() + ScrollingTriggerEdgeInsets.Top); var isBelowTop = pt.Y > CollectionView.Bounds.GetMinY(); return isAboveInset && isBelowTop; } private bool IsPointInBottomScrollingBounds(PointF pt) { var isBelowInset = pt.Y > (CollectionView.Bounds.GetMaxY() - ScrollingTriggerEdgeInsets.Bottom); var isAboveBottom = pt.Y < CollectionView.Bounds.GetMaxY(); return isBelowInset && isAboveBottom; } private bool IsInHorizontalBounds(PointF pt) { var result = pt.X < CollectionView.Bounds.GetMaxX() && pt.X > CollectionView.Bounds.GetMinX(); return result; } private bool IsInVerticalBounds(PointF pt) { var result = pt.Y < CollectionView.Bounds.GetMaxY() && pt.Y > CollectionView.Bounds.GetMinY(); return result; } private bool IsPointInLeftScrollingBounds(PointF pt) { var isLeftOfInset = pt.X < (CollectionView.Bounds.GetMinX() + ScrollingTriggerEdgeInsets.Left); var isRightOfEdge = pt.X > CollectionView.Bounds.GetMinX(); return isLeftOfInset && isRightOfEdge; } private bool IsPointInRightScrollingBounds(PointF pt) { var isRightOfInset = pt.X > (CollectionView.Bounds.GetMaxX() - ScrollingTriggerEdgeInsets.Right); var isLeftOfEdge = pt.X < CollectionView.Bounds.GetMaxX(); return isRightOfInset && isLeftOfEdge; } protected override void HandlePanGesture() { switch (PanGestureRecognizer.State) { case UIGestureRecognizerState.Began: case UIGestureRecognizerState.Changed: { // when an item is panned we check whether it is in the scrolling bound and scroll if we have to PanTranslationInCollectionView = PanGestureRecognizer.TranslationInView(CollectionView); var viewCenter = CurrentView.Center = AddPoints(CurrentViewCenter, PanTranslationInCollectionView); var newIndexPath = CollectionView.IndexPathForItemAtPoint(viewCenter); if (newIndexPath == null) { return; } InvalidateLayoutIfNecesary(); switch (ScrollDirection) { case UICollectionViewScrollDirection.Vertical: { if (IsPointInTopScrollingBounds(viewCenter) && IsInHorizontalBounds(viewCenter)) { SetupScrollTimerInDirection(ScrollingDirection.ScrollingDirectionUp); } else if (IsPointInBottomScrollingBounds(viewCenter) && IsInHorizontalBounds(viewCenter)) { SetupScrollTimerInDirection(ScrollingDirection.ScrollingDirectionDown); } else { InvalidatesScrollTimer(); } } break; case UICollectionViewScrollDirection.Horizontal: { if (IsPointInLeftScrollingBounds(viewCenter) && IsInVerticalBounds(viewCenter)) { SetupScrollTimerInDirection(ScrollingDirection.ScrollingDirectionLeft); } else if (IsPointInRightScrollingBounds(viewCenter) && IsInHorizontalBounds(viewCenter)) { SetupScrollTimerInDirection(ScrollingDirection.ScrollingDirectionRight); } else { InvalidatesScrollTimer(); } } break; default: break; } } break; case UIGestureRecognizerState.Cancelled: case UIGestureRecognizerState.Ended: { InvalidatesScrollTimer(); } break; default: break; } } public static readonly string ScrollingDirectionKey = "LX_scrollingdirection"; public static readonly float LX_FRAMES_PER_SECOND = 60.0f; // actual scrolling logic that gets called by the CADisplayInfo timer // only matters if we have an active scroll animation command public void HandleScroll() { if (DisplayLink == null) return; var userInfo = userInfos[DisplayLink]; var scrollingDirection = userInfo[ScrollingDirectionKey]; var number = (NSNumber)NSNumber.FromObject(scrollingDirection); ScrollingDirection direction = (ScrollingDirection)(number.Int32Value); if (direction == ScrollingDirection.ScrollingDirectionUnknown) return; var frameSize = CollectionView.Bounds.Size; var contentSize = CollectionView.ContentSize; var contentOffset = CollectionView.ContentOffset; var distance = (float)Math.Round(ScrollingSpeed / LX_FRAMES_PER_SECOND); var translation = new PointF(); switch (direction) { case ScrollingDirection.ScrollingDirectionUp: distance = -distance; var minY = 0.0f; if ((contentOffset.Y + distance) <= minY) { distance = -contentOffset.Y; } translation = new PointF(0.0f, distance); break; case ScrollingDirection.ScrollingDirectionDown: float maxY = Math.Max(contentSize.Height, frameSize.Height) - frameSize.Height; if ((contentOffset.Y + distance) >= maxY) { distance = maxY - contentOffset.Y; } translation = new PointF(0.0f, distance); break; case ScrollingDirection.ScrollingDirectionLeft: distance = -distance; float minX = 0.0f; if ((contentOffset.X + distance) <= minX) { distance = -contentOffset.X; } translation = new PointF(distance, 0.0f); break; case ScrollingDirection.ScrollingDirectionRight: float maxX = Math.Max(contentSize.Width, frameSize.Width) - frameSize.Width; if ((contentOffset.X + distance) >= maxX) { distance = maxX - contentOffset.X; } translation = new PointF(distance, 0.0f); break; default: break; } CurrentViewCenter = AddPoints(CurrentViewCenter, translation); if (CurrentView != null) { CurrentView.Center = AddPoints(CurrentViewCenter, PanTranslationInCollectionView); } CollectionView.ContentOffset = AddPoints(contentOffset, translation); } // figures out if the item in the list needs to be redrawn into a different position and does it if the need is there private void InvalidateLayoutIfNecesary() { var newIndexPath = CollectionView.IndexPathForItemAtPoint(CurrentView.Center); var previousIndexPath = SelectedItemIndexPath; if ((newIndexPath == null) || newIndexPath == previousIndexPath) { return; } SelectedItemIndexPath = newIndexPath; DataSource.ItemWillMoveToIndexPath(previousIndexPath, newIndexPath); NSAction action = () => { CollectionView.DeleteItems(new[] { previousIndexPath }); CollectionView.InsertItems(new[] { newIndexPath }); }; UICompletionHandler completion = (bool finished) => { DataSource.ItemDidMoveToIndexPath(previousIndexPath, newIndexPath); }; CollectionView.PerformBatchUpdates(action, completion); } protected override void HandleCollectionViewNull() { this.InvalidatesScrollTimer(); } } }
/* * Vericred API * * Vericred's API allows you to search for Health Plans that a specific doctor accepts. ## Getting Started Visit our [Developer Portal](https://developers.vericred.com) to create an account. Once you have created an account, you can create one Application for Production and another for our Sandbox (select the appropriate Plan when you create the Application). ## SDKs Our API follows standard REST conventions, so you can use any HTTP client to integrate with us. You will likely find it easier to use one of our [autogenerated SDKs](https://github.com/vericred/?query=vericred-), which we make available for several common programming languages. ## Authentication To authenticate, pass the API Key you created in the Developer Portal as a `Vericred-Api-Key` header. `curl -H 'Vericred-Api-Key: YOUR_KEY' "https://api.vericred.com/providers?search_term=Foo&zip_code=11215"` ## Versioning Vericred's API default to the latest version. However, if you need a specific version, you can request it with an `Accept-Version` header. The current version is `v3`. Previous versions are `v1` and `v2`. `curl -H 'Vericred-Api-Key: YOUR_KEY' -H 'Accept-Version: v2' "https://api.vericred.com/providers?search_term=Foo&zip_code=11215"` ## Pagination Endpoints that accept `page` and `per_page` parameters are paginated. They expose four additional fields that contain data about your position in the response, namely `Total`, `Per-Page`, `Link`, and `Page` as described in [RFC-5988](https://tools.ietf.org/html/rfc5988). For example, to display 5 results per page and view the second page of a `GET` to `/networks`, your final request would be `GET /networks?....page=2&per_page=5`. ## Sideloading When we return multiple levels of an object graph (e.g. `Provider`s and their `State`s we sideload the associated data. In this example, we would provide an Array of `State`s and a `state_id` for each provider. This is done primarily to reduce the payload size since many of the `Provider`s will share a `State` ``` { providers: [{ id: 1, state_id: 1}, { id: 2, state_id: 1 }], states: [{ id: 1, code: 'NY' }] } ``` If you need the second level of the object graph, you can just match the corresponding id. ## Selecting specific data All endpoints allow you to specify which fields you would like to return. This allows you to limit the response to contain only the data you need. For example, let's take a request that returns the following JSON by default ``` { provider: { id: 1, name: 'John', phone: '1234567890', field_we_dont_care_about: 'value_we_dont_care_about' }, states: [{ id: 1, name: 'New York', code: 'NY', field_we_dont_care_about: 'value_we_dont_care_about' }] } ``` To limit our results to only return the fields we care about, we specify the `select` query string parameter for the corresponding fields in the JSON document. In this case, we want to select `name` and `phone` from the `provider` key, so we would add the parameters `select=provider.name,provider.phone`. We also want the `name` and `code` from the `states` key, so we would add the parameters `select=states.name,staes.code`. The id field of each document is always returned whether or not it is requested. Our final request would be `GET /providers/12345?select=provider.name,provider.phone,states.name,states.code` The response would be ``` { provider: { id: 1, name: 'John', phone: '1234567890' }, states: [{ id: 1, name: 'New York', code: 'NY' }] } ``` ## Benefits summary format Benefit cost-share strings are formatted to capture: * Network tiers * Compound or conditional cost-share * Limits on the cost-share * Benefit-specific maximum out-of-pocket costs **Example #1** As an example, we would represent [this Summary of Benefits &amp; Coverage](https://s3.amazonaws.com/vericred-data/SBC/2017/33602TX0780032.pdf) as: * **Hospital stay facility fees**: - Network Provider: `$400 copay/admit plus 20% coinsurance` - Out-of-Network Provider: `$1,500 copay/admit plus 50% coinsurance` - Vericred's format for this benefit: `In-Network: $400 before deductible then 20% after deductible / Out-of-Network: $1,500 before deductible then 50% after deductible` * **Rehabilitation services:** - Network Provider: `20% coinsurance` - Out-of-Network Provider: `50% coinsurance` - Limitations & Exceptions: `35 visit maximum per benefit period combined with Chiropractic care.` - Vericred's format for this benefit: `In-Network: 20% after deductible / Out-of-Network: 50% after deductible | limit: 35 visit(s) per Benefit Period` **Example #2** In [this other Summary of Benefits &amp; Coverage](https://s3.amazonaws.com/vericred-data/SBC/2017/40733CA0110568.pdf), the **specialty_drugs** cost-share has a maximum out-of-pocket for in-network pharmacies. * **Specialty drugs:** - Network Provider: `40% coinsurance up to a $500 maximum for up to a 30 day supply` - Out-of-Network Provider `Not covered` - Vericred's format for this benefit: `In-Network: 40% after deductible, up to $500 per script / Out-of-Network: 100%` **BNF** Here's a description of the benefits summary string, represented as a context-free grammar: ``` <cost-share> ::= <tier> <opt-num-prefix> <value> <opt-per-unit> <deductible> <tier-limit> "/" <tier> <opt-num-prefix> <value> <opt-per-unit> <deductible> "|" <benefit-limit> <tier> ::= "In-Network:" | "In-Network-Tier-2:" | "Out-of-Network:" <opt-num-prefix> ::= "first" <num> <unit> | "" <unit> ::= "day(s)" | "visit(s)" | "exam(s)" | "item(s)" <value> ::= <ddct_moop> | <copay> | <coinsurance> | <compound> | "unknown" | "Not Applicable" <compound> ::= <copay> <deductible> "then" <coinsurance> <deductible> | <copay> <deductible> "then" <copay> <deductible> | <coinsurance> <deductible> "then" <coinsurance> <deductible> <copay> ::= "$" <num> <coinsurace> ::= <num> "%" <ddct_moop> ::= <copay> | "Included in Medical" | "Unlimited" <opt-per-unit> ::= "per day" | "per visit" | "per stay" | "" <deductible> ::= "before deductible" | "after deductible" | "" <tier-limit> ::= ", " <limit> | "" <benefit-limit> ::= <limit> | "" ``` * * OpenAPI spec version: 1.0.0 * * Generated by: https://github.com/swagger-api/swagger-codegen.git * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Linq; using System.IO; using System.Text; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; namespace IO.Vericred.Model { /// <summary> /// NetworkDetails /// </summary> [DataContract] public partial class NetworkDetails : IEquatable<NetworkDetails> { /// <summary> /// Initializes a new instance of the <see cref="NetworkDetails" /> class. /// </summary> /// <param name="Id">Primary key.</param> /// <param name="Name">Name of the Network.</param> /// <param name="HiosIssuerIds">List of issuer IDs.</param> public NetworkDetails(int? Id = null, string Name = null, List<int?> HiosIssuerIds = null) { this.Id = Id; this.Name = Name; this.HiosIssuerIds = HiosIssuerIds; } /// <summary> /// Primary key /// </summary> /// <value>Primary key</value> [DataMember(Name="id", EmitDefaultValue=false)] public int? Id { get; set; } /// <summary> /// Name of the Network /// </summary> /// <value>Name of the Network</value> [DataMember(Name="name", EmitDefaultValue=false)] public string Name { get; set; } /// <summary> /// List of issuer IDs /// </summary> /// <value>List of issuer IDs</value> [DataMember(Name="hios_issuer_ids", EmitDefaultValue=false)] public List<int?> HiosIssuerIds { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class NetworkDetails {\n"); sb.Append(" Id: ").Append(Id).Append("\n"); sb.Append(" Name: ").Append(Name).Append("\n"); sb.Append(" HiosIssuerIds: ").Append(HiosIssuerIds).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return this.Equals(obj as NetworkDetails); } /// <summary> /// Returns true if NetworkDetails instances are equal /// </summary> /// <param name="other">Instance of NetworkDetails to be compared</param> /// <returns>Boolean</returns> public bool Equals(NetworkDetails other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( this.Id == other.Id || this.Id != null && this.Id.Equals(other.Id) ) && ( this.Name == other.Name || this.Name != null && this.Name.Equals(other.Name) ) && ( this.HiosIssuerIds == other.HiosIssuerIds || this.HiosIssuerIds != null && this.HiosIssuerIds.SequenceEqual(other.HiosIssuerIds) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks etc, of course :) if (this.Id != null) hash = hash * 59 + this.Id.GetHashCode(); if (this.Name != null) hash = hash * 59 + this.Name.GetHashCode(); if (this.HiosIssuerIds != null) hash = hash * 59 + this.HiosIssuerIds.GetHashCode(); return hash; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Runtime.InteropServices; using System.Runtime.CompilerServices; using System.Threading; using Microsoft.Win32.SafeHandles; using System.Diagnostics; namespace System.Net.Sockets { internal partial class SafeCloseSocket : #if DEBUG DebugSafeHandleMinusOneIsInvalid #else SafeHandleMinusOneIsInvalid #endif { private int _receiveTimeout = -1; private int _sendTimeout = -1; public SocketAsyncContext AsyncContext { get { return _innerSocket.AsyncContext; } } public int FileDescriptor { get { return (int)handle; } } public bool IsNonBlocking { get; set; } public int ReceiveTimeout { get { return _receiveTimeout; } set { Debug.Assert(value == -1 || value > 0); _receiveTimeout = value;; } } public int SendTimeout { get { return _sendTimeout; } set { Debug.Assert(value == -1 || value > 0); _sendTimeout = value; } } public unsafe static SafeCloseSocket CreateSocket(int fileDescriptor) { return CreateSocket(InnerSafeCloseSocket.CreateSocket(fileDescriptor)); } public unsafe static SafeCloseSocket CreateSocket(AddressFamily addressFamily, SocketType socketType, ProtocolType protocolType) { return CreateSocket(InnerSafeCloseSocket.CreateSocket(addressFamily, socketType, protocolType)); } public unsafe static SafeCloseSocket Accept(SafeCloseSocket socketHandle, byte[] socketAddress, ref int socketAddressSize) { return CreateSocket(InnerSafeCloseSocket.Accept(socketHandle, socketAddress, ref socketAddressSize)); } private void InnerReleaseHandle() { // No-op for Unix. } internal sealed partial class InnerSafeCloseSocket : SafeHandleMinusOneIsInvalid { private SocketAsyncContext _asyncContext; public SocketAsyncContext AsyncContext { get { if (Volatile.Read(ref _asyncContext) == null) { Interlocked.CompareExchange(ref _asyncContext, new SocketAsyncContext((int)handle, SocketAsyncEngine.Instance), null); } return _asyncContext; } } private unsafe SocketError InnerReleaseHandle() { int errorCode; if (_asyncContext != null) { _asyncContext.Close(); } // If _blockable was set in BlockingRelease, it's safe to block here, which means // we can honor the linger options set on the socket. It also means closesocket() might return WSAEWOULDBLOCK, in which // case we need to do some recovery. if (_blockable) { GlobalLog.Print("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ") Following 'blockable' branch."); errorCode = Interop.Sys.Close((int)handle); if (errorCode == -1) { errorCode = (int)Interop.Sys.GetLastError(); } GlobalLog.Print("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ") close()#1:" + errorCode.ToString()); #if DEBUG _closeSocketHandle = handle; _closeSocketResult = SocketPal.GetSocketErrorForErrorCode((Interop.Error)errorCode); #endif // If it's not EWOULDBLOCK, there's no more recourse - we either succeeded or failed. if (errorCode != (int)Interop.Error.EWOULDBLOCK) { if (errorCode == 0 && _asyncContext != null) { _asyncContext.Close(); } return SocketPal.GetSocketErrorForErrorCode((Interop.Error)errorCode); } // The socket must be non-blocking with a linger timeout set. // We have to set the socket to blocking. errorCode = Interop.Sys.Fcntl.SetIsNonBlocking((int)handle, 0); if (errorCode == 0) { // The socket successfully made blocking; retry the close(). errorCode = Interop.Sys.Close((int)handle); GlobalLog.Print("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ") close()#2:" + errorCode.ToString()); #if DEBUG _closeSocketHandle = handle; _closeSocketResult = SocketPal.GetSocketErrorForErrorCode((Interop.Error)errorCode); #endif if (errorCode == 0 && _asyncContext != null) { _asyncContext.Close(); } return SocketPal.GetSocketErrorForErrorCode((Interop.Error)errorCode); } // The socket could not be made blocking; fall through to the regular abortive close. } // By default or if CloseAsIs() path failed, set linger timeout to zero to get an abortive close (RST). var linger = new Interop.libc.linger { l_onoff = 1, l_linger = 0 }; errorCode = Interop.libc.setsockopt((int)handle, Interop.libc.SOL_SOCKET, Interop.libc.SO_LINGER, &linger, (uint)sizeof(Interop.libc.linger)); #if DEBUG _closeSocketLinger = SocketPal.GetSocketErrorForErrorCode((Interop.Error)errorCode); #endif if (errorCode == -1) { errorCode = (int)Interop.Sys.GetLastError(); } GlobalLog.Print("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ") setsockopt():" + errorCode.ToString()); if (errorCode != 0 && errorCode != (int)Interop.Error.EINVAL && errorCode != (int)Interop.Error.ENOPROTOOPT) { // Too dangerous to try closesocket() - it might block! return SocketPal.GetSocketErrorForErrorCode((Interop.Error)errorCode); } errorCode = Interop.Sys.Close((int)handle); #if DEBUG _closeSocketHandle = handle; _closeSocketResult = SocketPal.GetSocketErrorForErrorCode((Interop.Error)errorCode); #endif GlobalLog.Print("SafeCloseSocket::ReleaseHandle(handle:" + handle.ToString("x") + ") close#3():" + (errorCode == -1 ? (int)Interop.Sys.GetLastError() : errorCode).ToString()); return SocketPal.GetSocketErrorForErrorCode((Interop.Error)errorCode); } public static InnerSafeCloseSocket CreateSocket(int fileDescriptor) { var res = new InnerSafeCloseSocket(); res.SetHandle((IntPtr)fileDescriptor); return res; } public static unsafe InnerSafeCloseSocket CreateSocket(AddressFamily addressFamily, SocketType socketType, ProtocolType protocolType) { int af = SocketPal.GetPlatformAddressFamily(addressFamily); int sock = SocketPal.GetPlatformSocketType(socketType); int pt = (int)protocolType; int fd = Interop.libc.socket(af, sock, pt); if (fd != -1) { // The socket was created successfully; make it non-blocking and enable // IPV6_V6ONLY by default for AF_INET6 sockets. int err = Interop.Sys.Fcntl.SetIsNonBlocking(fd, 1); if (err != 0) { Interop.Sys.Close(fd); fd = -1; } else if (addressFamily == AddressFamily.InterNetworkV6) { int on = 1; err = Interop.libc.setsockopt(fd, Interop.libc.IPPROTO_IPV6, Interop.libc.IPV6_V6ONLY, &on, (uint)sizeof(int)); if (err != 0) { Interop.Sys.Close(fd); fd = -1; } } } var res = new InnerSafeCloseSocket(); res.SetHandle((IntPtr)fd); return res; } public static unsafe InnerSafeCloseSocket Accept(SafeCloseSocket socketHandle, byte[] socketAddress, ref int socketAddressLen) { int acceptedFd; if (!socketHandle.IsNonBlocking) { socketHandle.AsyncContext.Accept(socketAddress, ref socketAddressLen, -1, out acceptedFd); } else { SocketError unused; SocketPal.TryCompleteAccept(socketHandle.FileDescriptor, socketAddress, ref socketAddressLen, out acceptedFd, out unused); } var res = new InnerSafeCloseSocket(); res.SetHandle((IntPtr)acceptedFd); return res; } } } }
// Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information. // // System.Drawing.Imaging.Metafile.cs // // Authors: // Christian Meyer, eMail: Christian.Meyer@cs.tum.edu // Dennis Hayes (dennish@raytek.com) // Sebastien Pouliot <sebastien@ximian.com> // // (C) 2002 Ximian, Inc. http://www.ximian.com // Copyright (C) 2004,2006-2007 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System.IO; using System.Reflection; using System.ComponentModel; using System.Diagnostics; using System.Runtime.InteropServices; using Gdip = System.Drawing.SafeNativeMethods.Gdip; using System.Runtime.Serialization; namespace System.Drawing.Imaging { #if !NETCORE [Editor ("System.Drawing.Design.MetafileEditor, " + Consts.AssemblySystem_Drawing_Design, typeof (System.Drawing.Design.UITypeEditor))] #endif [Serializable] [System.Runtime.CompilerServices.TypeForwardedFrom("System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] public sealed class Metafile : Image { // Non-null if a graphics instance was created using // Graphics.FromImage(this) The metadata holder is responsible for // freeing the nativeImage if the Metadata instance is disposed before // the Graphics instance. private MetafileHolder _metafileHolder; // A class responsible for disposing of the native Metafile instance // if it needs to outlive the managed Metafile instance. // // The following are both legal with win32 GDI+: // Metafile mf = ...; // get a metafile instance // Graphics g = Graphics.FromImage(mf); // get a graphics instance // g.Dispose(); mf.Dispose(); // dispose of the graphics instance first // OR // mf.Dispose(); g.Dispose(); // dispose of the metafile instance first // // The metafile holder is designed to take ownership of the native metafile image // when the managed Metafile instance is disposed while a Graphics instance is still // not disposed (ie the second code pattern above) and to keep the native image alive until the graphics // instance is disposed. // // Note that the following throws, so we only ever need to keep track of one Graphics // instance at a time: // Metafile mf = ...; // get a metafile instance // Graphics g = Graphics.FromImage(mf); // Graphics g2 = Graphics.FromImage(mf); // throws OutOfMemoryException on GDI+ on Win32 internal sealed class MetafileHolder : IDisposable { private bool _disposed; private IntPtr _nativeImage; internal bool Disposed { get => _disposed; } internal MetafileHolder() { _disposed = false; _nativeImage = IntPtr.Zero; } ~MetafileHolder() => Dispose(false); public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } internal void Dispose(bool disposing) { if (!_disposed) { IntPtr nativeImage = _nativeImage; _nativeImage = IntPtr.Zero; _disposed = true; if (nativeImage != IntPtr.Zero) { int status = Gdip.GdipDisposeImage(new HandleRef(this, nativeImage)); Gdip.CheckStatus(status); } } } internal void MetafileDisposed(IntPtr nativeImage) { _nativeImage = nativeImage; } internal void GraphicsDisposed() { Dispose(); } } internal MetafileHolder AddMetafileHolder() { // If _metafileHolder is not null and hasn't been disposed yet, there's already a graphics instance associated with // this metafile, the native code will return an error status. if (_metafileHolder != null && !_metafileHolder.Disposed) return null; _metafileHolder = new MetafileHolder(); return _metafileHolder; } // constructors internal Metafile(IntPtr ptr) => SetNativeImage(ptr); // Usually called when cloning images that need to have // not only the handle saved, but also the underlying stream // (when using MS GDI+ and IStream we must ensure the stream stays alive for all the life of the Image) internal Metafile(IntPtr ptr, Stream stream) => SetNativeImage(ptr); public Metafile(Stream stream) { if (stream == null) throw new ArgumentNullException(nameof(stream)); // With libgdiplus we use a custom API for this, because there's no easy way // to get the Stream down to libgdiplus. So, we wrap the stream with a set of delegates. GdiPlusStreamHelper sh = new GdiPlusStreamHelper(stream, seekToOrigin: false); int status = Gdip.GdipCreateMetafileFromDelegate_linux(sh.GetHeaderDelegate, sh.GetBytesDelegate, sh.PutBytesDelegate, sh.SeekDelegate, sh.CloseDelegate, sh.SizeDelegate, out nativeImage); // Since we're just passing to native code the delegates inside the wrapper, we need to keep sh alive // to avoid the object being collected and therefore the delegates would be collected as well. GC.KeepAlive(sh); Gdip.CheckStatus(status); } public Metafile(string filename) { // Called in order to emulate exception behavior from netfx related to invalid file paths. Path.GetFullPath(filename); int status = Gdip.GdipCreateMetafileFromFile(filename, out nativeImage); if (status == Gdip.GenericError) throw new ExternalException("Couldn't load specified file."); Gdip.CheckStatus(status); } public Metafile(IntPtr henhmetafile, bool deleteEmf) { int status = Gdip.GdipCreateMetafileFromEmf(henhmetafile, deleteEmf, out nativeImage); Gdip.CheckStatus(status); } public Metafile(IntPtr referenceHdc, EmfType emfType) : this(referenceHdc, new RectangleF(), MetafileFrameUnit.GdiCompatible, emfType, null) { } public Metafile(IntPtr referenceHdc, Rectangle frameRect) : this(referenceHdc, frameRect, MetafileFrameUnit.GdiCompatible, EmfType.EmfPlusDual, null) { } public Metafile(IntPtr referenceHdc, RectangleF frameRect) : this(referenceHdc, frameRect, MetafileFrameUnit.GdiCompatible, EmfType.EmfPlusDual, null) { } public Metafile(IntPtr hmetafile, WmfPlaceableFileHeader wmfHeader) { int status = Gdip.GdipCreateMetafileFromEmf(hmetafile, false, out nativeImage); Gdip.CheckStatus(status); } public Metafile(Stream stream, IntPtr referenceHdc) : this(stream, referenceHdc, new RectangleF(), MetafileFrameUnit.GdiCompatible, EmfType.EmfPlusDual, null) { } public Metafile(string fileName, IntPtr referenceHdc) : this(fileName, referenceHdc, new RectangleF(), MetafileFrameUnit.GdiCompatible, EmfType.EmfPlusDual, null) { } public Metafile(IntPtr referenceHdc, EmfType emfType, string description) : this(referenceHdc, new RectangleF(), MetafileFrameUnit.GdiCompatible, emfType, description) { } public Metafile(IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit) : this(referenceHdc, frameRect, frameUnit, EmfType.EmfPlusDual, null) { } public Metafile(IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit) : this(referenceHdc, frameRect, frameUnit, EmfType.EmfPlusDual, null) { } public Metafile(IntPtr hmetafile, WmfPlaceableFileHeader wmfHeader, bool deleteWmf) { int status = Gdip.GdipCreateMetafileFromEmf(hmetafile, deleteWmf, out nativeImage); Gdip.CheckStatus(status); } public Metafile(Stream stream, IntPtr referenceHdc, EmfType type) : this(stream, referenceHdc, new RectangleF(), MetafileFrameUnit.GdiCompatible, type, null) { } public Metafile(Stream stream, IntPtr referenceHdc, Rectangle frameRect) : this(stream, referenceHdc, frameRect, MetafileFrameUnit.GdiCompatible, EmfType.EmfPlusDual, null) { } public Metafile(Stream stream, IntPtr referenceHdc, RectangleF frameRect) : this(stream, referenceHdc, frameRect, MetafileFrameUnit.GdiCompatible, EmfType.EmfPlusDual, null) { } public Metafile(string fileName, IntPtr referenceHdc, EmfType type) : this(fileName, referenceHdc, new RectangleF(), MetafileFrameUnit.GdiCompatible, type, null) { } public Metafile(string fileName, IntPtr referenceHdc, Rectangle frameRect) : this(fileName, referenceHdc, frameRect, MetafileFrameUnit.GdiCompatible, EmfType.EmfPlusDual, null) { } public Metafile(string fileName, IntPtr referenceHdc, RectangleF frameRect) : this(fileName, referenceHdc, frameRect, MetafileFrameUnit.GdiCompatible, EmfType.EmfPlusDual, null) { } public Metafile(IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit, EmfType type) : this(referenceHdc, frameRect, frameUnit, type, null) { } public Metafile(IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit, EmfType type) : this(referenceHdc, frameRect, frameUnit, type, null) { } public Metafile(Stream stream, IntPtr referenceHdc, EmfType type, string description) : this(stream, referenceHdc, new RectangleF(), MetafileFrameUnit.GdiCompatible, type, description) { } public Metafile(Stream stream, IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit) : this(stream, referenceHdc, frameRect, frameUnit, EmfType.EmfPlusDual, null) { } public Metafile(Stream stream, IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit) : this(stream, referenceHdc, frameRect, frameUnit, EmfType.EmfPlusDual, null) { } public Metafile(string fileName, IntPtr referenceHdc, EmfType type, string description) : this(fileName, referenceHdc, new RectangleF(), MetafileFrameUnit.GdiCompatible, type, description) { } public Metafile(string fileName, IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit) : this(fileName, referenceHdc, frameRect, frameUnit, EmfType.EmfPlusDual, null) { } public Metafile(string fileName, IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit) : this(fileName, referenceHdc, frameRect, frameUnit, EmfType.EmfPlusDual, null) { } public Metafile(IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit, EmfType type, string desc) { int status = Gdip.GdipRecordMetafileI(referenceHdc, type, ref frameRect, frameUnit, desc, out nativeImage); Gdip.CheckStatus(status); } public Metafile(IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit, EmfType type, string description) { int status = Gdip.GdipRecordMetafile(referenceHdc, type, ref frameRect, frameUnit, description, out nativeImage); Gdip.CheckStatus(status); } public Metafile(Stream stream, IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit, EmfType type) : this(stream, referenceHdc, frameRect, frameUnit, type, null) { } public Metafile(Stream stream, IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit, EmfType type) : this(stream, referenceHdc, frameRect, frameUnit, type, null) { } public Metafile(string fileName, IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit, EmfType type) : this(fileName, referenceHdc, frameRect, frameUnit, type, null) { } public Metafile(string fileName, IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit, string description) : this(fileName, referenceHdc, frameRect, frameUnit, EmfType.EmfPlusDual, description) { } public Metafile(string fileName, IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit, EmfType type) : this(fileName, referenceHdc, frameRect, frameUnit, type, null) { } public Metafile(string fileName, IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit, string desc) : this(fileName, referenceHdc, frameRect, frameUnit, EmfType.EmfPlusDual, desc) { } public Metafile(Stream stream, IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit, EmfType type, string description) { if (stream == null) throw new NullReferenceException(nameof(stream)); // With libgdiplus we use a custom API for this, because there's no easy way // to get the Stream down to libgdiplus. So, we wrap the stream with a set of delegates. GdiPlusStreamHelper sh = new GdiPlusStreamHelper(stream, seekToOrigin: false); int status = Gdip.GdipRecordMetafileFromDelegateI_linux(sh.GetHeaderDelegate, sh.GetBytesDelegate, sh.PutBytesDelegate, sh.SeekDelegate, sh.CloseDelegate, sh.SizeDelegate, referenceHdc, type, ref frameRect, frameUnit, description, out nativeImage); // Since we're just passing to native code the delegates inside the wrapper, we need to keep sh alive // to avoid the object being collected and therefore the delegates would be collected as well. GC.KeepAlive(sh); Gdip.CheckStatus(status); } public Metafile(Stream stream, IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit, EmfType type, string description) { if (stream == null) throw new NullReferenceException(nameof(stream)); // With libgdiplus we use a custom API for this, because there's no easy way // to get the Stream down to libgdiplus. So, we wrap the stream with a set of delegates. GdiPlusStreamHelper sh = new GdiPlusStreamHelper(stream, seekToOrigin: false); int status = Gdip.GdipRecordMetafileFromDelegate_linux(sh.GetHeaderDelegate, sh.GetBytesDelegate, sh.PutBytesDelegate, sh.SeekDelegate, sh.CloseDelegate, sh.SizeDelegate, referenceHdc, type, ref frameRect, frameUnit, description, out nativeImage); // Since we're just passing to native code the delegates inside the wrapper, we need to keep sh alive // to avoid the object being collected and therefore the delegates would be collected as well. GC.KeepAlive(sh); Gdip.CheckStatus(status); } public Metafile(string fileName, IntPtr referenceHdc, Rectangle frameRect, MetafileFrameUnit frameUnit, EmfType type, string description) { // Called in order to emulate exception behavior from netfx related to invalid file paths. Path.GetFullPath(fileName); int status = Gdip.GdipRecordMetafileFileNameI(fileName, referenceHdc, type, ref frameRect, frameUnit, description, out nativeImage); Gdip.CheckStatus(status); } public Metafile(string fileName, IntPtr referenceHdc, RectangleF frameRect, MetafileFrameUnit frameUnit, EmfType type, string description) { // Called in order to emulate exception behavior from netfx related to invalid file paths. Path.GetFullPath(fileName); int status = Gdip.GdipRecordMetafileFileName(fileName, referenceHdc, type, ref frameRect, frameUnit, description, out nativeImage); Gdip.CheckStatus(status); } protected override void Dispose(bool disposing) { if (_metafileHolder != null && !_metafileHolder.Disposed) { // There's a graphics instance created from this Metafile, // transfer responsibility for disposing the nativeImage to the // MetafileHolder _metafileHolder.MetafileDisposed(nativeImage); _metafileHolder = null; nativeImage = IntPtr.Zero; } base.Dispose(disposing); } private Metafile(SerializationInfo info, StreamingContext context) : base(info, context) { } // methods public IntPtr GetHenhmetafile() { return nativeImage; } public MetafileHeader GetMetafileHeader() { IntPtr header = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(MetafileHeader))); try { int status = Gdip.GdipGetMetafileHeaderFromMetafile(nativeImage, header); Gdip.CheckStatus(status); return new MetafileHeader(header); } finally { Marshal.FreeHGlobal(header); } } public static MetafileHeader GetMetafileHeader(IntPtr henhmetafile) { IntPtr header = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(MetafileHeader))); try { int status = Gdip.GdipGetMetafileHeaderFromEmf(henhmetafile, header); Gdip.CheckStatus(status); return new MetafileHeader(header); } finally { Marshal.FreeHGlobal(header); } } public static MetafileHeader GetMetafileHeader(Stream stream) { if (stream == null) throw new NullReferenceException(nameof(stream)); IntPtr header = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(MetafileHeader))); try { // With libgdiplus we use a custom API for this, because there's no easy way // to get the Stream down to libgdiplus. So, we wrap the stream with a set of delegates. GdiPlusStreamHelper sh = new GdiPlusStreamHelper(stream, seekToOrigin: false); int status = Gdip.GdipGetMetafileHeaderFromDelegate_linux(sh.GetHeaderDelegate, sh.GetBytesDelegate, sh.PutBytesDelegate, sh.SeekDelegate, sh.CloseDelegate, sh.SizeDelegate, header); // Since we're just passing to native code the delegates inside the wrapper, we need to keep sh alive // to avoid the object being collected and therefore the delegates would be collected as well. GC.KeepAlive(sh); Gdip.CheckStatus(status); return new MetafileHeader(header); } finally { Marshal.FreeHGlobal(header); } } public static MetafileHeader GetMetafileHeader(string fileName) { if (fileName == null) throw new ArgumentNullException(nameof(fileName)); IntPtr header = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(MetafileHeader))); try { int status = Gdip.GdipGetMetafileHeaderFromFile(fileName, header); Gdip.CheckStatus(status); return new MetafileHeader(header); } finally { Marshal.FreeHGlobal(header); } } public static MetafileHeader GetMetafileHeader(IntPtr hmetafile, WmfPlaceableFileHeader wmfHeader) { IntPtr header = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(MetafileHeader))); try { int status = Gdip.GdipGetMetafileHeaderFromEmf(hmetafile, header); Gdip.CheckStatus(status); return new MetafileHeader(header); } finally { Marshal.FreeHGlobal(header); } } public void PlayRecord(EmfPlusRecordType recordType, int flags, int dataSize, byte[] data) { int status = Gdip.GdipPlayMetafileRecord(nativeImage, recordType, flags, dataSize, data); Gdip.CheckStatus(status); } } }
using System; using System.Linq; using System.Linq.Expressions; using Marten.Exceptions; using Marten.Schema; using Marten.Testing.Documents; using Marten.Testing.Harness; using Shouldly; using Xunit; namespace Marten.Testing.Acceptance { [Collection("acceptance")] public class computed_indexes: OneOffConfigurationsContext { [Fact] public void example() { // SAMPLE: using-a-simple-calculated-index var store = DocumentStore.For(_ => { _.Connection(ConnectionSource.ConnectionString); _.DatabaseSchemaName = "examples"; // This creates _.Schema.For<User>().Index(x => x.UserName); }); using (var session = store.QuerySession()) { // Postgresql will be able to use the computed // index generated from above var somebody = session.Query<User>() .Where(x => x.UserName == "somebody") .FirstOrDefault(); } // ENDSAMPLE store.Dispose(); } [Fact] public void smoke_test() { StoreOptions(_ => _.Schema.For<Target>().Index(x => x.Number)); var data = Target.GenerateRandomData(100).ToArray(); theStore.BulkInsert(data.ToArray()); theStore.Tenancy.Default.DbObjects.AllIndexes().Select(x => x.Name) .ShouldContain("mt_doc_target_idx_number"); using (var session = theStore.QuerySession()) { var cmd = session.Query<Target>().Where(x => x.Number == 3) .ToCommand(); session.Query<Target>().Where(x => x.Number == data.First().Number) .Select(x => x.Id).ToList().ShouldContain(data.First().Id); } } [Fact] public void specify_a_deep_index() { // SAMPLE: deep-calculated-index var store = DocumentStore.For(_ => { _.Connection(ConnectionSource.ConnectionString); _.Schema.For<Target>().Index(x => x.Inner.Color); }); // ENDSAMPLE } [Fact] public void specify_a_different_mechanism_to_customize_the_index() { // SAMPLE: customizing-calculated-index var store = DocumentStore.For(_ => { _.Connection(ConnectionSource.ConnectionString); // The second, optional argument to Index() // allows you to customize the calculated index _.Schema.For<Target>().Index(x => x.Number, x => { // Change the index method to "brin" x.Method = IndexMethod.brin; // Force the index to be generated with casing rules x.Casing = ComputedIndex.Casings.Lower; // Override the index name if you want x.IndexName = "mt_my_name"; // Toggle whether or not the index is concurrent // Default is false x.IsConcurrent = true; // Toggle whether or not the index is a UNIQUE // index x.IsUnique = true; // Toggle whether index value will be constrained unique in scope of whole document table (Global) // or in a scope of a single tenant (PerTenant) // Default is Global x.TenancyScope = Schema.Indexing.Unique.TenancyScope.PerTenant; // Partial index by supplying a condition x.Where = "(data ->> 'Number')::int > 10"; }); // For B-tree indexes, it's also possible to change // the sort order from the default of "ascending" _.Schema.For<User>().Index(x => x.LastName, x => { // Change the index method to "brin" x.SortOrder = SortOrder.Desc; }); }); // ENDSAMPLE } [Fact] public void specifying_an_index_type_should_create_the_index_with_that_type() { StoreOptions(_ => _.Schema.For<Target>().Index(x => x.Number, x => { x.Method = IndexMethod.brin; })); var data = Target.GenerateRandomData(100).ToArray(); theStore.BulkInsert(data.ToArray()); var ddl = theStore.Tenancy.Default.DbObjects.AllIndexes() .Where(x => x.Name == "mt_doc_target_idx_number") .Select(x => x.DDL.ToLower()) .First(); SpecificationExtensions.ShouldContain(ddl, "mt_doc_target_idx_number on"); SpecificationExtensions.ShouldContain(ddl, "mt_doc_target using brin"); } [Fact] public void create_index_with_sort_order() { StoreOptions(_ => _.Schema.For<Target>().Index(x => x.Number, x => { x.SortOrder = SortOrder.Desc; })); var data = Target.GenerateRandomData(100).ToArray(); theStore.BulkInsert(data.ToArray()); var ddl = theStore.Tenancy.Default.DbObjects.AllIndexes() .Where(x => x.Name == "mt_doc_target_idx_number") .Select(x => x.DDL.ToLower()) .First(); SpecificationExtensions.ShouldContain(ddl, "mt_doc_target_idx_number on"); SpecificationExtensions.ShouldContain(ddl, "mt_doc_target"); ddl.ShouldEndWith(" DESC)", Case.Insensitive); } [Fact] public void create_multi_property_index() { StoreOptions(_ => { var columns = new Expression<Func<Target, object>>[] { x => x.UserId, x => x.Flag }; _.Schema.For<Target>().Index(columns); }); var data = Target.GenerateRandomData(100).ToArray(); theStore.BulkInsert(data.ToArray()); var ddl = theStore.Tenancy.Default.DbObjects.AllIndexes() .Single(x => x.Name == "mt_doc_target_idx_user_idflag") .DDL .ToLower(); SpecificationExtensions.ShouldContain(ddl, "index mt_doc_target_idx_user_idflag"); SpecificationExtensions.ShouldContain(ddl, "((((data ->> 'userid'::text))::uuid), (((data ->> 'flag'::text))::boolean))"); } [Fact] public void create_multi_property_string_index_with_casing() { StoreOptions(_ => { var columns = new Expression<Func<Target, object>>[] { x => x.String, x => x.StringField }; _.Schema.For<Target>().Index(columns, c => c.Casing = ComputedIndex.Casings.Upper); }); var data = Target.GenerateRandomData(100).ToArray(); theStore.BulkInsert(data.ToArray()); var ddl = theStore.Tenancy.Default.DbObjects.AllIndexes() .Single(x => x.Name == "mt_doc_target_idx_stringstring_field") .DDL .ToLower(); SpecificationExtensions.ShouldContain(ddl, "index mt_doc_target_idx_stringstring_field"); SpecificationExtensions.ShouldContain(ddl, "(upper((data ->> 'string'::text)), upper((data ->> 'stringfield'::text)))"); } [Fact] public void create_multi_property_type_index_with_casing() { StoreOptions(_ => { var columns = new Expression<Func<Target, object>>[] { x => x.String, x => x.Long, x => x.OtherGuid }; _.Schema.For<Target>().Index(columns, c => { c.Casing = ComputedIndex.Casings.Upper; c.IsUnique = true; }); }); var guid = Guid.NewGuid(); using (var session = theStore.LightweightSession()) { var item = new Target { String = "string value", Long = 123, OtherGuid = guid }; session.Store(item); session.SaveChanges(); } var ddl = theStore.Tenancy.Default.DbObjects.AllIndexes() .Single(x => x.Name == "mt_doc_target_uidx_stringlongother_guid") .DDL .ToLower(); SpecificationExtensions.ShouldContain(ddl, "index mt_doc_target_uidx_stringlongother_guid"); SpecificationExtensions.ShouldContain(ddl, "(upper((data ->> 'string'::text)), (((data ->> 'long'::text))::bigint), (((data ->> 'otherguid'::text))::uuid))"); using (var session = theStore.LightweightSession()) { var item = new Target { String = "String Value", Long = 123, OtherGuid = guid }; session.Store(item); var exception = Assert.Throws<DocumentAlreadyExistsException>(() => session.SaveChanges()); Assert.Contains("duplicate key value violates unique constraint", exception.ToString()); } } [Fact] public void creating_index_using_date_should_work() { StoreOptions(_ => { _.Schema.For<Target>().Index(x => x.Date); }); var data = Target.GenerateRandomData(100).ToArray(); theStore.BulkInsert(data.ToArray()); var ddl = theStore.Tenancy.Default.DbObjects.AllIndexes() .Where(x => x.Name == "mt_doc_target_idx_date") .Select(x => x.DDL.ToLower()) .First(); SpecificationExtensions.ShouldContain(ddl, "mt_doc_target_idx_date on"); SpecificationExtensions.ShouldContain(ddl, "mt_doc_target_idx_date"); } [Fact] public void create_unique_index_on_string_with_mixed_casing() { StoreOptions(_ => _.Schema.For<Target>().Index(x => x.String, x => { x.IsUnique = true; })); var testString = "MiXeD cAsE sTrInG"; using (var session = theStore.LightweightSession()) { var item = Target.GenerateRandomData(1).First(); item.String = testString; session.Store(item); session.SaveChanges(); } theStore.Tenancy.Default.DbObjects.AllIndexes().Select(x => x.Name) .ShouldContain("mt_doc_target_uidx_string"); using (var session = theStore.LightweightSession()) { var item = Target.GenerateRandomData(1).First(); item.String = testString.ToLower(); // Inserting the same string but all lowercase should be OK session.Store(item); session.SaveChanges(); var item2 = Target.GenerateRandomData(1).First(); item2.String = testString; // Inserting the same original string should throw session.Store(item2); Exception<DocumentAlreadyExistsException>.ShouldBeThrownBy(() => session.SaveChanges()); } } [Fact] public void create_index_with_custom_name() { StoreOptions(_ => _.Schema.For<Target>().Index(x => x.String, x => { x.IndexName = "banana_index_created_by_nigel"; })); var testString = "MiXeD cAsE sTrInG"; using (var session = theStore.LightweightSession()) { var item = Target.GenerateRandomData(1).First(); item.String = testString; session.Store(item); session.SaveChanges(); } theStore.Tenancy.Default.DbObjects.AllIndexes().Select(x => x.Name) .ShouldContain("mt_banana_index_created_by_nigel"); } [Fact] public void create_index_with_where_clause() { StoreOptions(_ => _.Schema.For<Target>().Index(x => x.String, x => { x.Where = "(data ->> 'Number')::int > 10"; })); var testString = "MiXeD cAsE sTrInG"; using (var session = theStore.LightweightSession()) { var item = Target.GenerateRandomData(1).First(); item.String = testString; session.Store(item); session.SaveChanges(); } SpecificationExtensions.ShouldContain(theStore.Tenancy.Default.DbObjects.AllIndexes() .Where(x => x.Name == "mt_doc_target_idx_string") .Select(x => x.DDL), x => x.Contains("WHERE (((data ->> 'Number'::text))::integer > 10)")); } [Fact] public void create_unique_index_with_lower_case_constraint() { StoreOptions(_ => _.Schema.For<Target>().Index(x => x.String, x => { x.IsUnique = true; x.Casing = ComputedIndex.Casings.Lower; })); var testString = "MiXeD cAsE sTrInG"; using (var session = theStore.LightweightSession()) { var item = Target.GenerateRandomData(1).First(); item.String = testString; session.Store(item); session.SaveChanges(); } theStore.Tenancy.Default.DbObjects.AllIndexes().Select(x => x.Name) .ShouldContain("mt_doc_target_uidx_string"); using (var session = theStore.LightweightSession()) { var item = Target.GenerateRandomData(1).First(); item.String = testString.ToUpper(); // Inserting the same string but all uppercase should throw because // the index is stored with lowcased value session.Store(item); Exception<DocumentAlreadyExistsException>.ShouldBeThrownBy(() => session.SaveChanges()); } } [Fact] public void patch_if_missing() { using (var store1 = SeparateStore()) { store1.Advanced.Clean.CompletelyRemoveAll(); store1.Tenancy.Default.EnsureStorageExists(typeof(Target)); } using (var store2 = DocumentStore.For(_ => { _.Connection(ConnectionSource.ConnectionString); _.Schema.For<Target>().Index(x => x.Number); })) { var patch = store2.Schema.ToPatch(); SpecificationExtensions.ShouldContain(patch.UpdateDDL, "mt_doc_target_idx_number"); } } [Fact] public void no_patch_if_not_missing() { using (var store1 = StoreOptions(_ => { _.Schema.For<Target>().Index(x => x.Number); })) { store1.Advanced.Clean.CompletelyRemoveAll(); store1.Tenancy.Default.EnsureStorageExists(typeof(Target)); } using (var store2 = SeparateStore(_ => { _.Connection(ConnectionSource.ConnectionString); _.Schema.For<Target>().Index(x => x.Number); })) { var patch = store2.Schema.ToPatch(typeof(Target)); SpecificationExtensions.ShouldNotContain(patch.UpdateDDL, "mt_doc_target_idx_number"); } } public computed_indexes() : base("acceptance") { } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using NUnit.Framework; using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer; using Document = Lucene.Net.Documents.Document; using Field = Lucene.Net.Documents.Field; using NumericField = Lucene.Net.Documents.NumericField; using IndexWriter = Lucene.Net.Index.IndexWriter; using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength; using RAMDirectory = Lucene.Net.Store.RAMDirectory; using NumericUtils = Lucene.Net.Util.NumericUtils; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; namespace Lucene.Net.Search { [TestFixture] public class TestNumericRangeQuery64:LuceneTestCase { // distance of entries private const long distance = 66666L; // shift the starting of the values to the left, to also have negative values: private const long startOffset = - 1L << 31; // number of docs to generate for testing private const int noDocs = 10000; private static RAMDirectory directory; private static IndexSearcher searcher; /// <summary>test for constant score + boolean query + filter, the other tests only use the constant score mode </summary> private void TestRange(int precisionStep) { System.String field = "field" + precisionStep; int count = 3000; long lower = (distance * 3 / 2) + startOffset, upper = lower + count * distance + (distance / 3); System.Int64 tempAux = (long) lower; System.Int64 tempAux2 = (long) upper; NumericRangeQuery<long> q = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, tempAux2, true, true); System.Int64 tempAux3 = (long) lower; System.Int64 tempAux4 = (long) upper; NumericRangeFilter<long> f = NumericRangeFilter.NewLongRange(field, precisionStep, tempAux3, tempAux4, true, true); int lastTerms = 0; for (sbyte i = 0; i < 3; i++) { TopDocs topDocs; int terms; System.String type; q.ClearTotalNumberOfTerms(); f.ClearTotalNumberOfTerms(); switch (i) { case 0: type = " (constant score filter rewrite)"; q.RewriteMethod = MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE; topDocs = searcher.Search(q, null, noDocs, Sort.INDEXORDER); terms = q.TotalNumberOfTerms; break; case 1: type = " (constant score boolean rewrite)"; q.RewriteMethod = MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE; topDocs = searcher.Search(q, null, noDocs, Sort.INDEXORDER); terms = q.TotalNumberOfTerms; break; case 2: type = " (filter)"; topDocs = searcher.Search(new MatchAllDocsQuery(), f, noDocs, Sort.INDEXORDER); terms = f.TotalNumberOfTerms; break; default: return ; } System.Console.Out.WriteLine("Found " + terms + " distinct terms in range for field '" + field + "'" + type + "."); ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(count, sd.Length, "Score doc count" + type); Document doc = searcher.Doc(sd[0].Doc); Assert.AreEqual(2 * distance + startOffset, System.Int64.Parse(doc.Get(field)), "First doc" + type); doc = searcher.Doc(sd[sd.Length - 1].Doc); Assert.AreEqual((1 + count) * distance + startOffset, System.Int64.Parse(doc.Get(field)), "Last doc" + type); if (i > 0) { Assert.AreEqual(lastTerms, terms, "Distinct term number is equal for all query types"); } lastTerms = terms; } } [Test] public virtual void TestRange_8bit() { TestRange(8); } [Test] public virtual void TestRange_6bit() { TestRange(6); } [Test] public virtual void TestRange_4bit() { TestRange(4); } [Test] public virtual void TestRange_2bit() { TestRange(2); } [Test] public virtual void TestInverseRange() { System.Int64 tempAux = 1000L; System.Int64 tempAux2 = - 1000L; NumericRangeFilter<long> f = NumericRangeFilter.NewLongRange("field8", 8, tempAux, tempAux2, true, true); Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, f.GetDocIdSet(searcher.IndexReader), "A inverse range should return the EMPTY_DOCIDSET instance"); //UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'" System.Int64 tempAux3 = (long) System.Int64.MaxValue; f = NumericRangeFilter.NewLongRange("field8", 8, tempAux3, null, false, false); Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, f.GetDocIdSet(searcher.IndexReader), "A exclusive range starting with Long.MAX_VALUE should return the EMPTY_DOCIDSET instance"); //UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'" System.Int64 tempAux4 = (long) System.Int64.MinValue; f = NumericRangeFilter.NewLongRange("field8", 8, null, tempAux4, false, false); Assert.AreSame(DocIdSet.EMPTY_DOCIDSET, f.GetDocIdSet(searcher.IndexReader), "A exclusive range ending with Long.MIN_VALUE should return the EMPTY_DOCIDSET instance"); } [Test] public virtual void TestOneMatchQuery() { System.Int64 tempAux = 1000L; //UPGRADE_NOTE: ref keyword was added to struct-type parameters. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1303'" System.Int64 tempAux2 = 1000L; NumericRangeQuery<long> q = NumericRangeQuery.NewLongRange("ascfield8", 8, tempAux, tempAux2, true, true); Assert.AreSame(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE, q.RewriteMethod); TopDocs topDocs = searcher.Search(q, noDocs); ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(1, sd.Length, "Score doc count"); } private void TestLeftOpenRange(int precisionStep) { System.String field = "field" + precisionStep; int count = 3000; long upper = (count - 1) * distance + (distance / 3) + startOffset; //UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'" System.Int64 tempAux = (long) upper; NumericRangeQuery<long> q = NumericRangeQuery.NewLongRange(field, precisionStep, null, tempAux, true, true); TopDocs topDocs = searcher.Search(q, null, noDocs, Sort.INDEXORDER); System.Console.Out.WriteLine("Found " + q.TotalNumberOfTerms + " distinct terms in left open range for field '" + field + "'."); ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(count, sd.Length, "Score doc count"); Document doc = searcher.Doc(sd[0].Doc); Assert.AreEqual(startOffset, System.Int64.Parse(doc.Get(field)), "First doc"); doc = searcher.Doc(sd[sd.Length - 1].Doc); Assert.AreEqual((count - 1) * distance + startOffset, System.Int64.Parse(doc.Get(field)), "Last doc"); } [Test] public virtual void TestLeftOpenRange_8bit() { TestLeftOpenRange(8); } [Test] public virtual void TestLeftOpenRange_6bit() { TestLeftOpenRange(6); } [Test] public virtual void TestLeftOpenRange_4bit() { TestLeftOpenRange(4); } [Test] public virtual void TestLeftOpenRange_2bit() { TestLeftOpenRange(2); } private void TestRightOpenRange(int precisionStep) { System.String field = "field" + precisionStep; int count = 3000; long lower = (count - 1) * distance + (distance / 3) + startOffset; //UPGRADE_TODO: The 'System.Int64' structure does not have an equivalent to NULL. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1291'" System.Int64 tempAux = (long) lower; NumericRangeQuery<long> q = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, null, true, true); TopDocs topDocs = searcher.Search(q, null, noDocs, Sort.INDEXORDER); System.Console.Out.WriteLine("Found " + q.TotalNumberOfTerms + " distinct terms in right open range for field '" + field + "'."); ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(noDocs - count, sd.Length, "Score doc count"); Document doc = searcher.Doc(sd[0].Doc); Assert.AreEqual(count * distance + startOffset, System.Int64.Parse(doc.Get(field)), "First doc"); doc = searcher.Doc(sd[sd.Length - 1].Doc); Assert.AreEqual((noDocs - 1) * distance + startOffset, System.Int64.Parse(doc.Get(field)), "Last doc"); } [Test] public virtual void TestRightOpenRange_8bit() { TestRightOpenRange(8); } [Test] public virtual void TestRightOpenRange_6bit() { TestRightOpenRange(6); } [Test] public virtual void TestRightOpenRange_4bit() { TestRightOpenRange(4); } [Test] public virtual void TestRightOpenRange_2bit() { TestRightOpenRange(2); } private void TestRandomTrieAndClassicRangeQuery(int precisionStep) { System.Random rnd = NewRandom(); System.String field = "field" + precisionStep; int termCountT = 0, termCountC = 0; for (int i = 0; i < 50; i++) { long lower = (long) (rnd.NextDouble() * noDocs * distance) + startOffset; long upper = (long) (rnd.NextDouble() * noDocs * distance) + startOffset; if (lower > upper) { long a = lower; lower = upper; upper = a; } // test inclusive range System.Int64 tempAux = (long) lower; System.Int64 tempAux2 = (long) upper; NumericRangeQuery<long> tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, tempAux2, true, true); TermRangeQuery cq = new TermRangeQuery(field, NumericUtils.LongToPrefixCoded(lower), NumericUtils.LongToPrefixCoded(upper), true, true); TopDocs tTopDocs = searcher.Search(tq, 1); TopDocs cTopDocs = searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); termCountT += tq.TotalNumberOfTerms; termCountC += cq.TotalNumberOfTerms; // test exclusive range System.Int64 tempAux3 = (long) lower; System.Int64 tempAux4 = (long) upper; tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux3, tempAux4, false, false); cq = new TermRangeQuery(field, NumericUtils.LongToPrefixCoded(lower), NumericUtils.LongToPrefixCoded(upper), false, false); tTopDocs = searcher.Search(tq, 1); cTopDocs = searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); termCountT += tq.TotalNumberOfTerms; termCountC += cq.TotalNumberOfTerms; // test left exclusive range System.Int64 tempAux5 = (long) lower; System.Int64 tempAux6 = (long) upper; tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux5, tempAux6, false, true); cq = new TermRangeQuery(field, NumericUtils.LongToPrefixCoded(lower), NumericUtils.LongToPrefixCoded(upper), false, true); tTopDocs = searcher.Search(tq, 1); cTopDocs = searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); termCountT += tq.TotalNumberOfTerms; termCountC += cq.TotalNumberOfTerms; // test right exclusive range System.Int64 tempAux7 = (long) lower; System.Int64 tempAux8 = (long) upper; tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux7, tempAux8, true, false); cq = new TermRangeQuery(field, NumericUtils.LongToPrefixCoded(lower), NumericUtils.LongToPrefixCoded(upper), true, false); tTopDocs = searcher.Search(tq, 1); cTopDocs = searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); termCountT += tq.TotalNumberOfTerms; termCountC += cq.TotalNumberOfTerms; } if (precisionStep == System.Int32.MaxValue) { Assert.AreEqual(termCountT, termCountC, "Total number of terms should be equal for unlimited precStep"); } else { System.Console.Out.WriteLine("Average number of terms during random search on '" + field + "':"); System.Console.Out.WriteLine(" Trie query: " + (((double) termCountT) / (50 * 4))); System.Console.Out.WriteLine(" Classical query: " + (((double) termCountC) / (50 * 4))); } } [Test] public virtual void TestRandomTrieAndClassicRangeQuery_8bit() { TestRandomTrieAndClassicRangeQuery(8); } [Test] public virtual void TestRandomTrieAndClassicRangeQuery_6bit() { TestRandomTrieAndClassicRangeQuery(6); } [Test] public virtual void TestRandomTrieAndClassicRangeQuery_4bit() { TestRandomTrieAndClassicRangeQuery(4); } [Test] public virtual void TestRandomTrieAndClassicRangeQuery_2bit() { TestRandomTrieAndClassicRangeQuery(2); } [Test] public virtual void TestRandomTrieAndClassicRangeQuery_NoTrie() { TestRandomTrieAndClassicRangeQuery(System.Int32.MaxValue); } private void TestRangeSplit(int precisionStep) { System.Random rnd = NewRandom(); System.String field = "ascfield" + precisionStep; // 50 random tests for (int i = 0; i < 50; i++) { long lower = (long) (rnd.NextDouble() * noDocs - noDocs / 2); long upper = (long) (rnd.NextDouble() * noDocs - noDocs / 2); if (lower > upper) { long a = lower; lower = upper; upper = a; } // test inclusive range System.Int64 tempAux = (long) lower; System.Int64 tempAux2 = (long) upper; Query tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, tempAux2, true, true); TopDocs tTopDocs = searcher.Search(tq, 1); Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range query must be equal to inclusive range length"); // test exclusive range System.Int64 tempAux3 = (long) lower; System.Int64 tempAux4 = (long) upper; tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux3, tempAux4, false, false); tTopDocs = searcher.Search(tq, 1); Assert.AreEqual(System.Math.Max(upper - lower - 1, 0), tTopDocs.TotalHits, "Returned count of range query must be equal to exclusive range length"); // test left exclusive range System.Int64 tempAux5 = (long) lower; System.Int64 tempAux6 = (long) upper; tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux5, tempAux6, false, true); tTopDocs = searcher.Search(tq, 1); Assert.AreEqual(upper - lower, tTopDocs.TotalHits, "Returned count of range query must be equal to half exclusive range length"); // test right exclusive range System.Int64 tempAux7 = (long) lower; System.Int64 tempAux8 = (long) upper; tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux7, tempAux8, true, false); tTopDocs = searcher.Search(tq, 1); Assert.AreEqual(upper - lower, tTopDocs.TotalHits, "Returned count of range query must be equal to half exclusive range length"); } } [Test] public virtual void TestRangeSplit_8bit() { TestRangeSplit(8); } [Test] public virtual void TestRangeSplit_6bit() { TestRangeSplit(6); } [Test] public virtual void TestRangeSplit_4bit() { TestRangeSplit(4); } [Test] public virtual void TestRangeSplit_2bit() { TestRangeSplit(2); } /// <summary>we fake a double test using long2double conversion of NumericUtils </summary> private void TestDoubleRange(int precisionStep) { System.String field = "ascfield" + precisionStep; long lower = - 1000L; long upper = + 2000L; System.Double tempAux = (double) NumericUtils.SortableLongToDouble(lower); System.Double tempAux2 = (double) NumericUtils.SortableLongToDouble(upper); Query tq = NumericRangeQuery.NewDoubleRange(field, precisionStep, tempAux, tempAux2, true, true); TopDocs tTopDocs = searcher.Search(tq, 1); Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range query must be equal to inclusive range length"); System.Double tempAux3 = (double) NumericUtils.SortableLongToDouble(lower); System.Double tempAux4 = (double) NumericUtils.SortableLongToDouble(upper); Filter tf = NumericRangeFilter.NewDoubleRange(field, precisionStep, tempAux3, tempAux4, true, true); tTopDocs = searcher.Search(new MatchAllDocsQuery(), tf, 1); Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range filter must be equal to inclusive range length"); } [Test] public virtual void TestDoubleRange_8bit() { TestDoubleRange(8); } [Test] public virtual void TestDoubleRange_6bit() { TestDoubleRange(6); } [Test] public virtual void TestDoubleRange_4bit() { TestDoubleRange(4); } [Test] public virtual void TestDoubleRange_2bit() { TestDoubleRange(2); } private void TestSorting(int precisionStep) { System.Random rnd = NewRandom(); System.String field = "field" + precisionStep; // 10 random tests, the index order is ascending, // so using a reverse sort field should retun descending documents for (int i = 0; i < 10; i++) { long lower = (long) (rnd.NextDouble() * noDocs * distance) + startOffset; long upper = (long) (rnd.NextDouble() * noDocs * distance) + startOffset; if (lower > upper) { long a = lower; lower = upper; upper = a; } System.Int64 tempAux = (long) lower; System.Int64 tempAux2 = (long) upper; Query tq = NumericRangeQuery.NewLongRange(field, precisionStep, tempAux, tempAux2, true, true); TopDocs topDocs = searcher.Search(tq, null, noDocs, new Sort(new SortField(field, SortField.LONG, true))); if (topDocs.TotalHits == 0) continue; ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); long last = System.Int64.Parse(searcher.Doc(sd[0].Doc).Get(field)); for (int j = 1; j < sd.Length; j++) { long act = System.Int64.Parse(searcher.Doc(sd[j].Doc).Get(field)); Assert.IsTrue(last > act, "Docs should be sorted backwards"); last = act; } } } [Test] public virtual void TestSorting_8bit() { TestSorting(8); } [Test] public virtual void TestSorting_6bit() { TestSorting(6); } [Test] public virtual void TestSorting_4bit() { TestSorting(4); } [Test] public virtual void TestSorting_2bit() { TestSorting(2); } [Test] public virtual void TestEqualsAndHash() { QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test1", 4, 10L, 20L, true, true)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test2", 4, 10L, 20L, false, true)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test3", 4, 10L, 20L, true, false)); System.Int64 tempAux7 = 10L; QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test4", 4, 10L, 20L, false, false)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test5", 4, 10L, null, true, true)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test6", 4, null, 20L, true, true)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewLongRange("test7", 4, null, null, true, true)); QueryUtils.CheckEqual(NumericRangeQuery.NewLongRange("test8", 4, 10L, 20L, true, true), NumericRangeQuery.NewLongRange("test8", 4, 10L, 20L, true, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test9", 4, 10L, 20L, true, true), NumericRangeQuery.NewLongRange("test9", 8, 10L, 20L, true, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test10a", 4, 10L, 20L, true, true), NumericRangeQuery.NewLongRange("test10b", 4, 10L, 20L, true, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test11", 4, 10L, 20L, true, true), NumericRangeQuery.NewLongRange("test11", 4, 20L, 10L, true, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test12", 4, 10L, 20L, true, true), NumericRangeQuery.NewLongRange("test12", 4, 10L, 20L, false, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewLongRange("test13", 4, 10L, 20L, true, true), NumericRangeQuery.NewFloatRange("test13", 4, 10f, 20f, true, true)); // difference to int range is tested in TestNumericRangeQuery32 } static TestNumericRangeQuery64() { { try { // set the theoretical maximum term count for 8bit (see docs for the number) BooleanQuery.MaxClauseCount = 7 * 255 * 2 + 255; directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED); NumericField field8 = new NumericField("field8", 8, Field.Store.YES, true), field6 = new NumericField("field6", 6, Field.Store.YES, true), field4 = new NumericField("field4", 4, Field.Store.YES, true), field2 = new NumericField("field2", 2, Field.Store.YES, true), fieldNoTrie = new NumericField("field" + System.Int32.MaxValue, System.Int32.MaxValue, Field.Store.YES, true), ascfield8 = new NumericField("ascfield8", 8, Field.Store.NO, true), ascfield6 = new NumericField("ascfield6", 6, Field.Store.NO, true), ascfield4 = new NumericField("ascfield4", 4, Field.Store.NO, true), ascfield2 = new NumericField("ascfield2", 2, Field.Store.NO, true); Document doc = new Document(); // add fields, that have a distance to test general functionality doc.Add(field8); doc.Add(field6); doc.Add(field4); doc.Add(field2); doc.Add(fieldNoTrie); // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive doc.Add(ascfield8); doc.Add(ascfield6); doc.Add(ascfield4); doc.Add(ascfield2); // Add a series of noDocs docs with increasing long values, by updating the fields for (int l = 0; l < noDocs; l++) { long val = distance * l + startOffset; field8.SetLongValue(val); field6.SetLongValue(val); field4.SetLongValue(val); field2.SetLongValue(val); fieldNoTrie.SetLongValue(val); val = l - (noDocs / 2); ascfield8.SetLongValue(val); ascfield6.SetLongValue(val); ascfield4.SetLongValue(val); ascfield2.SetLongValue(val); writer.AddDocument(doc); } writer.Optimize(); writer.Close(); searcher = new IndexSearcher(directory, true); } catch (System.Exception e) { throw new System.SystemException("", e); } } } } }
#region Apache License // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to you under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion // .NET Compact Framework 1.0 has no support for WindowsIdentity #if !NETCF // MONO 1.0 has no support for Win32 Logon APIs #if !MONO // SSCLI 1.0 has no support for Win32 Logon APIs #if !SSCLI // We don't want framework or platform specific code in the CLI version of log4net #if !CLI_1_0 using System; using System.Runtime.InteropServices; using System.Security.Principal; using System.Security.Permissions; using log4net.Core; namespace log4net.Util { /// <summary> /// Impersonate a Windows Account /// </summary> /// <remarks> /// <para> /// This <see cref="SecurityContext"/> impersonates a Windows account. /// </para> /// <para> /// How the impersonation is done depends on the value of <see cref="Impersonate"/>. /// This allows the context to either impersonate a set of user credentials specified /// using username, domain name and password or to revert to the process credentials. /// </para> /// </remarks> public class WindowsSecurityContext : SecurityContext, IOptionHandler { /// <summary> /// The impersonation modes for the <see cref="WindowsSecurityContext"/> /// </summary> /// <remarks> /// <para> /// See the <see cref="WindowsSecurityContext.Credentials"/> property for /// details. /// </para> /// </remarks> public enum ImpersonationMode { /// <summary> /// Impersonate a user using the credentials supplied /// </summary> User, /// <summary> /// Revert this the thread to the credentials of the process /// </summary> Process } #region Member Variables private ImpersonationMode m_impersonationMode = ImpersonationMode.User; private string m_userName; private string m_domainName = Environment.MachineName; private string m_password; private WindowsIdentity m_identity; #endregion #region Constructor /// <summary> /// Default constructor /// </summary> /// <remarks> /// <para> /// Default constructor /// </para> /// </remarks> public WindowsSecurityContext() { } #endregion #region Public Properties /// <summary> /// Gets or sets the impersonation mode for this security context /// </summary> /// <value> /// The impersonation mode for this security context /// </value> /// <remarks> /// <para> /// Impersonate either a user with user credentials or /// revert this thread to the credentials of the process. /// The value is one of the <see cref="ImpersonationMode"/> /// enum. /// </para> /// <para> /// The default value is <see cref="ImpersonationMode.User"/> /// </para> /// <para> /// When the mode is set to <see cref="ImpersonationMode.User"/> /// the user's credentials are established using the /// <see cref="UserName"/>, <see cref="DomainName"/> and <see cref="Password"/> /// values. /// </para> /// <para> /// When the mode is set to <see cref="ImpersonationMode.Process"/> /// no other properties need to be set. If the calling thread is /// impersonating then it will be reverted back to the process credentials. /// </para> /// </remarks> public ImpersonationMode Credentials { get { return m_impersonationMode; } set { m_impersonationMode = value; } } /// <summary> /// Gets or sets the Windows username for this security context /// </summary> /// <value> /// The Windows username for this security context /// </value> /// <remarks> /// <para> /// This property must be set if <see cref="Credentials"/> /// is set to <see cref="ImpersonationMode.User"/> (the default setting). /// </para> /// </remarks> public string UserName { get { return m_userName; } set { m_userName = value; } } /// <summary> /// Gets or sets the Windows domain name for this security context /// </summary> /// <value> /// The Windows domain name for this security context /// </value> /// <remarks> /// <para> /// The default value for <see cref="DomainName"/> is the local machine name /// taken from the <see cref="Environment.MachineName"/> property. /// </para> /// <para> /// This property must be set if <see cref="Credentials"/> /// is set to <see cref="ImpersonationMode.User"/> (the default setting). /// </para> /// </remarks> public string DomainName { get { return m_domainName; } set { m_domainName = value; } } /// <summary> /// Sets the password for the Windows account specified by the <see cref="UserName"/> and <see cref="DomainName"/> properties. /// </summary> /// <value> /// The password for the Windows account specified by the <see cref="UserName"/> and <see cref="DomainName"/> properties. /// </value> /// <remarks> /// <para> /// This property must be set if <see cref="Credentials"/> /// is set to <see cref="ImpersonationMode.User"/> (the default setting). /// </para> /// </remarks> public string Password { set { m_password = value; } } #endregion #region IOptionHandler Members /// <summary> /// Initialize the SecurityContext based on the options set. /// </summary> /// <remarks> /// <para> /// This is part of the <see cref="IOptionHandler"/> delayed object /// activation scheme. The <see cref="ActivateOptions"/> method must /// be called on this object after the configuration properties have /// been set. Until <see cref="ActivateOptions"/> is called this /// object is in an undefined state and must not be used. /// </para> /// <para> /// If any of the configuration properties are modified then /// <see cref="ActivateOptions"/> must be called again. /// </para> /// <para> /// The security context will try to Logon the specified user account and /// capture a primary token for impersonation. /// </para> /// </remarks> /// <exception cref="ArgumentNullException">The required <see cref="UserName" />, /// <see cref="DomainName" /> or <see cref="Password" /> properties were not specified.</exception> public void ActivateOptions() { if (m_impersonationMode == ImpersonationMode.User) { if (m_userName == null) throw new ArgumentNullException("m_userName"); if (m_domainName == null) throw new ArgumentNullException("m_domainName"); if (m_password == null) throw new ArgumentNullException("m_password"); m_identity = LogonUser(m_userName, m_domainName, m_password); } } #endregion /// <summary> /// Impersonate the Windows account specified by the <see cref="UserName"/> and <see cref="DomainName"/> properties. /// </summary> /// <param name="state">caller provided state</param> /// <returns> /// An <see cref="IDisposable"/> instance that will revoke the impersonation of this SecurityContext /// </returns> /// <remarks> /// <para> /// Depending on the <see cref="Credentials"/> property either /// impersonate a user using credentials supplied or revert /// to the process credentials. /// </para> /// </remarks> public override IDisposable Impersonate(object state) { if (m_impersonationMode == ImpersonationMode.User) { if (m_identity != null) { return new DisposableImpersonationContext(m_identity.Impersonate()); } } else if (m_impersonationMode == ImpersonationMode.Process) { // Impersonate(0) will revert to the process credentials return new DisposableImpersonationContext(WindowsIdentity.Impersonate(IntPtr.Zero)); } return null; } /// <summary> /// Create a <see cref="WindowsIdentity"/> given the userName, domainName and password. /// </summary> /// <param name="userName">the user name</param> /// <param name="domainName">the domain name</param> /// <param name="password">the password</param> /// <returns>the <see cref="WindowsIdentity"/> for the account specified</returns> /// <remarks> /// <para> /// Uses the Windows API call LogonUser to get a principal token for the account. This /// token is used to initialize the WindowsIdentity. /// </para> /// </remarks> #if NET_4_0 || MONO_4_0 [System.Security.SecuritySafeCritical] #endif [System.Security.Permissions.SecurityPermission(System.Security.Permissions.SecurityAction.Demand, UnmanagedCode = true)] private static WindowsIdentity LogonUser(string userName, string domainName, string password) { const int LOGON32_PROVIDER_DEFAULT = 0; //This parameter causes LogonUser to create a primary token. const int LOGON32_LOGON_INTERACTIVE = 2; // Call LogonUser to obtain a handle to an access token. IntPtr tokenHandle = IntPtr.Zero; if(!LogonUser(userName, domainName, password, LOGON32_LOGON_INTERACTIVE, LOGON32_PROVIDER_DEFAULT, ref tokenHandle)) { NativeError error = NativeError.GetLastError(); throw new Exception("Failed to LogonUser ["+userName+"] in Domain ["+domainName+"]. Error: "+ error.ToString()); } const int SecurityImpersonation = 2; IntPtr dupeTokenHandle = IntPtr.Zero; if(!DuplicateToken(tokenHandle, SecurityImpersonation, ref dupeTokenHandle)) { NativeError error = NativeError.GetLastError(); if (tokenHandle != IntPtr.Zero) { CloseHandle(tokenHandle); } throw new Exception("Failed to DuplicateToken after LogonUser. Error: " + error.ToString()); } WindowsIdentity identity = new WindowsIdentity(dupeTokenHandle); // Free the tokens. if (dupeTokenHandle != IntPtr.Zero) { CloseHandle(dupeTokenHandle); } if (tokenHandle != IntPtr.Zero) { CloseHandle(tokenHandle); } return identity; } #region Native Method Stubs [DllImport("advapi32.dll", SetLastError=true)] private static extern bool LogonUser(String lpszUsername, String lpszDomain, String lpszPassword, int dwLogonType, int dwLogonProvider, ref IntPtr phToken); [DllImport("kernel32.dll", CharSet=CharSet.Auto)] private extern static bool CloseHandle(IntPtr handle); [DllImport("advapi32.dll", CharSet=CharSet.Auto, SetLastError=true)] private extern static bool DuplicateToken(IntPtr ExistingTokenHandle, int SECURITY_IMPERSONATION_LEVEL, ref IntPtr DuplicateTokenHandle); #endregion #region DisposableImpersonationContext class /// <summary> /// Adds <see cref="IDisposable"/> to <see cref="WindowsImpersonationContext"/> /// </summary> /// <remarks> /// <para> /// Helper class to expose the <see cref="WindowsImpersonationContext"/> /// through the <see cref="IDisposable"/> interface. /// </para> /// </remarks> private sealed class DisposableImpersonationContext : IDisposable { private readonly WindowsImpersonationContext m_impersonationContext; /// <summary> /// Constructor /// </summary> /// <param name="impersonationContext">the impersonation context being wrapped</param> /// <remarks> /// <para> /// Constructor /// </para> /// </remarks> public DisposableImpersonationContext(WindowsImpersonationContext impersonationContext) { m_impersonationContext = impersonationContext; } /// <summary> /// Revert the impersonation /// </summary> /// <remarks> /// <para> /// Revert the impersonation /// </para> /// </remarks> public void Dispose() { m_impersonationContext.Undo(); } } #endregion } } #endif // !CLI_1_0 #endif // !SSCLI #endif // !MONO #endif // !NETCF
using UnityEngine; using System.Collections.Generic; using System.IO; using System.Xml; using System.Reflection; using System.Text; using System.Threading; using Ecosim.SceneData; using Ecosim.SceneData.Action; using Ecosim.SceneData.AnimalPopulationModel; namespace Ecosim.SceneData { public class InventarisationsData { public class YearData { public readonly int year; public float lowestValue = Mathf.Infinity; public float highestValue = -Mathf.Infinity; private Dictionary<string, float> values; public YearData (int year) { this.year = year; this.values = new Dictionary<string, float> (); } public void AddValue (string name, float value) { // Check the lowest and highest values if (value < lowestValue) { lowestValue = value; } if (value > highestValue) { highestValue = value; } // Choose the highest value if it already exists if (values.ContainsKey (name)) { float newValue = Mathf.Max (value, values[name]); values [name] = newValue; return; } values.Add (name, value); } /// <summary> /// Gets the value. Returns true/false whether the name exists, and if so it sets the out value. /// </summary> public bool GetValue (string name, out float value) { if (values.ContainsKey (name)) { value = values[name]; return true; } value = 0f; return false; } public override string ToString () { System.Text.StringBuilder sb = new System.Text.StringBuilder (); sb.AppendFormat ("Year {0}\n", year); foreach (KeyValuePair<string, float> pair in values) { sb.AppendFormat ("{0}:{1}\n", pair.Key, pair.Value); } sb.AppendFormat ("Min:{0}, Max:{1}\n", lowestValue, highestValue); return string.Format ("[YearData] {0}", sb.ToString()); } } protected List<string> values; protected List<YearData> years; public InventarisationsData () { this.values = new List<string> (); this.years = new List<YearData> (); } public IEnumerable<string> EnumerateValues () { foreach (string s in this.values) { yield return s; } } public IEnumerable<YearData> EnumerateYears () { foreach (YearData y in this.years) { yield return y; } } public YearData GetYear (int year) { return GetYear (year, false); } public YearData GetYear (int year, bool createNewIfNull) { foreach (YearData y in years) { if (y.year == year) return y; } if (createNewIfNull) { YearData y = new YearData (year); this.years.Add (y); return y; } return null; } public int GetYearsCount () { return this.years.Count; } public float GetLowestValue () { float value = Mathf.Infinity; foreach (YearData y in this.years) { if (y.lowestValue < value) { value = y.lowestValue; } } return value; } public float GetHighestValue () { float value = -Mathf.Infinity; foreach (YearData y in this.years) { if (y.highestValue > value) { value = y.highestValue; } } return value; } public void AddValue (string name) { if (!this.values.Contains (name)) { this.values.Add (name); this.values.Sort (); } } public int GetValuesCount () { return this.values.Count; } public override string ToString () { string years = ""; foreach (YearData y in this.years) { years += y.ToString () + "\n"; } return string.Format ("[BaseData]\n{0}", years); } } public class ExportData { public class YearData { public class CoordinateData { public readonly Coordinate coord; private Dictionary<string, string> values; public CoordinateData (Coordinate coord) { this.coord = coord; this.values = new Dictionary<string, string> (); } public string this[string key] { get { if (values.ContainsKey (key)) { return values [key]; } return ""; } set { if (!values.ContainsKey (key)) { values.Add (key, ""); } values [key] = value; } } public IEnumerable<string> EnumerateKeys () { foreach (KeyValuePair<string, string> pair in this.values) { yield return pair.Key; } } } public readonly int year; public List<CoordinateData> coords; public YearData (int year) { this.year = year; this.coords = new List<CoordinateData> (); } public CoordinateData NewCoord (Coordinate coord) { if (this[coord] == null) { CoordinateData cd = new CoordinateData (coord); this.coords.Add (cd); return cd; } else return this [coord]; } public CoordinateData this[Coordinate coord] { get { foreach (CoordinateData cd in this.coords) { if (cd.coord.x == coord.x && cd.coord.y == coord.y) { return cd; } } return null; } } public IEnumerable<CoordinateData> EnumerateCoords () { foreach (CoordinateData cd in this.coords) { yield return cd; } } } public List<string> columns; public List<string> defaultColumns; public List<YearData> years; public ExportData () { this.columns = new List<string> (); this.defaultColumns = new List<string> (); this.years = new List<YearData> (); } public YearData NewYear (int year) { if (this[year] == null) { YearData ny = new YearData (year); this.years.Add (ny); return ny; } else return this [year]; } public YearData this[int year] { get { foreach (YearData y in this.years) { if (y.year == year) { return y; } } return null; } } public void SortYears () { this.years.Sort ( delegate (YearData x, YearData y) { if (x.year > y.year) return 1; if (x.year < y.year) return -1; return 0; }); } public IEnumerable<string> EnumerateColumns () { foreach (string c in this.columns) { yield return c; } } public IEnumerable<string> EnumerateColumns (bool skipDefault) { foreach (string c in this.columns) { if (skipDefault) { if (!this.defaultColumns.Contains (c)) yield return c; } else yield return c; } } public IEnumerable<YearData> EnumerateYears () { foreach (YearData y in this.years) { yield return y; } } public void AddColumn (string column) { if (!this.columns.Contains (column)) { this.columns.Add (column); } } public bool HasColumn (string column) { return this.columns.Contains (column); } public void RegisterCurrentColumnsAsDefault () { this.defaultColumns = new List<string> (this.columns); } public string ToCSV () { string delimiter = ";"; StringBuilder sb = new StringBuilder (); // Add all columns foreach (string c in this.EnumerateColumns ()) { sb.Append (c); sb.Append (delimiter); } sb.AppendLine (); // Loop through all years foreach (ExportData.YearData y in EnumerateYears ()) { foreach (ExportData.YearData.CoordinateData c in y.EnumerateCoords ()) { // Add year sb.Append (y.year); sb.Append (delimiter); // Add x,y sb.Append (c.coord.x); sb.Append (delimiter); sb.Append (c.coord.y); sb.Append (delimiter); // Loop through all columns int si = 0; foreach (string s in this.EnumerateColumns ()) { // We need to skip the year, x and y (0,1,2) if (si++ <= 2) continue; // Append row value sb.Append (c[s]); sb.Append (delimiter); } sb.AppendLine (); } } return sb.ToString (); } } public class ExportSettings { public Data area; public List<string> years; public List<string> dataNames; public ExportData exportData; public ExportSettings (Data area, List<string> years, List<string> dataNames) { this.area = area; this.years = years; this.dataNames = dataNames; } } public class YearExportSettings { public ExportSettings settings; public int year; public YearExportSettings (ExportSettings settings, int year) { this.settings = settings; this.year = year; } } public class ExportMgr { public enum SelectionTypes { All, Selection } public enum DataTypes { Always, OnlyWhenSurveyed } public enum CostTypes { None, OnePrice, PricePerYear } public enum GraphCostTypes { None, OnePrice } public const int COORDS_PER_FRAME = 100; public static ExportMgr self { get; private set; } private readonly Scene scene; public bool exportEnabled; public SelectionTypes selectionType; public DataTypes dataType; public bool exportVegetationTypes; public bool exportSuccessionTypes; public CostTypes costType; public int costs; public bool graphExportEnabled; public GraphCostTypes graphCostType; public int graphCosts; public List<int> targetAreas; public List<string> parameters; public List<string> animals; public List<string> plants; public ExportData currentExportData; private volatile int activeThreads = 0; public ExportMgr (Scene scene) { self = this; this.scene = scene; this.targetAreas = new List<int> (); this.parameters = new List<string> (); this.animals = new List<string> (); this.plants = new List<string> (); } public InventarisationsData GetInventarisationsData () { InventarisationsData id = new InventarisationsData (); foreach (Progression.InventarisationResult ir in scene.progression.inventarisations) { if (ir.selected) { id.AddValue (ir.name); id.GetYear (ir.year, true).AddValue (ir.name, (float)ir.DataMap.GetSum ()); } } return id; } private IEnumerator<object> RetrieveExportData (ExportSettings settings) { // We'll use an abbreviation for less code ExportData ed = new ExportData (); settings.exportData = ed; // Temp vars ExportData.YearData year; ExportData.YearData.CoordinateData coordData; /** Default Columns **/ // Add default columns ed.AddColumn ("year"); ed.AddColumn ("x"); ed.AddColumn ("y"); // Add target areas for (int i = 1; i < scene.progression.targetAreas + 1; i++) { if (ShouldExportTargetArea (i)) { ed.AddColumn ("targetarea " + i); } } // Add Vegetation type if (exportVegetationTypes) { ed.AddColumn ("vegetation"); } // Add Succession type if (exportSuccessionTypes) { ed.AddColumn ("succession"); } ed.RegisterCurrentColumnsAsDefault (); yield return new WaitForEndOfFrame (); // Add all data names foreach (string s in settings.dataNames) { ed.AddColumn (s); } yield return new WaitForEndOfFrame (); // Loop through all tiles of the managed area and add them to all tiles of every (past) year for (int i = 0; i < (scene.progression.year - scene.progression.startYear); i++) { // New/get year int y = scene.progression.startYear + i; if (settings.years.Contains (y.ToString ())) { year = ed.NewYear (y); // Coords per frame int totalCoordsProcessed = 0; foreach (ValueCoordinate vc in settings.area.EnumerateNotZero ()) { // New/get coord coordData = year.NewCoord (vc); // Check if we should wait a frame totalCoordsProcessed++; if (totalCoordsProcessed > COORDS_PER_FRAME) { totalCoordsProcessed = 0; yield return new WaitForEndOfFrame (); } } } yield return new WaitForEndOfFrame (); } #pragma warning disable 162 // Setup threads activeThreads = 0; /** Inventarisations **/ foreach (Progression.InventarisationResult ir in scene.progression.inventarisations) { // Check for name if (!settings.dataNames.Contains (ir.name)) continue; // Setup years and coords year = ed [ir.year]; if (year == null) continue; // Coords per frame int totalCoordsProcessed = 0; foreach (ValueCoordinate vc in ir.AreaMap.EnumerateNotZero ()) { coordData = year[(Coordinate)vc]; if (coordData == null) continue; coordData[ir.name] = ir.DataMap.Get (vc).ToString(); // Costs BasicAction action = scene.actions.GetAction (ir.actionId); if (action != null) { // Update costs int prevCosts = 0; int.TryParse (coordData["costs"], out prevCosts); coordData["costs"] = (prevCosts + (int)action.uiList [0].cost).ToString(); } // Check if we should wait a frame totalCoordsProcessed++; if (totalCoordsProcessed > COORDS_PER_FRAME) { totalCoordsProcessed = 0; yield return new WaitForEndOfFrame (); } } yield return new WaitForEndOfFrame (); } /** Research points **/ foreach (ResearchPoint r in scene.progression.researchPoints) { foreach (ResearchPoint.Measurement rm in r.measurements) { // Setup years and coords year = ed[rm.year]; if (year == null) continue; coordData = year[new Coordinate (r.x, r.y)]; if (coordData == null) continue; // Setup name if (settings.dataNames.Contains (rm.name)) { coordData[rm.name] = "1"; } // Setup columns and values foreach (KeyValuePair<string, string> p in rm.data.values) { // Check for name if (!settings.dataNames.Contains (p.Key)) continue; coordData[p.Key] = p.Value; } // Costs BasicAction action = scene.actions.GetAction (rm.actionId); if (action != null) { // Update costs int prevCosts = 0; int.TryParse (coordData["costs"], out prevCosts); coordData["costs"] = (prevCosts + (int)action.uiList [0].cost).ToString(); } yield return new WaitForEndOfFrame (); } yield return new WaitForEndOfFrame (); } /** Measures (actions) **/ foreach (Progression.ActionTaken ta in scene.progression.actionsTaken) { BasicAction a = scene.actions.GetAction (ta.id); if (a != null && ta.years.Count > 0) { // Setup column string key = a.GetDescription (); // Check for name if (!settings.dataNames.Contains (key)) continue; // Loop through all years foreach (int y in ta.years) { // Get the affected area Data area = scene.progression.GetData (a.affectedAreaName, y); if (area != null) { // Setup years and coords year = ed[y]; if (year == null) continue; foreach (ValueCoordinate vc in area.EnumerateNotZero ()) { coordData = year[vc]; if (coordData == null) continue; coordData[key] = "1"; // Update costs int prevCosts = 0; int.TryParse (coordData["costs"], out prevCosts); coordData["costs"] = (prevCosts + (int)a.uiList [0].cost).ToString(); } } yield return new WaitForEndOfFrame (); } } yield return new WaitForEndOfFrame (); } // Loop through all coordinates activeThreads += ed.years.Count; foreach (ExportData.YearData y in ed.EnumerateYears ()) { // Start thread(s) //ThreadPool.QueueUserWorkItem (ProcessYearData, new YearExportSettings (settings, y.year)); yield return GameControl.self.StartCoroutine ( ProcessYearData (new YearExportSettings (settings, y.year)) ); yield return new WaitForSeconds (0.1f); } // Loop through all columns and convert the values foreach (string c in ed.EnumerateColumns (true)) { // TODO Check if we should convert it? foreach (ExportData.YearData y in ed.EnumerateYears ()) { foreach (ExportData.YearData.CoordinateData cd in y.EnumerateCoords ()) { // Coords per frame int totalCoordsProcessed = 0; string val = cd [c]; int intVal = 0; if (!string.IsNullOrEmpty (val) && int.TryParse (val, out intVal)) { cd [c] = scene.progression.ConvertToFloat (c, intVal).ToString (); } // Check if we should wait a frame totalCoordsProcessed++; if (totalCoordsProcessed > COORDS_PER_FRAME) { totalCoordsProcessed = 0; yield return new WaitForEndOfFrame (); } } } } // Cost ed.AddColumn ("costs"); while (activeThreads > 0) { yield return 0; } #pragma warning restore 162 // Set current export data currentExportData = ed; } private IEnumerator<object> ProcessYearData (System.Object args) { // Coords per frame int totalCoordsProcessed = 0; // Temp vars YearExportSettings ySettings = (YearExportSettings)args; ExportSettings settings = ySettings.settings; ExportData ed = settings.exportData; ExportData.YearData year; ExportData.YearData.CoordinateData coordData; // Save the parameters and filter the list List<string> parametersList = new List<string> (); foreach (string s in scene.progression.GetAllDataNames (false)) { if (ShouldExportParameter (s) && settings.dataNames.Contains (s)) { parametersList.Add (s); } } // Save the plants list and filter the list List<PlantType> plantsList = new List<PlantType> (); foreach (PlantType p in scene.plantTypes) { if (ShouldExportPlant (p.name) && settings.dataNames.Contains (p.name)) { plantsList.Add (p); } } // Save the animals list and filter the list List<AnimalType> animalsList = new List<AnimalType> (); foreach (AnimalType a in scene.animalTypes) { if (ShouldExportAnimal (a.name) && settings.dataNames.Contains (a.name)) { animalsList.Add (a); } } ExportData.YearData y = ed [ySettings.year]; foreach (ExportData.YearData.CoordinateData cd in y.EnumerateCoords ()) { coordData = cd; /** Target areas **/ for (int a = 1; a < scene.progression.targetAreas + 1; a++) { if (ed.HasColumn ("targetarea " + a)) { Data targetArea = scene.progression.GetData (Progression.TARGET_ID + a); cd ["targetarea " + a] = (targetArea.Get (cd.coord) > 0) ? "1" : "0"; } } /** Vegetation types **/ if (exportVegetationTypes || exportSuccessionTypes) { // Get the tile type TileType tile = scene.progression.vegetation.GetTileType (cd.coord.x, cd.coord.y); // Set the data if (exportVegetationTypes) { // Set vegetation and succession type cd["vegetation"] = tile.vegetationType.name; } if (exportSuccessionTypes) { cd["succession"] = tile.vegetationType.successionType.name; } } /** Data Type **/ switch (this.dataType) { // If we ALWAYS show the data, get all parameters and show them // also we get the data from the plants and animals and show it case DataTypes.Always : { /** Parameters **/ foreach (string p in parametersList) { // Check if we should set parameter if (string.IsNullOrEmpty (cd [p])) { // Get the data, if it's null try the default (init) value Data data = scene.progression.GetData (p, y.year) ?? scene.progression.GetData (p); // Exception: calculated data, we need to manually set the year bool isCalcData = (data is CalculatedData); if (isCalcData) { ((CalculatedData)data).year = y.year; } // Set the value if (data != null) { cd [p] = data.Get (cd.coord).ToString (); } // Reset the calc data to the current year to avoid messing up the game logic if (isCalcData) { ((CalculatedData)data).year = -1; } } } /** Plants **/ foreach (PlantType p in plantsList) { // Get the data Data data = scene.progression.GetData (p.dataName, y.year); if (data != null) { cd [p.name] = data.Get (cd.coord).ToString (); } } /** Animals **/ foreach (AnimalType a in animalsList) { // Check the animal type if (a is LargeAnimalType) { LargeAnimalType la = (LargeAnimalType)a; foreach (AnimalStartPopulationModel.Nests.Nest nest in la.startPopModel.nests.nests) { // We check if we have a coord data of the location of the nest if ((coordData.coord.x == nest.x) && (coordData.coord.y == nest.y)) { // Count the total animals in the nest int males = nest.GetMalesAt (y.year); int females = nest.GetFemalesAt(y.year); coordData [a.name] = (males + females).ToString (); } } } // TODO: Add more animal types as they come } } break; case DataTypes.OnlyWhenSurveyed : break; } // Check if we should wait a frame totalCoordsProcessed++; if (totalCoordsProcessed > COORDS_PER_FRAME) { totalCoordsProcessed = 0; yield return new WaitForEndOfFrame (); } } ThreadFinished (); } private void Thread (System.Object args) { // Temp vars ExportSettings settings = (ExportSettings)args; ExportData ed = settings.exportData; ExportData.YearData year; ExportData.YearData.CoordinateData coordData; ThreadFinished (); } private void ThreadFinished () { activeThreads--; } public bool ShouldExportParameter (string param) { if (this.selectionType == SelectionTypes.Selection) { return this.parameters.Contains (param); } return true; } public bool ShouldExportAnimal (string animal) { if (this.selectionType == SelectionTypes.Selection) { return this.animals.Contains (animal); } return true; } public bool ShouldExportTargetArea (int area) { if (this.selectionType == SelectionTypes.Selection) { return this.targetAreas.Contains (area); } return true; } public bool ShouldExportPlant (string plant) { if (this.selectionType == SelectionTypes.Selection) { return this.plants.Contains (plant); } return true; } public void ExportData (ExportSettings settings, System.Action onComplete, System.Action onCanceled) { GameControl.self.StartCoroutine (SaveFileDialog.Show ("export", "csv files (*.csv)|*.csv", delegate(bool ok, string url) { // Check if cancelled if (!ok) { if (onCanceled != null) onCanceled (); return; } // Get the export data GameControl.self.StartCoroutine (COExportData (settings, url, onComplete)); })); } private IEnumerator<object> COExportData (ExportSettings settings, string filePath, System.Action onComplete) { // Enable spinner and hide interface SimpleSpinner.ActivateSpinner (); //GameControl.self.isProcessing = true; GameControl.self.hideToolBar = true; GameControl.self.hideSuccessionButton = true; GameMenu.show = false; string help = "Gathering data. Depending on the amount of data, this may take a few minutes."; GameControl.ExtraHelp (help); yield return 0; yield return GameControl.self.StartCoroutine (RetrieveExportData (settings)); // Sort the years currentExportData.SortYears (); // Disable spinner and show interface SimpleSpinner.DeactivateSpinner (); //GameControl.self.isProcessing = false; GameControl.self.hideToolBar = false; GameControl.self.hideSuccessionButton = false; GameMenu.show = true; GameControl.ClearExtraHelp (help); // Create new file FileStream fs = File.Create (filePath); // Stringify and save System.Text.UTF8Encoding enc = new System.Text.UTF8Encoding (); string txt = currentExportData.ToCSV (); fs.Write (enc.GetBytes (txt), 0, enc.GetByteCount (txt)); // Close and dispose the stream fs.Close (); fs.Dispose (); fs = null; if (onComplete != null) onComplete (); } public void AddTargetArea (int area) { if (this.targetAreas.Contains (area)) return; this.targetAreas.Add (area); this.targetAreas.Sort (); } public void RemoveTargetArea (int area) { if (!this.targetAreas.Contains (area)) return; this.targetAreas.Remove (area); this.targetAreas.Sort (); } public void AddParameter (string param) { if (this.parameters.Contains (param)) return; this.parameters.Add (param); this.parameters.Sort (); } public void RemoveParameter (string param) { if (!this.parameters.Contains (param)) return; this.parameters.Remove (param); this.parameters.Sort (); } public void AddAnimal (string name) { if (this.animals.Contains (name)) return; this.animals.Add (name); this.animals.Sort (); } public void RemoveAnimal (string name) { if (!this.animals.Contains (name)) return; this.animals.Remove (name); this.animals.Sort (); } public void AddPlant (string name) { if (this.plants.Contains (name)) return; this.plants.Add (name); this.plants.Sort (); } public void RemovePlant (string name) { if (!this.plants.Contains (name)) return; this.plants.Remove (name); this.plants.Sort (); } public static ExportMgr Load (string path, Scene scene) { ExportMgr mgr = new ExportMgr (scene); if (File.Exists (path + "exportsettings.xml")) { XmlTextReader reader = new XmlTextReader (new System.IO.StreamReader (path + "exportsettings.xml")); try { while (reader.Read()) { XmlNodeType nType = reader.NodeType; if ((nType == XmlNodeType.Element) && (reader.Name.ToLower () == "export")) { mgr.Load (reader); } } } finally { reader.Close (); } } return mgr; } private void Load (XmlTextReader reader) { this.exportEnabled = bool.Parse (reader.GetAttribute ("enabled")); this.selectionType = (SelectionTypes)System.Enum.Parse (typeof(SelectionTypes), reader.GetAttribute ("selectiontype")); this.dataType = (DataTypes)System.Enum.Parse (typeof(DataTypes), reader.GetAttribute ("datatype")); this.exportSuccessionTypes = bool.Parse (reader.GetAttribute ("exportsucctypes")); this.exportVegetationTypes = bool.Parse (reader.GetAttribute ("exportvegtypes")); if (!string.IsNullOrEmpty (reader.GetAttribute ("costtype"))) { this.costType = (CostTypes)System.Enum.Parse (typeof(CostTypes), reader.GetAttribute ("costtype")); } if (!string.IsNullOrEmpty (reader.GetAttribute ("costs"))) { this.costs = int.Parse (reader.GetAttribute ("costs")); } if (!string.IsNullOrEmpty (reader.GetAttribute ("graphenabled"))) { this.graphExportEnabled = bool.Parse (reader.GetAttribute ("graphenabled")); } else this.graphExportEnabled = true; if (!string.IsNullOrEmpty (reader.GetAttribute ("graphcosttype"))) { this.graphCostType = (GraphCostTypes)System.Enum.Parse (typeof (GraphCostTypes), reader.GetAttribute ("graphcosttype")); } if (!string.IsNullOrEmpty (reader.GetAttribute ("graphcosts"))) { this.graphCosts = int.Parse (reader.GetAttribute ("graphcosts")); } List<int> targetAreaList = new List<int>(); List<string> paramList = new List<string>(); List<string> animalList = new List<string>(); List<string> plantList = new List<string>(); while (reader.Read()) { XmlNodeType nType = reader.NodeType; if (nType == XmlNodeType.Element) { switch (reader.Name.ToLower ()) { case "targetarea" : targetAreaList.Add (int.Parse (reader.GetAttribute ("id"))); break; case "param" : paramList.Add (reader.GetAttribute ("name")); break; case "animal" : animalList.Add (reader.GetAttribute ("name")); break; case "plant" : plantList.Add (reader.GetAttribute ("name")); break; } } else if ((nType == XmlNodeType.EndElement) && (reader.Name.ToLower () == "export")) { break; } } this.targetAreas = targetAreaList; this.parameters = paramList; this.animals = animalList; this.plants = plantList; } public void Save (string path) { XmlTextWriter writer = new XmlTextWriter (path + "exportsettings.xml", System.Text.Encoding.UTF8); writer.WriteStartDocument (true); writer.WriteStartElement ("export"); writer.WriteAttributeString ("enabled", this.exportEnabled.ToString().ToLower()); writer.WriteAttributeString ("exportvegtypes", this.exportVegetationTypes.ToString().ToLower()); writer.WriteAttributeString ("exportsucctypes", this.exportSuccessionTypes.ToString().ToLower()); writer.WriteAttributeString ("selectiontype", this.selectionType.ToString()); writer.WriteAttributeString ("datatype", this.dataType.ToString()); writer.WriteAttributeString ("costtype", this.costType.ToString()); writer.WriteAttributeString ("costs", this.costs.ToString ()); writer.WriteAttributeString ("graphenabled", this.graphExportEnabled.ToString ().ToLower ()); writer.WriteAttributeString ("graphcosttype", this.graphCostType.ToString ()); writer.WriteAttributeString ("graphcosts", this.graphCosts.ToString ()); foreach (string s in this.parameters) { writer.WriteStartElement ("param"); writer.WriteAttributeString ("name", s); writer.WriteEndElement (); } foreach (string s in this.animals) { writer.WriteStartElement ("animal"); writer.WriteAttributeString ("name", s); writer.WriteEndElement (); } foreach (string s in this.plants) { writer.WriteStartElement ("plant"); writer.WriteAttributeString ("name", s); writer.WriteEndElement (); } foreach (int i in this.targetAreas) { writer.WriteStartElement ("targetarea"); writer.WriteAttributeString ("id", i.ToString()); writer.WriteEndElement (); } writer.WriteEndElement (); writer.WriteEndDocument (); writer.Close (); } public void UpdateReferences () { List<string> list = new List<string> (); foreach (string s in this.parameters) { if (scene.progression.HasData (s)) list.Add (s); } this.parameters = list; list = new List<string> (); foreach (string s in this.animals) { foreach (AnimalType a in scene.animalTypes) { if (a.name.ToLower () == s.ToLower ()) { list.Add (a.name); break; } } } this.animals = list; list = new List<string> (); foreach (string s in this.plants) { foreach (PlantType p in scene.plantTypes) { if (p.name.ToLower () == s.ToLower ()) { list.Add (p.name); break; } } } this.plants = list; } } }
namespace XenAdmin.TabPages { partial class GeneralTabPage { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { if (licenseStatus != null) licenseStatus.Dispose(); components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(GeneralTabPage)); this.buttonProperties = new System.Windows.Forms.Button(); this.panel1 = new System.Windows.Forms.Panel(); this.panel3 = new System.Windows.Forms.Panel(); this.buttonPanel = new System.Windows.Forms.FlowLayoutPanel(); this.buttonViewConsole = new System.Windows.Forms.Button(); this.buttonViewLog = new System.Windows.Forms.Button(); this.linkLabelExpand = new System.Windows.Forms.LinkLabel(); this.linkLabelCollapse = new System.Windows.Forms.LinkLabel(); this.panel2 = new XenAdmin.Controls.PanelNoFocusScroll(); this.panelReadCaching = new System.Windows.Forms.Panel(); this.pdSectionReadCaching = new XenAdmin.Controls.PDSection(); this.panelDockerInfo = new System.Windows.Forms.Panel(); this.pdSectionDockerInfo = new XenAdmin.Controls.PDSection(); this.panelDockerVersion = new System.Windows.Forms.Panel(); this.pdSectionDockerVersion = new XenAdmin.Controls.PDSection(); this.panelStorageLinkSystemCapabilities = new System.Windows.Forms.Panel(); this.pdSectionStorageLinkSystemCapabilities = new XenAdmin.Controls.PDSection(); this.panelMultipathBoot = new System.Windows.Forms.Panel(); this.pdSectionMultipathBoot = new XenAdmin.Controls.PDSection(); this.panelStorageLink = new System.Windows.Forms.Panel(); this.pdStorageLink = new XenAdmin.Controls.PDSection(); this.panelUpdates = new System.Windows.Forms.Panel(); this.pdSectionUpdates = new XenAdmin.Controls.PDSection(); this.panelMemoryAndVCPUs = new System.Windows.Forms.Panel(); this.pdSectionVCPUs = new XenAdmin.Controls.PDSection(); this.panelMultipathing = new System.Windows.Forms.Panel(); this.pdSectionMultipathing = new XenAdmin.Controls.PDSection(); this.panelStatus = new System.Windows.Forms.Panel(); this.pdSectionStatus = new XenAdmin.Controls.PDSection(); this.panelHighAvailability = new System.Windows.Forms.Panel(); this.pdSectionHighAvailability = new XenAdmin.Controls.PDSection(); this.panelBootOptions = new System.Windows.Forms.Panel(); this.pdSectionBootOptions = new XenAdmin.Controls.PDSection(); this.panelCPU = new System.Windows.Forms.Panel(); this.pdSectionCPU = new XenAdmin.Controls.PDSection(); this.panelLicense = new System.Windows.Forms.Panel(); this.pdSectionLicense = new XenAdmin.Controls.PDSection(); this.panelVersion = new System.Windows.Forms.Panel(); this.pdSectionVersion = new XenAdmin.Controls.PDSection(); this.panelMemory = new System.Windows.Forms.Panel(); this.pdSectionMemory = new XenAdmin.Controls.PDSection(); this.panelManagementInterfaces = new System.Windows.Forms.Panel(); this.pdSectionManagementInterfaces = new XenAdmin.Controls.PDSection(); this.panelCustomFields = new System.Windows.Forms.Panel(); this.pdSectionCustomFields = new XenAdmin.Controls.PDSection(); this.panelGeneral = new System.Windows.Forms.Panel(); this.pdSectionGeneral = new XenAdmin.Controls.PDSection(); this.pageContainerPanel.SuspendLayout(); this.panel1.SuspendLayout(); this.panel3.SuspendLayout(); this.buttonPanel.SuspendLayout(); this.panel2.SuspendLayout(); this.panelReadCaching.SuspendLayout(); this.panelDockerInfo.SuspendLayout(); this.panelDockerVersion.SuspendLayout(); this.panelStorageLinkSystemCapabilities.SuspendLayout(); this.panelMultipathBoot.SuspendLayout(); this.panelStorageLink.SuspendLayout(); this.panelUpdates.SuspendLayout(); this.panelMemoryAndVCPUs.SuspendLayout(); this.panelMultipathing.SuspendLayout(); this.panelStatus.SuspendLayout(); this.panelHighAvailability.SuspendLayout(); this.panelBootOptions.SuspendLayout(); this.panelCPU.SuspendLayout(); this.panelLicense.SuspendLayout(); this.panelVersion.SuspendLayout(); this.panelMemory.SuspendLayout(); this.panelManagementInterfaces.SuspendLayout(); this.panelCustomFields.SuspendLayout(); this.panelGeneral.SuspendLayout(); this.SuspendLayout(); // // pageContainerPanel // this.pageContainerPanel.Controls.Add(this.panel2); this.pageContainerPanel.Controls.Add(this.panel1); resources.ApplyResources(this.pageContainerPanel, "pageContainerPanel"); // // buttonProperties // resources.ApplyResources(this.buttonProperties, "buttonProperties"); this.buttonProperties.Name = "buttonProperties"; this.buttonProperties.UseVisualStyleBackColor = true; this.buttonProperties.Click += new System.EventHandler(this.EditButton_Click); // // panel1 // this.panel1.Controls.Add(this.panel3); resources.ApplyResources(this.panel1, "panel1"); this.panel1.Name = "panel1"; // // panel3 // this.panel3.Controls.Add(this.buttonPanel); this.panel3.Controls.Add(this.linkLabelExpand); this.panel3.Controls.Add(this.linkLabelCollapse); resources.ApplyResources(this.panel3, "panel3"); this.panel3.Name = "panel3"; // // buttonPanel // this.buttonPanel.Controls.Add(this.buttonProperties); this.buttonPanel.Controls.Add(this.buttonViewConsole); this.buttonPanel.Controls.Add(this.buttonViewLog); resources.ApplyResources(this.buttonPanel, "buttonPanel"); this.buttonPanel.Name = "buttonPanel"; // // buttonViewConsole // resources.ApplyResources(this.buttonViewConsole, "buttonViewConsole"); this.buttonViewConsole.Name = "buttonViewConsole"; this.buttonViewConsole.UseVisualStyleBackColor = true; this.buttonViewConsole.Click += new System.EventHandler(this.buttonViewConsole_Click); // // buttonViewLog // resources.ApplyResources(this.buttonViewLog, "buttonViewLog"); this.buttonViewLog.Name = "buttonViewLog"; this.buttonViewLog.UseVisualStyleBackColor = true; this.buttonViewLog.Click += new System.EventHandler(this.buttonViewLog_Click); // // linkLabelExpand // resources.ApplyResources(this.linkLabelExpand, "linkLabelExpand"); this.linkLabelExpand.Name = "linkLabelExpand"; this.linkLabelExpand.TabStop = true; this.linkLabelExpand.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.linkLabelExpand_LinkClicked); // // linkLabelCollapse // resources.ApplyResources(this.linkLabelCollapse, "linkLabelCollapse"); this.linkLabelCollapse.Name = "linkLabelCollapse"; this.linkLabelCollapse.TabStop = true; this.linkLabelCollapse.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.linkLabelCollapse_LinkClicked); // // panel2 // resources.ApplyResources(this.panel2, "panel2"); this.panel2.Controls.Add(this.panelReadCaching); this.panel2.Controls.Add(this.panelDockerInfo); this.panel2.Controls.Add(this.panelDockerVersion); this.panel2.Controls.Add(this.panelStorageLinkSystemCapabilities); this.panel2.Controls.Add(this.panelMultipathBoot); this.panel2.Controls.Add(this.panelStorageLink); this.panel2.Controls.Add(this.panelUpdates); this.panel2.Controls.Add(this.panelMemoryAndVCPUs); this.panel2.Controls.Add(this.panelMultipathing); this.panel2.Controls.Add(this.panelStatus); this.panel2.Controls.Add(this.panelHighAvailability); this.panel2.Controls.Add(this.panelBootOptions); this.panel2.Controls.Add(this.panelCPU); this.panel2.Controls.Add(this.panelLicense); this.panel2.Controls.Add(this.panelVersion); this.panel2.Controls.Add(this.panelMemory); this.panel2.Controls.Add(this.panelManagementInterfaces); this.panel2.Controls.Add(this.panelCustomFields); this.panel2.Controls.Add(this.panelGeneral); this.panel2.Name = "panel2"; // // panelReadCaching // resources.ApplyResources(this.panelReadCaching, "panelReadCaching"); this.panelReadCaching.Controls.Add(this.pdSectionReadCaching); this.panelReadCaching.Name = "panelReadCaching"; // // pdSectionReadCaching // this.pdSectionReadCaching.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionReadCaching, "pdSectionReadCaching"); this.pdSectionReadCaching.Name = "pdSectionReadCaching"; this.pdSectionReadCaching.ShowCellToolTips = false; this.pdSectionReadCaching.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelDockerInfo // resources.ApplyResources(this.panelDockerInfo, "panelDockerInfo"); this.panelDockerInfo.Controls.Add(this.pdSectionDockerInfo); this.panelDockerInfo.Name = "panelDockerInfo"; // // pdSectionDockerInfo // this.pdSectionDockerInfo.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionDockerInfo, "pdSectionDockerInfo"); this.pdSectionDockerInfo.Name = "pdSectionDockerInfo"; this.pdSectionDockerInfo.ShowCellToolTips = false; // // panelDockerVersion // resources.ApplyResources(this.panelDockerVersion, "panelDockerVersion"); this.panelDockerVersion.Controls.Add(this.pdSectionDockerVersion); this.panelDockerVersion.Name = "panelDockerVersion"; // // pdSectionDockerVersion // this.pdSectionDockerVersion.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionDockerVersion, "pdSectionDockerVersion"); this.pdSectionDockerVersion.Name = "pdSectionDockerVersion"; this.pdSectionDockerVersion.ShowCellToolTips = false; // // panelStorageLinkSystemCapabilities // resources.ApplyResources(this.panelStorageLinkSystemCapabilities, "panelStorageLinkSystemCapabilities"); this.panelStorageLinkSystemCapabilities.Controls.Add(this.pdSectionStorageLinkSystemCapabilities); this.panelStorageLinkSystemCapabilities.Name = "panelStorageLinkSystemCapabilities"; // // pdSectionStorageLinkSystemCapabilities // this.pdSectionStorageLinkSystemCapabilities.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionStorageLinkSystemCapabilities, "pdSectionStorageLinkSystemCapabilities"); this.pdSectionStorageLinkSystemCapabilities.Name = "pdSectionStorageLinkSystemCapabilities"; this.pdSectionStorageLinkSystemCapabilities.ShowCellToolTips = false; // // panelMultipathBoot // resources.ApplyResources(this.panelMultipathBoot, "panelMultipathBoot"); this.panelMultipathBoot.Controls.Add(this.pdSectionMultipathBoot); this.panelMultipathBoot.Name = "panelMultipathBoot"; // // pdSectionMultipathBoot // this.pdSectionMultipathBoot.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionMultipathBoot, "pdSectionMultipathBoot"); this.pdSectionMultipathBoot.Name = "pdSectionMultipathBoot"; this.pdSectionMultipathBoot.ShowCellToolTips = false; // // panelStorageLink // resources.ApplyResources(this.panelStorageLink, "panelStorageLink"); this.panelStorageLink.Controls.Add(this.pdStorageLink); this.panelStorageLink.Name = "panelStorageLink"; // // pdStorageLink // this.pdStorageLink.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdStorageLink, "pdStorageLink"); this.pdStorageLink.Name = "pdStorageLink"; this.pdStorageLink.ShowCellToolTips = false; this.pdStorageLink.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelUpdates // resources.ApplyResources(this.panelUpdates, "panelUpdates"); this.panelUpdates.Controls.Add(this.pdSectionUpdates); this.panelUpdates.Name = "panelUpdates"; // // pdSectionUpdates // this.pdSectionUpdates.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionUpdates, "pdSectionUpdates"); this.pdSectionUpdates.Name = "pdSectionUpdates"; this.pdSectionUpdates.ShowCellToolTips = false; this.pdSectionUpdates.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelMemoryAndVCPUs // resources.ApplyResources(this.panelMemoryAndVCPUs, "panelMemoryAndVCPUs"); this.panelMemoryAndVCPUs.Controls.Add(this.pdSectionVCPUs); this.panelMemoryAndVCPUs.Name = "panelMemoryAndVCPUs"; // // pdSectionVCPUs // this.pdSectionVCPUs.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionVCPUs, "pdSectionVCPUs"); this.pdSectionVCPUs.Name = "pdSectionVCPUs"; this.pdSectionVCPUs.ShowCellToolTips = false; this.pdSectionVCPUs.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelMultipathing // resources.ApplyResources(this.panelMultipathing, "panelMultipathing"); this.panelMultipathing.Controls.Add(this.pdSectionMultipathing); this.panelMultipathing.Name = "panelMultipathing"; // // pdSectionMultipathing // this.pdSectionMultipathing.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionMultipathing, "pdSectionMultipathing"); this.pdSectionMultipathing.Name = "pdSectionMultipathing"; this.pdSectionMultipathing.ShowCellToolTips = false; this.pdSectionMultipathing.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelStatus // resources.ApplyResources(this.panelStatus, "panelStatus"); this.panelStatus.Controls.Add(this.pdSectionStatus); this.panelStatus.Name = "panelStatus"; // // pdSectionStatus // this.pdSectionStatus.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionStatus, "pdSectionStatus"); this.pdSectionStatus.Name = "pdSectionStatus"; this.pdSectionStatus.ShowCellToolTips = false; this.pdSectionStatus.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelHighAvailability // resources.ApplyResources(this.panelHighAvailability, "panelHighAvailability"); this.panelHighAvailability.Controls.Add(this.pdSectionHighAvailability); this.panelHighAvailability.Name = "panelHighAvailability"; // // pdSectionHighAvailability // this.pdSectionHighAvailability.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionHighAvailability, "pdSectionHighAvailability"); this.pdSectionHighAvailability.Name = "pdSectionHighAvailability"; this.pdSectionHighAvailability.ShowCellToolTips = false; this.pdSectionHighAvailability.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelBootOptions // resources.ApplyResources(this.panelBootOptions, "panelBootOptions"); this.panelBootOptions.Controls.Add(this.pdSectionBootOptions); this.panelBootOptions.Name = "panelBootOptions"; // // pdSectionBootOptions // this.pdSectionBootOptions.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionBootOptions, "pdSectionBootOptions"); this.pdSectionBootOptions.Name = "pdSectionBootOptions"; this.pdSectionBootOptions.ShowCellToolTips = false; this.pdSectionBootOptions.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelCPU // resources.ApplyResources(this.panelCPU, "panelCPU"); this.panelCPU.Controls.Add(this.pdSectionCPU); this.panelCPU.Name = "panelCPU"; // // pdSectionCPU // this.pdSectionCPU.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionCPU, "pdSectionCPU"); this.pdSectionCPU.Name = "pdSectionCPU"; this.pdSectionCPU.ShowCellToolTips = false; this.pdSectionCPU.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelLicense // resources.ApplyResources(this.panelLicense, "panelLicense"); this.panelLicense.Controls.Add(this.pdSectionLicense); this.panelLicense.Name = "panelLicense"; // // pdSectionLicense // this.pdSectionLicense.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionLicense, "pdSectionLicense"); this.pdSectionLicense.Name = "pdSectionLicense"; this.pdSectionLicense.ShowCellToolTips = false; this.pdSectionLicense.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelVersion // resources.ApplyResources(this.panelVersion, "panelVersion"); this.panelVersion.Controls.Add(this.pdSectionVersion); this.panelVersion.Name = "panelVersion"; // // pdSectionVersion // this.pdSectionVersion.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionVersion, "pdSectionVersion"); this.pdSectionVersion.Name = "pdSectionVersion"; this.pdSectionVersion.ShowCellToolTips = false; this.pdSectionVersion.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelMemory // resources.ApplyResources(this.panelMemory, "panelMemory"); this.panelMemory.Controls.Add(this.pdSectionMemory); this.panelMemory.Name = "panelMemory"; // // pdSectionMemory // this.pdSectionMemory.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionMemory, "pdSectionMemory"); this.pdSectionMemory.Name = "pdSectionMemory"; this.pdSectionMemory.ShowCellToolTips = false; this.pdSectionMemory.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelManagementInterfaces // resources.ApplyResources(this.panelManagementInterfaces, "panelManagementInterfaces"); this.panelManagementInterfaces.Controls.Add(this.pdSectionManagementInterfaces); this.panelManagementInterfaces.Name = "panelManagementInterfaces"; // // pdSectionManagementInterfaces // this.pdSectionManagementInterfaces.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionManagementInterfaces, "pdSectionManagementInterfaces"); this.pdSectionManagementInterfaces.Name = "pdSectionManagementInterfaces"; this.pdSectionManagementInterfaces.ShowCellToolTips = false; this.pdSectionManagementInterfaces.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelCustomFields // resources.ApplyResources(this.panelCustomFields, "panelCustomFields"); this.panelCustomFields.Controls.Add(this.pdSectionCustomFields); this.panelCustomFields.Name = "panelCustomFields"; // // pdSectionCustomFields // this.pdSectionCustomFields.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionCustomFields, "pdSectionCustomFields"); this.pdSectionCustomFields.Name = "pdSectionCustomFields"; this.pdSectionCustomFields.ShowCellToolTips = true; this.pdSectionCustomFields.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // panelGeneral // resources.ApplyResources(this.panelGeneral, "panelGeneral"); this.panelGeneral.Controls.Add(this.pdSectionGeneral); this.panelGeneral.Name = "panelGeneral"; // // pdSectionGeneral // this.pdSectionGeneral.BackColor = System.Drawing.Color.Gainsboro; resources.ApplyResources(this.pdSectionGeneral, "pdSectionGeneral"); this.pdSectionGeneral.Name = "pdSectionGeneral"; this.pdSectionGeneral.ShowCellToolTips = false; this.pdSectionGeneral.ExpandedChanged += new System.Action<XenAdmin.Controls.PDSection>(this.s_ExpandedEventHandler); // // GeneralTabPage // resources.ApplyResources(this, "$this"); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi; this.DoubleBuffered = true; this.Name = "GeneralTabPage"; this.pageContainerPanel.ResumeLayout(false); this.panel1.ResumeLayout(false); this.panel3.ResumeLayout(false); this.panel3.PerformLayout(); this.buttonPanel.ResumeLayout(false); this.panel2.ResumeLayout(false); this.panel2.PerformLayout(); this.panelReadCaching.ResumeLayout(false); this.panelDockerInfo.ResumeLayout(false); this.panelDockerVersion.ResumeLayout(false); this.panelStorageLinkSystemCapabilities.ResumeLayout(false); this.panelMultipathBoot.ResumeLayout(false); this.panelStorageLink.ResumeLayout(false); this.panelUpdates.ResumeLayout(false); this.panelMemoryAndVCPUs.ResumeLayout(false); this.panelMultipathing.ResumeLayout(false); this.panelStatus.ResumeLayout(false); this.panelHighAvailability.ResumeLayout(false); this.panelBootOptions.ResumeLayout(false); this.panelCPU.ResumeLayout(false); this.panelLicense.ResumeLayout(false); this.panelVersion.ResumeLayout(false); this.panelMemory.ResumeLayout(false); this.panelManagementInterfaces.ResumeLayout(false); this.panelCustomFields.ResumeLayout(false); this.panelGeneral.ResumeLayout(false); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.Button buttonProperties; private System.Windows.Forms.Panel panel1; private XenAdmin.Controls.PanelNoFocusScroll panel2; private System.Windows.Forms.Panel panelGeneral; private XenAdmin.Controls.PDSection pdSectionGeneral; private System.Windows.Forms.Panel panelMemoryAndVCPUs; private XenAdmin.Controls.PDSection pdSectionVCPUs; private System.Windows.Forms.Panel panelBootOptions; private XenAdmin.Controls.PDSection pdSectionBootOptions; private System.Windows.Forms.Panel panelMultipathing; private XenAdmin.Controls.PDSection pdSectionMultipathing; private System.Windows.Forms.Panel panelStatus; private XenAdmin.Controls.PDSection pdSectionStatus; private System.Windows.Forms.Panel panelHighAvailability; private XenAdmin.Controls.PDSection pdSectionHighAvailability; private System.Windows.Forms.Panel panelCustomFields; private XenAdmin.Controls.PDSection pdSectionCustomFields; private System.Windows.Forms.Panel panelManagementInterfaces; private XenAdmin.Controls.PDSection pdSectionManagementInterfaces; private System.Windows.Forms.Panel panelCPU; private XenAdmin.Controls.PDSection pdSectionCPU; private System.Windows.Forms.Panel panelVersion; private XenAdmin.Controls.PDSection pdSectionVersion; private System.Windows.Forms.Panel panelLicense; private XenAdmin.Controls.PDSection pdSectionLicense; private System.Windows.Forms.Panel panelMemory; private XenAdmin.Controls.PDSection pdSectionMemory; private System.Windows.Forms.Panel panelUpdates; private XenAdmin.Controls.PDSection pdSectionUpdates; private System.Windows.Forms.LinkLabel linkLabelExpand; private System.Windows.Forms.LinkLabel linkLabelCollapse; private System.Windows.Forms.Panel panel3; private System.Windows.Forms.Panel panelStorageLink; private XenAdmin.Controls.PDSection pdStorageLink; private System.Windows.Forms.Panel panelMultipathBoot; private XenAdmin.Controls.PDSection pdSectionMultipathBoot; private System.Windows.Forms.Panel panelStorageLinkSystemCapabilities; private XenAdmin.Controls.PDSection pdSectionStorageLinkSystemCapabilities; private System.Windows.Forms.Panel panelDockerInfo; private System.Windows.Forms.Panel panelDockerVersion; private Controls.PDSection pdSectionDockerVersion; private Controls.PDSection pdSectionDockerInfo; private System.Windows.Forms.Panel panelReadCaching; private Controls.PDSection pdSectionReadCaching; private System.Windows.Forms.Button buttonViewConsole; private System.Windows.Forms.Button buttonViewLog; private System.Windows.Forms.FlowLayoutPanel buttonPanel; } }
// // System.Web.UI.ClientScriptManager.cs // // Authors: // Duncan Mak (duncan@ximian.com) // Gonzalo Paniagua (gonzalo@ximian.com) // Andreas Nahr (ClassDevelopment@A-SoftTech.com) // Lluis Sanchez (lluis@novell.com) // // (C) 2002,2003 Ximian, Inc. (http://www.ximian.com) // (c) 2003 Novell, Inc. (http://www.novell.com) // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections; using System.Text; namespace System.Web.UI { #if NET_2_0 public #else internal #endif class ClientScriptManager { Hashtable registeredArrayDeclares; ScriptEntry clientScriptBlocks; ScriptEntry startupScriptBlocks; Hashtable hiddenFields; ScriptEntry submitStatements; ScriptEntry scriptIncludes; Page page; internal ClientScriptManager (Page page) { this.page = page; } public string GetPostBackClientEvent (Control control, string argument) { return GetPostBackEventReference (control, argument); } public string GetPostBackClientHyperlink (Control control, string argument) { return "javascript:" + GetPostBackEventReference (control, argument); } public string GetPostBackEventReference (Control control) { return GetPostBackEventReference (control, ""); } public string GetPostBackEventReference (Control control, string argument) { page.RequiresPostBackScript (); return String.Format ("__doPostBack('{0}','{1}')", control.UniqueID, argument); } #if NET_2_0 public string GetPostBackEventReference (PostBackOptions options) { if (options.ActionUrl == null && options.ValidationGroup == null && !options.TrackFocus && !options.AutoPostBack && !options.PerformValidation) { if (options.RequiresJavaScriptProtocol) return GetPostBackClientHyperlink (options.TargetControl, options.Argument); else return GetPostBackEventReference (options.TargetControl, options.Argument); } if (!IsClientScriptIncludeRegistered (typeof(Page), "webform")) { RegisterClientScriptInclude (typeof(Page), "webform", GetWebResourceUrl (typeof(Page), "webform.js")); } if (options.ActionUrl != null) RegisterHiddenField (Page.PreviousPageID, page.Request.FilePath); if (options.ClientSubmit || options.ActionUrl != null) page.RequiresPostBackScript (); return String.Format ("{0}WebForm_DoPostback({1},{2},{3},{4},{5},{6},{7},{8})", options.RequiresJavaScriptProtocol ? "javascript:" : "", ClientScriptManager.GetScriptLiteral (options.TargetControl.UniqueID), ClientScriptManager.GetScriptLiteral (options.Argument), ClientScriptManager.GetScriptLiteral (options.ActionUrl), ClientScriptManager.GetScriptLiteral (options.AutoPostBack), ClientScriptManager.GetScriptLiteral (options.PerformValidation), ClientScriptManager.GetScriptLiteral (options.TrackFocus), ClientScriptManager.GetScriptLiteral (options.ClientSubmit), ClientScriptManager.GetScriptLiteral (options.ValidationGroup) ); } public string GetCallbackEventReference (Control control, string argument, string clientCallback, string context) { return GetCallbackEventReference (control, argument, clientCallback, context, null); } public string GetCallbackEventReference (Control control, string argument, string clientCallback, string context, string clientErrorCallback) { if (!IsClientScriptIncludeRegistered (typeof(Page), "callback")) RegisterClientScriptInclude (typeof(Page), "callback", GetWebResourceUrl (typeof(Page), "callback.js")); return string.Format ("WebForm_DoCallback ('{0}', {1}, {2}, {3}, {4})", control.UniqueID, argument, clientCallback, context, clientErrorCallback); } public string GetWebResourceUrl(Type type, string resourceName) { if (type == null) throw new ArgumentNullException ("type"); if (resourceName == null || resourceName.Length == 0) throw new ArgumentNullException ("type"); return System.Web.Handlers.AssemblyResourceLoader.GetResourceUrl (type, resourceName); } #endif public bool IsClientScriptBlockRegistered (string key) { return IsScriptRegistered (clientScriptBlocks, GetType(), key); } public bool IsClientScriptBlockRegistered (Type type, string key) { return IsScriptRegistered (clientScriptBlocks, type, key); } public bool IsStartupScriptRegistered (string key) { return IsScriptRegistered (startupScriptBlocks, GetType(), key); } public bool IsStartupScriptRegistered (Type type, string key) { return IsScriptRegistered (startupScriptBlocks, type, key); } public bool IsOnSubmitStatementRegistered (string key) { return IsScriptRegistered (submitStatements, GetType(), key); } public bool IsOnSubmitStatementRegistered (Type type, string key) { return IsScriptRegistered (submitStatements, type, key); } public bool IsClientScriptIncludeRegistered (string key) { return IsScriptRegistered (scriptIncludes, GetType(), key); } public bool IsClientScriptIncludeRegistered (Type type, string key) { return IsScriptRegistered (scriptIncludes, type, key); } bool IsScriptRegistered (ScriptEntry scriptList, Type type, string key) { while (scriptList != null) { if (scriptList.Type == type && scriptList.Key == key) return true; scriptList = scriptList.Next; } return false; } public void RegisterArrayDeclaration (string arrayName, string arrayValue) { if (registeredArrayDeclares == null) registeredArrayDeclares = new Hashtable(); if (!registeredArrayDeclares.ContainsKey (arrayName)) registeredArrayDeclares.Add (arrayName, new ArrayList()); ((ArrayList) registeredArrayDeclares[arrayName]).Add(arrayValue); } void RegisterScript (ref ScriptEntry scriptList, Type type, string key, string script, bool addScriptTags) { ScriptEntry last = null; ScriptEntry entry = scriptList; while (entry != null) { if (entry.Type == type && entry.Key == key) return; last = entry; entry = entry.Next; } if (addScriptTags) script = "<script language=javascript>\n<!--\n" + script + "\n// -->\n</script>"; entry = new ScriptEntry (type, key, script); if (last != null) last.Next = entry; else scriptList = entry; } internal void RegisterClientScriptBlock (string key, string script) { RegisterScript (ref clientScriptBlocks, GetType(), key, script, false); } public void RegisterClientScriptBlock (Type type, string key, string script) { RegisterScript (ref clientScriptBlocks, type, key, script, false); } public void RegisterClientScriptBlock (Type type, string key, string script, bool addScriptTags) { RegisterScript (ref clientScriptBlocks, type, key, script, addScriptTags); } public void RegisterHiddenField (string hiddenFieldName, string hiddenFieldInitialValue) { if (hiddenFields == null) hiddenFields = new Hashtable (); if (!hiddenFields.ContainsKey (hiddenFieldName)) hiddenFields.Add (hiddenFieldName, hiddenFieldInitialValue); } internal void RegisterOnSubmitStatement (string key, string script) { RegisterScript (ref submitStatements, GetType (), key, script, false); } public void RegisterOnSubmitStatement (Type type, string key, string script) { RegisterScript (ref submitStatements, type, key, script, false); } internal void RegisterStartupScript (string key, string script) { RegisterScript (ref startupScriptBlocks, GetType(), key, script, false); } public void RegisterStartupScript (Type type, string key, string script) { RegisterScript (ref startupScriptBlocks, type, key, script, false); } public void RegisterStartupScript (Type type, string key, string script, bool addScriptTags) { RegisterScript (ref startupScriptBlocks, type, key, script, addScriptTags); } public void RegisterClientScriptInclude (string key, string url) { RegisterScript (ref scriptIncludes, GetType(), key, url, false); } public void RegisterClientScriptInclude (Type type, string key, string url) { RegisterScript (ref scriptIncludes, type, key, url, false); } void WriteScripts (HtmlTextWriter writer, ScriptEntry scriptList) { while (scriptList != null) { writer.WriteLine (scriptList.Script); scriptList = scriptList.Next; } } internal void WriteHiddenFields (HtmlTextWriter writer) { if (hiddenFields == null) return; foreach (string key in hiddenFields.Keys) { string value = hiddenFields [key] as string; writer.WriteLine ("\n<input type=\"hidden\" name=\"{0}\" value=\"{1}\" />", key, value); } hiddenFields = null; } internal void WriteClientScriptIncludes (HtmlTextWriter writer) { ScriptEntry entry = scriptIncludes; while (entry != null) { writer.WriteLine ("\n<script src=\"{0}\" type=\"text/javascript\"></script>", entry.Script); entry = entry.Next; } } internal void WriteClientScriptBlocks (HtmlTextWriter writer) { WriteScripts (writer, clientScriptBlocks); } internal void WriteStartupScriptBlocks (HtmlTextWriter writer) { WriteScripts (writer, startupScriptBlocks); } internal void WriteArrayDeclares (HtmlTextWriter writer) { if (registeredArrayDeclares != null) { writer.WriteLine(); writer.WriteLine("<script language=\"javascript\">"); writer.WriteLine("<!--"); IDictionaryEnumerator arrayEnum = registeredArrayDeclares.GetEnumerator(); while (arrayEnum.MoveNext()) { writer.Write("\tvar "); writer.Write(arrayEnum.Key); writer.Write(" = new Array("); IEnumerator arrayListEnum = ((ArrayList) arrayEnum.Value).GetEnumerator(); bool isFirst = true; while (arrayListEnum.MoveNext()) { if (isFirst) isFirst = false; else writer.Write(", "); writer.Write(arrayListEnum.Current); } writer.WriteLine(");"); } writer.WriteLine("// -->"); writer.WriteLine("</script>"); writer.WriteLine(); } } internal string WriteSubmitStatements () { if (submitStatements == null) return null; StringBuilder sb = new StringBuilder (); ScriptEntry entry = submitStatements; while (entry != null) { sb.Append (entry.Script); entry = entry.Next; } return sb.ToString (); } internal static string GetScriptLiteral (object ob) { if (ob == null) return "null"; else if (ob is string) { string s = (string)ob; s = s.Replace ("\"", "\\\""); return "\"" + s + "\""; } else if (ob is bool) { return ob.ToString().ToLower(); } else { return ob.ToString (); } } class ScriptEntry { public Type Type; public string Key; public string Script; public ScriptEntry Next; public ScriptEntry (Type type, string key, string script) { Key = key; Type = type; Script = script; } } } }
using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using ChaosCMS.Stores; using ChaosCMS.Validators; using Microsoft.AspNetCore.Http; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using ChaosCMS.Models.Pages; namespace ChaosCMS.Managers { /// <summary> /// Provides the APIs for managing pages in a persistence store. /// </summary> /// <typeparam name="TPage">The type encapsulating a page.</typeparam> public class PageManager<TPage> : IDisposable where TPage : class { private readonly HttpContext context; /// <summary> /// The cancellation token assocated with the current HttpContext.RequestAborted or CancellationToken.None if unavailable. /// </summary> protected CancellationToken CancellationToken => context?.RequestAborted ?? CancellationToken.None; /// <summary> /// Constructs a new instance of <see cref="PageManager{TPage}"/>. /// </summary> /// <param name="store">The persistence store the manager will operate over.</param> /// <param name="optionsAccessor"></param> /// <param name="errors"></param> /// <param name="urlFormatter"></param> /// <param name="validators"></param> /// <param name="services"></param> /// <param name="logger"></param> public PageManager(IPageStore<TPage> store, IOptions<ChaosOptions> optionsAccessor, ChaosErrorDescriber errors, IUrlFormatter urlFormatter, IEnumerable<IPageValidator<TPage>> validators, IServiceProvider services, ILogger<PageManager<TPage>> logger) { this.Store = store ?? throw new ArgumentNullException(nameof(store)); ; this.Options = optionsAccessor?.Value ?? new ChaosOptions(); this.ErrorDescriber = errors ?? new ChaosErrorDescriber(); this.UrlFormatter = urlFormatter ?? new DefaultUrlFormatter(optionsAccessor); this.Logger = logger; if (validators != null) { foreach (var validator in validators) { this.PageValidators.Add(validator); } } if (services != null) { context = services.GetService<IHttpContextAccessor>()?.HttpContext; } } /// <summary> /// Gets or Sets the presistence store the manager operates over. /// </summary> /// <value> /// The presistence store the manager operates over. /// </value> protected internal IPageStore<TPage> Store { get; private set; } /// <summary> /// The <see cref="ILogger"/> used to log messages from the manager. /// </summary> /// <value> /// The <see cref="ILogger"/> used to log messages from the manager. /// </value> protected internal virtual ILogger Logger { get; set; } /// <summary> /// The <see cref="IPageValidator{TPage}"/> used to validate pages. /// </summary> protected internal IList<IPageValidator<TPage>> PageValidators { get; } = new List<IPageValidator<TPage>>(); /// <summary> /// The <see cref="ChaosErrorDescriber"/> used to generate error messages. /// </summary> protected internal ChaosErrorDescriber ErrorDescriber { get; set; } /// <summary> /// The <see cref="ChaosOptions"/> used to configure Chaos. /// </summary> protected internal ChaosOptions Options { get; set; } /// <summary> /// The <see cref="IUrlFormatter"/> used for formatting urls. /// </summary> protected internal IUrlFormatter UrlFormatter { get; set; } /// <summary> /// /// </summary> public virtual bool SupportsContents { get { this.ThrowIfDisposed(); return this.Store is IPageContentStore<TPage>; } } /// <summary> /// /// </summary> /// <param name="page"></param> /// <returns></returns> public virtual async Task<ChaosResult> CreateAsync(TPage page) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } var result = await ValidateInternal(page); if (!result.Succeeded) { return result; } await this.FormatUrlAsync(page); return await this.Store.CreateAsync(page, CancellationToken); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <returns></returns> public virtual async Task<ChaosResult> UpdateAsync(TPage page) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } var result = await ValidateInternal(page); if (!result.Succeeded) { return result; } await this.FormatUrlAsync(page); return await this.Store.UpdateAsync(page, CancellationToken); } private async Task<ChaosResult> ValidateInternal(TPage page) { var error = new List<ChaosError>(); foreach (var validator in PageValidators) { var result = await validator.ValidateAsync(this, page); if (!result.Succeeded) { error.AddRange(result.Errors); } } if (error.Count > 0) { return ChaosResult.Failed(error.ToArray()); } return ChaosResult.Success; } /// <summary> /// /// </summary> /// <param name="page"></param> /// <param name="itemsPerPage"></param> /// <returns></returns> public Task<ChaosPaged<TPage>> FindPagedAsync(int page = 1, int itemsPerPage = 25) { this.CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page < 1) { throw new InvalidOperationException(Resources.NegativePage); } if (itemsPerPage > this.Options.MaxItemsPerPage) { throw new InvalidOperationException(Resources.FormatMaxItemsPerPage(this.Options.MaxItemsPerPage)); } return this.Store.FindPagedAsync(page, itemsPerPage, this.CancellationToken); } /// <summary> /// Finds the page with the pageId /// </summary> /// <param name="pageId">The id of the page.</param> /// <returns></returns> public virtual Task<TPage> FindByIdAsync(string pageId) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (pageId == null) { throw new ArgumentNullException(nameof(pageId)); } return this.Store.FindByIdAsync(pageId, CancellationToken); } /// <summary> /// Finds the page with the externalId /// </summary> /// <param name="externalId">the id of the external source.</param> /// <returns>an instance of the page if founce.</returns> public virtual Task<TPage> FindByExternalIdAsync(string externalId) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (externalId == null) { throw new ArgumentNullException(nameof(externalId)); } return this.Store.FindByExternalIdAsync(externalId, CancellationToken); } /// <summary> /// Finds a page by a statusCode /// </summary> /// <param name="statusCode"></param> /// <returns></returns> public virtual Task<TPage> FindByStatusCodeAsync(int statusCode) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); return this.Store.FindByStatusCodeAsync(statusCode, CancellationToken); } /// <summary> /// Finds the assosiated page with the urlPath. /// </summary> /// <param name="urlPath">The url of the page.</param> /// <returns></returns> public virtual Task<TPage> FindByUrlAsync(string urlPath) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (urlPath == null) { throw new ArgumentNullException(nameof(urlPath)); } var url = this.FormatUrl(urlPath); return Store.FindByUrlAsync(url, CancellationToken); } private string FormatUrl(string urlPath) { var segments = urlPath.Split('/'); var formatedSegments = new List<string>(); foreach(var segment in segments) { formatedSegments.Add(this.UrlFormatter.FormatUrl(segment)); } return string.Join("/", formatedSegments); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <returns></returns> public virtual Task<string> GetIdAsync(TPage page) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.GetIdAsync(page, CancellationToken); } /// <summary> /// Gets the name of the page. /// </summary> /// <param name="page">The page to get the name from.</param> /// <returns></returns> public virtual Task<string> GetNameAsync(TPage page) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.GetNameAsync(page, CancellationToken); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <param name="name"></param> /// <returns></returns> public virtual Task SetNameAsync(TPage page, string name) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if(page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.SetNameAsync(page, name, CancellationToken); } /// <summary> /// Gets the url of the page. /// </summary> /// <param name="page">The page to get the url from.</param> /// <returns></returns> public virtual Task<string> GetUrlAsync(TPage page) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.GetUrlAsync(page, CancellationToken); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <param name="url"></param> /// <returns></returns> public virtual Task SetUrlAsync(TPage page, string url) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } var formattedUrl = FormatUrl(url); return this.Store.SetUrlAsync(page, formattedUrl, CancellationToken); } /// <summary> /// Gets the template of the Page /// </summary> /// <param name="page"></param> /// <returns></returns> public virtual Task<string> GetTemplateAsync(TPage page) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.GetTemplateAsync(page, CancellationToken); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <param name="url"></param> /// <returns></returns> public virtual Task SetTemplateAsync(TPage page, string url) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.SetTemplateAsync(page, url, CancellationToken); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <param name="pageType"></param> /// <returns></returns> public virtual Task SetPageTypeAsync(TPage page, string pageType) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.SetPageTypeAsync(page, pageType, CancellationToken); } /// <summary> /// Gets the type of page /// </summary> /// <param name="page"></param> /// <returns>the name of the type</returns> public virtual Task<string> GetPageTypeAsync(TPage page) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if(page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.GetPageTypeAsync(page, CancellationToken); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <returns></returns> public Task<int> GetStatusCodeAsync(TPage page) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.GetStatusCodeAsync(page, CancellationToken); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <param name="code"></param> /// <returns></returns> public virtual Task SetStatusCodeAsync(TPage page, int code) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); if (page == null) { throw new ArgumentNullException(nameof(page)); } return this.Store.SetStatusCodeAsync(page, code, CancellationToken); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <returns></returns> public virtual Task<List<Content>> GetContentAsync(TPage page) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); var store = this.GetPageContentStore(); if(page == null) { throw new ArgumentNullException(nameof(page)); } return store.GetContentAsync(page, CancellationToken); } /// <summary> /// /// </summary> /// <param name="page"></param> /// <param name="content"></param> /// <returns></returns> public virtual Task SetContentAsync(TPage page, List<Content> content) { CancellationToken.ThrowIfCancellationRequested(); this.ThrowIfDisposed(); var store = this.GetPageContentStore(); if(page == null) { throw new ArgumentNullException(nameof(page)); } return store.SetContentAsync(page, content, CancellationToken); } #region IDisposable Support private bool isDisposed = false; // To detect redundant calls /// <summary> /// Releases the unmanaged resources used by the page manager and optionally releases the managed resources. /// </summary> /// <param name="disposing">True to release both managed and unmanaged resources; false to release only unmanaged resources.</param> protected virtual void Dispose(bool disposing) { if (disposing && !isDisposed) { Store.Dispose(); isDisposed = true; } } /// <summary> /// Releases all resources used by the page manager. /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } #endregion IDisposable Support private IPageContentStore<TPage> GetPageContentStore() { this.ThrowIfDisposed(); var store = this.Store as IPageContentStore<TPage>; if(store == null) { throw new NotSupportedException(Resources.FormatStoreIsNotOfType(typeof(IPageContentStore<TPage>).Name)); } return store; } private async Task FormatUrlAsync(TPage page) { var formattedUrl = FormatUrl(await this.GetUrlAsync(page)); await this.Store.SetUrlAsync(page, formattedUrl, CancellationToken); } /// <summary> /// Throws if this class has been disposed. /// </summary> protected void ThrowIfDisposed() { if (isDisposed) { throw new ObjectDisposedException(GetType().Name); } } } }
using Lucene.Net.Diagnostics; using Lucene.Net.Support; using Lucene.Net.Support.Threading; using System; using System.Collections.Generic; using System.IO; namespace Lucene.Net.Search { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// Utility class to safely share instances of a certain type across multiple /// threads, while periodically refreshing them. This class ensures each /// reference is closed only once all threads have finished using it. It is /// recommended to consult the documentation of <see cref="ReferenceManager{G}"/> /// implementations for their <see cref="MaybeRefresh()"/> semantics. /// <para/> /// @lucene.experimental /// </summary> /// <typeparam name="G">The concrete type that will be <see cref="Acquire()"/>d and /// <see cref="Release(G)"/>d.</typeparam> public abstract class ReferenceManager<G> : IDisposable where G : class //Make G nullable { private const string REFERENCE_MANAGER_IS_CLOSED_MSG = "this ReferenceManager is closed"; // LUCENENET NOTE: changed this to be a private volatile field // with a property to set/get it, since protected volatile // fields are not CLS compliant private volatile G current; /// <summary> /// The current reference /// </summary> protected G Current { get => current; set => current = value; } private readonly ReentrantLock refreshLock = new ReentrantLock(); private readonly ISet<ReferenceManager.IRefreshListener> refreshListeners = new ConcurrentHashSet<ReferenceManager.IRefreshListener>(); private void EnsureOpen() { if (current == null) { throw new ObjectDisposedException(this.GetType().FullName, REFERENCE_MANAGER_IS_CLOSED_MSG); } } private void SwapReference(G newReference) { lock (this) { EnsureOpen(); G oldReference = current; current = newReference; Release(oldReference); } } /// <summary> /// Decrement reference counting on the given reference. </summary> /// <exception cref="IOException"> If reference decrement on the given resource failed.</exception> protected abstract void DecRef(G reference); /// <summary> /// Refresh the given reference if needed. Returns <c>null</c> if no refresh /// was needed, otherwise a new refreshed reference. </summary> /// <exception cref="ObjectDisposedException"> If the reference manager has been <see cref="Dispose()"/>d. </exception> /// <exception cref="IOException"> If the refresh operation failed </exception> protected abstract G RefreshIfNeeded(G referenceToRefresh); /// <summary> /// Try to increment reference counting on the given reference. Returns <c>true</c> if /// the operation was successful. </summary> /// <exception cref="ObjectDisposedException"> if the reference manager has been <see cref="Dispose()"/>d. </exception> protected abstract bool TryIncRef(G reference); /// <summary> /// Obtain the current reference. You must match every call to acquire with one /// call to <see cref="Release(G)"/>; it's best to do so in a finally clause, and set /// the reference to <c>null</c> to prevent accidental usage after it has been /// released. </summary> /// <exception cref="ObjectDisposedException"> If the reference manager has been <see cref="Dispose()"/>d. </exception> public G Acquire() { G @ref; do { if ((@ref = current) == null) { throw new ObjectDisposedException(this.GetType().FullName, REFERENCE_MANAGER_IS_CLOSED_MSG); } if (TryIncRef(@ref)) { return @ref; } if (GetRefCount(@ref) == 0 && (object)current == (object)@ref) { if (Debugging.AssertsEnabled) Debugging.Assert(@ref != null); /* if we can't increment the reader but we are still the current reference the RM is in a illegal states since we can't make any progress anymore. The reference is closed but the RM still holds on to it as the actual instance. this can only happen if somebody outside of the RM decrements the refcount without a corresponding increment since the RM assigns the new reference before counting down the reference. */ throw new InvalidOperationException("The managed reference has already closed - this is likely a bug when the reference count is modified outside of the ReferenceManager"); } } while (true); } /// <summary> /// <para> /// Closes this ReferenceManager to prevent future <see cref="Acquire()"/>ing. A /// reference manager should be disposed if the reference to the managed resource /// should be disposed or the application using the <see cref="ReferenceManager{G}"/> /// is shutting down. The managed resource might not be released immediately, /// if the <see cref="ReferenceManager{G}"/> user is holding on to a previously /// <see cref="Acquire()"/>d reference. The resource will be released once /// when the last reference is <see cref="Release(G)"/>d. Those /// references can still be used as if the manager was still active. /// </para> /// <para> /// Applications should not <see cref="Acquire()"/> new references from this /// manager once this method has been called. <see cref="Acquire()"/>ing a /// resource on a disposed <see cref="ReferenceManager{G}"/> will throw an /// <seealso cref="ObjectDisposedException"/>. /// </para> /// </summary> /// <exception cref="IOException"> /// If the underlying reader of the current reference could not be disposed </exception> public void Dispose() { lock (this) { Dispose(true); GC.SuppressFinalize(this); } } /// <summary> /// Returns the current reference count of the given reference. /// </summary> protected abstract int GetRefCount(G reference); /// <summary> /// Called after <see cref="Dispose()"/>, so subclass can free any resources. </summary> /// <exception cref="IOException"> if the after dispose operation in a sub-class throws an <see cref="IOException"/> /// </exception> protected virtual void Dispose(bool disposing) { if (disposing && current != null) { // make sure we can call this more than once // closeable javadoc says: // if this is already closed then invoking this method has no effect. SwapReference(null); } } private void DoMaybeRefresh() { // it's ok to call lock() here (blocking) because we're supposed to get here // from either maybeRefreh() or maybeRefreshBlocking(), after the lock has // already been obtained. Doing that protects us from an accidental bug // where this method will be called outside the scope of refreshLock. // Per ReentrantLock's javadoc, calling lock() by the same thread more than // once is ok, as long as unlock() is called a matching number of times. refreshLock.Lock(); bool refreshed = false; try { G reference = Acquire(); try { NotifyRefreshListenersBefore(); G newReference = RefreshIfNeeded(reference); if (newReference != null) { if (Debugging.AssertsEnabled) Debugging.Assert(!ReferenceEquals(newReference, reference), "refreshIfNeeded should return null if refresh wasn't needed"); try { SwapReference(newReference); refreshed = true; } finally { if (!refreshed) { Release(newReference); } } } } finally { Release(reference); NotifyRefreshListenersRefreshed(refreshed); } AfterMaybeRefresh(); } finally { refreshLock.Unlock(); } } /// <summary> /// You must call this (or <see cref="MaybeRefreshBlocking()"/>), periodically, if /// you want that <see cref="Acquire()"/> will return refreshed instances. /// /// <para> /// <b>Threads</b>: it's fine for more than one thread to call this at once. /// Only the first thread will attempt the refresh; subsequent threads will see /// that another thread is already handling refresh and will return /// immediately. Note that this means if another thread is already refreshing /// then subsequent threads will return right away without waiting for the /// refresh to complete. /// </para> /// <para> /// If this method returns <c>true</c> it means the calling thread either refreshed or /// that there were no changes to refresh. If it returns <c>false</c> it means another /// thread is currently refreshing. /// </para> </summary> /// <exception cref="IOException"> If refreshing the resource causes an <see cref="IOException"/> </exception> /// <exception cref="ObjectDisposedException"> If the reference manager has been <see cref="Dispose()"/>d. </exception> public bool MaybeRefresh() { EnsureOpen(); // Ensure only 1 thread does refresh at once; other threads just return immediately: bool doTryRefresh = refreshLock.TryLock(); if (doTryRefresh) { try { DoMaybeRefresh(); } finally { refreshLock.Unlock(); } } return doTryRefresh; } /// <summary> /// You must call this (or <see cref="MaybeRefresh()"/>), periodically, if you want /// that <see cref="Acquire()"/> will return refreshed instances. /// /// <para/> /// <b>Threads</b>: unlike <see cref="MaybeRefresh()"/>, if another thread is /// currently refreshing, this method blocks until that thread completes. It is /// useful if you want to guarantee that the next call to <see cref="Acquire()"/> /// will return a refreshed instance. Otherwise, consider using the /// non-blocking <see cref="MaybeRefresh()"/>. </summary> /// <exception cref="IOException"> If refreshing the resource causes an <see cref="IOException"/> </exception> /// <exception cref="ObjectDisposedException"> If the reference manager has been <see cref="Dispose()"/>d. </exception> public void MaybeRefreshBlocking() { EnsureOpen(); // Ensure only 1 thread does refresh at once refreshLock.Lock(); try { DoMaybeRefresh(); } finally { refreshLock.Unlock(); } } /// <summary> /// Called after a refresh was attempted, regardless of /// whether a new reference was in fact created. </summary> /// <exception cref="IOException"> if a low level I/O exception occurs</exception> protected virtual void AfterMaybeRefresh() { } /// <summary> /// Release the reference previously obtained via <see cref="Acquire()"/>. /// <para/> /// <b>NOTE:</b> it's safe to call this after <see cref="Dispose()"/>. </summary> /// <exception cref="IOException"> If the release operation on the given resource throws an <see cref="IOException"/> </exception> public void Release(G reference) { if (Debugging.AssertsEnabled) Debugging.Assert(!(reference is null)); DecRef(reference); } private void NotifyRefreshListenersBefore() { foreach (ReferenceManager.IRefreshListener refreshListener in refreshListeners) { refreshListener.BeforeRefresh(); } } private void NotifyRefreshListenersRefreshed(bool didRefresh) { foreach (ReferenceManager.IRefreshListener refreshListener in refreshListeners) { refreshListener.AfterRefresh(didRefresh); } } /// <summary> /// Adds a listener, to be notified when a reference is refreshed/swapped. /// </summary> public virtual void AddListener(ReferenceManager.IRefreshListener listener) { if (listener == null) { throw new ArgumentNullException("Listener cannot be null"); } refreshListeners.Add(listener); } /// <summary> /// Remove a listener added with <see cref="AddListener(ReferenceManager.IRefreshListener)"/>. /// </summary> public virtual void RemoveListener(ReferenceManager.IRefreshListener listener) { if (listener == null) { throw new ArgumentNullException("Listener cannot be null"); } refreshListeners.Remove(listener); } } /// <summary> /// LUCENENET specific class used to provide static access to <see cref="ReferenceManager.IRefreshListener"/> /// without having to specifiy the generic closing type of <see cref="ReferenceManager{G}"/>. /// </summary> public static class ReferenceManager { /// <summary> /// Use to receive notification when a refresh has /// finished. See <see cref="ReferenceManager{G}.AddListener(IRefreshListener)"/>. /// </summary> public interface IRefreshListener { /// <summary> /// Called right before a refresh attempt starts. </summary> void BeforeRefresh(); /// <summary> /// Called after the attempted refresh; if the refresh /// did open a new reference then didRefresh will be <c>true</c> /// and <see cref="ReferenceManager{G}.Acquire()"/> is guaranteed to return the new /// reference. /// </summary> void AfterRefresh(bool didRefresh); } } }
using System; namespace Umbraco.Core { public static partial class Constants { public static class DataTypes { //NOTE: unfortunately due to backwards compat we can't move/rename these, with the addition of the GUID //constants, it would make more sense to have these suffixed with "ID" or in a Subclass called "INT", for //now all we can do is make a subclass called Guids to put the GUID IDs. public const int LabelString = System.DefaultLabelDataTypeId; public const int LabelInt = -91; public const int LabelBigint = -93; public const int LabelDateTime = -94; public const int LabelTime = -98; public const int LabelDecimal = -99; public const int Textarea = -89; public const int Textbox = -88; public const int RichtextEditor = -87; public const int Boolean = -49; public const int DateTime = -36; public const int DropDownSingle = -39; public const int DropDownMultiple = -42; public const int Upload = -90; public const int DefaultContentListView = -95; public const int DefaultMediaListView = -96; public const int DefaultMembersListView = -97; public const int ImageCropper = 1043; public const int Tags = 1041; public static class ReservedPreValueKeys { public const string IgnoreUserStartNodes = "ignoreUserStartNodes"; } /// <summary> /// Defines the identifiers for Umbraco data types as constants for easy centralized access/management. /// </summary> public static class Guids { /// <summary> /// Guid for Content Picker as string /// </summary> public const string ContentPicker = "FD1E0DA5-5606-4862-B679-5D0CF3A52A59"; /// <summary> /// Guid for Content Picker /// </summary> public static readonly Guid ContentPickerGuid = new Guid(ContentPicker); /// <summary> /// Guid for Member Picker as string /// </summary> public const string MemberPicker = "1EA2E01F-EBD8-4CE1-8D71-6B1149E63548"; /// <summary> /// Guid for Member Picker /// </summary> public static readonly Guid MemberPickerGuid = new Guid(MemberPicker); /// <summary> /// Guid for Media Picker as string /// </summary> public const string MediaPicker = "135D60E0-64D9-49ED-AB08-893C9BA44AE5"; /// <summary> /// Guid for Media Picker /// </summary> public static readonly Guid MediaPickerGuid = new Guid(MediaPicker); /// <summary> /// Guid for Multiple Media Picker as string /// </summary> public const string MultipleMediaPicker = "9DBBCBBB-2327-434A-B355-AF1B84E5010A"; /// <summary> /// Guid for Multiple Media Picker /// </summary> public static readonly Guid MultipleMediaPickerGuid = new Guid(MultipleMediaPicker); /// <summary> /// Guid for Related Links as string /// </summary> public const string RelatedLinks = "B4E3535A-1753-47E2-8568-602CF8CFEE6F"; /// <summary> /// Guid for Related Links /// </summary> public static readonly Guid RelatedLinksGuid = new Guid(RelatedLinks); /// <summary> /// Guid for Member as string /// </summary> public const string Member = "d59be02f-1df9-4228-aa1e-01917d806cda"; /// <summary> /// Guid for Member /// </summary> public static readonly Guid MemberGuid = new Guid(Member); /// <summary> /// Guid for Image Cropper as string /// </summary> public const string ImageCropper = "1df9f033-e6d4-451f-b8d2-e0cbc50a836f"; /// <summary> /// Guid for Image Cropper /// </summary> public static readonly Guid ImageCropperGuid = new Guid(ImageCropper); /// <summary> /// Guid for Tags as string /// </summary> public const string Tags = "b6b73142-b9c1-4bf8-a16d-e1c23320b549"; /// <summary> /// Guid for Tags /// </summary> public static readonly Guid TagsGuid = new Guid(Tags); /// <summary> /// Guid for List View - Content as string /// </summary> public const string ListViewContent = "C0808DD3-8133-4E4B-8CE8-E2BEA84A96A4"; /// <summary> /// Guid for List View - Content /// </summary> public static readonly Guid ListViewContentGuid = new Guid(ListViewContent); /// <summary> /// Guid for List View - Media as string /// </summary> public const string ListViewMedia = "3A0156C4-3B8C-4803-BDC1-6871FAA83FFF"; /// <summary> /// Guid for List View - Media /// </summary> public static readonly Guid ListViewMediaGuid = new Guid(ListViewMedia); /// <summary> /// Guid for List View - Members as string /// </summary> public const string ListViewMembers = "AA2C52A0-CE87-4E65-A47C-7DF09358585D"; /// <summary> /// Guid for List View - Members /// </summary> public static readonly Guid ListViewMembersGuid = new Guid(ListViewMembers); /// <summary> /// Guid for Date Picker with time as string /// </summary> public const string DatePickerWithTime = "e4d66c0f-b935-4200-81f0-025f7256b89a"; /// <summary> /// Guid for Date Picker with time /// </summary> public static readonly Guid DatePickerWithTimeGuid = new Guid(DatePickerWithTime); /// <summary> /// Guid for Approved Color as string /// </summary> public const string ApprovedColor = "0225af17-b302-49cb-9176-b9f35cab9c17"; /// <summary> /// Guid for Approved Color /// </summary> public static readonly Guid ApprovedColorGuid = new Guid(ApprovedColor); /// <summary> /// Guid for Dropdown multiple as string /// </summary> public const string DropdownMultiple = "f38f0ac7-1d27-439c-9f3f-089cd8825a53"; /// <summary> /// Guid for Dropdown multiple /// </summary> public static readonly Guid DropdownMultipleGuid = new Guid(DropdownMultiple); /// <summary> /// Guid for Radiobox as string /// </summary> public const string Radiobox = "bb5f57c9-ce2b-4bb9-b697-4caca783a805"; /// <summary> /// Guid for Radiobox /// </summary> public static readonly Guid RadioboxGuid = new Guid(Radiobox); /// <summary> /// Guid for Date Picker as string /// </summary> public const string DatePicker = "5046194e-4237-453c-a547-15db3a07c4e1"; /// <summary> /// Guid for Date Picker /// </summary> public static readonly Guid DatePickerGuid = new Guid(DatePicker); /// <summary> /// Guid for Dropdown as string /// </summary> public const string Dropdown = "0b6a45e7-44ba-430d-9da5-4e46060b9e03"; /// <summary> /// Guid for Dropdown /// </summary> public static readonly Guid DropdownGuid = new Guid(Dropdown); /// <summary> /// Guid for Checkbox list as string /// </summary> public const string CheckboxList = "fbaf13a8-4036-41f2-93a3-974f678c312a"; /// <summary> /// Guid for Checkbox list /// </summary> public static readonly Guid CheckboxListGuid = new Guid(CheckboxList); /// <summary> /// Guid for Checkbox as string /// </summary> public const string Checkbox = "92897bc6-a5f3-4ffe-ae27-f2e7e33dda49"; /// <summary> /// Guid for Checkbox /// </summary> public static readonly Guid CheckboxGuid = new Guid(Checkbox); /// <summary> /// Guid for Numeric as string /// </summary> public const string Numeric = "2e6d3631-066e-44b8-aec4-96f09099b2b5"; /// <summary> /// Guid for Dropdown /// </summary> public static readonly Guid NumericGuid = new Guid(Numeric); /// <summary> /// Guid for Richtext editor as string /// </summary> public const string RichtextEditor = "ca90c950-0aff-4e72-b976-a30b1ac57dad"; /// <summary> /// Guid for Richtext editor /// </summary> public static readonly Guid RichtextEditorGuid = new Guid(RichtextEditor); /// <summary> /// Guid for Textstring as string /// </summary> public const string Textstring = "0cc0eba1-9960-42c9-bf9b-60e150b429ae"; /// <summary> /// Guid for Textstring /// </summary> public static readonly Guid TextstringGuid = new Guid(Textstring); /// <summary> /// Guid for Textarea as string /// </summary> public const string Textarea = "c6bac0dd-4ab9-45b1-8e30-e4b619ee5da3"; /// <summary> /// Guid for Dropdown /// </summary> public static readonly Guid TextareaGuid = new Guid(Textarea); /// <summary> /// Guid for Upload as string /// </summary> public const string Upload = "84c6b441-31df-4ffe-b67e-67d5bc3ae65a"; /// <summary> /// Guid for Upload /// </summary> public static readonly Guid UploadGuid = new Guid(Upload); /// <summary> /// Guid for Label as string /// </summary> public const string LabelString = "f0bc4bfb-b499-40d6-ba86-058885a5178c"; /// <summary> /// Guid for Label string /// </summary> public static readonly Guid LabelStringGuid = new Guid(LabelString); /// <summary> /// Guid for Label as int /// </summary> public const string LabelInt = "8e7f995c-bd81-4627-9932-c40e568ec788"; /// <summary> /// Guid for Label int /// </summary> public static readonly Guid LabelIntGuid = new Guid(LabelInt); /// <summary> /// Guid for Label as big int /// </summary> public const string LabelBigInt = "930861bf-e262-4ead-a704-f99453565708"; /// <summary> /// Guid for Label big int /// </summary> public static readonly Guid LabelBigIntGuid = new Guid(LabelBigInt); /// <summary> /// Guid for Label as date time /// </summary> public const string LabelDateTime = "0e9794eb-f9b5-4f20-a788-93acd233a7e4"; /// <summary> /// Guid for Label date time /// </summary> public static readonly Guid LabelDateTimeGuid = new Guid(LabelDateTime); /// <summary> /// Guid for Label as time /// </summary> public const string LabelTime = "a97cec69-9b71-4c30-8b12-ec398860d7e8"; /// <summary> /// Guid for Label time /// </summary> public static readonly Guid LabelTimeGuid = new Guid(LabelTime); /// <summary> /// Guid for Label as decimal /// </summary> public const string LabelDecimal = "8f1ef1e1-9de4-40d3-a072-6673f631ca64"; /// <summary> /// Guid for Label decimal /// </summary> public static readonly Guid LabelDecimalGuid = new Guid(LabelDecimal); } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Android.App; using Android.Content; using Android.OS; using Android.Runtime; using Android.Views; using Android.Widget; using Tax_Informer.Core; using static Tax_Informer.MyGlobal; using Android.Webkit; using Java.Lang; using Android.Util; using Android.Graphics.Drawables; using Android.Support.V4.Widget; using Android.Support.Design.Widget; using other = com.refractored.fab; using Android.Support.V7.App; namespace Tax_Informer.Activities { [Activity(Label = "ArticalActivity")] internal class ArticalActivity : AppCompatActivity, IUiArticalResponseHandler { public const string PassArticalOverviewObj = nameof(PassArticalOverviewObj); public const string PassWebsiteKey = nameof(PassWebsiteKey); public const string PassIsOffline = nameof(PassIsOffline); private ArticalOverview articalOverview = null; private bool isOffline = false; private string currentWebsiteKey = null; private DrawerLayout navDrawerLayout = null; private LinearLayout headerLayout = null; private TextView articalTitleTextview = null, articalDateTextview = null, articalWebsiteComicTextview = null; private WebView articalContentWebview = null; private TextView articalContentTextview = null; private other.ObservableScrollView scrollView = null; private other.FloatingActionButton floatingButton = null; private GridView gridview = null; private GridviewAdapter adapter = null; private TextView optionOpenInBrowser = null; public Artical currentArtical = null; private int dpToPx(int dp) { DisplayMetrics displayMetrics = BaseContext.Resources.DisplayMetrics; int px = (int)System.Math.Round((double)dp * (double)((float)displayMetrics.Xdpi / (int)DisplayMetrics.DensityDefault)); return px; } public void ArticalProcessedCallback(string uid, string url, Artical artical) { RunOnUiThread(new Action(() =>{ updateArtical(artical); })); } private void updateArtical(Artical artical) { MyLog.Log(this, nameof(updateArtical) + "..."); currentArtical = artical;//cache the data if (string.IsNullOrEmpty(artical.ExternalFileLink)) { //MyLog.Log(this, $"Updating artical data text url {artical.MyLink} " + "..."); //articalContentTextview.Gravity = GravityFlags.Left; //articalContentTextview.TextFormatted = Android.Text.Html.FromHtml(artical.HtmlText);//TODO: Add an image getter for getting images from web. Use Picasso to download image and use custom memory cache. //articalContentTextview.GetFocusedRect(new Android.Graphics.Rect(0, 0, 1, 1)); //adapter.NotifyDataSetChanged(); //articalContentTextview.Visibility = ViewStates.Visible; //if (articalContentWebview != null) articalContentWebview.Visibility = ViewStates.Gone; //MyLog.Log(this, $"Updating artical data text url {artical.MyLink} " + "...Done"); articalContentWebview.LoadData(artical.HtmlText, "text/html", "utf-8"); articalContentTextview.Visibility = ViewStates.Gone; articalContentWebview.Visibility = ViewStates.Visible; } else { MyLog.Log(this, $"Updating artical data extrnal url {artical.MyLink} \t link {artical.ExternalFileLink}" + "..."); articalContentWebview.LoadUrl("file:///android_asset/pdfviewer/index.html?file=" + System.Net.WebUtility.UrlDecode(artical.ExternalFileLink)); articalContentWebview.Settings.DefaultFontSize = 20; articalContentTextview.Visibility = ViewStates.Gone; articalContentWebview.Visibility = ViewStates.Visible; MyLog.Log(this, $"Updating artical data extrnal url {artical.MyLink} \t link {artical.ExternalFileLink}" + "...Done"); } //if (artical.RelatedPosts != null) // gridview.LayoutParameters.Height = artical.RelatedPosts.Length * dpToPx(70); Title = artical.Title; articalDateTextview.Text = GetHumanReadableDate(artical.Date); articalTitleTextview.Text = artical.Title; MyLog.Log(this, nameof(updateArtical) + "...Done"); } protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); MyLog.Log(this, nameof(OnCreate) + "..."); SetContentView(Resource.Layout.artical); ActionBar?.Hide(); articalOverview = null; Bundle extras = Intent.Extras; Website currentWebsite = null; MyLog.Log(this, "Loading bundle data" + "..."); if (extras != null && extras.ContainsKey(PassArticalOverviewObj)) articalOverview = new ArticalOverview(extras.GetBundle(PassArticalOverviewObj)); else { Finish(); return; } if (extras != null || extras.ContainsKey(PassWebsiteKey)) currentWebsiteKey = extras.GetString(PassWebsiteKey); else { Finish(); return; } if (extras != null || extras.ContainsKey(PassIsOffline)) isOffline = extras.GetBoolean(PassIsOffline); else { Finish(); return; } MyLog.Log(this, "Loading bundle data" + "...Done"); if (!isOffline) { MyLog.Log(this, $"Request artical data online url {articalOverview.LinkOfActualArtical}" + "..."); analysisModule.ReadArtical(UidGenerator(), currentWebsiteKey, articalOverview, this); //make the request MyLog.Log(this, $"Request artical data online url {articalOverview.LinkOfActualArtical}" + "...Done"); } else { MyLog.Log(this, $"Requesting artical data offline url{articalOverview.LinkOfActualArtical}" + "..."); database.GetArtical(UidGenerator(), articalOverview, this); MyLog.Log(this, $"Requesting artical data offline url{articalOverview.LinkOfActualArtical}" + "...Done"); } currentWebsite = Config.GetWebsite(currentWebsiteKey); MyLog.Log(this, "Loading webview" + "..."); articalContentWebview = FindViewById<WebView>(Resource.Id.articalContentWebView); articalContentWebview.Settings.DefaultFontSize = 20; articalContentWebview.Settings.BuiltInZoomControls = true; articalContentWebview.Settings.JavaScriptEnabled = true; articalContentWebview.Settings.AllowFileAccessFromFileURLs = true; articalContentWebview.Settings.AllowUniversalAccessFromFileURLs = true; articalContentWebview.Visibility = ViewStates.Gone; MyLog.Log(this, "Loading webview" + "...Done"); ChangeStatusBarColor(Window, currentWebsite.Color); gridview = FindViewById<GridView>(Resource.Id.relatedPostGridView); adapter = new GridviewAdapter() { parent = this }; gridview.Adapter = adapter; gridview.ItemClick += Gridview_ItemClick; articalContentTextview = FindViewById<TextView>(Resource.Id.articalContentTextView); headerLayout = FindViewById<LinearLayout>(Resource.Id.articalHeaderLinearLayout); articalTitleTextview = FindViewById<TextView>(Resource.Id.articalTitleTextView); articalDateTextview = FindViewById<TextView>(Resource.Id.articalDateTextView); articalWebsiteComicTextview = FindViewById<TextView>(Resource.Id.articalWebsiteComicTextView); scrollView = FindViewById<other.ObservableScrollView>(Resource.Id.articalScrollView); floatingButton = FindViewById<other.FloatingActionButton>(Resource.Id.articalFab); navDrawerLayout = FindViewById<DrawerLayout>(Resource.Id.articalDrawerLayout); headerLayout.SetBackgroundColor(Android.Graphics.Color.ParseColor(currentWebsite.Color)); articalTitleTextview.Text = articalOverview.Title ?? ""; articalDateTextview.Text = GetHumanReadableDate(articalOverview.Date) ?? ""; articalWebsiteComicTextview.Text = currentWebsite.ComicText ?? ""; articalContentTextview.Text = "Loading..."; articalContentTextview.Gravity = GravityFlags.CenterHorizontal; floatingButton.AttachToScrollView(scrollView); floatingButton.Visibility = !isOffline ? ViewStates.Visible : ViewStates.Gone; optionOpenInBrowser = FindViewById<TextView>(Resource.Id.articalOptionOpenInBrowserTextView); optionOpenInBrowser.Click += OptionOpenInBrowser_Click; floatingButton.Click += FloatingButton_Click; MyLog.Log(this, nameof(OnCreate) + "...Done"); } private void FloatingButton_Click(object sender, EventArgs e) { MyLog.Log(this, nameof(FloatingButton_Click) + "..."); MyLog.Log(this, "Making artical offline" + "..."); database.MakeOffline(UidGenerator(), currentWebsiteKey, currentArtical, articalOverview); //request to make data offline MyLog.Log(this, "Making artical offline" + "...Done"); Snackbar.Make(sender as View, "Offline is now available", (int)ToastLength.Short).Show(); MyLog.Log(this, nameof(FloatingButton_Click) + "...Done"); } private void OptionOpenInBrowser_Click(object sender, EventArgs e) { MyLog.Log(this, nameof(OptionOpenInBrowser_Click) + "..."); navDrawerLayout.CloseDrawer((int)GravityFlags.Right); Intent browserIntent = new Intent(Intent.ActionView, Android.Net.Uri.Parse(currentArtical.MyLink)); StartActivity(browserIntent); MyLog.Log(this, nameof(OptionOpenInBrowser_Click) + "...Done"); } private void Gridview_ItemClick(object sender, AdapterView.ItemClickEventArgs e) { MyLog.Log(this, nameof(Gridview_ItemClick) + "..."); navDrawerLayout.CloseDrawer((int)GravityFlags.Right); MyLog.Log(this, $"Starting activity artical url {currentArtical?.RelatedPosts?[e.Position]?.LinkOfActualArtical}" + "..."); StartActivityArtical(this, currentArtical.RelatedPosts[e.Position], currentWebsiteKey); MyLog.Log(this, $"Starting activity artical url {currentArtical?.RelatedPosts?[e.Position]?.LinkOfActualArtical}" + "...Done"); MyLog.Log(this, nameof(Gridview_ItemClick) + "...Done"); } class GridviewAdapter : BaseAdapter { public ArticalActivity parent { get; set; } = null; //public event EventHandler<int> onClick = null; public override int Count { get { if (parent?.currentArtical?.RelatedPosts != null) return parent.currentArtical.RelatedPosts.Length; return 0; } } public override Java.Lang.Object GetItem(int position) { return null; } public override long GetItemId(int position) { return position; } public override View GetView(int position, View convertView, ViewGroup parent) { if (convertView == null) { var layoutInflator = this.parent.LayoutInflater; convertView = layoutInflator.Inflate(Resource.Layout.artical_relatedpost_single_item, parent, false); convertView.Tag = convertView.FindViewById(Resource.Id.relatedPostTextView); } var textview = convertView.Tag as TextView; textview.Text = this.parent.currentArtical.RelatedPosts[position].Title; return convertView; } } } }
// Copyright (c) 2015, Outercurve Foundation. // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // - Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // - Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // - Neither the name of the Outercurve Foundation nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON // ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. using System; using System.Collections.Generic; using System.Text; namespace WebsitePanel.Providers.HostedSolution { public class ExchangeAccount { int accountId; int itemId; int packageId; string subscriberNumber; ExchangeAccountType accountType; string accountName; string displayName; string primaryEmailAddress; bool mailEnabledPublicFolder; MailboxManagerActions mailboxManagerActions; string accountPassword; string samAccountName; int mailboxPlanId; string mailboxPlan; string publicFolderPermission; string userPrincipalName; string notes; int levelId; bool isVip; [LogProperty] public int AccountId { get { return this.accountId; } set { this.accountId = value; } } public int ItemId { get { return this.itemId; } set { this.itemId = value; } } public int PackageId { get { return this.packageId; } set { this.packageId = value; } } public ExchangeAccountType AccountType { get { return this.accountType; } set { this.accountType = value; } } [LogProperty] public string AccountName { get { return this.accountName; } set { this.accountName = value; } } [LogProperty] public string SamAccountName { get { return this.samAccountName; } set { this.samAccountName = value; } } [LogProperty] public string DisplayName { get { return this.displayName; } set { this.displayName = value; } } [LogProperty("Email Address")] public string PrimaryEmailAddress { get { return this.primaryEmailAddress; } set { this.primaryEmailAddress = value; } } public bool MailEnabledPublicFolder { get { return this.mailEnabledPublicFolder; } set { this.mailEnabledPublicFolder = value; } } //public string AccountPassword //{ // get { return this.accountPassword; } // set { this.accountPassword = value; } //} public MailboxManagerActions MailboxManagerActions { get { return this.mailboxManagerActions; } set { this.mailboxManagerActions = value; } } public int MailboxPlanId { get { return this.mailboxPlanId; } set { this.mailboxPlanId = value; } } public string MailboxPlan { get { return this.mailboxPlan; } set { this.mailboxPlan = value; } } public string SubscriberNumber { get { return this.subscriberNumber; } set { this.subscriberNumber = value; } } public string PublicFolderPermission { get { return this.publicFolderPermission; } set { this.publicFolderPermission = value; } } public string UserPrincipalName { get { return this.userPrincipalName; } set { this.userPrincipalName = value; } } public string Notes { get { return this.notes; } set { this.notes = value; } } int archivingMailboxPlanId; public int ArchivingMailboxPlanId { get { return this.archivingMailboxPlanId; } set { this.archivingMailboxPlanId = value; } } string archivingMailboxPlan; public string ArchivingMailboxPlan { get { return this.archivingMailboxPlan; } set { this.archivingMailboxPlan = value; } } bool enableArchiving; public bool EnableArchiving { get { return this.enableArchiving; } set { this.enableArchiving = value; } } public bool IsVIP { get { return this.isVip; } set { this.isVip = value; } } public int LevelId { get { return this.levelId; } set { this.levelId = value; } } public bool Disabled { get; set; } public bool Locked { get; set; } public override string ToString() { return !string.IsNullOrEmpty(accountName) ? accountName : base.ToString(); } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using QuantConnect.Data; using QuantConnect.Data.Market; using QuantConnect.Interfaces; using QuantConnect.Securities; using QuantConnect.Securities.Future; using QuantConnect.Securities.Option; namespace QuantConnect.Algorithm.CSharp { /// <summary> /// This regression algorithm tests that we only receive the option chain for a single future contract /// in the option universe filter. /// </summary> public class AddFutureOptionSingleOptionChainSelectedInUniverseFilterRegressionAlgorithm : QCAlgorithm, IRegressionAlgorithmDefinition { private bool _invested; private bool _onDataReached; private bool _optionFilterRan; private readonly HashSet<Symbol> _symbolsReceived = new HashSet<Symbol>(); private readonly HashSet<Symbol> _expectedSymbolsReceived = new HashSet<Symbol>(); private readonly Dictionary<Symbol, List<QuoteBar>> _dataReceived = new Dictionary<Symbol, List<QuoteBar>>(); private Future _es; public override void Initialize() { SetStartDate(2020, 1, 4); SetEndDate(2020, 1, 6); _es = AddFuture(Futures.Indices.SP500EMini, Resolution.Minute, Market.CME); _es.SetFilter((futureFilter) => { return futureFilter.Expiration(0, 365).ExpirationCycle(new[] { 3, 6 }); }); AddFutureOption(_es.Symbol, optionContracts => { _optionFilterRan = true; var expiry = new HashSet<DateTime>(optionContracts.Select(x => x.Underlying.ID.Date)).SingleOrDefault(); // Cast to IEnumerable<Symbol> because OptionFilterContract overrides some LINQ operators like `Select` and `Where` // and cause it to mutate the underlying Symbol collection when using those operators. var symbol = new HashSet<Symbol>(((IEnumerable<Symbol>)optionContracts).Select(x => x.Underlying)).SingleOrDefault(); if (expiry == null || symbol == null) { throw new InvalidOperationException("Expected a single Option contract in the chain, found 0 contracts"); } var enumerator = optionContracts.GetEnumerator(); while (enumerator.MoveNext()) { _expectedSymbolsReceived.Add(enumerator.Current); } return optionContracts; }); } public override void OnData(Slice data) { if (!data.HasData) { return; } _onDataReached = true; var hasOptionQuoteBars = false; foreach (var qb in data.QuoteBars.Values) { if (qb.Symbol.SecurityType != SecurityType.FutureOption) { continue; } hasOptionQuoteBars = true; _symbolsReceived.Add(qb.Symbol); if (!_dataReceived.ContainsKey(qb.Symbol)) { _dataReceived[qb.Symbol] = new List<QuoteBar>(); } _dataReceived[qb.Symbol].Add(qb); } if (_invested || !hasOptionQuoteBars) { return; } foreach (var chain in data.OptionChains.Values) { var futureInvested = false; var optionInvested = false; foreach (var option in chain.Contracts.Keys) { if (futureInvested && optionInvested) { return; } var future = option.Underlying; if (!optionInvested && data.ContainsKey(option)) { var optionContract = Securities[option]; var marginModel = optionContract.BuyingPowerModel as FuturesOptionsMarginModel; if (marginModel.InitialIntradayMarginRequirement == 0 || marginModel.InitialOvernightMarginRequirement == 0 || marginModel.MaintenanceIntradayMarginRequirement == 0 || marginModel.MaintenanceOvernightMarginRequirement == 0) { throw new Exception("Unexpected margin requirements"); } if (marginModel.GetInitialMarginRequirement(optionContract, 1) == 0) { throw new Exception("Unexpected Initial Margin requirement"); } if (marginModel.GetMaintenanceMargin(optionContract) != 0) { throw new Exception("Unexpected Maintenance Margin requirement"); } MarketOrder(option, 1); _invested = true; optionInvested = true; if (marginModel.GetMaintenanceMargin(optionContract) == 0) { throw new Exception("Unexpected Maintenance Margin requirement"); } } if (!futureInvested && data.ContainsKey(future)) { MarketOrder(future, 1); _invested = true; futureInvested = true; } } } } public override void OnEndOfAlgorithm() { base.OnEndOfAlgorithm(); if (!_optionFilterRan) { throw new InvalidOperationException("Option chain filter was never ran"); } if (!_onDataReached) { throw new Exception("OnData() was never called."); } if (_symbolsReceived.Count != _expectedSymbolsReceived.Count) { throw new AggregateException($"Expected {_expectedSymbolsReceived.Count} option contracts Symbols, found {_symbolsReceived.Count}"); } var missingSymbols = new List<Symbol>(); foreach (var expectedSymbol in _expectedSymbolsReceived) { if (!_symbolsReceived.Contains(expectedSymbol)) { missingSymbols.Add(expectedSymbol); } } if (missingSymbols.Count > 0) { throw new Exception($"Symbols: \"{string.Join(", ", missingSymbols)}\" were not found in OnData"); } foreach (var expectedSymbol in _expectedSymbolsReceived) { var data = _dataReceived[expectedSymbol]; var nonDupeDataCount = data.Select(x => { x.EndTime = default(DateTime); return x; }).Distinct().Count(); if (nonDupeDataCount < 1000) { throw new Exception($"Received too few data points. Expected >=1000, found {nonDupeDataCount} for {expectedSymbol}"); } } } /// <summary> /// This is used by the regression test system to indicate if the open source Lean repository has the required data to run this algorithm. /// </summary> public bool CanRunLocally { get; } = true; /// <summary> /// This is used by the regression test system to indicate which languages this algorithm is written in. /// </summary> public Language[] Languages { get; } = { Language.CSharp, Language.Python }; /// <summary> /// This is used by the regression test system to indicate what the expected statistics are from running the algorithm /// </summary> public Dictionary<string, string> ExpectedStatistics => new Dictionary<string, string> { {"Total Trades", "2"}, {"Average Win", "0%"}, {"Average Loss", "0%"}, {"Compounding Annual Return", "-10.708%"}, {"Drawdown", "0.200%"}, {"Expectancy", "0"}, {"Net Profit", "-0.093%"}, {"Sharpe Ratio", "-10.594"}, {"Probabilistic Sharpe Ratio", "0%"}, {"Loss Rate", "0%"}, {"Win Rate", "0%"}, {"Profit-Loss Ratio", "0"}, {"Alpha", "-0.261"}, {"Beta", "0.244"}, {"Annual Standard Deviation", "0.01"}, {"Annual Variance", "0"}, {"Information Ratio", "-22.456"}, {"Tracking Error", "0.032"}, {"Treynor Ratio", "-0.454"}, {"Total Fees", "$3.70"}, {"Estimated Strategy Capacity", "$41000.00"}, {"Lowest Capacity Asset", "ES 31C3JQTOYO9T0|ES XCZJLC9NOB29"}, {"Fitness Score", "0.273"}, {"Kelly Criterion Estimate", "0"}, {"Kelly Criterion Probability Value", "0"}, {"Sortino Ratio", "79228162514264337593543950335"}, {"Return Over Maximum Drawdown", "-123.159"}, {"Portfolio Turnover", "0.547"}, {"Total Insights Generated", "0"}, {"Total Insights Closed", "0"}, {"Total Insights Analysis Completed", "0"}, {"Long Insight Count", "0"}, {"Short Insight Count", "0"}, {"Long/Short Ratio", "100%"}, {"Estimated Monthly Alpha Value", "$0"}, {"Total Accumulated Estimated Alpha Value", "$0"}, {"Mean Population Estimated Insight Value", "$0"}, {"Mean Population Direction", "0%"}, {"Mean Population Magnitude", "0%"}, {"Rolling Averaged Population Direction", "0%"}, {"Rolling Averaged Population Magnitude", "0%"}, {"OrderListHash", "9347e3b610cfa21f7cbd968a0135c8af"} }; } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using Pathfinding; using Pathfinding.Util; /** Linearly interpolating movement script. * This movement script will follow the path exactly, it uses linear interpolation to move between the waypoints in the path. * This is desirable for some types of games. * It also works in 2D. * * Recommended setup: * * This depends on what type of movement you are aiming for. * If you are aiming for movement where the unit follows the path exactly (you are likely using a grid or point graph) * the default settings on this component should work quite well, however I recommend that you adjust the StartEndModifier * on the Seeker component: set the 'Exact Start Point' field to 'NodeConnection' and the 'Exact End Point' field to 'SnapToNode'. * * If you on the other hand want smoother movement I recommend adding the Simple Smooth Modifier to the GameObject as well. * You may also want to tweak the #rotationSpeed. * * \ingroup movementscripts */ [RequireComponent(typeof(Seeker))] [AddComponentMenu("Pathfinding/AI/AILerp (2D,3D)")] [HelpURL("http://arongranberg.com/astar/docs/class_a_i_lerp.php")] public class AILerp : VersionedMonoBehaviour { /** Determines how often it will search for new paths. * If you have fast moving targets or AIs, you might want to set it to a lower value. * The value is in seconds between path requests. */ public float repathRate = 0.5F; /** Target to move towards. * The AI will try to follow/move towards this target. * It can be a point on the ground where the player has clicked in an RTS for example, or it can be the player object in a zombie game. */ public Transform target; /** Enables or disables searching for paths. * Setting this to false does not stop any active path requests from being calculated or stop it from continuing to follow the current path. * \see #canMove */ public bool canSearch = true; /** Enables or disables movement. * \see #canSearch */ public bool canMove = true; /** Speed in world units */ public float speed = 3; /** If true, the AI will rotate to face the movement direction */ public bool enableRotation = true; /** If true, rotation will only be done along the Z axis so that the Y axis is the forward direction of the character. * This is useful for 2D games in which one often want to have the Y axis as the forward direction to get sprites and 2D colliders to work properly. * \shadowimage{aibase_forward_axis.png} */ public bool rotationIn2D = false; /** How quickly to rotate */ public float rotationSpeed = 10; /** If true, some interpolation will be done when a new path has been calculated. * This is used to avoid short distance teleportation. */ public bool interpolatePathSwitches = true; /** How quickly to interpolate to the new path */ public float switchPathInterpolationSpeed = 5; /** Cached Seeker component */ protected Seeker seeker; /** Cached Transform component */ protected Transform tr; /** Time when the last path request was sent */ protected float lastRepath = -9999; /** Current path which is followed */ protected ABPath path; /** True if the end-of-path is reached. * \see TargetReached */ public bool targetReached { get; private set; } /** Only when the previous path has been returned should be search for a new path */ protected bool canSearchAgain = true; /** When a new path was returned, the AI was moving along this ray. * Used to smoothly interpolate between the previous movement and the movement along the new path. * The speed is equal to movement direction. */ protected Vector3 previousMovementOrigin; protected Vector3 previousMovementDirection; protected float previousMovementStartTime = -9999; protected PathInterpolator interpolator = new PathInterpolator(); /** Holds if the Start function has been run. * Used to test if coroutines should be started in OnEnable to prevent calculating paths * in the awake stage (or rather before start on frame 0). */ private bool startHasRun = false; /** Initializes reference variables. * If you override this function you should in most cases call base.Awake () at the start of it. * */ protected override void Awake () { base.Awake(); //This is a simple optimization, cache the transform component lookup tr = transform; seeker = GetComponent<Seeker>(); // Tell the StartEndModifier to ask for our exact position when post processing the path This // is important if we are using prediction and requesting a path from some point slightly ahead // of us since then the start point in the path request may be far from our position when the // path has been calculated. This is also good because if a long path is requested, it may take // a few frames for it to be calculated so we could have moved some distance during that time seeker.startEndModifier.adjustStartPoint = () => tr.position; } /** Starts searching for paths. * If you override this function you should in most cases call base.Start () at the start of it. * \see #Init * \see #RepeatTrySearchPath */ protected virtual void Start () { startHasRun = true; Init(); } /** Called when the component is enabled */ protected virtual void OnEnable () { // Make sure we receive callbacks when paths complete seeker.pathCallback += OnPathComplete; Init(); } void Init () { if (startHasRun) { lastRepath = float.NegativeInfinity; StartCoroutine(RepeatTrySearchPath()); } } public void OnDisable () { // Abort any calculations in progress if (seeker != null) seeker.CancelCurrentPathRequest(); canSearchAgain = true; // Release the current path so that it can be pooled if (path != null) path.Release(this); path = null; // Make sure we no longer receive callbacks when paths complete seeker.pathCallback -= OnPathComplete; } /** Tries to search for a path every #repathRate seconds. * \see TrySearchPath */ protected IEnumerator RepeatTrySearchPath () { while (true) { float v = TrySearchPath(); yield return new WaitForSeconds(v); } } /** Tries to search for a path. * Will search for a new path if there was a sufficient time since the last repath and both * #canSearchAgain and #canSearch are true and there is a target. * * \returns The time to wait until calling this function again (based on #repathRate) */ public float TrySearchPath () { if (Time.time - lastRepath >= repathRate && canSearchAgain && canSearch && target != null) { SearchPath(); return repathRate; } else { return Mathf.Max(0, repathRate - (Time.time-lastRepath)); } } /** Requests a path to the target. * Some inheriting classes will prevent the path from being requested immediately when * this function is called, for example when the AI is currently traversing a special path segment * in which case it is usually a bad idea to search for a new path. */ public virtual void SearchPath () { ForceSearchPath(); } /** Requests a path to the target. * Bypasses 'is-it-a-good-time-to-request-a-path' checks. */ public virtual void ForceSearchPath () { if (target == null) throw new System.InvalidOperationException("Target is null"); lastRepath = Time.time; // This is where we should search to var targetPosition = target.position; var currentPosition = GetFeetPosition(); // If we are following a path, start searching from the node we will // reach next this can prevent odd turns right at the start of the path if (interpolator.valid) { var prevDist = interpolator.distance; // Move to the end of the current segment interpolator.MoveToSegment(interpolator.segmentIndex, 1); currentPosition = interpolator.position; // Move back to the original position interpolator.distance = prevDist; } canSearchAgain = false; // Alternative way of requesting the path //ABPath p = ABPath.Construct (currentPosition,targetPoint,null); //seeker.StartPath (p); // We should search from the current position seeker.StartPath(currentPosition, targetPosition); } /** The end of the path has been reached. * If you want custom logic for when the AI has reached it's destination * add it here. * You can also create a new script which inherits from this one * and override the function in that script. */ public virtual void OnTargetReached () { } /** Called when a requested path has finished calculation. * A path is first requested by #SearchPath, it is then calculated, probably in the same or the next frame. * Finally it is returned to the seeker which forwards it to this function.\n */ public virtual void OnPathComplete (Path _p) { ABPath p = _p as ABPath; if (p == null) throw new System.Exception("This function only handles ABPaths, do not use special path types"); canSearchAgain = true; // Increase the reference count on the path. // This is used for path pooling p.Claim(this); // Path couldn't be calculated of some reason. // More info in p.errorLog (debug string) if (p.error) { p.Release(this); return; } if (interpolatePathSwitches) { ConfigurePathSwitchInterpolation(); } // Release the previous path // This is used for path pooling. // Note that this will invalidate the interpolator // since the vectorPath list will be pooled. if (path != null) path.Release(this); // Replace the old path path = p; targetReached = false; // Just for the rest of the code to work, if there // is only one waypoint in the path add another one if (path.vectorPath != null && path.vectorPath.Count == 1) { path.vectorPath.Insert(0, GetFeetPosition()); } // Reset some variables ConfigureNewPath(); } protected virtual void ConfigurePathSwitchInterpolation () { bool reachedEndOfPreviousPath = interpolator.valid && interpolator.remainingDistance < 0.0001f; if (interpolator.valid && !reachedEndOfPreviousPath) { previousMovementOrigin = interpolator.position; previousMovementDirection = interpolator.tangent.normalized * interpolator.remainingDistance; previousMovementStartTime = Time.time; } else { previousMovementOrigin = Vector3.zero; previousMovementDirection = Vector3.zero; previousMovementStartTime = -9999; } } public virtual Vector3 GetFeetPosition () { return tr.position; } /** Finds the closest point on the current path and configures the #interpolator */ protected virtual void ConfigureNewPath () { var hadValidPath = interpolator.valid; var prevTangent = hadValidPath ? interpolator.tangent : Vector3.zero; interpolator.SetPath(path.vectorPath); interpolator.MoveToClosestPoint(GetFeetPosition()); if (interpolatePathSwitches && switchPathInterpolationSpeed > 0.01f && hadValidPath) { var correctionFactor = Mathf.Max(-Vector3.Dot(prevTangent.normalized, interpolator.tangent.normalized), 0); interpolator.distance -= speed*correctionFactor*(1f/switchPathInterpolationSpeed); } } protected virtual void Update () { if (canMove) { Vector3 direction; Vector3 nextPos = CalculateNextPosition(out direction); // Rotate unless we are really close to the target if (enableRotation && direction != Vector3.zero) { if (rotationIn2D) { float angle = Mathf.Atan2(direction.x, -direction.y) * Mathf.Rad2Deg + 180; Vector3 euler = tr.eulerAngles; euler.z = Mathf.LerpAngle(euler.z, angle, Time.deltaTime * rotationSpeed); tr.eulerAngles = euler; } else { Quaternion rot = tr.rotation; Quaternion desiredRot = Quaternion.LookRotation(direction); tr.rotation = Quaternion.Slerp(rot, desiredRot, Time.deltaTime * rotationSpeed); } } tr.position = nextPos; } } /** Calculate the AI's next position (one frame in the future). * \param direction The tangent of the segment the AI is currently traversing. Not normalized. */ protected virtual Vector3 CalculateNextPosition (out Vector3 direction) { if (!interpolator.valid) { direction = Vector3.zero; return tr.position; } interpolator.distance += Time.deltaTime * speed; if (interpolator.remainingDistance < 0.0001f && !targetReached) { targetReached = true; OnTargetReached(); } direction = interpolator.tangent; float alpha = switchPathInterpolationSpeed * (Time.time - previousMovementStartTime); if (interpolatePathSwitches && alpha < 1f) { // Find the approximate position we would be at if we // would have continued to follow the previous path Vector3 positionAlongPreviousPath = previousMovementOrigin + Vector3.ClampMagnitude(previousMovementDirection, speed * (Time.time - previousMovementStartTime)); // Interpolate between the position on the current path and the position // we would have had if we would have continued along the previous path. return Vector3.Lerp(positionAlongPreviousPath, interpolator.position, alpha); } else { return interpolator.position; } } }
// // CurvesDialog.cs // // Author: // Krzysztof Marecki <marecki.krzysztof@gmail.com> // // Copyright (c) 2010 Krzysztof Marecki // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections.Generic; using Cairo; using Pinta.Core; namespace Pinta { public partial class CurvesDialog : Gtk.Dialog { private class ControlPointDrawingInfo { public Color Color { get; set; } public bool IsActive { get; set; } } //drawing area width and height private const int size = 256; //control point radius private const int radius = 6; private int channels; //last added control point x; private int last_cpx; //control points for luminosity transfer mode private SortedList<int, int>[] luminosity_cps; //control points for rg transfer mode private SortedList<int, int>[] rgb_cps; public SortedList<int, int>[] ControlPoints { get { return (Mode == ColorTransferMode.Luminosity) ? luminosity_cps : rgb_cps; } set { if (Mode == ColorTransferMode.Luminosity) luminosity_cps = value; else rgb_cps = value; } } public ColorTransferMode Mode { get { return (comboMap.Active == 0) ? ColorTransferMode.Rgb : ColorTransferMode.Luminosity; } } public CurvesDialog () { this.Build (); drawing.DoubleBuffered = true; comboMap.Changed += HandleComboMapChanged; buttonReset.Clicked += HandleButtonResetClicked; checkRed.Toggled += HandleCheckToggled; checkGreen.Toggled += HandleCheckToggled; checkBlue.Toggled += HandleCheckToggled; drawing.ExposeEvent += HandleDrawingExposeEvent; drawing.MotionNotifyEvent += HandleDrawingMotionNotifyEvent; drawing.LeaveNotifyEvent += HandleDrawingLeaveNotifyEvent; drawing.ButtonPressEvent += HandleDrawingButtonPressEvent; ResetControlPoints (); } private void HandleCheckToggled (object o, EventArgs args) { InvalidateDrawing (); } void HandleButtonResetClicked (object sender, EventArgs e) { ResetControlPoints (); InvalidateDrawing (); } private void ResetControlPoints() { channels = (Mode == ColorTransferMode.Luminosity) ? 1 : 3; ControlPoints = new SortedList<int, int>[channels]; for (int i = 0; i < channels; i++) { SortedList<int, int> list = new SortedList<int, int> (); list.Add (0, 0); list.Add (size - 1, size - 1); ControlPoints [i] = list; } } private void HandleComboMapChanged (object sender, EventArgs e) { if (ControlPoints == null) ResetControlPoints (); bool visible = (Mode == ColorTransferMode.Rgb); checkRed.Visible = checkGreen.Visible = checkBlue.Visible = visible; InvalidateDrawing (); } private void InvalidateDrawing () { //to invalidate whole drawing area drawing.GdkWindow.Invalidate(); } private void HandleDrawingLeaveNotifyEvent (object o, Gtk.LeaveNotifyEventArgs args) { InvalidateDrawing (); } private IEnumerable<SortedList<int,int>> GetActiveControlPoints () { if (Mode == ColorTransferMode.Luminosity) yield return ControlPoints [0]; else { if (checkRed.Active) yield return ControlPoints [0]; if (checkGreen.Active) yield return ControlPoints [1]; if (checkBlue.Active) yield return ControlPoints [2]; } } private void AddControlPoint (int x, int y) { foreach (var controlPoints in GetActiveControlPoints ()) { controlPoints [x] = size - 1 - y; } last_cpx = x; } private void HandleDrawingMotionNotifyEvent (object o, Gtk.MotionNotifyEventArgs args) { int x, y; Gdk.ModifierType mask; drawing.GdkWindow.GetPointer (out x, out y, out mask); if (x < 0 || x >= size || y < 0 || y >=size) return; if (args.Event.State == Gdk.ModifierType.Button1Mask) { // first and last control point cannot be removed if (last_cpx != 0 && last_cpx != size - 1) { foreach (var controlPoints in GetActiveControlPoints ()) { if (controlPoints.ContainsKey (last_cpx)) controlPoints.Remove (last_cpx); } } AddControlPoint (x, y); } InvalidateDrawing (); } private void HandleDrawingButtonPressEvent (object o, Gtk.ButtonPressEventArgs args) { int x, y; Gdk.ModifierType mask; drawing.GdkWindow.GetPointer (out x, out y, out mask); if (args.Event.Button == 1) { AddControlPoint (x, y); } // user pressed right button if (args.Event.Button == 3) { foreach (var controlPoints in GetActiveControlPoints ()) { for (int i = 0; i < controlPoints.Count; i++) { int cpx = controlPoints.Keys [i]; int cpy = size - 1 - (int)controlPoints.Values [i]; //we cannot allow user to remove first or last control point if (cpx == 0 && cpy == size - 1) continue; if (cpx == size -1 && cpy == 0) continue; if (CheckControlPointProximity (cpx, cpy, x, y)) { controlPoints.RemoveAt (i); break; } } } } InvalidateDrawing(); } private void DrawBorder (Context g) { g.Rectangle (0, 0, size - 1, size - 1); g.LineWidth = 1; g.Stroke (); } private void DrawPointerCross (Context g) { int x, y; Gdk.ModifierType mask; drawing.GdkWindow.GetPointer (out x, out y, out mask); if (x >= 0 && x < size && y >= 0 && y < size) { g.LineWidth = 0.5; g.MoveTo (x, 0); g.LineTo (x, size); g.MoveTo (0, y); g.LineTo (size , y); g.Stroke (); this.labelPoint.Text = string.Format ("({0}, {1})", x, y); } else this.labelPoint.Text = string.Empty; } private void DrawGrid (Context g) { g.Color = new Color (0.05, 0.05, 0.05); g.SetDash (new double[] {4, 4}, 2); g.LineWidth = 1; for (int i = 1; i < 4; i++) { g.MoveTo (i * size / 4, 0); g.LineTo (i * size / 4, size); g.MoveTo (0, i * size / 4); g.LineTo (size, i * size / 4); } g.MoveTo (0, size - 1); g.LineTo (size - 1, 0); g.Stroke (); g.SetDash (new double[] {}, 0); } //cpx, cpyx - control point's x and y coordinates private bool CheckControlPointProximity (int cpx, int cpy, int x, int y) { return (Math.Sqrt (Math.Pow (cpx - x, 2) + Math.Pow (cpy - y, 2)) < radius); } private IEnumerator<ControlPointDrawingInfo> GetDrawingInfos () { if (Mode == ColorTransferMode.Luminosity) yield return new ControlPointDrawingInfo () { Color = new Color (0.4, 0.4, 0.4), IsActive = true }; else { yield return new ControlPointDrawingInfo () { Color = new Color (0.9, 0, 0), IsActive = checkRed.Active }; yield return new ControlPointDrawingInfo () { Color = new Color (0, 0.9, 0), IsActive = checkGreen.Active }; yield return new ControlPointDrawingInfo () { Color = new Color(0, 0, 0.9), IsActive = checkBlue.Active }; } } private void DrawControlPoints (Context g) { int x, y; Gdk.ModifierType mask; drawing.GdkWindow.GetPointer (out x, out y, out mask); var infos = GetDrawingInfos (); foreach (var controlPoints in ControlPoints) { infos.MoveNext (); var info = infos.Current; for (int i = 0; i < controlPoints.Count; i++) { int cpx = controlPoints.Keys [i]; int cpy = size - 1 - (int)controlPoints.Values [i]; Rectangle rect; if (info.IsActive) { if (CheckControlPointProximity (cpx, cpy, x, y)) { rect = new Rectangle (cpx - (radius + 2) / 2, cpy - (radius + 2) / 2, radius + 2, radius + 2); g.DrawEllipse (rect, new Color (0.2, 0.2, 0.2), 2); rect = new Rectangle (cpx - radius / 2, cpy - radius / 2, radius, radius); g.FillEllipse (rect, new Color (0.9, 0.9, 0.9)); } else { rect = new Rectangle (cpx - radius / 2, cpy - radius / 2, radius, radius); g.DrawEllipse (rect, info.Color, 2); } } rect = new Rectangle (cpx - (radius - 2) / 2, cpy - (radius - 2) / 2, radius - 2, radius -2); g.FillEllipse (rect, info.Color); } } g.Stroke (); } private void DrawSpline (Context g) { var infos = GetDrawingInfos (); foreach (var controlPoints in ControlPoints) { int points = controlPoints.Count; SplineInterpolator interpolator = new SplineInterpolator (); IList<int> xa = controlPoints.Keys; IList<int> ya = controlPoints.Values; PointD[] line = new PointD [size]; for (int i = 0; i < points; i++) { interpolator.Add (xa [i], ya [i]); } for (int i = 0; i < line.Length; i++) { line[i].X = (float)i; line[i].Y = (float)(Utility.Clamp(size - 1 - interpolator.Interpolate (i), 0, size - 1)); } g.LineWidth = 2; g.LineJoin = LineJoin.Round; g.MoveTo (line [0]); for (int i = 1; i < line.Length; i++) g.LineTo (line [i]); infos.MoveNext (); var info = infos.Current; g.Color = info.Color; g.LineWidth = info.IsActive ? 2 : 1; g.Stroke (); } } private void HandleDrawingExposeEvent (object o, Gtk.ExposeEventArgs args) { using (Context g = Gdk.CairoHelper.Create (drawing.GdkWindow)) { DrawBorder (g); DrawPointerCross (g); DrawSpline (g); DrawGrid (g); DrawControlPoints (g); } } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Net; using System.Runtime; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Orleans.Runtime.ConsistentRing; using Orleans.Runtime.Counters; using Orleans.Runtime.GrainDirectory; using Orleans.Runtime.LogConsistency; using Orleans.Runtime.Messaging; using Orleans.Runtime.MultiClusterNetwork; using Orleans.Runtime.Providers; using Orleans.Runtime.ReminderService; using Orleans.Runtime.Scheduler; using Orleans.Services; using Orleans.Streams; using Orleans.Transactions; using Orleans.Runtime.Versions; using Orleans.Versions; using Orleans.ApplicationParts; using Orleans.Configuration; using Orleans.Serialization; namespace Orleans.Runtime { /// <summary> /// Orleans silo. /// </summary> public class Silo { /// <summary> Standard name for Primary silo. </summary> public const string PrimarySiloName = "Primary"; /// <summary> Silo Types. </summary> public enum SiloType { /// <summary> No silo type specified. </summary> None = 0, /// <summary> Primary silo. </summary> Primary, /// <summary> Secondary silo. </summary> Secondary, } private readonly ILocalSiloDetails siloDetails; private readonly ClusterOptions clusterOptions; private readonly ISiloMessageCenter messageCenter; private readonly OrleansTaskScheduler scheduler; private readonly LocalGrainDirectory localGrainDirectory; private readonly ActivationDirectory activationDirectory; private readonly IncomingMessageAgent incomingAgent; private readonly IncomingMessageAgent incomingSystemAgent; private readonly IncomingMessageAgent incomingPingAgent; private readonly ILogger logger; private TypeManager typeManager; private readonly TaskCompletionSource<int> siloTerminatedTask = new TaskCompletionSource<int>(TaskCreationOptions.RunContinuationsAsynchronously); private readonly SiloStatisticsManager siloStatistics; private readonly InsideRuntimeClient runtimeClient; private IReminderService reminderService; private SystemTarget fallbackScheduler; private readonly IMembershipOracle membershipOracle; private readonly IMultiClusterOracle multiClusterOracle; private readonly ExecutorService executorService; private Watchdog platformWatchdog; private readonly TimeSpan initTimeout; private readonly TimeSpan stopTimeout = TimeSpan.FromMinutes(1); private readonly Catalog catalog; private readonly List<IHealthCheckParticipant> healthCheckParticipants = new List<IHealthCheckParticipant>(); private readonly object lockable = new object(); private readonly GrainFactory grainFactory; private readonly ISiloLifecycleSubject siloLifecycle; private List<GrainService> grainServices = new List<GrainService>(); private readonly ILoggerFactory loggerFactory; /// <summary> /// Gets the type of this /// </summary> internal string Name => this.siloDetails.Name; internal OrleansTaskScheduler LocalScheduler { get { return scheduler; } } internal ILocalGrainDirectory LocalGrainDirectory { get { return localGrainDirectory; } } internal IMultiClusterOracle LocalMultiClusterOracle { get { return multiClusterOracle; } } internal IConsistentRingProvider RingProvider { get; private set; } internal ICatalog Catalog => catalog; internal SystemStatus SystemStatus { get; set; } internal IServiceProvider Services { get; } /// <summary> SiloAddress for this silo. </summary> public SiloAddress SiloAddress => this.siloDetails.SiloAddress; /// <summary> /// Silo termination event used to signal shutdown of this silo. /// </summary> public WaitHandle SiloTerminatedEvent // one event for all types of termination (shutdown, stop and fast kill). => ((IAsyncResult)this.siloTerminatedTask.Task).AsyncWaitHandle; public Task SiloTerminated { get { return this.siloTerminatedTask.Task; } } // one event for all types of termination (shutdown, stop and fast kill). private SchedulingContext membershipOracleContext; private SchedulingContext multiClusterOracleContext; private SchedulingContext reminderServiceContext; private LifecycleSchedulingSystemTarget lifecycleSchedulingSystemTarget; /// <summary> /// Initializes a new instance of the <see cref="Silo"/> class. /// </summary> /// <param name="siloDetails">The silo initialization parameters</param> /// <param name="services">Dependency Injection container</param> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope", Justification = "Should not Dispose of messageCenter in this method because it continues to run / exist after this point.")] public Silo(ILocalSiloDetails siloDetails, IServiceProvider services) { string name = siloDetails.Name; // Temporarily still require this. Hopefuly gone when 2.0 is released. this.siloDetails = siloDetails; this.SystemStatus = SystemStatus.Creating; AsynchAgent.IsStarting = true; // todo. use ISiloLifecycle instead? var startTime = DateTime.UtcNow; IOptions<ClusterMembershipOptions> clusterMembershipOptions = services.GetRequiredService<IOptions<ClusterMembershipOptions>>(); initTimeout = clusterMembershipOptions.Value.MaxJoinAttemptTime; if (Debugger.IsAttached) { initTimeout = StandardExtensions.Max(TimeSpan.FromMinutes(10), clusterMembershipOptions.Value.MaxJoinAttemptTime); stopTimeout = initTimeout; } var localEndpoint = this.siloDetails.SiloAddress.Endpoint; services.GetService<SerializationManager>().RegisterSerializers(services.GetService<IApplicationPartManager>()); this.Services = services; this.Services.InitializeSiloUnobservedExceptionsHandler(); //set PropagateActivityId flag from node config IOptions<SiloMessagingOptions> messagingOptions = services.GetRequiredService<IOptions<SiloMessagingOptions>>(); RequestContext.PropagateActivityId = messagingOptions.Value.PropagateActivityId; this.loggerFactory = this.Services.GetRequiredService<ILoggerFactory>(); logger = this.loggerFactory.CreateLogger<Silo>(); logger.Info(ErrorCode.SiloGcSetting, "Silo starting with GC settings: ServerGC={0} GCLatencyMode={1}", GCSettings.IsServerGC, Enum.GetName(typeof(GCLatencyMode), GCSettings.LatencyMode)); if (!GCSettings.IsServerGC) { logger.Warn(ErrorCode.SiloGcWarning, "Note: Silo not running with ServerGC turned on - recommend checking app config : <configuration>-<runtime>-<gcServer enabled=\"true\">"); logger.Warn(ErrorCode.SiloGcWarning, "Note: ServerGC only kicks in on multi-core systems (settings enabling ServerGC have no effect on single-core machines)."); } logger.Info(ErrorCode.SiloInitializing, "-------------- Initializing silo on host {0} MachineName {1} at {2}, gen {3} --------------", this.siloDetails.DnsHostName, Environment.MachineName, localEndpoint, this.siloDetails.SiloAddress.Generation); logger.Info(ErrorCode.SiloInitConfig, "Starting silo {0}", name); var siloMessagingOptions = this.Services.GetRequiredService<IOptions<SiloMessagingOptions>>(); BufferPool.InitGlobalBufferPool(siloMessagingOptions.Value); try { grainFactory = Services.GetRequiredService<GrainFactory>(); } catch (InvalidOperationException exc) { logger.Error(ErrorCode.SiloStartError, "Exception during Silo.Start, GrainFactory was not registered in Dependency Injection container", exc); throw; } // Performance metrics siloStatistics = Services.GetRequiredService<SiloStatisticsManager>(); // The scheduler scheduler = Services.GetRequiredService<OrleansTaskScheduler>(); healthCheckParticipants.Add(scheduler); runtimeClient = Services.GetRequiredService<InsideRuntimeClient>(); // Initialize the message center messageCenter = Services.GetRequiredService<MessageCenter>(); var dispatcher = this.Services.GetRequiredService<Dispatcher>(); messageCenter.RerouteHandler = dispatcher.RerouteMessage; messageCenter.SniffIncomingMessage = runtimeClient.SniffIncomingMessage; // Now the router/directory service // This has to come after the message center //; note that it then gets injected back into the message center.; localGrainDirectory = Services.GetRequiredService<LocalGrainDirectory>(); // Now the activation directory. activationDirectory = Services.GetRequiredService<ActivationDirectory>(); // Now the consistent ring provider RingProvider = Services.GetRequiredService<IConsistentRingProvider>(); catalog = Services.GetRequiredService<Catalog>(); executorService = Services.GetRequiredService<ExecutorService>(); // Now the incoming message agents var messageFactory = this.Services.GetRequiredService<MessageFactory>(); incomingSystemAgent = new IncomingMessageAgent(Message.Categories.System, messageCenter, activationDirectory, scheduler, catalog.Dispatcher, messageFactory, executorService, this.loggerFactory); incomingPingAgent = new IncomingMessageAgent(Message.Categories.Ping, messageCenter, activationDirectory, scheduler, catalog.Dispatcher, messageFactory, executorService, this.loggerFactory); incomingAgent = new IncomingMessageAgent(Message.Categories.Application, messageCenter, activationDirectory, scheduler, catalog.Dispatcher, messageFactory, executorService, this.loggerFactory); membershipOracle = Services.GetRequiredService<IMembershipOracle>(); this.clusterOptions = Services.GetRequiredService<IOptions<ClusterOptions>>().Value; var multiClusterOptions = Services.GetRequiredService<IOptions<MultiClusterOptions>>().Value; if (!multiClusterOptions.HasMultiClusterNetwork) { logger.Info("Skip multicluster oracle creation (no multicluster network configured)"); } else { multiClusterOracle = Services.GetRequiredService<IMultiClusterOracle>(); } this.SystemStatus = SystemStatus.Created; AsynchAgent.IsStarting = false; StringValueStatistic.FindOrCreate(StatisticNames.SILO_START_TIME, () => LogFormatter.PrintDate(startTime)); // this will help troubleshoot production deployment when looking at MDS logs. this.siloLifecycle = this.Services.GetRequiredService<ISiloLifecycleSubject>(); // register all lifecycle participants IEnumerable<ILifecycleParticipant<ISiloLifecycle>> lifecycleParticipants = this.Services.GetServices<ILifecycleParticipant<ISiloLifecycle>>(); foreach(ILifecycleParticipant<ISiloLifecycle> participant in lifecycleParticipants) { participant?.Participate(this.siloLifecycle); } // register all named lifecycle participants IKeyedServiceCollection<string, ILifecycleParticipant<ISiloLifecycle>> namedLifecycleParticipantCollection = this.Services.GetService<IKeyedServiceCollection<string,ILifecycleParticipant<ISiloLifecycle>>>(); foreach (ILifecycleParticipant<ISiloLifecycle> participant in namedLifecycleParticipantCollection ?.GetServices(this.Services) ?.Select(s => s.GetService(this.Services))) { participant?.Participate(this.siloLifecycle); } // add self to lifecycle this.Participate(this.siloLifecycle); logger.Info(ErrorCode.SiloInitializingFinished, "-------------- Started silo {0}, ConsistentHashCode {1:X} --------------", SiloAddress.ToLongString(), SiloAddress.GetConsistentHashCode()); } public void Start() { StartAsync(CancellationToken.None).GetAwaiter().GetResult(); } public async Task StartAsync(CancellationToken cancellationToken) { StartTaskWithPerfAnalysis("Start Scheduler", scheduler.Start, new Stopwatch()); // SystemTarget for provider init calls this.lifecycleSchedulingSystemTarget = Services.GetRequiredService<LifecycleSchedulingSystemTarget>(); this.fallbackScheduler = Services.GetRequiredService<FallbackSystemTarget>(); RegisterSystemTarget(lifecycleSchedulingSystemTarget); try { await this.scheduler.QueueTask(() => this.siloLifecycle.OnStart(cancellationToken), this.lifecycleSchedulingSystemTarget.SchedulingContext); } catch (Exception exc) { logger.Error(ErrorCode.SiloStartError, "Exception during Silo.Start", exc); throw; } } private void CreateSystemTargets() { logger.Debug("Creating System Targets for this silo."); logger.Debug("Creating {0} System Target", "SiloControl"); var siloControl = ActivatorUtilities.CreateInstance<SiloControl>(Services); RegisterSystemTarget(siloControl); logger.Debug("Creating {0} System Target", "ProtocolGateway"); RegisterSystemTarget(new ProtocolGateway(this.SiloAddress, this.loggerFactory)); logger.Debug("Creating {0} System Target", "DeploymentLoadPublisher"); RegisterSystemTarget(Services.GetRequiredService<DeploymentLoadPublisher>()); logger.Debug("Creating {0} System Target", "RemoteGrainDirectory + CacheValidator"); RegisterSystemTarget(LocalGrainDirectory.RemoteGrainDirectory); RegisterSystemTarget(LocalGrainDirectory.CacheValidator); logger.Debug("Creating {0} System Target", "RemoteClusterGrainDirectory"); RegisterSystemTarget(LocalGrainDirectory.RemoteClusterGrainDirectory); logger.Debug("Creating {0} System Target", "ClientObserverRegistrar + TypeManager"); this.RegisterSystemTarget(this.Services.GetRequiredService<ClientObserverRegistrar>()); var implicitStreamSubscriberTable = Services.GetRequiredService<ImplicitStreamSubscriberTable>(); var versionDirectorManager = this.Services.GetRequiredService<CachedVersionSelectorManager>(); var grainTypeManager = this.Services.GetRequiredService<GrainTypeManager>(); IOptions<TypeManagementOptions> typeManagementOptions = this.Services.GetRequiredService<IOptions<TypeManagementOptions>>(); typeManager = new TypeManager(SiloAddress, grainTypeManager, membershipOracle, LocalScheduler, typeManagementOptions.Value.TypeMapRefreshInterval, implicitStreamSubscriberTable, this.grainFactory, versionDirectorManager, this.loggerFactory); this.RegisterSystemTarget(typeManager); logger.Debug("Creating {0} System Target", "MembershipOracle"); if (this.membershipOracle is SystemTarget) { RegisterSystemTarget((SystemTarget)membershipOracle); } if (multiClusterOracle != null && multiClusterOracle is SystemTarget) { logger.Debug("Creating {0} System Target", "MultiClusterOracle"); RegisterSystemTarget((SystemTarget)multiClusterOracle); } logger.Debug("Finished creating System Targets for this silo."); } private async Task InjectDependencies() { healthCheckParticipants.Add(membershipOracle); catalog.SiloStatusOracle = this.membershipOracle; this.membershipOracle.SubscribeToSiloStatusEvents(localGrainDirectory); messageCenter.SiloDeadOracle = this.membershipOracle.IsDeadSilo; // consistentRingProvider is not a system target per say, but it behaves like the localGrainDirectory, so it is here this.membershipOracle.SubscribeToSiloStatusEvents((ISiloStatusListener)RingProvider); this.membershipOracle.SubscribeToSiloStatusEvents(typeManager); this.membershipOracle.SubscribeToSiloStatusEvents(Services.GetRequiredService<DeploymentLoadPublisher>()); this.membershipOracle.SubscribeToSiloStatusEvents(Services.GetRequiredService<ClientObserverRegistrar>()); var reminderTable = Services.GetService<IReminderTable>(); if (reminderTable != null) { logger.Info($"Creating reminder grain service for type={reminderTable.GetType()}"); // Start the reminder service system target reminderService = new LocalReminderService(this, reminderTable, this.initTimeout, this.loggerFactory); ; RegisterSystemTarget((SystemTarget)reminderService); } RegisterSystemTarget(catalog); await scheduler.QueueAction(catalog.Start, catalog.SchedulingContext) .WithTimeout(initTimeout, $"Starting Catalog failed due to timeout {initTimeout}"); // SystemTarget for provider init calls this.fallbackScheduler = Services.GetRequiredService<FallbackSystemTarget>(); RegisterSystemTarget(fallbackScheduler); } private Task OnRuntimeInitializeStart(CancellationToken ct) { lock (lockable) { if (!this.SystemStatus.Equals(SystemStatus.Created)) throw new InvalidOperationException(String.Format("Calling Silo.Start() on a silo which is not in the Created state. This silo is in the {0} state.", this.SystemStatus)); this.SystemStatus = SystemStatus.Starting; } logger.Info(ErrorCode.SiloStarting, "Silo Start()"); var processExitHandlingOptions = this.Services.GetService<IOptions<ProcessExitHandlingOptions>>().Value; if(processExitHandlingOptions.FastKillOnProcessExit) AppDomain.CurrentDomain.ProcessExit += HandleProcessExit; //TODO: setup thead pool directly to lifecycle StartTaskWithPerfAnalysis("ConfigureThreadPoolAndServicePointSettings", this.ConfigureThreadPoolAndServicePointSettings, Stopwatch.StartNew()); return Task.CompletedTask; } private void StartTaskWithPerfAnalysis(string taskName, Action task, Stopwatch stopWatch) { stopWatch.Restart(); task.Invoke(); stopWatch.Stop(); this.logger.Info(ErrorCode.SiloStartPerfMeasure, $"{taskName} took {stopWatch.ElapsedMilliseconds} Milliseconds to finish"); } private async Task StartAsyncTaskWithPerfAnalysis(string taskName, Func<Task> task, Stopwatch stopWatch) { stopWatch.Restart(); await task.Invoke(); stopWatch.Stop(); this.logger.Info(ErrorCode.SiloStartPerfMeasure, $"{taskName} took {stopWatch.ElapsedMilliseconds} Milliseconds to finish"); } private async Task OnRuntimeServicesStart(CancellationToken ct) { //TODO: Setup all (or as many as possible) of the class started in this call to work directly with lifecyce var stopWatch = Stopwatch.StartNew(); // The order of these 4 is pretty much arbitrary. StartTaskWithPerfAnalysis("Start Message center",messageCenter.Start,stopWatch); StartTaskWithPerfAnalysis("Start Incoming message agents", IncomingMessageAgentsStart, stopWatch); void IncomingMessageAgentsStart() { incomingPingAgent.Start(); incomingSystemAgent.Start(); incomingAgent.Start(); } StartTaskWithPerfAnalysis("Start local grain directory", LocalGrainDirectory.Start,stopWatch); StartTaskWithPerfAnalysis("Init implicit stream subscribe table", InitImplicitStreamSubscribeTable, stopWatch); void InitImplicitStreamSubscribeTable() { // Initialize the implicit stream subscribers table. var implicitStreamSubscriberTable = Services.GetRequiredService<ImplicitStreamSubscriberTable>(); var grainTypeManager = Services.GetRequiredService<GrainTypeManager>(); implicitStreamSubscriberTable.InitImplicitStreamSubscribers(grainTypeManager.GrainClassTypeData.Select(t => t.Value.Type).ToArray()); } this.runtimeClient.CurrentStreamProviderRuntime = this.Services.GetRequiredService<SiloProviderRuntime>(); // This has to follow the above steps that start the runtime components await StartAsyncTaskWithPerfAnalysis("Create system targets and inject dependencies", () => { CreateSystemTargets(); return InjectDependencies(); }, stopWatch); // Validate the configuration. // TODO - refactor validation - jbragg //GlobalConfig.Application.ValidateConfiguration(logger); } private async Task OnRuntimeGrainServicesStart(CancellationToken ct) { var stopWatch = Stopwatch.StartNew(); // Load and init grain services before silo becomes active. await StartAsyncTaskWithPerfAnalysis("Init grain services", () => CreateGrainServices(), stopWatch); this.membershipOracleContext = (this.membershipOracle as SystemTarget)?.SchedulingContext ?? this.fallbackScheduler.SchedulingContext; await StartAsyncTaskWithPerfAnalysis("Starting local silo status oracle", StartMembershipOracle, stopWatch); async Task StartMembershipOracle() { await scheduler.QueueTask(() => this.membershipOracle.Start(), this.membershipOracleContext) .WithTimeout(initTimeout, $"Starting MembershipOracle failed due to timeout {initTimeout}"); logger.Debug("Local silo status oracle created successfully."); } var versionStore = Services.GetService<IVersionStore>(); await StartAsyncTaskWithPerfAnalysis("Init type manager", () => scheduler .QueueTask(() => this.typeManager.Initialize(versionStore), this.typeManager.SchedulingContext) .WithTimeout(this.initTimeout, $"TypeManager Initializing failed due to timeout {initTimeout}"), stopWatch); //if running in multi cluster scenario, start the MultiClusterNetwork Oracle if (this.multiClusterOracle != null) { await StartAsyncTaskWithPerfAnalysis("Start multicluster oracle", StartMultiClusterOracle, stopWatch); async Task StartMultiClusterOracle() { logger.Info("Starting multicluster oracle with my ServiceId={0} and ClusterId={1}.", this.clusterOptions.ServiceId, this.clusterOptions.ClusterId); this.multiClusterOracleContext = (multiClusterOracle as SystemTarget)?.SchedulingContext ?? this.fallbackScheduler.SchedulingContext; await scheduler.QueueTask(() => multiClusterOracle.Start(), multiClusterOracleContext) .WithTimeout(initTimeout, $"Starting MultiClusterOracle failed due to timeout {initTimeout}"); logger.Debug("multicluster oracle created successfully."); } } try { StatisticsOptions statisticsOptions = Services.GetRequiredService<IOptions<StatisticsOptions>>().Value; StartTaskWithPerfAnalysis("Start silo statistics", () => this.siloStatistics.Start(statisticsOptions), stopWatch); logger.Debug("Silo statistics manager started successfully."); // Finally, initialize the deployment load collector, for grains with load-based placement await StartAsyncTaskWithPerfAnalysis("Start deployment load collector", StartDeploymentLoadCollector, stopWatch); async Task StartDeploymentLoadCollector() { var deploymentLoadPublisher = Services.GetRequiredService<DeploymentLoadPublisher>(); await this.scheduler.QueueTask(deploymentLoadPublisher.Start, deploymentLoadPublisher.SchedulingContext) .WithTimeout(this.initTimeout, $"Starting DeploymentLoadPublisher failed due to timeout {initTimeout}"); logger.Debug("Silo deployment load publisher started successfully."); } // Start background timer tick to watch for platform execution stalls, such as when GC kicks in this.platformWatchdog = new Watchdog(statisticsOptions.LogWriteInterval, this.healthCheckParticipants, this.executorService, this.loggerFactory); this.platformWatchdog.Start(); if (this.logger.IsEnabled(LogLevel.Debug)) { logger.Debug("Silo platform watchdog started successfully."); } } catch (Exception exc) { this.SafeExecute(() => this.logger.Error(ErrorCode.Runtime_Error_100330, String.Format("Error starting silo {0}. Going to FastKill().", this.SiloAddress), exc)); throw; } if (logger.IsEnabled(LogLevel.Debug)) { logger.Debug("Silo.Start complete: System status = {0}", this.SystemStatus); } } private async Task OnBecomeActiveStart(CancellationToken ct) { var stopWatch = Stopwatch.StartNew(); StartTaskWithPerfAnalysis("Start gateway", StartGateway, stopWatch); void StartGateway() { // Now that we're active, we can start the gateway var mc = this.messageCenter as MessageCenter; mc?.StartGateway(this.Services.GetRequiredService<ClientObserverRegistrar>()); logger.Debug("Message gateway service started successfully."); } await StartAsyncTaskWithPerfAnalysis("Starting local silo status oracle", BecomeActive, stopWatch); async Task BecomeActive() { await scheduler.QueueTask(this.membershipOracle.BecomeActive, this.membershipOracleContext) .WithTimeout(initTimeout, $"MembershipOracle activating failed due to timeout {initTimeout}"); logger.Debug("Local silo status oracle became active successfully."); } this.SystemStatus = SystemStatus.Running; } private async Task OnActiveStart(CancellationToken ct) { var stopWatch = Stopwatch.StartNew(); if (this.reminderService != null) { await StartAsyncTaskWithPerfAnalysis("Start reminder service", StartReminderService, stopWatch); async Task StartReminderService() { // so, we have the view of the membership in the consistentRingProvider. We can start the reminder service this.reminderServiceContext = (this.reminderService as SystemTarget)?.SchedulingContext ?? this.fallbackScheduler.SchedulingContext; await this.scheduler.QueueTask(this.reminderService.Start, this.reminderServiceContext) .WithTimeout(this.initTimeout, $"Starting ReminderService failed due to timeout {initTimeout}"); this.logger.Debug("Reminder service started successfully."); } } foreach (var grainService in grainServices) { await StartGrainService(grainService); } } private async Task CreateGrainServices() { var grainServices = this.Services.GetServices<IGrainService>(); foreach (var grainService in grainServices) { await RegisterGrainService(grainService); } } private async Task RegisterGrainService(IGrainService service) { var grainService = (GrainService)service; RegisterSystemTarget(grainService); grainServices.Add(grainService); await this.scheduler.QueueTask(() => grainService.Init(Services), grainService.SchedulingContext).WithTimeout(this.initTimeout, $"GrainService Initializing failed due to timeout {initTimeout}"); logger.Info($"Grain Service {service.GetType().FullName} registered successfully."); } private async Task StartGrainService(IGrainService service) { var grainService = (GrainService)service; await this.scheduler.QueueTask(grainService.Start, grainService.SchedulingContext).WithTimeout(this.initTimeout, $"Starting GrainService failed due to timeout {initTimeout}"); logger.Info($"Grain Service {service.GetType().FullName} started successfully."); } private void ConfigureThreadPoolAndServicePointSettings() { PerformanceTuningOptions performanceTuningOptions = Services.GetRequiredService<IOptions<PerformanceTuningOptions>>().Value; if (performanceTuningOptions.MinDotNetThreadPoolSize > 0) { int workerThreads; int completionPortThreads; ThreadPool.GetMinThreads(out workerThreads, out completionPortThreads); if (performanceTuningOptions.MinDotNetThreadPoolSize > workerThreads || performanceTuningOptions.MinDotNetThreadPoolSize > completionPortThreads) { // if at least one of the new values is larger, set the new min values to be the larger of the prev. and new config value. int newWorkerThreads = Math.Max(performanceTuningOptions.MinDotNetThreadPoolSize, workerThreads); int newCompletionPortThreads = Math.Max(performanceTuningOptions.MinDotNetThreadPoolSize, completionPortThreads); bool ok = ThreadPool.SetMinThreads(newWorkerThreads, newCompletionPortThreads); if (ok) { logger.Info(ErrorCode.SiloConfiguredThreadPool, "Configured ThreadPool.SetMinThreads() to values: {0},{1}. Previous values are: {2},{3}.", newWorkerThreads, newCompletionPortThreads, workerThreads, completionPortThreads); } else { logger.Warn(ErrorCode.SiloFailedToConfigureThreadPool, "Failed to configure ThreadPool.SetMinThreads(). Tried to set values to: {0},{1}. Previous values are: {2},{3}.", newWorkerThreads, newCompletionPortThreads, workerThreads, completionPortThreads); } } } // Set .NET ServicePointManager settings to optimize throughput performance when using Azure storage // http://blogs.msdn.com/b/windowsazurestorage/archive/2010/06/25/nagle-s-algorithm-is-not-friendly-towards-small-requests.aspx logger.Info(ErrorCode.SiloConfiguredServicePointManager, "Configured .NET ServicePointManager to Expect100Continue={0}, DefaultConnectionLimit={1}, UseNagleAlgorithm={2} to improve Azure storage performance.", performanceTuningOptions.Expect100Continue, performanceTuningOptions.DefaultConnectionLimit, performanceTuningOptions.UseNagleAlgorithm); ServicePointManager.Expect100Continue = performanceTuningOptions.Expect100Continue; ServicePointManager.DefaultConnectionLimit = performanceTuningOptions.DefaultConnectionLimit; ServicePointManager.UseNagleAlgorithm = performanceTuningOptions.UseNagleAlgorithm; } /// <summary> /// Gracefully stop the run time system only, but not the application. /// Applications requests would be abruptly terminated, while the internal system state gracefully stopped and saved as much as possible. /// Grains are not deactivated. /// </summary> public void Stop() { var cancellationSource = new CancellationTokenSource(); cancellationSource.Cancel(); StopAsync(cancellationSource.Token).GetAwaiter().GetResult(); } /// <summary> /// Gracefully stop the run time system and the application. /// All grains will be properly deactivated. /// All in-flight applications requests would be awaited and finished gracefully. /// </summary> public void Shutdown() { StopAsync(CancellationToken.None).GetAwaiter().GetResult(); } /// <summary> /// Gracefully stop the run time system only, but not the application. /// Applications requests would be abruptly terminated, while the internal system state gracefully stopped and saved as much as possible. /// </summary> public async Task StopAsync(CancellationToken cancellationToken) { bool gracefully = !cancellationToken.IsCancellationRequested; string operation = gracefully ? "Shutdown()" : "Stop()"; bool stopAlreadyInProgress = false; lock (lockable) { if (this.SystemStatus.Equals(SystemStatus.Stopping) || this.SystemStatus.Equals(SystemStatus.ShuttingDown) || this.SystemStatus.Equals(SystemStatus.Terminated)) { stopAlreadyInProgress = true; // Drop through to wait below } else if (!this.SystemStatus.Equals(SystemStatus.Running)) { throw new InvalidOperationException(String.Format("Calling Silo.{0} on a silo which is not in the Running state. This silo is in the {1} state.", operation, this.SystemStatus)); } else { if (gracefully) this.SystemStatus = SystemStatus.ShuttingDown; else this.SystemStatus = SystemStatus.Stopping; } } if (stopAlreadyInProgress) { logger.Info(ErrorCode.SiloStopInProgress, "Silo termination is in progress - Will wait for it to finish"); var pause = TimeSpan.FromSeconds(1); while (!this.SystemStatus.Equals(SystemStatus.Terminated)) { logger.Info(ErrorCode.WaitingForSiloStop, "Waiting {0} for termination to complete", pause); Thread.Sleep(pause); } await this.siloTerminatedTask.Task; return; } try { await this.scheduler.QueueTask(() => this.siloLifecycle.OnStop(cancellationToken), this.lifecycleSchedulingSystemTarget.SchedulingContext); } finally { SafeExecute(scheduler.Stop); SafeExecute(scheduler.PrintStatistics); } } private Task OnRuntimeServicesStop(CancellationToken cancellationToken) { // Start rejecting all silo to silo application messages SafeExecute(messageCenter.BlockApplicationMessages); // Stop scheduling/executing application turns SafeExecute(scheduler.StopApplicationTurns); // Directory: Speed up directory handoff // will be started automatically when directory receives SiloStatusChangeNotification(Stopping) SafeExecute(() => LocalGrainDirectory.StopPreparationCompletion.WaitWithThrow(stopTimeout)); return Task.CompletedTask; } private Task OnRuntimeInitializeStop(CancellationToken ct) { // 10, 11, 12: Write Dead in the table, Drain scheduler, Stop msg center, ... logger.Info(ErrorCode.SiloStopped, "Silo is Stopped()"); SafeExecute(() => scheduler.QueueTask( this.membershipOracle.KillMyself, this.membershipOracleContext) .WaitWithThrow(stopTimeout)); // incoming messages SafeExecute(incomingSystemAgent.Stop); SafeExecute(incomingPingAgent.Stop); SafeExecute(incomingAgent.Stop); // timers if (platformWatchdog != null) SafeExecute(platformWatchdog.Stop); // Silo may be dying before platformWatchdog was set up SafeExecute(activationDirectory.PrintActivationDirectory); SafeExecute(messageCenter.Stop); SafeExecute(siloStatistics.Stop); SafeExecute(() => this.SystemStatus = SystemStatus.Terminated); SafeExecute(() => (this.Services as IDisposable)?.Dispose()); // Setting the event should be the last thing we do. // Do nothing after that! this.siloTerminatedTask.SetResult(0); return Task.CompletedTask; } private async Task OnBecomeActiveStop(CancellationToken ct) { bool gracefully = !ct.IsCancellationRequested; string operation = gracefully ? "Shutdown()" : "Stop()"; try { if (gracefully) { logger.Info(ErrorCode.SiloShuttingDown, "Silo starting to Shutdown()"); // Write "ShutDown" state in the table + broadcast gossip msgs to re-read the table to everyone await scheduler.QueueTask(this.membershipOracle.ShutDown, this.membershipOracleContext) .WithTimeout(stopTimeout, $"MembershipOracle Shutting down failed due to timeout {stopTimeout}"); // Deactivate all grains SafeExecute(() => catalog.DeactivateAllActivations().WaitWithThrow(stopTimeout)); } else { logger.Info(ErrorCode.SiloStopping, "Silo starting to Stop()"); // Write "Stopping" state in the table + broadcast gossip msgs to re-read the table to everyone await scheduler.QueueTask(this.membershipOracle.Stop, this.membershipOracleContext) .WithTimeout(stopTimeout, $"Stopping MembershipOracle faield due to timeout {stopTimeout}"); } } catch (Exception exc) { logger.Error(ErrorCode.SiloFailedToStopMembership, String.Format("Failed to {0} membership oracle. About to FastKill this silo.", operation), exc); return; // will go to finally } // Stop the gateway SafeExecute(messageCenter.StopAcceptingClientMessages); } private async Task OnActiveStop(CancellationToken ct) { if (reminderService != null) { // 2: Stop reminder service await scheduler.QueueTask(reminderService.Stop, this.reminderServiceContext) .WithTimeout(stopTimeout, $"Stopping ReminderService failed due to timeout {stopTimeout}"); } foreach (var grainService in grainServices) { await this.scheduler.QueueTask(grainService.Stop, grainService.SchedulingContext).WithTimeout(this.stopTimeout, $"Stopping GrainService failed due to timeout {initTimeout}"); if (this.logger.IsEnabled(LogLevel.Debug)) { logger.Debug(String.Format("{0} Grain Service with Id {1} stopped successfully.", grainService.GetType().FullName, grainService.GetPrimaryKeyLong(out string ignored))); } } } private void SafeExecute(Action action) { Utils.SafeExecute(action, logger, "Silo.Stop"); } private void HandleProcessExit(object sender, EventArgs e) { // NOTE: We need to minimize the amount of processing occurring on this code path -- we only have under approx 2-3 seconds before process exit will occur this.logger.Warn(ErrorCode.Runtime_Error_100220, "Process is exiting"); this.Stop(); } internal void RegisterSystemTarget(SystemTarget target) { var providerRuntime = this.Services.GetRequiredService<SiloProviderRuntime>(); providerRuntime.RegisterSystemTarget(target); } /// <summary> Return dump of diagnostic data from this silo. </summary> /// <param name="all"></param> /// <returns>Debug data for this silo.</returns> public string GetDebugDump(bool all = true) { var sb = new StringBuilder(); foreach (var systemTarget in activationDirectory.AllSystemTargets()) sb.AppendFormat("System target {0}:", ((ISystemTargetBase)systemTarget).GrainId.ToString()).AppendLine(); var enumerator = activationDirectory.GetEnumerator(); while(enumerator.MoveNext()) { Utils.SafeExecute(() => { var activationData = enumerator.Current.Value; var workItemGroup = scheduler.GetWorkItemGroup(activationData.SchedulingContext); if (workItemGroup == null) { sb.AppendFormat("Activation with no work item group!! Grain {0}, activation {1}.", activationData.Grain, activationData.ActivationId); sb.AppendLine(); return; } if (all || activationData.State.Equals(ActivationState.Valid)) { sb.AppendLine(workItemGroup.DumpStatus()); sb.AppendLine(activationData.DumpStatus()); } }); } logger.Info(ErrorCode.SiloDebugDump, sb.ToString()); return sb.ToString(); } /// <summary> Object.ToString override -- summary info for this silo. </summary> public override string ToString() { return localGrainDirectory.ToString(); } private void Participate(ISiloLifecycle lifecycle) { lifecycle.Subscribe<Silo>(ServiceLifecycleStage.RuntimeInitialize, (ct) => Task.Run(() => OnRuntimeInitializeStart(ct)), (ct) => Task.Run(() => OnRuntimeInitializeStop(ct))); lifecycle.Subscribe<Silo>(ServiceLifecycleStage.RuntimeServices, (ct) => Task.Run(() => OnRuntimeServicesStart(ct)), (ct) => Task.Run(() => OnRuntimeServicesStop(ct))); lifecycle.Subscribe<Silo>(ServiceLifecycleStage.RuntimeGrainServices, (ct) => Task.Run(() => OnRuntimeGrainServicesStart(ct))); lifecycle.Subscribe<Silo>(ServiceLifecycleStage.BecomeActive, (ct) => Task.Run(() => OnBecomeActiveStart(ct)), (ct) => Task.Run(() => OnBecomeActiveStop(ct))); lifecycle.Subscribe<Silo>(ServiceLifecycleStage.Active, (ct) => Task.Run(() => OnActiveStart(ct)), (ct) => Task.Run(() => OnActiveStop(ct))); } } // A dummy system target for fallback scheduler internal class FallbackSystemTarget : SystemTarget { public FallbackSystemTarget(ILocalSiloDetails localSiloDetails, ILoggerFactory loggerFactory) : base(Constants.FallbackSystemTargetId, localSiloDetails.SiloAddress, loggerFactory) { } } // A dummy system target for fallback scheduler internal class LifecycleSchedulingSystemTarget : SystemTarget { public LifecycleSchedulingSystemTarget(ILocalSiloDetails localSiloDetails, ILoggerFactory loggerFactory) : base(Constants.LifecycleSchedulingSystemTargetId, localSiloDetails.SiloAddress, loggerFactory) { } } }
//------------------------------------------------------------------------------ // <copyright file="Processor.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // <owner current="true" primary="true">[....]</owner> //------------------------------------------------------------------------------ namespace System.Xml.Xsl.XsltOld { using Res = System.Xml.Utils.Res; using System.Globalization; using System.Diagnostics; using System.IO; using System.Xml.XPath; using MS.Internal.Xml.XPath; using System.Text; using System.Collections; using System.Collections.Generic; using System.Xml.Xsl.XsltOld.Debugger; using System.Reflection; using System.Security; internal sealed class Processor : IXsltProcessor { // // Static constants // const int StackIncrement = 10; // // Execution result // internal enum ExecResult { Continue, // Continues next iteration immediately Interrupt, // Returns to caller, was processed enough Done // Execution finished } internal enum OutputResult { Continue, Interrupt, Overflow, Error, Ignore } private ExecResult execResult; // // Compiled stylesheet // private Stylesheet stylesheet; // Root of import tree of template managers private RootAction rootAction; private Key[] keyList; private List<TheQuery> queryStore; public PermissionSet permissions; // used by XsltCompiledContext in document and extension functions // // Document Being transformed // private XPathNavigator document; // // Execution action stack // private HWStack actionStack; private HWStack debuggerStack; // // Register for returning value from calling nested action // private StringBuilder sharedStringBuilder; // // Output related member variables // int ignoreLevel; StateMachine xsm; RecordBuilder builder; XsltOutput output; XmlNameTable nameTable = new NameTable(); XmlResolver resolver; #pragma warning disable 618 XsltArgumentList args; #pragma warning restore 618 Hashtable scriptExtensions; ArrayList numberList; // // Template lookup action // TemplateLookupAction templateLookup = new TemplateLookupAction(); private IXsltDebugger debugger; Query[] queryList; private ArrayList sortArray; private Hashtable documentCache; // NOTE: ValueOf() can call Matches() through XsltCompileContext.PreserveWhitespace(), // that's why we use two different contexts here, valueOfContext and matchesContext private XsltCompileContext valueOfContext; private XsltCompileContext matchesContext; internal XPathNavigator Current { get { ActionFrame frame = (ActionFrame) this.actionStack.Peek(); return frame != null ? frame.Node : null; } } internal ExecResult ExecutionResult { get { return this.execResult; } set { Debug.Assert(this.execResult == ExecResult.Continue); this.execResult = value; } } internal Stylesheet Stylesheet { get { return this.stylesheet; } } internal XmlResolver Resolver { get { Debug.Assert(this.resolver != null, "Constructor should create it if null passed"); return this.resolver; } } internal ArrayList SortArray { get { Debug.Assert(this.sortArray != null, "InitSortArray() wasn't called"); return this.sortArray; } } internal Key[] KeyList { get { return this.keyList; } } internal XPathNavigator GetNavigator(Uri ruri) { XPathNavigator result = null; if (documentCache != null) { result = documentCache[ruri] as XPathNavigator; if (result != null) { return result.Clone(); } } else { documentCache = new Hashtable(); } Object input = resolver.GetEntity(ruri, null, null); if (input is Stream) { XmlTextReaderImpl tr = new XmlTextReaderImpl(ruri.ToString(), (Stream) input); { tr.XmlResolver = this.resolver; } // reader is closed by Compiler.LoadDocument() result = ((IXPathNavigable)Compiler.LoadDocument(tr)).CreateNavigator(); } else if (input is XPathNavigator){ result = (XPathNavigator) input; } else { throw XsltException.Create(Res.Xslt_CantResolve, ruri.ToString()); } documentCache[ruri] = result.Clone(); return result; } internal void AddSort(Sort sortinfo) { Debug.Assert(this.sortArray != null, "InitSortArray() wasn't called"); this.sortArray.Add(sortinfo); } internal void InitSortArray() { if (this.sortArray == null) { this.sortArray = new ArrayList(); } else { this.sortArray.Clear(); } } internal object GetGlobalParameter(XmlQualifiedName qname) { object parameter = args.GetParam(qname.Name, qname.Namespace); if (parameter == null) { return null; } // if ( parameter is XPathNodeIterator || parameter is XPathNavigator || parameter is Boolean || parameter is Double || parameter is String ) { // doing nothing } else if ( parameter is Int16 || parameter is UInt16 || parameter is Int32 || parameter is UInt32 || parameter is Int64 || parameter is UInt64 || parameter is Single || parameter is Decimal ) { parameter = XmlConvert.ToXPathDouble(parameter); } else { parameter = parameter.ToString(); } return parameter; } internal object GetExtensionObject(string nsUri) { return args.GetExtensionObject(nsUri); } internal object GetScriptObject(string nsUri) { return scriptExtensions[nsUri]; } internal RootAction RootAction { get { return this.rootAction; } } internal XPathNavigator Document { get { return this.document; } } #if DEBUG private bool stringBuilderLocked = false; #endif internal StringBuilder GetSharedStringBuilder() { #if DEBUG Debug.Assert(! stringBuilderLocked); #endif if (sharedStringBuilder == null) { sharedStringBuilder = new StringBuilder(); } else { sharedStringBuilder.Length = 0; } #if DEBUG stringBuilderLocked = true; #endif return sharedStringBuilder; } internal void ReleaseSharedStringBuilder() { // don't clean stringBuilderLocked here. ToString() will happen after this call #if DEBUG stringBuilderLocked = false; #endif } internal ArrayList NumberList { get { if (this.numberList == null) { this.numberList = new ArrayList(); } return this.numberList; } } internal IXsltDebugger Debugger { get { return this.debugger; } } internal HWStack ActionStack { get { return this.actionStack; } } internal RecordBuilder Builder { get { return this.builder; } } internal XsltOutput Output { get { return this.output; } } // // Construction // public Processor( XPathNavigator doc, XsltArgumentList args, XmlResolver resolver, Stylesheet stylesheet, List<TheQuery> queryStore, RootAction rootAction, IXsltDebugger debugger ) { this.stylesheet = stylesheet; this.queryStore = queryStore; this.rootAction = rootAction; this.queryList = new Query[queryStore.Count]; { for(int i = 0; i < queryStore.Count; i ++) { queryList[i] = Query.Clone(queryStore[i].CompiledQuery.QueryTree); } } this.xsm = new StateMachine(); this.document = doc; this.builder = null; this.actionStack = new HWStack(StackIncrement); this.output = this.rootAction.Output; this.permissions = this.rootAction.permissions; this.resolver = resolver ?? XmlNullResolver.Singleton; this.args = args ?? new XsltArgumentList(); this.debugger = debugger; if (this.debugger != null) { this.debuggerStack = new HWStack(StackIncrement, /*limit:*/1000); templateLookup = new TemplateLookupActionDbg(); } // Clone the compile-time KeyList if (this.rootAction.KeyList != null) { this.keyList = new Key[this.rootAction.KeyList.Count]; for (int i = 0; i < this.keyList.Length; i ++) { this.keyList[i] = this.rootAction.KeyList[i].Clone(); } } this.scriptExtensions = new Hashtable(this.stylesheet.ScriptObjectTypes.Count); { foreach(DictionaryEntry entry in this.stylesheet.ScriptObjectTypes) { string namespaceUri = (string)entry.Key; if (GetExtensionObject(namespaceUri) != null) { throw XsltException.Create(Res.Xslt_ScriptDub, namespaceUri); } scriptExtensions.Add(namespaceUri, Activator.CreateInstance((Type)entry.Value, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.CreateInstance, null, null, null)); } } this.PushActionFrame(this.rootAction, /*nodeSet:*/null); } public ReaderOutput StartReader() { ReaderOutput output = new ReaderOutput(this); this.builder = new RecordBuilder(output, this.nameTable); return output; } public void Execute(Stream stream) { RecordOutput recOutput = null; switch (this.output.Method) { case XsltOutput.OutputMethod.Text: recOutput = new TextOnlyOutput(this, stream); break; case XsltOutput.OutputMethod.Xml: case XsltOutput.OutputMethod.Html: case XsltOutput.OutputMethod.Other: case XsltOutput.OutputMethod.Unknown: recOutput = new TextOutput(this, stream); break; } this.builder = new RecordBuilder(recOutput, this.nameTable); Execute(); } public void Execute(TextWriter writer) { RecordOutput recOutput = null; switch (this.output.Method) { case XsltOutput.OutputMethod.Text: recOutput = new TextOnlyOutput(this, writer); break; case XsltOutput.OutputMethod.Xml: case XsltOutput.OutputMethod.Html: case XsltOutput.OutputMethod.Other: case XsltOutput.OutputMethod.Unknown: recOutput = new TextOutput(this, writer); break; } this.builder = new RecordBuilder(recOutput, this.nameTable); Execute(); } public void Execute(XmlWriter writer) { this.builder = new RecordBuilder(new WriterOutput(this, writer), this.nameTable); Execute(); } // // Execution part of processor // internal void Execute() { Debug.Assert(this.actionStack != null); while (this.execResult == ExecResult.Continue) { ActionFrame frame = (ActionFrame) this.actionStack.Peek(); if (frame == null) { Debug.Assert(this.builder != null); this.builder.TheEnd(); ExecutionResult = ExecResult.Done; break; } // Execute the action which was on the top of the stack if (frame.Execute(this)) { this.actionStack.Pop(); } } if (this.execResult == ExecResult.Interrupt) { this.execResult = ExecResult.Continue; } } // // Action frame support // internal ActionFrame PushNewFrame() { ActionFrame prent = (ActionFrame) this.actionStack.Peek(); ActionFrame frame = (ActionFrame) this.actionStack.Push(); if (frame == null) { frame = new ActionFrame(); this.actionStack.AddToTop(frame); } Debug.Assert(frame != null); if (prent != null) { frame.Inherit(prent); } return frame; } internal void PushActionFrame(Action action, XPathNodeIterator nodeSet) { ActionFrame frame = PushNewFrame(); frame.Init(action, nodeSet); } internal void PushActionFrame(ActionFrame container) { this.PushActionFrame(container, container.NodeSet); } internal void PushActionFrame(ActionFrame container, XPathNodeIterator nodeSet) { ActionFrame frame = PushNewFrame(); frame.Init(container, nodeSet); } internal void PushTemplateLookup(XPathNodeIterator nodeSet, XmlQualifiedName mode, Stylesheet importsOf) { Debug.Assert(this.templateLookup != null); this.templateLookup.Initialize(mode, importsOf); PushActionFrame(this.templateLookup, nodeSet); } internal string GetQueryExpression(int key) { Debug.Assert(key != Compiler.InvalidQueryKey); return this.queryStore[key].CompiledQuery.Expression; } internal Query GetCompiledQuery(int key) { Debug.Assert(key != Compiler.InvalidQueryKey); TheQuery theQuery = this.queryStore[key]; theQuery.CompiledQuery.CheckErrors(); Query expr = Query.Clone(this.queryList[key]); expr.SetXsltContext(new XsltCompileContext(theQuery._ScopeManager, this)); return expr; } internal Query GetValueQuery(int key) { return GetValueQuery(key, null); } internal Query GetValueQuery(int key, XsltCompileContext context) { Debug.Assert(key != Compiler.InvalidQueryKey); TheQuery theQuery = this.queryStore[key]; theQuery.CompiledQuery.CheckErrors(); Query expr = this.queryList[key]; if (context == null) { context = new XsltCompileContext(theQuery._ScopeManager, this); } else { context.Reinitialize(theQuery._ScopeManager, this); } expr.SetXsltContext(context); return expr; } private XsltCompileContext GetValueOfContext() { if (this.valueOfContext == null) { this.valueOfContext = new XsltCompileContext(); } return this.valueOfContext; } [Conditional("DEBUG")] private void RecycleValueOfContext() { if (this.valueOfContext != null) { this.valueOfContext.Recycle(); } } private XsltCompileContext GetMatchesContext() { if (this.matchesContext == null) { this.matchesContext = new XsltCompileContext(); } return this.matchesContext; } [Conditional("DEBUG")] private void RecycleMatchesContext() { if (this.matchesContext != null) { this.matchesContext.Recycle(); } } internal String ValueOf(ActionFrame context, int key) { string result; Query query = this.GetValueQuery(key, GetValueOfContext()); object value = query.Evaluate(context.NodeSet); if (value is XPathNodeIterator) { XPathNavigator n = query.Advance(); result = n != null ? ValueOf(n) : string.Empty; } else { result = XmlConvert.ToXPathString(value); } RecycleValueOfContext(); return result; } internal String ValueOf(XPathNavigator n) { if (this.stylesheet.Whitespace && n.NodeType == XPathNodeType.Element) { StringBuilder builder = this.GetSharedStringBuilder(); ElementValueWithoutWS(n, builder); this.ReleaseSharedStringBuilder(); return builder.ToString(); } return n.Value; } private void ElementValueWithoutWS(XPathNavigator nav, StringBuilder builder) { Debug.Assert(nav.NodeType == XPathNodeType.Element); bool preserve = this.Stylesheet.PreserveWhiteSpace(this, nav); if (nav.MoveToFirstChild()) { do { switch (nav.NodeType) { case XPathNodeType.Text : case XPathNodeType.SignificantWhitespace : builder.Append(nav.Value); break; case XPathNodeType.Whitespace : if (preserve) { builder.Append(nav.Value); } break; case XPathNodeType.Element : ElementValueWithoutWS(nav, builder); break; } }while (nav.MoveToNext()); nav.MoveToParent(); } } internal XPathNodeIterator StartQuery(XPathNodeIterator context, int key) { Query query = GetCompiledQuery(key); object result = query.Evaluate(context); if (result is XPathNodeIterator) { // ToDo: We create XPathSelectionIterator to count positions, but it's better create special query in this case at compile time. return new XPathSelectionIterator(context.Current, query); } throw XsltException.Create(Res.XPath_NodeSetExpected); } internal object Evaluate(ActionFrame context, int key) { return GetValueQuery(key).Evaluate(context.NodeSet); } internal object RunQuery(ActionFrame context, int key) { Query query = GetCompiledQuery(key); object value = query.Evaluate(context.NodeSet); XPathNodeIterator it = value as XPathNodeIterator; if (it != null) { return new XPathArrayIterator(it); } return value; } internal string EvaluateString(ActionFrame context, int key) { object objValue = Evaluate(context, key); string value = null; if (objValue != null) value = XmlConvert.ToXPathString(objValue); if (value == null) value = string.Empty; return value; } internal bool EvaluateBoolean(ActionFrame context, int key) { object objValue = Evaluate(context, key); if (objValue != null) { XPathNavigator nav = objValue as XPathNavigator; return nav != null ? Convert.ToBoolean(nav.Value, CultureInfo.InvariantCulture) : Convert.ToBoolean(objValue, CultureInfo.InvariantCulture); } else { return false; } } internal bool Matches(XPathNavigator context, int key) { // We don't use XPathNavigator.Matches() to avoid cloning of Query on each call Query query = this.GetValueQuery(key, GetMatchesContext()); try { bool result = query.MatchNode(context) != null; RecycleMatchesContext(); return result; } catch(XPathException) { throw XsltException.Create(Res.Xslt_InvalidPattern, this.GetQueryExpression(key)); } } // // Outputting part of processor // internal XmlNameTable NameTable { get { return this.nameTable; } } internal bool CanContinue { get { return this.execResult == ExecResult.Continue; } } internal bool ExecutionDone { get { return this.execResult == ExecResult.Done; } } internal void ResetOutput() { Debug.Assert(this.builder != null); this.builder.Reset(); } internal bool BeginEvent(XPathNodeType nodeType, string prefix, string name, string nspace, bool empty) { return BeginEvent(nodeType, prefix, name, nspace, empty, null, true); } internal bool BeginEvent(XPathNodeType nodeType, string prefix, string name, string nspace, bool empty, Object htmlProps, bool search) { Debug.Assert(this.xsm != null); int stateOutlook = this.xsm.BeginOutlook(nodeType); if (this.ignoreLevel > 0 || stateOutlook == StateMachine.Error) { this.ignoreLevel ++; return true; // We consumed the event, so pretend it was output. } switch (this.builder.BeginEvent(stateOutlook, nodeType, prefix, name, nspace, empty, htmlProps, search)) { case OutputResult.Continue: this.xsm.Begin(nodeType); Debug.Assert(StateMachine.StateOnly(stateOutlook) == this.xsm.State); Debug.Assert(ExecutionResult == ExecResult.Continue); return true; case OutputResult.Interrupt: this.xsm.Begin(nodeType); Debug.Assert(StateMachine.StateOnly(stateOutlook) == this.xsm.State); ExecutionResult = ExecResult.Interrupt; return true; case OutputResult.Overflow: ExecutionResult = ExecResult.Interrupt; return false; case OutputResult.Error: this.ignoreLevel ++; return true; case OutputResult.Ignore: return true; default: Debug.Fail("Unexpected result of RecordBuilder.BeginEvent()"); return true; } } internal bool TextEvent(string text) { return this.TextEvent(text, false); } internal bool TextEvent(string text, bool disableOutputEscaping) { Debug.Assert(this.xsm != null); if (this.ignoreLevel > 0) { return true; } int stateOutlook = this.xsm.BeginOutlook(XPathNodeType.Text); switch (this.builder.TextEvent(stateOutlook, text, disableOutputEscaping)) { case OutputResult.Continue: this.xsm.Begin(XPathNodeType.Text); Debug.Assert(StateMachine.StateOnly(stateOutlook) == this.xsm.State); Debug.Assert(ExecutionResult == ExecResult.Continue); return true; case OutputResult.Interrupt: this.xsm.Begin(XPathNodeType.Text); Debug.Assert(StateMachine.StateOnly(stateOutlook) == this.xsm.State); ExecutionResult = ExecResult.Interrupt; return true; case OutputResult.Overflow: ExecutionResult = ExecResult.Interrupt; return false; case OutputResult.Error: case OutputResult.Ignore: return true; default: Debug.Fail("Unexpected result of RecordBuilder.TextEvent()"); return true; } } internal bool EndEvent(XPathNodeType nodeType) { Debug.Assert(this.xsm != null); if (this.ignoreLevel > 0) { this.ignoreLevel --; return true; } int stateOutlook = this.xsm.EndOutlook(nodeType); switch (this.builder.EndEvent(stateOutlook, nodeType)) { case OutputResult.Continue: this.xsm.End(nodeType); Debug.Assert(StateMachine.StateOnly(stateOutlook) == this.xsm.State); return true; case OutputResult.Interrupt: this.xsm.End(nodeType); Debug.Assert(StateMachine.StateOnly(stateOutlook) == this.xsm.State, "StateMachine.StateOnly(stateOutlook) == this.xsm.State"); ExecutionResult = ExecResult.Interrupt; return true; case OutputResult.Overflow: ExecutionResult = ExecResult.Interrupt; return false; case OutputResult.Error: case OutputResult.Ignore: default: Debug.Fail("Unexpected result of RecordBuilder.TextEvent()"); return true; } } internal bool CopyBeginEvent(XPathNavigator node, bool emptyflag) { switch (node.NodeType) { case XPathNodeType.Element: case XPathNodeType.Attribute: case XPathNodeType.ProcessingInstruction: case XPathNodeType.Comment: return BeginEvent(node.NodeType, node.Prefix, node.LocalName, node.NamespaceURI, emptyflag); case XPathNodeType.Namespace: // value instead of namespace here! return BeginEvent(XPathNodeType.Namespace, null, node.LocalName, node.Value, false); case XPathNodeType.Text: // Text will be copied in CopyContents(); break; case XPathNodeType.Root: case XPathNodeType.Whitespace: case XPathNodeType.SignificantWhitespace: case XPathNodeType.All: break; default: Debug.Fail("Invalid XPathNodeType in CopyBeginEvent"); break; } return true; } internal bool CopyTextEvent(XPathNavigator node) { switch (node.NodeType) { case XPathNodeType.Element: case XPathNodeType.Namespace: break; case XPathNodeType.Attribute: case XPathNodeType.ProcessingInstruction: case XPathNodeType.Comment: case XPathNodeType.Text: case XPathNodeType.Whitespace: case XPathNodeType.SignificantWhitespace: string text = node.Value; return TextEvent(text); case XPathNodeType.Root: case XPathNodeType.All: break; default: Debug.Fail("Invalid XPathNodeType in CopyTextEvent"); break; } return true; } internal bool CopyEndEvent(XPathNavigator node) { switch (node.NodeType) { case XPathNodeType.Element: case XPathNodeType.Attribute: case XPathNodeType.ProcessingInstruction: case XPathNodeType.Comment: case XPathNodeType.Namespace: return EndEvent(node.NodeType); case XPathNodeType.Text: // Text was copied in CopyContents(); break; case XPathNodeType.Root: case XPathNodeType.Whitespace: case XPathNodeType.SignificantWhitespace: case XPathNodeType.All: break; default: Debug.Fail("Invalid XPathNodeType in CopyEndEvent"); break; } return true; } internal static bool IsRoot(XPathNavigator navigator) { Debug.Assert(navigator != null); if (navigator.NodeType == XPathNodeType.Root) { return true; } else if (navigator.NodeType == XPathNodeType.Element) { XPathNavigator clone = navigator.Clone(); clone.MoveToRoot(); return clone.IsSamePosition(navigator); } else { return false; } } // // Builder stack // internal void PushOutput(RecordOutput output) { Debug.Assert(output != null); this.builder.OutputState = this.xsm.State; RecordBuilder lastBuilder = this.builder; this.builder = new RecordBuilder(output, this.nameTable); this.builder.Next = lastBuilder; this.xsm.Reset(); } internal RecordOutput PopOutput() { Debug.Assert(this.builder != null); RecordBuilder topBuilder = this.builder; this.builder = topBuilder.Next; this.xsm.State = this.builder.OutputState; topBuilder.TheEnd(); return topBuilder.Output; } internal bool SetDefaultOutput(XsltOutput.OutputMethod method) { if(Output.Method != method) { this.output = this.output.CreateDerivedOutput(method); return true; } return false; } internal object GetVariableValue(VariableAction variable) { int variablekey = variable.VarKey; if (variable.IsGlobal) { ActionFrame rootFrame = (ActionFrame) this.actionStack[0]; object result = rootFrame.GetVariable(variablekey); if (result == VariableAction.BeingComputedMark) { throw XsltException.Create(Res.Xslt_CircularReference, variable.NameStr); } if (result != null) { return result; } // Variable wasn't evaluated yet int saveStackSize = this.actionStack.Length; ActionFrame varFrame = PushNewFrame(); varFrame.Inherit(rootFrame); varFrame.Init(variable, rootFrame.NodeSet); do { bool endOfFrame = ((ActionFrame) this.actionStack.Peek()).Execute(this); if (endOfFrame) { this.actionStack.Pop(); } } while (saveStackSize < this.actionStack.Length); Debug.Assert(saveStackSize == this.actionStack.Length); result = rootFrame.GetVariable(variablekey); Debug.Assert(result != null, "Variable was just calculated and result can't be null"); return result; } else { return ((ActionFrame) this.actionStack.Peek()).GetVariable(variablekey); } } internal void SetParameter(XmlQualifiedName name, object value) { Debug.Assert(1 < actionStack.Length); ActionFrame parentFrame = (ActionFrame) this.actionStack[actionStack.Length - 2]; parentFrame.SetParameter(name, value); } internal void ResetParams() { ActionFrame frame = (ActionFrame) this.actionStack[actionStack.Length - 1]; frame.ResetParams(); } internal object GetParameter(XmlQualifiedName name) { Debug.Assert(2 < actionStack.Length); ActionFrame parentFrame = (ActionFrame) this.actionStack[actionStack.Length - 3]; return parentFrame.GetParameter(name); } // ---------------------- Debugger stack ----------------------- internal class DebuggerFrame { internal ActionFrame actionFrame; internal XmlQualifiedName currentMode; } internal void PushDebuggerStack() { Debug.Assert(this.Debugger != null, "We don't generate calls this function if ! debugger"); DebuggerFrame dbgFrame = (DebuggerFrame) this.debuggerStack.Push(); if (dbgFrame == null) { dbgFrame = new DebuggerFrame(); this.debuggerStack.AddToTop(dbgFrame); } dbgFrame.actionFrame = (ActionFrame) this.actionStack.Peek(); // In a case of next builtIn action. } internal void PopDebuggerStack() { Debug.Assert(this.Debugger != null, "We don't generate calls this function if ! debugger"); this.debuggerStack.Pop(); } internal void OnInstructionExecute() { Debug.Assert(this.Debugger != null, "We don't generate calls this function if ! debugger"); DebuggerFrame dbgFrame = (DebuggerFrame) this.debuggerStack.Peek(); Debug.Assert(dbgFrame != null, "PushDebuggerStack() wasn't ever called"); dbgFrame.actionFrame = (ActionFrame) this.actionStack.Peek(); this.Debugger.OnInstructionExecute((IXsltProcessor) this); } internal XmlQualifiedName GetPrevioseMode() { Debug.Assert(this.Debugger != null, "We don't generate calls this function if ! debugger"); Debug.Assert(2 <= this.debuggerStack.Length); return ((DebuggerFrame) this.debuggerStack[this.debuggerStack.Length - 2]).currentMode; } internal void SetCurrentMode(XmlQualifiedName mode) { Debug.Assert(this.Debugger != null, "We don't generate calls this function if ! debugger"); ((DebuggerFrame) this.debuggerStack[this.debuggerStack.Length - 1]).currentMode = mode; } // ----------------------- IXsltProcessor : -------------------- int IXsltProcessor.StackDepth { get {return this.debuggerStack.Length;} } IStackFrame IXsltProcessor.GetStackFrame(int depth) { return ((DebuggerFrame) this.debuggerStack[depth]).actionFrame; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.ErrorReporting; using Microsoft.CodeAnalysis.Host; using Microsoft.CodeAnalysis.Internal.Log; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Notification; using Microsoft.CodeAnalysis.Shared.TestHooks; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.SolutionCrawler { internal partial class SolutionCrawlerRegistrationService { private partial class WorkCoordinator { private partial class IncrementalAnalyzerProcessor { private static readonly Func<int, object, bool, string> s_enqueueLogger = (t, i, s) => string.Format("[{0}] {1} : {2}", t, i.ToString(), s); private readonly Registration _registration; private readonly IAsynchronousOperationListener _listener; private readonly IDocumentTrackingService _documentTracker; private readonly IProjectCacheService _cacheService; private readonly HighPriorityProcessor _highPriorityProcessor; private readonly NormalPriorityProcessor _normalPriorityProcessor; private readonly LowPriorityProcessor _lowPriorityProcessor; private readonly Lazy<IDiagnosticAnalyzerService> _lazyDiagnosticAnalyzerService; private LogAggregator _logAggregator; public IncrementalAnalyzerProcessor( IAsynchronousOperationListener listener, IEnumerable<Lazy<IIncrementalAnalyzerProvider, IncrementalAnalyzerProviderMetadata>> analyzerProviders, Registration registration, int highBackOffTimeSpanInMs, int normalBackOffTimeSpanInMs, int lowBackOffTimeSpanInMs, CancellationToken shutdownToken) { _logAggregator = new LogAggregator(); _listener = listener; _registration = registration; _cacheService = registration.GetService<IProjectCacheService>(); _lazyDiagnosticAnalyzerService = new Lazy<IDiagnosticAnalyzerService>(() => GetDiagnosticAnalyzerService(analyzerProviders)); // create active file analyzers right away var activeFileAnalyzers = GetActiveFileIncrementalAnalyzers(_registration, analyzerProviders); // create non active file analyzers lazily. var lazyAllAnalyzers = new Lazy<ImmutableArray<IIncrementalAnalyzer>>(() => GetIncrementalAnalyzers(_registration, analyzerProviders)); // event and worker queues _documentTracker = _registration.GetService<IDocumentTrackingService>(); var globalNotificationService = _registration.GetService<IGlobalOperationNotificationService>(); _highPriorityProcessor = new HighPriorityProcessor(listener, this, activeFileAnalyzers, highBackOffTimeSpanInMs, shutdownToken); _normalPriorityProcessor = new NormalPriorityProcessor(listener, this, lazyAllAnalyzers, globalNotificationService, normalBackOffTimeSpanInMs, shutdownToken); _lowPriorityProcessor = new LowPriorityProcessor(listener, this, lazyAllAnalyzers, globalNotificationService, lowBackOffTimeSpanInMs, shutdownToken); } private IDiagnosticAnalyzerService GetDiagnosticAnalyzerService(IEnumerable<Lazy<IIncrementalAnalyzerProvider, IncrementalAnalyzerProviderMetadata>> analyzerProviders) { // alternatively, we could just MEF import IDiagnosticAnalyzerService directly // this can be null in test env. return (IDiagnosticAnalyzerService)analyzerProviders.Where(p => p.Value is IDiagnosticAnalyzerService).SingleOrDefault()?.Value; } private static ImmutableArray<IIncrementalAnalyzer> GetActiveFileIncrementalAnalyzers( Registration registration, IEnumerable<Lazy<IIncrementalAnalyzerProvider, IncrementalAnalyzerProviderMetadata>> providers) { var orderedAnalyzers = GetOrderedAnalyzers(registration, providers.Where(p => p.Metadata.HighPriorityForActiveFile)); SolutionCrawlerLogger.LogActiveFileAnalyzers(registration.CorrelationId, registration.Workspace, orderedAnalyzers); return orderedAnalyzers; } private static ImmutableArray<IIncrementalAnalyzer> GetIncrementalAnalyzers( Registration registration, IEnumerable<Lazy<IIncrementalAnalyzerProvider, IncrementalAnalyzerProviderMetadata>> providers) { var orderedAnalyzers = GetOrderedAnalyzers(registration, providers); SolutionCrawlerLogger.LogAnalyzers(registration.CorrelationId, registration.Workspace, orderedAnalyzers); return orderedAnalyzers; } private static ImmutableArray<IIncrementalAnalyzer> GetOrderedAnalyzers( Registration registration, IEnumerable<Lazy<IIncrementalAnalyzerProvider, IncrementalAnalyzerProviderMetadata>> providers) { // Sort list so BaseDiagnosticIncrementalAnalyzers (if any) come first. OrderBy orders 'false' keys before 'true'. return providers.Select(p => p.Value.CreateIncrementalAnalyzer(registration.Workspace)) .OrderBy(a => !(a is BaseDiagnosticIncrementalAnalyzer)) .ToImmutableArray(); } public void Enqueue(WorkItem item) { Contract.ThrowIfNull(item.DocumentId); _highPriorityProcessor.Enqueue(item); _normalPriorityProcessor.Enqueue(item); _lowPriorityProcessor.Enqueue(item); } public void Shutdown() { _highPriorityProcessor.Shutdown(); _normalPriorityProcessor.Shutdown(); _lowPriorityProcessor.Shutdown(); } // TODO: delete this once prototyping is done public void ChangeDiagnosticsEngine(bool useV2Engine) { var diagnosticAnalyzer = Analyzers.FirstOrDefault(a => a is BaseDiagnosticIncrementalAnalyzer) as DiagnosticAnalyzerService.IncrementalAnalyzerDelegatee; if (diagnosticAnalyzer == null) { return; } diagnosticAnalyzer.TurnOff(useV2Engine); } public ImmutableArray<IIncrementalAnalyzer> Analyzers => _normalPriorityProcessor.Analyzers; private Solution CurrentSolution => _registration.CurrentSolution; private ProjectDependencyGraph DependencyGraph => CurrentSolution.GetProjectDependencyGraph(); private IDiagnosticAnalyzerService DiagnosticAnalyzerService => _lazyDiagnosticAnalyzerService.Value; public Task AsyncProcessorTask { get { return Task.WhenAll( _highPriorityProcessor.AsyncProcessorTask, _normalPriorityProcessor.AsyncProcessorTask, _lowPriorityProcessor.AsyncProcessorTask); } } private IDisposable EnableCaching(ProjectId projectId) { return _cacheService?.EnableCaching(projectId) ?? NullDisposable.Instance; } private IEnumerable<DocumentId> GetOpenDocumentIds() { return _registration.Workspace.GetOpenDocumentIds(); } private void ResetLogAggregator() { _logAggregator = new LogAggregator(); } private static async Task ProcessDocumentAnalyzersAsync( Document document, ImmutableArray<IIncrementalAnalyzer> analyzers, WorkItem workItem, CancellationToken cancellationToken) { // process all analyzers for each categories in this order - syntax, body, document if (workItem.MustRefresh || workItem.InvocationReasons.Contains(PredefinedInvocationReasons.SyntaxChanged)) { await RunAnalyzersAsync(analyzers, document, (a, d, c) => a.AnalyzeSyntaxAsync(d, c), cancellationToken).ConfigureAwait(false); } if (workItem.MustRefresh || workItem.InvocationReasons.Contains(PredefinedInvocationReasons.SemanticChanged)) { await RunAnalyzersAsync(analyzers, document, (a, d, c) => a.AnalyzeDocumentAsync(d, null, c), cancellationToken).ConfigureAwait(false); } else { // if we don't need to re-analyze whole body, see whether we need to at least re-analyze one method. await RunBodyAnalyzersAsync(analyzers, workItem, document, cancellationToken).ConfigureAwait(false); } } private static async Task RunAnalyzersAsync<T>(ImmutableArray<IIncrementalAnalyzer> analyzers, T value, Func<IIncrementalAnalyzer, T, CancellationToken, Task> runnerAsync, CancellationToken cancellationToken) { foreach (var analyzer in analyzers) { if (cancellationToken.IsCancellationRequested) { return; } var local = analyzer; await GetOrDefaultAsync(value, async (v, c) => { await runnerAsync(local, v, c).ConfigureAwait(false); return default(object); }, cancellationToken).ConfigureAwait(false); } } private static async Task RunBodyAnalyzersAsync(ImmutableArray<IIncrementalAnalyzer> analyzers, WorkItem workItem, Document document, CancellationToken cancellationToken) { try { var root = await GetOrDefaultAsync(document, (d, c) => d.GetSyntaxRootAsync(c), cancellationToken).ConfigureAwait(false); var syntaxFactsService = document.Project.LanguageServices.GetService<ISyntaxFactsService>(); if (root == null || syntaxFactsService == null) { // as a fallback mechanism, if we can't run one method body due to some missing service, run whole document analyzer. await RunAnalyzersAsync(analyzers, document, (a, d, c) => a.AnalyzeDocumentAsync(d, null, c), cancellationToken).ConfigureAwait(false); return; } // check whether we know what body has changed. currently, this is an optimization toward typing case. if there are more than one body changes // it will be considered as semantic change and whole document analyzer will take care of that case. var activeMember = GetMemberNode(syntaxFactsService, root, workItem.ActiveMember); if (activeMember == null) { // no active member means, change is out side of a method body, but it didn't affect semantics (such as change in comment) // in that case, we update whole document (just this document) so that we can have updated locations. await RunAnalyzersAsync(analyzers, document, (a, d, c) => a.AnalyzeDocumentAsync(d, null, c), cancellationToken).ConfigureAwait(false); return; } // re-run just the body await RunAnalyzersAsync(analyzers, document, (a, d, c) => a.AnalyzeDocumentAsync(d, activeMember, c), cancellationToken).ConfigureAwait(false); } catch (Exception e) when (FatalError.ReportUnlessCanceled(e)) { throw ExceptionUtilities.Unreachable; } } private static async Task<TResult> GetOrDefaultAsync<TData, TResult>(TData value, Func<TData, CancellationToken, Task<TResult>> funcAsync, CancellationToken cancellationToken) { try { return await funcAsync(value, cancellationToken).ConfigureAwait(false); } catch (OperationCanceledException) { return default(TResult); } catch (AggregateException e) when (CrashUnlessCanceled(e)) { return default(TResult); } catch (Exception e) when (FatalError.Report(e)) { // TODO: manage bad workers like what code actions does now throw ExceptionUtilities.Unreachable; } } private static SyntaxNode GetMemberNode(ISyntaxFactsService service, SyntaxNode root, SyntaxPath memberPath) { if (root == null || memberPath == null) { return null; } SyntaxNode memberNode; if (!memberPath.TryResolve(root, out memberNode)) { return null; } return service.IsMethodLevelMember(memberNode) ? memberNode : null; } internal ProjectId GetActiveProject() { ProjectId activeProjectId = null; if (_documentTracker != null) { var activeDocument = _documentTracker.GetActiveDocument(); if (activeDocument != null) { activeProjectId = activeDocument.ProjectId; } } return null; } private static bool CrashUnlessCanceled(AggregateException aggregate) { var flattened = aggregate.Flatten(); if (flattened.InnerExceptions.All(e => e is OperationCanceledException)) { return true; } FatalError.Report(flattened); return false; } internal void WaitUntilCompletion_ForTestingPurposesOnly(ImmutableArray<IIncrementalAnalyzer> analyzers, List<WorkItem> items) { _normalPriorityProcessor.WaitUntilCompletion_ForTestingPurposesOnly(analyzers, items); var projectItems = items.Select(i => i.With(null, i.ProjectId, EmptyAsyncToken.Instance)); _lowPriorityProcessor.WaitUntilCompletion_ForTestingPurposesOnly(analyzers, items); } internal void WaitUntilCompletion_ForTestingPurposesOnly() { _normalPriorityProcessor.WaitUntilCompletion_ForTestingPurposesOnly(); _lowPriorityProcessor.WaitUntilCompletion_ForTestingPurposesOnly(); } private class NullDisposable : IDisposable { public static readonly IDisposable Instance = new NullDisposable(); public void Dispose() { } } } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using System.Security.Claims; using System.Security.Principal; using Microsoft.AspNet.Authorization; using Microsoft.AspNet.Identity; using Microsoft.AspNet.Mvc; using yo; using yo.Models; using yo.Services; namespace yo.Controllers { [Authorize] public class ManageController : Controller { private readonly UserManager<ApplicationUser> _userManager; private readonly SignInManager<ApplicationUser> _signInManager; private readonly IEmailSender _emailSender; private readonly ISmsSender _smsSender; public ManageController( UserManager<ApplicationUser> userManager, SignInManager<ApplicationUser> signInManager, IEmailSender emailSender, ISmsSender smsSender) { _userManager = userManager; _signInManager = signInManager; _emailSender = emailSender; _smsSender = smsSender; } // // GET: /Account/Index [HttpGet] public async Task<IActionResult> Index(ManageMessageId? message = null) { ViewData["StatusMessage"] = message == ManageMessageId.ChangePasswordSuccess ? "Your password has been changed." : message == ManageMessageId.SetPasswordSuccess ? "Your password has been set." : message == ManageMessageId.SetTwoFactorSuccess ? "Your two-factor authentication provider has been set." : message == ManageMessageId.Error ? "An error has occurred." : message == ManageMessageId.AddPhoneSuccess ? "Your phone number was added." : message == ManageMessageId.RemovePhoneSuccess ? "Your phone number was removed." : ""; var user = await GetCurrentUserAsync(); var model = new IndexViewModel { HasPassword = await _userManager.HasPasswordAsync(user), PhoneNumber = await _userManager.GetPhoneNumberAsync(user), TwoFactor = await _userManager.GetTwoFactorEnabledAsync(user), Logins = await _userManager.GetLoginsAsync(user), BrowserRemembered = await _signInManager.IsTwoFactorClientRememberedAsync(user) }; return View(model); } // // GET: /Account/RemoveLogin [HttpGet] public async Task<IActionResult> RemoveLogin() { var user = await GetCurrentUserAsync(); var linkedAccounts = await _userManager.GetLoginsAsync(user); ViewData["ShowRemoveButton"] = await _userManager.HasPasswordAsync(user) || linkedAccounts.Count > 1; return View(linkedAccounts); } // // POST: /Manage/RemoveLogin [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> RemoveLogin(string loginProvider, string providerKey) { ManageMessageId? message = ManageMessageId.Error; var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.RemoveLoginAsync(user, loginProvider, providerKey); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); message = ManageMessageId.RemoveLoginSuccess; } } return RedirectToAction(nameof(ManageLogins), new { Message = message }); } // // GET: /Account/AddPhoneNumber public IActionResult AddPhoneNumber() { return View(); } // // POST: /Account/AddPhoneNumber [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> AddPhoneNumber(AddPhoneNumberViewModel model) { if (!ModelState.IsValid) { return View(model); } // Generate the token and send it var user = await GetCurrentUserAsync(); var code = await _userManager.GenerateChangePhoneNumberTokenAsync(user, model.PhoneNumber); await _smsSender.SendSmsAsync(model.PhoneNumber, "Your security code is: " + code); return RedirectToAction(nameof(VerifyPhoneNumber), new { PhoneNumber = model.PhoneNumber }); } // // POST: /Manage/EnableTwoFactorAuthentication [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> EnableTwoFactorAuthentication() { var user = await GetCurrentUserAsync(); if (user != null) { await _userManager.SetTwoFactorEnabledAsync(user, true); await _signInManager.SignInAsync(user, isPersistent: false); } return RedirectToAction(nameof(Index), "Manage"); } // // POST: /Manage/DisableTwoFactorAuthentication [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> DisableTwoFactorAuthentication() { var user = await GetCurrentUserAsync(); if (user != null) { await _userManager.SetTwoFactorEnabledAsync(user, false); await _signInManager.SignInAsync(user, isPersistent: false); } return RedirectToAction(nameof(Index), "Manage"); } // // GET: /Account/VerifyPhoneNumber [HttpGet] public async Task<IActionResult> VerifyPhoneNumber(string phoneNumber) { var code = await _userManager.GenerateChangePhoneNumberTokenAsync(await GetCurrentUserAsync(), phoneNumber); // Send an SMS to verify the phone number return phoneNumber == null ? View("Error") : View(new VerifyPhoneNumberViewModel { PhoneNumber = phoneNumber }); } // // POST: /Account/VerifyPhoneNumber [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> VerifyPhoneNumber(VerifyPhoneNumberViewModel model) { if (!ModelState.IsValid) { return View(model); } var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.ChangePhoneNumberAsync(user, model.PhoneNumber, model.Code); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); return RedirectToAction(nameof(Index), new { Message = ManageMessageId.AddPhoneSuccess }); } } // If we got this far, something failed, redisplay the form ModelState.AddModelError(string.Empty, "Failed to verify phone number"); return View(model); } // // GET: /Account/RemovePhoneNumber [HttpGet] public async Task<IActionResult> RemovePhoneNumber() { var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.SetPhoneNumberAsync(user, null); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); return RedirectToAction(nameof(Index), new { Message = ManageMessageId.RemovePhoneSuccess }); } } return RedirectToAction(nameof(Index), new { Message = ManageMessageId.Error }); } // // GET: /Manage/ChangePassword [HttpGet] public IActionResult ChangePassword() { return View(); } // // POST: /Account/Manage [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> ChangePassword(ChangePasswordViewModel model) { if (!ModelState.IsValid) { return View(model); } var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.ChangePasswordAsync(user, model.OldPassword, model.NewPassword); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); return RedirectToAction(nameof(Index), new { Message = ManageMessageId.ChangePasswordSuccess }); } AddErrors(result); return View(model); } return RedirectToAction(nameof(Index), new { Message = ManageMessageId.Error }); } // // GET: /Manage/SetPassword [HttpGet] public IActionResult SetPassword() { return View(); } // // POST: /Manage/SetPassword [HttpPost] [ValidateAntiForgeryToken] public async Task<IActionResult> SetPassword(SetPasswordViewModel model) { if (!ModelState.IsValid) { return View(model); } var user = await GetCurrentUserAsync(); if (user != null) { var result = await _userManager.AddPasswordAsync(user, model.NewPassword); if (result.Succeeded) { await _signInManager.SignInAsync(user, isPersistent: false); return RedirectToAction(nameof(Index), new { Message = ManageMessageId.SetPasswordSuccess }); } AddErrors(result); return View(model); } return RedirectToAction(nameof(Index), new { Message = ManageMessageId.Error }); } //GET: /Account/Manage [HttpGet] public async Task<IActionResult> ManageLogins(ManageMessageId? message = null) { ViewData["StatusMessage"] = message == ManageMessageId.RemoveLoginSuccess ? "The external login was removed." : message == ManageMessageId.AddLoginSuccess ? "The external login was added." : message == ManageMessageId.Error ? "An error has occurred." : ""; var user = await GetCurrentUserAsync(); if (user == null) { return View("Error"); } var userLogins = await _userManager.GetLoginsAsync(user); var otherLogins = _signInManager.GetExternalAuthenticationSchemes().Where(auth => userLogins.All(ul => auth.AuthenticationScheme != ul.LoginProvider)).ToList(); ViewData["ShowRemoveButton"] = user.PasswordHash != null || userLogins.Count > 1; return View(new ManageLoginsViewModel { CurrentLogins = userLogins, OtherLogins = otherLogins }); } // // POST: /Manage/LinkLogin [HttpPost] [ValidateAntiForgeryToken] public IActionResult LinkLogin(string provider) { // Request a redirect to the external login provider to link a login for the current user var redirectUrl = Url.Action("LinkLoginCallback", "Manage"); var properties = _signInManager.ConfigureExternalAuthenticationProperties(provider, redirectUrl, User.GetUserId()); return new ChallengeResult(provider, properties); } // // GET: /Manage/LinkLoginCallback [HttpGet] public async Task<ActionResult> LinkLoginCallback() { var user = await GetCurrentUserAsync(); if (user == null) { return View("Error"); } var info = await _signInManager.GetExternalLoginInfoAsync(User.GetUserId()); if (info == null) { return RedirectToAction(nameof(ManageLogins), new { Message = ManageMessageId.Error }); } var result = await _userManager.AddLoginAsync(user, info); var message = result.Succeeded ? ManageMessageId.AddLoginSuccess : ManageMessageId.Error; return RedirectToAction(nameof(ManageLogins), new { Message = message }); } #region Helpers private void AddErrors(IdentityResult result) { foreach (var error in result.Errors) { ModelState.AddModelError(string.Empty, error.Description); } } private async Task<bool> HasPhoneNumber() { var user = await _userManager.FindByIdAsync(User.GetUserId()); if (user != null) { return user.PhoneNumber != null; } return false; } public enum ManageMessageId { AddPhoneSuccess, AddLoginSuccess, ChangePasswordSuccess, SetTwoFactorSuccess, SetPasswordSuccess, RemoveLoginSuccess, RemovePhoneSuccess, Error } private async Task<ApplicationUser> GetCurrentUserAsync() { return await _userManager.FindByIdAsync(Context.User.GetUserId()); } private IActionResult RedirectToLocal(string returnUrl) { if (Url.IsLocalUrl(returnUrl)) { return Redirect(returnUrl); } else { return RedirectToAction(nameof(HomeController.Index), nameof(HomeController)); } } #endregion } }
using System; using System.Globalization; using System.Net; using Microsoft.Extensions.DependencyInjection; using Orleans; using Orleans.Runtime; using Orleans.Runtime.Configuration; using Orleans.Serialization; using Orleans.TestingHost.Utils; using TestExtensions; using Xunit; using Xunit.Abstractions; namespace UnitTests.General { [Collection(TestEnvironmentFixture.DefaultCollection)] public class Identifiertests { private readonly ITestOutputHelper output; private readonly TestEnvironmentFixture environment; private static readonly Random random = new Random(); class A { } class B : A { } public Identifiertests(ITestOutputHelper output, TestEnvironmentFixture fixture) { this.output = output; this.environment = fixture; } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_IsSystem() { GrainId testGrain = Constants.DirectoryServiceId; output.WriteLine("Testing GrainID " + testGrain); Assert.True(testGrain.IsSystemTarget); // System grain ID is not flagged as a system ID GrainId sGrain = (GrainId)this.environment.SerializationManager.DeepCopy(testGrain); output.WriteLine("Testing GrainID " + sGrain); Assert.True(sGrain.IsSystemTarget); // String round-trip grain ID is not flagged as a system ID Assert.Equal(testGrain, sGrain); // Should be equivalent GrainId object Assert.Same(testGrain, sGrain); // Should be same / intern'ed GrainId object ActivationId testActivation = ActivationId.GetSystemActivation(testGrain, SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 2456), 0)); output.WriteLine("Testing ActivationID " + testActivation); Assert.True(testActivation.IsSystem); // System activation ID is not flagged as a system ID } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeyKeyExtGrainCategoryDisallowsNullKeyExtension() { Assert.Throws<ArgumentNullException>(() => UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: null)); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeyKeyExtGrainCategoryDisallowsEmptyKeyExtension() { Assert.Throws<ArgumentException>(() => UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: "")); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeyKeyExtGrainCategoryDisallowsWhiteSpaceKeyExtension() { Assert.Throws<ArgumentException>(() => UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: " \t\n\r")); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeySerializationShouldReproduceAnIdenticalObject() { { var expected = UniqueKey.NewKey(Guid.NewGuid()); BinaryTokenStreamWriter writer = new BinaryTokenStreamWriter(); writer.Write(expected); BinaryTokenStreamReader reader = new BinaryTokenStreamReader(writer.ToBytes()); var actual = reader.ReadUniqueKey(); Assert.Equal(expected, actual); // UniqueKey.Serialize() and UniqueKey.Deserialize() failed to reproduce an identical object (case #1). } { var kx = random.Next().ToString(CultureInfo.InvariantCulture); var expected = UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: kx); BinaryTokenStreamWriter writer = new BinaryTokenStreamWriter(); writer.Write(expected); BinaryTokenStreamReader reader = new BinaryTokenStreamReader(writer.ToBytes()); var actual = reader.ReadUniqueKey(); Assert.Equal(expected, actual); // UniqueKey.Serialize() and UniqueKey.Deserialize() failed to reproduce an identical object (case #2). } { var kx = random.Next().ToString(CultureInfo.InvariantCulture) + new String('*', 400); var expected = UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: kx); BinaryTokenStreamWriter writer = new BinaryTokenStreamWriter(); writer.Write(expected); BinaryTokenStreamReader reader = new BinaryTokenStreamReader(writer.ToBytes()); var actual = reader.ReadUniqueKey(); Assert.Equal(expected, actual); // UniqueKey.Serialize() and UniqueKey.Deserialize() failed to reproduce an identical object (case #3). } } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ParsingUniqueKeyStringificationShouldReproduceAnIdenticalObject() { UniqueKey expected1 = UniqueKey.NewKey(Guid.NewGuid()); string str1 = expected1.ToHexString(); UniqueKey actual1 = UniqueKey.Parse(str1); Assert.Equal(expected1, actual1); // UniqueKey.ToString() and UniqueKey.Parse() failed to reproduce an identical object (case 1). string kx3 = "case 3"; UniqueKey expected3 = UniqueKey.NewKey(Guid.NewGuid(), category: UniqueKey.Category.KeyExtGrain, keyExt: kx3); string str3 = expected3.ToHexString(); UniqueKey actual3 = UniqueKey.Parse(str3); Assert.Equal(expected3, actual3); // UniqueKey.ToString() and UniqueKey.Parse() failed to reproduce an identical object (case 3). long pk = random.Next(); UniqueKey expected4 = UniqueKey.NewKey(pk); string str4 = expected4.ToHexString(); UniqueKey actual4 = UniqueKey.Parse(str4); Assert.Equal(expected4, actual4); // UniqueKey.ToString() and UniqueKey.Parse() failed to reproduce an identical object (case 4). pk = random.Next(); string kx5 = "case 5"; UniqueKey expected5 = UniqueKey.NewKey(pk, category: UniqueKey.Category.KeyExtGrain, keyExt: kx5); string str5 = expected5.ToHexString(); UniqueKey actual5 = UniqueKey.Parse(str5); Assert.Equal(expected5, actual5); // UniqueKey.ToString() and UniqueKey.Parse() failed to reproduce an identical object (case 5). } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void GrainIdShouldEncodeAndDecodePrimaryKeyGuidCorrectly() { const int repeat = 100; for (int i = 0; i < repeat; ++i) { Guid expected = Guid.NewGuid(); GrainId grainId = GrainId.GetGrainIdForTesting(expected); Guid actual = grainId.Key.PrimaryKeyToGuid(); Assert.Equal(expected, actual); // Failed to encode and decode grain id } } [Fact, TestCategory("SlowBVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void GrainId_ToFromPrintableString() { Guid guid = Guid.NewGuid(); GrainId grainId = GrainId.GetGrainIdForTesting(guid); GrainId roundTripped = RoundTripGrainIdToParsable(grainId); Assert.Equal(grainId, roundTripped); // GrainId.ToPrintableString -- Guid key string extKey = "Guid-ExtKey-1"; guid = Guid.NewGuid(); grainId = GrainId.GetGrainId(0, guid, extKey); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.Equal(grainId, roundTripped); // GrainId.ToPrintableString -- Guid key + Extended Key grainId = GrainId.GetGrainId(0, guid, null); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.Equal(grainId, roundTripped); // GrainId.ToPrintableString -- Guid key + null Extended Key long key = random.Next(); guid = UniqueKey.NewKey(key).PrimaryKeyToGuid(); grainId = GrainId.GetGrainIdForTesting(guid); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.Equal(grainId, roundTripped); // GrainId.ToPrintableString -- Int64 key extKey = "Long-ExtKey-2"; key = random.Next(); guid = UniqueKey.NewKey(key).PrimaryKeyToGuid(); grainId = GrainId.GetGrainId(0, guid, extKey); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.Equal(grainId, roundTripped); // GrainId.ToPrintableString -- Int64 key + Extended Key guid = UniqueKey.NewKey(key).PrimaryKeyToGuid(); grainId = GrainId.GetGrainId(0, guid, null); roundTripped = RoundTripGrainIdToParsable(grainId); Assert.Equal(grainId, roundTripped); // GrainId.ToPrintableString -- Int64 key + null Extended Key } private GrainId RoundTripGrainIdToParsable(GrainId input) { string str = input.ToParsableString(); GrainId output = GrainId.FromParsableString(str); return output; } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueTypeCodeDataShouldStore32BitsOfInformation() { const int expected = unchecked((int)0xfabccbaf); var uk = UniqueKey.NewKey(0, UniqueKey.Category.None, expected); var actual = uk.BaseTypeCode; Assert.Equal(expected, actual); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeysShouldPreserveTheirPrimaryKeyValueIfItIsGuid() { const int all32Bits = unchecked((int)0xffffffff); var expectedKey1 = Guid.NewGuid(); const string expectedKeyExt1 = "1"; var uk1 = UniqueKey.NewKey(expectedKey1, UniqueKey.Category.KeyExtGrain, all32Bits, expectedKeyExt1); string actualKeyExt1; var actualKey1 = uk1.PrimaryKeyToGuid(out actualKeyExt1); Assert.Equal(expectedKey1, actualKey1); //"UniqueKey objects should preserve the value of their primary key (Guid case #1)."); Assert.Equal(expectedKeyExt1, actualKeyExt1); //"UniqueKey objects should preserve the value of their key extension (Guid case #1)."); var expectedKey2 = Guid.NewGuid(); const string expectedKeyExt2 = "2"; var uk2 = UniqueKey.NewKey(expectedKey2, UniqueKey.Category.KeyExtGrain, all32Bits, expectedKeyExt2); string actualKeyExt2; var actualKey2 = uk2.PrimaryKeyToGuid(out actualKeyExt2); Assert.Equal(expectedKey2, actualKey2); // "UniqueKey objects should preserve the value of their primary key (Guid case #2)."); Assert.Equal(expectedKeyExt2, actualKeyExt2); // "UniqueKey objects should preserve the value of their key extension (Guid case #2)."); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void UniqueKeysShouldPreserveTheirPrimaryKeyValueIfItIsLong() { const int all32Bits = unchecked((int)0xffffffff); var n1 = random.Next(); var n2 = random.Next(); const string expectedKeyExt = "1"; var expectedKey = unchecked((long)((((ulong)((uint)n1)) << 32) | ((uint)n2))); var uk = UniqueKey.NewKey(expectedKey, UniqueKey.Category.KeyExtGrain, all32Bits, expectedKeyExt); string actualKeyExt; var actualKey = uk.PrimaryKeyToLong(out actualKeyExt); Assert.Equal(expectedKey, actualKey); // "UniqueKey objects should preserve the value of their primary key (long case)."); Assert.Equal(expectedKeyExt, actualKeyExt); // "UniqueKey objects should preserve the value of their key extension (long case)."); } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_HashCorrectness() { // This tests that our optimized Jenkins hash computes the same value as the reference implementation int testCount = 1000; for (int i = 0; i < testCount; i++) { byte[] byteData = new byte[24]; random.NextBytes(byteData); ulong u1 = BitConverter.ToUInt64(byteData, 0); ulong u2 = BitConverter.ToUInt64(byteData, 8); ulong u3 = BitConverter.ToUInt64(byteData, 16); var referenceHash = JenkinsHash.ComputeHash(byteData); var optimizedHash = JenkinsHash.ComputeHash(u1, u2, u3); Assert.Equal(referenceHash, optimizedHash); // "Optimized hash value doesn't match the reference value for inputs {0}, {1}, {2}", u1, u2, u3 } } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_Interning_GrainID() { Guid guid = new Guid(); GrainId gid1 = GrainId.FromParsableString(guid.ToString("B")); GrainId gid2 = GrainId.FromParsableString(guid.ToString("N")); Assert.Equal(gid1, gid2); // Should be equal GrainId's Assert.Same(gid1, gid2); // Should be same / intern'ed GrainId object // Round-trip through Serializer GrainId gid3 = (GrainId)this.environment.SerializationManager.RoundTripSerializationForTesting(gid1); Assert.Equal(gid1, gid3); // Should be equal GrainId's Assert.Equal(gid2, gid3); // Should be equal GrainId's Assert.Same(gid1, gid3); // Should be same / intern'ed GrainId object Assert.Same(gid2, gid3); // Should be same / intern'ed GrainId object } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_Interning_string_equals() { Interner<string, string> interner = new Interner<string, string>(); const string str = "1"; string r1 = interner.FindOrCreate("1", _ => str); string r2 = interner.FindOrCreate("1", _ => null); // Should always be found Assert.Equal(r1, r2); // 1: Objects should be equal Assert.Same(r1, r2); // 2: Objects should be same / intern'ed // Round-trip through Serializer string r3 = (string)this.environment.SerializationManager.RoundTripSerializationForTesting(r1); Assert.Equal(r1, r3); // 3: Should be equal Assert.Equal(r2, r3); // 4: Should be equal } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void ID_Intern_FindOrCreate_derived_class() { Interner<int, A> interner = new Interner<int, A>(); var obj1 = new A(); var obj2 = new B(); var obj3 = new B(); var r1 = interner.FindOrCreate(1, _ => obj1); Assert.Equal(obj1, r1); // Objects should be equal Assert.Same(obj1, r1); // Objects should be same / intern'ed var r2 = interner.FindOrCreate(2, _ => obj2); Assert.Equal(obj2, r2); // Objects should be equal Assert.Same(obj2, r2); // Objects should be same / intern'ed // FindOrCreate should not replace instances of same class var r3 = interner.FindOrCreate(2, _ => obj3); Assert.Same(obj2, r3); // FindOrCreate should return previous object Assert.NotSame(obj3, r3); // FindOrCreate should not replace previous object of same class // FindOrCreate should not replace cached instances with instances of most derived class var r4 = interner.FindOrCreate(1, _ => obj2); Assert.Same(obj1, r4); // FindOrCreate return previously cached object Assert.NotSame(obj2, r4); // FindOrCreate should not replace previously cached object // FindOrCreate should not replace cached instances with instances of less derived class var r5 = interner.FindOrCreate(2, _ => obj1); Assert.NotSame(obj1, r5); // FindOrCreate should not replace previously cached object Assert.Same(obj2, r5); // FindOrCreate return previously cached object } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void Interning_SiloAddress() { //string addrStr1 = "1.2.3.4@11111@1"; SiloAddress a1 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 1111), 12345); SiloAddress a2 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 1111), 12345); Assert.Equal(a1, a2); // Should be equal SiloAddress's Assert.Same(a1, a2); // Should be same / intern'ed SiloAddress object // Round-trip through Serializer SiloAddress a3 = (SiloAddress)this.environment.SerializationManager.RoundTripSerializationForTesting(a1); Assert.Equal(a1, a3); // Should be equal SiloAddress's Assert.Equal(a2, a3); // Should be equal SiloAddress's Assert.Same(a1, a3); // Should be same / intern'ed SiloAddress object Assert.Same(a2, a3); // Should be same / intern'ed SiloAddress object } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void Interning_SiloAddress2() { SiloAddress a1 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 1111), 12345); SiloAddress a2 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 2222), 12345); Assert.NotEqual(a1, a2); // Should not be equal SiloAddress's Assert.NotSame(a1, a2); // Should not be same / intern'ed SiloAddress object } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void Interning_SiloAddress_Serialization() { SiloAddress a1 = SiloAddress.New(new IPEndPoint(IPAddress.Loopback, 1111), 12345); // Round-trip through Serializer SiloAddress a3 = (SiloAddress)this.environment.SerializationManager.RoundTripSerializationForTesting(a1); Assert.Equal(a1, a3); // Should be equal SiloAddress's Assert.Same(a1, a3); // Should be same / intern'ed SiloAddress object } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void GrainID_AsGuid() { string guidString = "0699605f-884d-4343-9977-f40a39ab7b2b"; Guid grainIdGuid = Guid.Parse(guidString); GrainId grainId = GrainId.GetGrainIdForTesting(grainIdGuid); //string grainIdToKeyString = grainId.ToKeyString(); string grainIdToFullString = grainId.ToFullString(); string grainIdToGuidString = GrainIdToGuidString(grainId); string grainIdKeyString = grainId.Key.ToString(); output.WriteLine("Guid={0}", grainIdGuid); output.WriteLine("GrainId={0}", grainId); //output.WriteLine("GrainId.ToKeyString={0}", grainIdToKeyString); output.WriteLine("GrainId.Key.ToString={0}", grainIdKeyString); output.WriteLine("GrainIdToGuidString={0}", grainIdToGuidString); output.WriteLine("GrainId.ToFullString={0}", grainIdToFullString); // Equal: Public APIs //Assert.Equal(guidString, grainIdToKeyString); // GrainId.ToKeyString Assert.Equal(guidString, grainIdToGuidString); // GrainIdToGuidString // Equal: Internal APIs Assert.Equal(grainIdGuid, grainId.GetPrimaryKey()); // GetPrimaryKey Guid // NOT-Equal: Internal APIs Assert.NotEqual(guidString, grainIdKeyString); // GrainId.Key.ToString } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers")] public void SiloAddress_ToFrom_ParsableString() { SiloAddress address1 = SiloAddress.NewLocalAddress(12345); string addressStr1 = address1.ToParsableString(); SiloAddress addressObj1 = SiloAddress.FromParsableString(addressStr1); output.WriteLine("Convert -- From: {0} Got result string: '{1}' object: {2}", address1, addressStr1, addressObj1); Assert.Equal(address1, addressObj1); // SiloAddress equal after To-From-ParsableString //const string addressStr2 = "127.0.0.1-11111-144611139"; const string addressStr2 = "127.0.0.1:11111@144611139"; SiloAddress addressObj2 = SiloAddress.FromParsableString(addressStr2); string addressStr2Out = addressObj2.ToParsableString(); output.WriteLine("Convert -- From: {0} Got result string: '{1}' object: {2}", addressStr2, addressStr2Out, addressObj2); Assert.Equal(addressStr2, addressStr2Out); // SiloAddress equal after From-To-ParsableString } internal string GrainIdToGuidString(GrainId grainId) { const string pkIdentifierStr = "PrimaryKey:"; string grainIdFullString = grainId.ToFullString(); int pkStartIdx = grainIdFullString.IndexOf(pkIdentifierStr, StringComparison.Ordinal) + pkIdentifierStr.Length + 1; string pkGuidString = grainIdFullString.Substring(pkStartIdx, Guid.Empty.ToString().Length); return pkGuidString; } [Fact, TestCategory("BVT"), TestCategory("Functional"), TestCategory("Identifiers"), TestCategory("GrainReference")] public void GrainReference_Test1() { Guid guid = Guid.NewGuid(); GrainId regularGrainId = GrainId.GetGrainIdForTesting(guid); GrainReference grainRef = this.environment.InternalGrainFactory.GetGrain(regularGrainId); TestGrainReference(grainRef); grainRef = GrainReference.FromGrainId(regularGrainId, null, "generic"); TestGrainReference(grainRef); GrainId systemTragetGrainId = GrainId.NewSystemTargetGrainIdByTypeCode(2); grainRef = GrainReference.FromGrainId(systemTragetGrainId, null, null, SiloAddress.NewLocalAddress(1)); this.environment.GrainFactory.BindGrainReference(grainRef); TestGrainReference(grainRef); GrainId observerGrainId = GrainId.NewClientId(); grainRef = GrainReference.NewObserverGrainReference(observerGrainId, GuidId.GetNewGuidId(), this.environment.RuntimeClient.GrainReferenceRuntime); this.environment.GrainFactory.BindGrainReference(grainRef); TestGrainReference(grainRef); GrainId geoObserverGrainId = GrainId.NewClientId("clusterid"); grainRef = GrainReference.NewObserverGrainReference(geoObserverGrainId, GuidId.GetNewGuidId(), this.environment.RuntimeClient.GrainReferenceRuntime); this.environment.GrainFactory.BindGrainReference(grainRef); TestGrainReference(grainRef); } private void TestGrainReference(GrainReference grainRef) { GrainReference roundTripped = RoundTripGrainReferenceToKey(grainRef); Assert.Equal(grainRef, roundTripped); // GrainReference.ToKeyString roundTripped = this.environment.SerializationManager.RoundTripSerializationForTesting(grainRef); Assert.Equal(grainRef, roundTripped); // GrainReference.OrleansSerializer roundTripped = TestingUtils.RoundTripDotNetSerializer(grainRef, this.environment.GrainFactory, this.environment.SerializationManager); Assert.Equal(grainRef, roundTripped); // GrainReference.DotNetSerializer } private GrainReference RoundTripGrainReferenceToKey(GrainReference input) { string str = input.ToKeyString(); GrainReference output = this.environment.Services.GetRequiredService<IGrainReferenceConverter>().GetGrainFromKeyString(str); return output; } } }
/* MIT License Copyright (c) 2017 Saied Zarrinmehr Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Windows; using System.Windows.Media; using System.Windows.Shapes; using System.Windows.Controls; using System.Windows.Input; using SpatialAnalysis.Visualization; using SpatialAnalysis.CellularEnvironment; using SpatialAnalysis.Geometry; using System.ComponentModel; using SpatialAnalysis.Miscellaneous; namespace SpatialAnalysis.IsovistUtility.IsovistVisualization { /// <summary> /// Class ProxemicsVisualHost. Visualizes the proxemics /// </summary> /// <seealso cref="System.Windows.FrameworkElement" /> public class ProxemicsVisualHost : FrameworkElement { private double _opacity { get; set; } private OSMDocument _host { get; set; } private Brush[] brushes { get; set; } private double[] radiuses { get; set; } private Brush centerBrush { get; set; } private double centerSize { get; set; } private MenuItem visualization_Menu { get; set; } private MenuItem hide_Show_Menu { get; set; } private MenuItem setProximity_Menu { get; set; } private MenuItem setOpacity_Menu { get; set; } private MenuItem centerSize_Menu { get; set; } private MenuItem clear_Menu { get; set; } private MenuItem getProxemics_Menu { get; set; } // Create a collection of child visual objects. private VisualCollection _children; /// <summary> /// Initializes a new instance of the <see cref="ProxemicsVisualHost"/> class. /// </summary> public ProxemicsVisualHost() { _children = new VisualCollection(this); this.radiuses = new double[] { 1.5, 4, 12, 25 }; this.brushes = new Brush[] { Brushes.IndianRed.Clone(), Brushes.Orange.Clone(), Brushes.DarkSeaGreen.Clone(), Brushes.LightBlue.Clone(), Brushes.AliceBlue.Clone() }; this._opacity = .7; for (int i = 0; i < this.brushes.Length; i++) { this.brushes[i].Opacity = this._opacity; } this.centerSize = 3; this.centerBrush = Brushes.DarkRed; this.visualization_Menu = new MenuItem() { Header = "Proxemics" }; this.hide_Show_Menu = new MenuItem() { Header = "Hide" }; this.setOpacity_Menu = new MenuItem() { Header = "Set Opacity" }; this.setProximity_Menu = new MenuItem() { Header = "Set Proximity Boundaries" }; this.centerSize_Menu = new MenuItem() { Header = "Center Size" }; this.clear_Menu = new MenuItem() { Header = "Clear Proxemics" }; this.getProxemics_Menu = new MenuItem() { Header = "Get Proxemics" }; this.visualization_Menu.Items.Add(this.getProxemics_Menu); this.visualization_Menu.Items.Add(this.setProximity_Menu); this.visualization_Menu.Items.Add(this.setOpacity_Menu); this.visualization_Menu.Items.Add(this.hide_Show_Menu); this.visualization_Menu.Items.Add(this.centerSize_Menu); this.visualization_Menu.Items.Add(this.clear_Menu); this.getProxemics_Menu.Click += new RoutedEventHandler(getProxemics_Click); this.hide_Show_Menu.Click += new RoutedEventHandler(hide_Show_Menu_Click); this.centerSize_Menu.Click += new RoutedEventHandler(centerSize_Menu_Click); this.clear_Menu.Click += new RoutedEventHandler(clear_Menu_Click); this.setProximity_Menu.Click += new RoutedEventHandler(setProximity_Menu_Click); this.setOpacity_Menu.Click += new RoutedEventHandler(setOpacity_Menu_Click); } private void setOpacity_Menu_Click(object sender, RoutedEventArgs e) { this.setOpacity(); } private void setOpacity() { GetNumberSlider gn = new GetNumberSlider(0.0d, this._opacity, 1.0d, "Set Proxemics Opacity", "Set a number for the transparency of the proxamic colors"); gn.Owner = this._host; gn.ShowDialog(); this._opacity = gn.GetNumber; for (int i = 0; i < this.brushes.Length; i++) { this.brushes[i] = this.brushes[i].Clone(); this.brushes[i].Opacity = this._opacity; } gn = null; } #region polygonal isovist private void getProxemics_Click(object sender, RoutedEventArgs e) { this._host.Menues.IsEnabled = false; this._host.UIMessage.Text = "Click on your desired vantage point on screen"; this._host.UIMessage.Visibility = System.Windows.Visibility.Visible; this._host.Cursor = Cursors.Pen; this._host.FloorScene.MouseLeftButtonDown += mouseLeftButtonDown_GetProxemics; this._host.MouseBtn.MouseDown += releaseProxemicsMode; } private void releaseProxemicsMode(object sender, MouseButtonEventArgs e) { this._host.Menues.IsEnabled = true; this._host.Cursor = Cursors.Arrow; this._host.UIMessage.Visibility = System.Windows.Visibility.Hidden; this._host.FloorScene.MouseLeftButtonDown -= mouseLeftButtonDown_GetProxemics; this._host.CommandReset.MouseDown -= releaseProxemicsMode; } private void mouseLeftButtonDown_GetProxemics(object sender, MouseButtonEventArgs e) { var point = this._host.InverseRenderTransform.Transform(Mouse.GetPosition(this._host.FloorScene)); UV p = new UV(point.X, point.Y); Cell cell = this._host.cellularFloor.FindCell(p); if (cell == null) { MessageBox.Show("Pick a point on the walkable field and try again!\n"); return; } switch (this._host.IsovistBarrierType) { case BarrierType.Visual: if (cell.VisualOverlapState != OverlapState.Outside) { MessageBox.Show("Pick a point outside visual barriers.\nTry again!"); return; } break; case BarrierType.Physical: if (cell.PhysicalOverlapState != OverlapState.Outside) { MessageBox.Show("Pick a point outside physical barriers.\nTry again!"); return; } break; case BarrierType.Field: if (cell.FieldOverlapState != OverlapState.Inside) { MessageBox.Show("Pick a point inside the walkable field.\nTry again!"); return; } break; case BarrierType.BarrierBuffer: if (cell.BarrierBufferOverlapState != OverlapState.Outside) { MessageBox.Show("Pick a point outside barrier buffers.\nTry again!"); return; } break; default: break; } try { BarrierPolygon[] barriers = new BarrierPolygon[this.radiuses.Length]; for (int i = 0; i < this.radiuses.Length; i++) { HashSet<UVLine> blocks = this._host.cellularFloor.PolygonalIsovistVisualObstacles(p, this.radiuses[i], this._host.IsovistBarrierType); barriers[i] = this._host.BIM_To_OSM.IsovistPolygon(p, this.radiuses[i], blocks); } Proxemics proxemics = new Proxemics(barriers, p); this.draw(proxemics); } catch (Exception error0) { MessageBox.Show(error0.Report()); } } #endregion private void clear_Menu_Click(object sender, RoutedEventArgs e) { this._children.Clear(); } /// <summary> /// Clears this instance. /// </summary> public void Clear() { this._host = null; this.visualization_Menu.Items.Clear(); this._children.Clear(); this._children = null; this.getProxemics_Menu.Click -= getProxemics_Click; this.hide_Show_Menu.Click -= hide_Show_Menu_Click; this.centerSize_Menu.Click -= centerSize_Menu_Click; this.clear_Menu.Click -= clear_Menu_Click; this.setProximity_Menu.Click -= setProximity_Menu_Click; this.setOpacity_Menu.Click -= setOpacity_Menu_Click; this.brushes = null; this.radiuses = null; this.centerBrush = null; this.visualization_Menu = null; this.hide_Show_Menu = null; this.setProximity_Menu = null; this.setOpacity_Menu = null; this.centerSize_Menu = null; this.clear_Menu = null; this.getProxemics_Menu = null; } private void centerSize_Menu_Click(object sender, RoutedEventArgs e) { GetNumber gn = new GetNumber("Enter New Center Size", "New square size will be applied to the edges of Isovists", this.centerSize); gn.Owner = this._host; gn.ShowDialog(); this.centerSize = gn.NumberValue; gn = null; } private void hide_Show() { if (this.Visibility == System.Windows.Visibility.Visible) { this.Visibility = System.Windows.Visibility.Collapsed; this.hide_Show_Menu.Header = "Show"; this.clear_Menu.IsEnabled = false; this.centerSize_Menu.IsEnabled = false; } else { this.Visibility = System.Windows.Visibility.Visible; this.hide_Show_Menu.Header = "Hide"; this.clear_Menu.IsEnabled = true; this.centerSize_Menu.IsEnabled = true; } } private void hide_Show_Menu_Click(object sender, RoutedEventArgs e) { this.hide_Show(); } private void setProximity_Menu_Click(object sender, RoutedEventArgs e) { BoundarySetting bs = new BoundarySetting(this.radiuses); bs.Owner = this._host; bs.ShowDialog(); this.radiuses = bs.Radiuses; bs = null; } // Provide a required override for the VisualChildrenCount property. protected override int VisualChildrenCount { get { return _children.Count; } } // Provide a required override for the GetVisualChild method. protected override Visual GetVisualChild(int index) { if (index < 0 || index >= _children.Count) { throw new ArgumentOutOfRangeException(); } return _children[index]; } private double getScaleFactor() { double scale = this.RenderTransform.Value.M11 * this.RenderTransform.Value.M11 + this.RenderTransform.Value.M12 * this.RenderTransform.Value.M12; return Math.Sqrt(scale); } private void draw(Proxemics proxemics) { double scale = this.getScaleFactor(); DrawingVisual drawingVisual = new DrawingVisual(); using (DrawingContext drawingContext = drawingVisual.RenderOpen()) { StreamGeometry sg0 = new StreamGeometry(); using (StreamGeometryContext sgc = sg0.Open()) { sgc.BeginFigure(this.toPoint(proxemics.ProxemicsPolygons[0].BoundaryPoints[0]), true, true); for (int i = 1; i < proxemics.ProxemicsPolygons[0].BoundaryPoints.Length; i++) { sgc.LineTo(this.toPoint(proxemics.ProxemicsPolygons[0].BoundaryPoints[i]), true, true); } } sg0.Freeze(); drawingContext.DrawGeometry(this.brushes[0], null, sg0); for (int n = 1; n < this.radiuses.Length; n++) { StreamGeometry sg = new StreamGeometry(); using (StreamGeometryContext sgc = sg.Open()) { sgc.BeginFigure(this.toPoint(proxemics.ProxemicsPolygons[n - 1].BoundaryPoints[0]), true, true); for (int i = 1; i < proxemics.ProxemicsPolygons[n - 1].BoundaryPoints.Length; i++) { sgc.LineTo(this.toPoint(proxemics.ProxemicsPolygons[n - 1].BoundaryPoints[i]), true, true); } sgc.BeginFigure(this.toPoint(proxemics.ProxemicsPolygons[n].BoundaryPoints[0]), true, true); for (int i = 1; i < proxemics.ProxemicsPolygons[n].BoundaryPoints.Length; i++) { sgc.LineTo(this.toPoint(proxemics.ProxemicsPolygons[n].BoundaryPoints[i]), true, true); } } sg.FillRule = FillRule.EvenOdd; sg.Freeze(); drawingContext.DrawGeometry(this.brushes[n], null, sg); } Point center = this.toPoint(proxemics.Center); var p1 = new Point(center.X - this.centerSize / (2 * scale), center.Y); var p2 = new Point(center.X + this.centerSize / (2 * scale), center.Y); drawingContext.DrawLine(new Pen(this.centerBrush, this.centerSize / scale), p1, p2); } drawingVisual.Drawing.Freeze(); this._children.Add(drawingVisual); } private Point toPoint(UV uv) { return new Point(uv.U, uv.V); } public void SetHost(OSMDocument host) { this._host = host; this.RenderTransform = this._host.RenderTransformation; this._host.IsovistMenu.Items.Insert(3, this.visualization_Menu); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Diagnostics; using System.Runtime; using System.Runtime.CompilerServices; namespace System { /// <summary> /// ReadOnlySpan is a read-only view over Span<typeparam name="T"></typeparam> /// </summary> [DebuggerTypeProxy(typeof(ReadOnlySpanDebuggerView<>))] [DebuggerDisplay("Length = {Length}")] public partial struct ReadOnlySpan<T> : IEquatable<T[]> { /// <summary>A managed array/string; or null for native ptrs.</summary> internal readonly object Object; /// <summary>An byte-offset into the array/string; or a native ptr.</summary> internal readonly UIntPtr Offset; /// <summary>Fetches the number of elements this Span contains.</summary> public readonly int Length; /// <summary> /// Creates a new span over the entirety of the target array. /// </summary> /// <param name="array">The target array.</param> /// <exception cref="System.ArgumentException"> /// Thrown if the 'array' parameter is null. /// </exception> /// [MethodImpl(MethodImplOptions.AggressiveInlining)] public ReadOnlySpan(T[] array) { Contract.Requires(array != null); Object = array; Offset = new UIntPtr((uint)SpanHelpers<T>.OffsetToArrayData); Length = array.Length; } /// <summary> /// Creates a new span over the portion of the target array beginning /// at 'start' index. /// </summary> /// <param name="array">The target array.</param> /// <param name="start">The index at which to begin the span.</param> /// <exception cref="System.ArgumentException"> /// Thrown if the 'array' parameter is null. /// </exception> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified start index is not in range (&lt;0 or &gt;&eq;length). /// </exception> // TODO: Should we have this overload? It is really confusing when you also have Span(T* array, int length) // While with Slice it makes sense it might not in here. [MethodImpl(MethodImplOptions.AggressiveInlining)] internal ReadOnlySpan(T[] array, int start) { Contract.Requires(array != null); Contract.RequiresInInclusiveRange(start, (uint)array.Length); if (start < array.Length) { Object = array; Offset = UnsafeUtilities.GetElementAddress<T>((UIntPtr)SpanHelpers<T>.OffsetToArrayData, (UIntPtr)start); Length = array.Length - start; } else { Object = null; Offset = UIntPtr.Zero; Length = 0; } } /// <summary> /// Creates a new span over the portion of the target array beginning /// at 'start' index and ending at 'end' index (exclusive). /// </summary> /// <param name="array">The target array.</param> /// <param name="start">The index at which to begin the span.</param> /// <param name="length">The number of items in the span.</param> /// <exception cref="System.ArgumentException"> /// Thrown if the 'array' parameter is null. /// </exception> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified start or end index is not in range (&lt;0 or &gt;&eq;length). /// </exception> [MethodImpl(MethodImplOptions.AggressiveInlining)] public ReadOnlySpan(T[] array, int start, int length) { Contract.Requires(array != null); Contract.RequiresInInclusiveRange(start, length, (uint)array.Length); if (start < array.Length) { Object = array; Offset = UnsafeUtilities.GetElementAddress<T>((UIntPtr)SpanHelpers<T>.OffsetToArrayData, (UIntPtr)start); Length = length; } else { Object = null; Offset = UIntPtr.Zero; Length = 0; } } /// <summary> /// Creates a new span over the target unmanaged buffer. Clearly this /// is quite dangerous, because we are creating arbitrarily typed T's /// out of a void*-typed block of memory. And the length is not checked. /// But if this creation is correct, then all subsequent uses are correct. /// </summary> /// <param name="ptr">An unmanaged pointer to memory.</param> /// <param name="length">The number of T elements the memory contains.</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public unsafe ReadOnlySpan(void* ptr, int length) { Contract.Requires(length >= 0); Contract.Requires(length == 0 || ptr != null); Object = null; Offset = new UIntPtr(ptr); Length = length; } /// <summary> /// An internal helper for creating spans. Not for public use. /// </summary> internal ReadOnlySpan(object obj, UIntPtr offset, int length) { Object = obj; Offset = offset; Length = length; } public static implicit operator ReadOnlySpan<T>(T[] array) { return new ReadOnlySpan<T>(array); } public static implicit operator ReadOnlySpan<T>(Span<T> slice) { return new ReadOnlySpan<T>(slice.Object, slice.Offset, slice.Length); } public static implicit operator ReadOnlySpan<T>(ArraySegment<T> arraySegment) { return new ReadOnlySpan<T>(arraySegment.Array, arraySegment.Offset, arraySegment.Count); } public static ReadOnlySpan<T> Empty => default(ReadOnlySpan<T>); public bool IsEmpty => Length == 0; /// <summary> /// Fetches the element at the specified index. /// </summary> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified index is not in range (&lt;0 or &gt;&eq;length). /// </exception> public T this[int index] { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { Contract.RequiresInRange(index, (uint)Length); return UnsafeUtilities.Get<T>(Object, Offset, (UIntPtr)index); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private set { Contract.RequiresInRange(index, (uint)Length); UnsafeUtilities.Set(Object, Offset, (UIntPtr)index, value); } } /// <summary> /// Copies the contents of this span into a new array. This heap /// allocates, so should generally be avoided, however is sometimes /// necessary to bridge the gap with APIs written in terms of arrays. /// </summary> public T[] ToArray() { var dest = new T[Length]; CopyTo(dest.Slice()); return dest; } /// <summary> /// Copies the contents of this span into another. The destination /// must be at least as big as the source, and may be bigger. /// </summary> /// <param name="destination">The span to copy items into.</param> public void CopyTo(Span<T> destination) { // There are some benefits of making local copies. See https://github.com/dotnet/coreclr/issues/5556 var dest = destination; var src = this; Contract.Requires(src.Length <= dest.Length); if (default(T) != null && MemoryUtils.IsPrimitiveValueType<T>()) { // review: (#848) - overflow and alignment UnsafeUtilities.CopyBlock(src.Object, src.Offset, dest.Object, dest.Offset, src.Length * Unsafe.SizeOf<T>()); } else { for (int i = 0; i < src.Length; i++) { // We don't check bounds here as we are surely within them T value = UnsafeUtilities.Get<T>(src.Object, src.Offset, (UIntPtr)i); UnsafeUtilities.Set(dest.Object, dest.Offset, (UIntPtr)i, value); } } } /// <summary> /// Copies the contents of this span into an array. The destination /// must be at least as big as the source, and may be bigger. /// </summary> /// <param name="destination">The span to copy items into.</param> public void CopyTo(T[] destination) { var src = new Span<T>(Object, Offset, Length); src.CopyTo(destination); } /// <summary> /// Forms a slice out of the given span, beginning at 'start'. /// </summary> /// <param name="start">The index at which to begin this slice.</param> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified start index is not in range (&lt;0 or &gt;&eq;length). /// </exception> [MethodImpl(MethodImplOptions.AggressiveInlining)] public ReadOnlySpan<T> Slice(int start) { Contract.RequiresInInclusiveRange(start, (uint)Length); return new ReadOnlySpan<T>( Object, UnsafeUtilities.GetElementAddress<T>(Offset, (UIntPtr)start), Length - start); } /// <summary> /// Forms a slice out of the given span, beginning at 'start'. /// </summary> /// <param name="start">The index at which to begin this slice.</param> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified start index is not in range (&lt;0 or &gt;&eq;length). /// </exception> [MethodImpl(MethodImplOptions.AggressiveInlining)] public ReadOnlySpan<T> Slice(uint start) { Contract.RequiresInInclusiveRange(start, (uint)Length); return new ReadOnlySpan<T>(Object, UnsafeUtilities.GetElementAddress<T>(Offset, (UIntPtr)start), Length - (int)start); } /// <summary> /// Forms a slice out of the given span, beginning at 'start', and /// ending at 'end' (exclusive). /// </summary> /// <param name="start">The index at which to begin this slice.</param> /// <param name="end">The index at which to end this slice (exclusive).</param> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified start or end index is not in range (&lt;0 or &gt;&eq;length). /// </exception> [MethodImpl(MethodImplOptions.AggressiveInlining)] public ReadOnlySpan<T> Slice(int start, int length) { Contract.RequiresInInclusiveRange(start, length, (uint)Length); return new ReadOnlySpan<T>( Object, UnsafeUtilities.GetElementAddress<T>(Offset, (UIntPtr)start), length); } /// <summary> /// Forms a slice out of the given span, beginning at 'start', and /// ending at 'end' (exclusive). /// </summary> /// <param name="start">The index at which to begin this slice.</param> /// <param name="end">The index at which to end this slice (exclusive).</param> /// <exception cref="System.ArgumentOutOfRangeException"> /// Thrown when the specified start or end index is not in range (&lt;0 or &gt;&eq;length). /// </exception> [MethodImpl(MethodImplOptions.AggressiveInlining)] public ReadOnlySpan<T> Slice(uint start, uint length) { Contract.RequiresInInclusiveRange(start, length, (uint)Length); return new ReadOnlySpan<T>( Object, UnsafeUtilities.GetElementAddress<T>(Offset, (UIntPtr)start), (int)length); } /// <summary> /// Checks to see if two spans point at the same memory. Note that /// this does *not* check to see if the *contents* are equal. /// </summary> [MethodImpl(MethodImplOptions.AggressiveInlining)] public bool ReferenceEquals(ReadOnlySpan<T> other) { return Object == other.Object && Offset == other.Offset && Length == other.Length; } public override int GetHashCode() { unchecked { var hashCode = Offset.GetHashCode(); hashCode = hashCode * 31 + Length; if (Object != null) { hashCode = hashCode * 31 + Object.GetHashCode(); } return hashCode; } } public override bool Equals(object obj) { ThrowHelper.ThrowInvalidOperationException_ForBoxingSpans(); return false; } /// <summary> /// Checks to see if two spans point at the same memory. Note that /// this does *not* check to see if the *contents* are equal. /// </summary> public bool Equals(ReadOnlySpan<T> other) => ReferenceEquals(other); public bool Equals(Span<T> other) => other.StructuralEquals(Object, Offset, Length); public bool Equals(T[] other) => Equals(new ReadOnlySpan<T>(other)); public static bool operator ==(ReadOnlySpan<T> left, ReadOnlySpan<T> right) => left.Equals(right); public static bool operator !=(ReadOnlySpan<T> left, ReadOnlySpan<T> right) => !left.Equals(right); } }
using EngineLayer; using MassSpectrometry; using MzLibUtil; using NUnit.Framework; using Proteomics; using System; using System.Collections.Generic; using System.IO; using System.Linq; using TaskLayer; using UsefulProteomicsDatabases; namespace Test { [TestFixture] public static class MyTaskTest { #region Public Fields public static bool hasPrunedRun; #endregion Public Fields #region Public Methods [Test] public static void TestEverythingRunner() { #region Setup tasks foreach (var modFile in Directory.GetFiles(@"Mods")) GlobalEngineLevelSettings.AddMods(PtmListLoader.ReadModsFromFile(modFile)); CalibrationTask task1 = new CalibrationTask { CommonParameters = new CommonParameters { ConserveMemory = false, DigestionParams = new DigestionParams { MaxMissedCleavages = 0, MinPeptideLength = null, InitiatorMethionineBehavior = InitiatorMethionineBehavior.Retain }, }, CalibrationParameters = new CalibrationParameters { WriteIntermediateFiles = true } }; GptmdTask task2 = new GptmdTask { CommonParameters = new CommonParameters { ConserveMemory = false }, }; SearchTask task3 = new SearchTask { CommonParameters = new CommonParameters { ConserveMemory = false }, SearchParameters = new SearchParameters { DoParsimony = true, SearchType = SearchType.Modern } }; SearchTask task4 = new SearchTask { CommonParameters = new CommonParameters { ConserveMemory = false }, SearchParameters = new SearchParameters { SearchType = SearchType.Modern, } }; List<Tuple<string, MetaMorpheusTask>> taskList = new List<Tuple<string, MetaMorpheusTask>> { new Tuple<string, MetaMorpheusTask>("task1", task1), new Tuple<string, MetaMorpheusTask>("task2", task2), new Tuple<string, MetaMorpheusTask>("task3", task3), new Tuple<string, MetaMorpheusTask>("task4", task4),}; #endregion Setup tasks List<ModificationWithMass> variableModifications = GlobalEngineLevelSettings.AllModsKnown.OfType<ModificationWithMass>().Where(b => task1.CommonParameters.ListOfModsVariable.Contains(new Tuple<string, string>(b.modificationType, b.id))).ToList(); List<ModificationWithMass> fixedModifications = GlobalEngineLevelSettings.AllModsKnown.OfType<ModificationWithMass>().Where(b => task1.CommonParameters.ListOfModsFixed.Contains(new Tuple<string, string>(b.modificationType, b.id))).ToList(); Console.WriteLine("Size of variable Modificaitaons: " + variableModifications.Capacity); Console.WriteLine("Size of fixed Modificaitaons: " + fixedModifications.Capacity); // Generate data for files Protein ParentProtein = new Protein("MPEPTIDEKANTHE", "accession1"); var digestedList = ParentProtein.Digest(task1.CommonParameters.DigestionParams, fixedModifications).ToList(); Assert.AreEqual(2, digestedList.Count); PeptideWithPossibleModifications modPep1 = digestedList[0]; var setList1 = modPep1.GetPeptidesWithSetModifications(task1.CommonParameters.DigestionParams, variableModifications).ToList(); Assert.AreEqual(2, setList1.Count); PeptideWithSetModifications pepWithSetMods1 = setList1[0]; PeptideWithPossibleModifications modPep2 = digestedList[1]; var setList2 = modPep2.GetPeptidesWithSetModifications(task1.CommonParameters.DigestionParams, variableModifications).ToList(); Assert.AreEqual(1, setList2.Count); PeptideWithSetModifications pepWithSetMods2 = setList2[0]; var dictHere = new Dictionary<int, List<Modification>>(); ModificationMotif.TryGetMotif("E", out ModificationMotif motif); dictHere.Add(3, new List<Modification> { new ModificationWithMass("21", null, motif, TerminusLocalization.Any, 21.981943) }); Protein ParentProteinToNotInclude = new Protein("MPEPTIDEK", "accession2", new List<Tuple<string, string>>(), dictHere); digestedList = ParentProteinToNotInclude.Digest(task1.CommonParameters.DigestionParams, fixedModifications).ToList(); var modPep3 = digestedList[0]; Assert.AreEqual(1, digestedList.Count); var setList3 = modPep3.GetPeptidesWithSetModifications(task1.CommonParameters.DigestionParams, variableModifications).ToList(); Assert.AreEqual(4, setList3.Count); IMsDataFile<IMsDataScan<IMzSpectrum<IMzPeak>>> myMsDataFile = new TestDataFile(new List<PeptideWithSetModifications> { pepWithSetMods1, pepWithSetMods2, setList3[1] }); Protein proteinWithChain = new Protein("MAACNNNCAA", "accession3", new List<Tuple<string, string>>(), new Dictionary<int, List<Modification>>(), new List<ProteolysisProduct> { new ProteolysisProduct(4, 8, "chain") }, "name2", "fullname2"); #region Write the files string mzmlName = @"ok.mzML"; IO.MzML.MzmlMethods.CreateAndWriteMyMzmlWithCalibratedSpectra(myMsDataFile, mzmlName, false); string xmlName = "okk.xml"; ProteinDbWriter.WriteXmlDatabase(new Dictionary<string, HashSet<Tuple<int, Modification>>>(), new List<Protein> { ParentProtein, proteinWithChain }, xmlName); #endregion Write the files // RUN! var engine = new EverythingRunnerEngine(taskList, new List<string> { mzmlName }, new List<DbForTask> { new DbForTask(xmlName, false) }, null); engine.Run(); } [Test] public static void TestMultipleFilesRunner() { #region Setup tasks foreach (var modFile in Directory.GetFiles(@"Mods")) GlobalEngineLevelSettings.AddMods(PtmListLoader.ReadModsFromFile(modFile)); CalibrationTask task1 = new CalibrationTask { CommonParameters = new CommonParameters { DigestionParams = new DigestionParams { MaxMissedCleavages = 0, MinPeptideLength = null, InitiatorMethionineBehavior = InitiatorMethionineBehavior.Retain }, ListOfModsVariable = new List<Tuple<string, string>> { new Tuple<string, string>("Common Variable", "Oxidation of M") }, ListOfModsFixed = new List<Tuple<string, string>> { new Tuple<string, string>("Common Fixed", "Carbamidomethyl of C") }, ListOfModsLocalize = GlobalEngineLevelSettings.AllModsKnown.Select(b => new Tuple<string, string>(b.modificationType, b.id)).ToList(), ProductMassTolerance = new AbsoluteTolerance(0.01) }, }; GptmdTask task2 = new GptmdTask { CommonParameters = new CommonParameters { DigestionParams = new DigestionParams { Protease = GlobalEngineLevelSettings.ProteaseDictionary["trypsin"], }, ProductMassTolerance = new AbsoluteTolerance(0.01) }, }; SearchTask task3 = new SearchTask { CommonParameters = new CommonParameters { ConserveMemory = false }, SearchParameters = new SearchParameters { DoParsimony = true, SearchType = SearchType.Modern, } }; SearchTask task4 = new SearchTask { CommonParameters = new CommonParameters { ConserveMemory = false }, SearchParameters = new SearchParameters { SearchType = SearchType.Modern, } }; List<Tuple<string, MetaMorpheusTask>> taskList = new List<Tuple<string, MetaMorpheusTask>> { new Tuple<string, MetaMorpheusTask>("task1", task1), new Tuple<string, MetaMorpheusTask>("task2", task2), new Tuple<string, MetaMorpheusTask>("task3", task3), new Tuple<string, MetaMorpheusTask>("task4", task4),}; #endregion Setup tasks List<ModificationWithMass> variableModifications = GlobalEngineLevelSettings.AllModsKnown.OfType<ModificationWithMass>().Where(b => task1.CommonParameters.ListOfModsVariable.Contains(new Tuple<string, string>(b.modificationType, b.id))).ToList(); List<ModificationWithMass> fixedModifications = GlobalEngineLevelSettings.AllModsKnown.OfType<ModificationWithMass>().Where(b => task1.CommonParameters.ListOfModsFixed.Contains(new Tuple<string, string>(b.modificationType, b.id))).ToList(); // Generate data for files Protein ParentProtein = new Protein("MPEPTIDEKANTHE", "accession1"); var digestedList = ParentProtein.Digest(task1.CommonParameters.DigestionParams, fixedModifications).ToList(); Assert.AreEqual(2, digestedList.Count); PeptideWithPossibleModifications modPep1 = digestedList[0]; var setList1 = modPep1.GetPeptidesWithSetModifications(task1.CommonParameters.DigestionParams, variableModifications).ToList(); Assert.AreEqual(2, setList1.Count); PeptideWithSetModifications pepWithSetMods1 = setList1[0]; PeptideWithPossibleModifications modPep2 = digestedList[1]; var setList2 = modPep2.GetPeptidesWithSetModifications(task1.CommonParameters.DigestionParams, variableModifications).ToList(); Assert.AreEqual(1, setList2.Count); PeptideWithSetModifications pepWithSetMods2 = setList2[0]; var dictHere = new Dictionary<int, List<Modification>>(); ModificationMotif.TryGetMotif("E", out ModificationMotif motif); dictHere.Add(3, new List<Modification> { new ModificationWithMass("21", null, motif, TerminusLocalization.Any, 21.981943) }); Protein ParentProteinToNotInclude = new Protein("MPEPTIDEK", "accession2", new List<Tuple<string, string>>(), dictHere); digestedList = ParentProteinToNotInclude.Digest(task1.CommonParameters.DigestionParams, fixedModifications).ToList(); var modPep3 = digestedList[0]; Assert.AreEqual(1, digestedList.Count); var setList3 = modPep3.GetPeptidesWithSetModifications(task1.CommonParameters.DigestionParams, variableModifications).ToList(); Assert.AreEqual(4, setList3.Count); IMsDataFile<IMsDataScan<IMzSpectrum<IMzPeak>>> myMsDataFile1 = new TestDataFile(new List<PeptideWithSetModifications> { pepWithSetMods1, pepWithSetMods2, setList3[1] }); string mzmlName1 = @"ok1.mzML"; IO.MzML.MzmlMethods.CreateAndWriteMyMzmlWithCalibratedSpectra(myMsDataFile1, mzmlName1, false); IMsDataFile<IMsDataScan<IMzSpectrum<IMzPeak>>> myMsDataFile2 = new TestDataFile(new List<PeptideWithSetModifications> { pepWithSetMods1, pepWithSetMods2, setList3[1] }); string mzmlName2 = @"ok2.mzML"; IO.MzML.MzmlMethods.CreateAndWriteMyMzmlWithCalibratedSpectra(myMsDataFile2, mzmlName2, false); Protein proteinWithChain1 = new Protein("MAACNNNCAA", "accession3", new List<Tuple<string, string>>(), new Dictionary<int, List<Modification>>(), new List<ProteolysisProduct> { new ProteolysisProduct(4, 8, "chain") }, "name2", "fullname2", false, false, new List<DatabaseReference>(), new List<SequenceVariation>(), null); Protein proteinWithChain2 = new Protein("MAACNNNCAA", "accession3", new List<Tuple<string, string>>(), new Dictionary<int, List<Modification>>(), new List<ProteolysisProduct> { new ProteolysisProduct(4, 8, "chain") }, "name2", "fullname2", false, false, new List<DatabaseReference>(), new List<SequenceVariation>(), null); string xmlName = "okk.xml"; ProteinDbWriter.WriteXmlDatabase(new Dictionary<string, HashSet<Tuple<int, Modification>>>(), new List<Protein> { ParentProtein, proteinWithChain1, proteinWithChain2 }, xmlName); // RUN! var engine = new EverythingRunnerEngine(taskList, new List<string> { mzmlName1, mzmlName2 }, new List<DbForTask> { new DbForTask(xmlName, false) }, null); engine.Run(); } [Test] public static void MakeSureFdrDoesntSkip() { MetaMorpheusTask task = new SearchTask { CommonParameters = new CommonParameters { DigestionParams = new DigestionParams { MinPeptideLength = null, }, ScoreCutoff = 1, DeconvolutionIntensityRatio = 999, DeconvolutionMassTolerance = new PpmTolerance(50), }, SearchParameters = new SearchParameters { SearchDecoy = false, MassDiffAcceptor = new OpenSearchMode(), } }; string xmlName = "MakeSureFdrDoesntSkip.xml"; #region Generate protein and write to file { Protein theProtein = new Protein("MG", "accession1"); ProteinDbWriter.WriteXmlDatabase(new Dictionary<string, HashSet<Tuple<int, Modification>>>(), new List<Protein> { theProtein }, xmlName); } #endregion Generate protein and write to file string mzmlName = @"MakeSureFdrDoesntSkip.mzML"; #region Generate and write the mzml { var theProteins = ProteinDbLoader.LoadProteinXML(xmlName, true, true, new List<Modification>(), false, new List<string>(), out Dictionary<string, Modification> ok); List<ModificationWithMass> fixedModifications = new List<ModificationWithMass>(); var targetDigested = theProteins[0].Digest(task.CommonParameters.DigestionParams, fixedModifications).ToList(); var okjhjf = targetDigested[0].GetPeptidesWithSetModifications(task.CommonParameters.DigestionParams, GlobalEngineLevelSettings.AllModsKnown.OfType<ModificationWithMass>().ToList()).ToList(); PeptideWithSetModifications targetGood = okjhjf.First(); TestDataFile myMsDataFile = new TestDataFile(new List<PeptideWithSetModifications> { targetGood }, true); var ii = myMsDataFile.GetOneBasedScan(1).MassSpectrum.YArray.ToList(); ii.Add(1); ii.Add(1); ii.Add(1); ii.Add(1); var intensities = ii.ToArray(); var mm = myMsDataFile.GetOneBasedScan(1).MassSpectrum.XArray.ToList(); var hah = 104.35352; mm.Add(hah); mm.Add(hah + 1); mm.Add(hah + 2); var mz = mm.ToArray(); Array.Sort(mz, intensities); myMsDataFile.ReplaceFirstScanArrays(mz, intensities); IO.MzML.MzmlMethods.CreateAndWriteMyMzmlWithCalibratedSpectra(myMsDataFile, mzmlName, false); } #endregion Generate and write the mzml // RUN! var theStringResult = task.RunTask(TestContext.CurrentContext.TestDirectory, new List<DbForTask> { new DbForTask(xmlName, false) }, new List<string> { mzmlName }, "taskId1").ToString(); Assert.IsTrue(theStringResult.Contains("All target PSMS within 1% FDR: 1")); } [Test] public static void MakeSureGptmdTaskMatchesExactMatches() { MetaMorpheusTask task1; #region Setup tasks { ModificationMotif.TryGetMotif("T", out ModificationMotif motif); GlobalEngineLevelSettings.AddMods(new List<ModificationWithMass> { new ModificationWithMass("ok", "okType", motif, TerminusLocalization.Any, 229) }); task1 = new GptmdTask { CommonParameters = new CommonParameters { ConserveMemory = false, DigestionParams = new DigestionParams { InitiatorMethionineBehavior = InitiatorMethionineBehavior.Retain, }, ListOfModsVariable = new List<Tuple<string, string>>(), ListOfModsFixed = new List<Tuple<string, string>>(), ScoreCutoff = 1 }, GptmdParameters = new GptmdParameters { ListOfModsGptmd = new List<Tuple<string, string>> { new Tuple<string, string>("okType", "ok") }, PrecursorMassTolerance = new AbsoluteTolerance(1) } }; } #endregion Setup tasks string xmlName = "sweetness.xml"; #region Generate protein and write to file { Protein theProtein = new Protein("MPEPTIDEKANTHE", "accession1"); ProteinDbWriter.WriteXmlDatabase(new Dictionary<string, HashSet<Tuple<int, Modification>>>(), new List<Protein> { theProtein }, xmlName); } #endregion Generate protein and write to file string mzmlName = @"ok.mzML"; #region Generate and write the mzml { var theProteins = ProteinDbLoader.LoadProteinXML(xmlName, true, true, new List<Modification>(), false, new List<string>(), out Dictionary<string, Modification> ok); List<ModificationWithMass> fixedModifications = new List<ModificationWithMass>(); var targetDigested = theProteins[0].Digest(task1.CommonParameters.DigestionParams, fixedModifications).ToList(); ModificationMotif.TryGetMotif("T", out ModificationMotif motif); var okjhjf = targetDigested[0].GetPeptidesWithSetModifications(task1.CommonParameters.DigestionParams, GlobalEngineLevelSettings.AllModsKnown.OfType<ModificationWithMass>().ToList()).ToList(); PeptideWithSetModifications targetGood = okjhjf.First(); var okjhj = targetDigested[1].GetPeptidesWithSetModifications(task1.CommonParameters.DigestionParams, GlobalEngineLevelSettings.AllModsKnown.OfType<ModificationWithMass>().ToList()).ToList(); PeptideWithSetModifications targetWithUnknownMod = okjhj.Last(); IMsDataFile<IMsDataScan<IMzSpectrum<IMzPeak>>> myMsDataFile = new TestDataFile(new List<PeptideWithSetModifications> { targetGood, targetWithUnknownMod }, true); IO.MzML.MzmlMethods.CreateAndWriteMyMzmlWithCalibratedSpectra(myMsDataFile, mzmlName, false); } #endregion Generate and write the mzml // RUN! var theStringResult = task1.RunTask(TestContext.CurrentContext.TestDirectory, new List<DbForTask> { new DbForTask(xmlName, false) }, new List<string> { mzmlName }, "taskId1").ToString(); Assert.IsTrue(theStringResult.Contains("Modifications added: 1")); } //test if prunedDatabase matches expected output [Test] public static void TestPrunedDatabase() { hasPrunedRun = true; #region setup //Create Search Task SearchTask task1 = new SearchTask { SearchParameters = new SearchParameters { WritePrunedDatabase = true } }; //add task 1 to task list List<Tuple<string, MetaMorpheusTask>> taskList = new List<Tuple<string, MetaMorpheusTask>> { new Tuple<string, MetaMorpheusTask>("task1", task1)}; ModificationMotif.TryGetMotif("P", out ModificationMotif motif); var connorMod = new ModificationWithMass("ConnorMod", "ConnorModType", motif, TerminusLocalization.Any, 10); GlobalEngineLevelSettings.AddMods(new List<ModificationWithLocation> { connorMod }); #endregion setup #region Protein and Mod Creation //create modification lists List<ModificationWithMass> variableModifications = GlobalEngineLevelSettings.AllModsKnown.OfType<ModificationWithMass>().Where(b => task1.CommonParameters.ListOfModsVariable.Contains(new Tuple<string, string>(b.modificationType, b.id))).ToList(); //add modification to Protein object var dictHere = new Dictionary<int, List<Modification>>(); ModificationWithMass modToAdd = connorMod; ModificationWithMass modToAdd2 = connorMod; dictHere.Add(1, new List<Modification> { modToAdd }); dictHere.Add(3, new List<Modification> { modToAdd2 }); //protein Creation (One with mod and one without) Protein TestProtein = new Protein("PEPTID", "accession1"); Protein TestProteinWithMod = new Protein("PEPTID", "accession1", new List<Tuple<string, string>>(), dictHere); #endregion Protein and Mod Creation #region XML File Console.WriteLine("hi"); //First Write XML Database string xmlName = "okkk.xml"; //Add Mod to list and write XML input database Dictionary<string, HashSet<Tuple<int, Modification>>> modList = new Dictionary<string, HashSet<Tuple<int, Modification>>>(); var Hash = new HashSet<Tuple<int, Modification>> { new Tuple<int, Modification>(3, modToAdd) }; modList.Add("test", Hash); ProteinDbWriter.WriteXmlDatabase(modList, new List<Protein> { TestProteinWithMod }, xmlName); #endregion XML File #region MZML File //now write MZML file var protein = ProteinDbLoader.LoadProteinXML(xmlName, true, true, new List<Modification>(), false, new List<string>(), out Dictionary<string, Modification> ok); var digestedList = protein[0].Digest(task1.CommonParameters.DigestionParams, new List<ModificationWithMass> { }).ToList(); Assert.AreEqual(1, digestedList.Count); PeptideWithPossibleModifications modPep1 = digestedList[0]; var setList1 = modPep1.GetPeptidesWithSetModifications(task1.CommonParameters.DigestionParams, variableModifications).ToList(); Assert.AreEqual(4, setList1.Count); //Set Peptide with 1 mod at position 3 PeptideWithSetModifications pepWithSetMods1 = setList1[1]; //Finally Write MZML file Assert.AreEqual("PEP[ConnorModType:ConnorMod]TID", pepWithSetMods1.Sequence); IMsDataFile<IMsDataScan<IMzSpectrum<IMzPeak>>> myMsDataFile = new TestDataFile(new List<PeptideWithSetModifications> { pepWithSetMods1 }); string mzmlName = @"hello.mzML"; IO.MzML.MzmlMethods.CreateAndWriteMyMzmlWithCalibratedSpectra(myMsDataFile, mzmlName, false); #endregion MZML File //run! var engine = new EverythingRunnerEngine(taskList, new List<string> { mzmlName }, new List<DbForTask> { new DbForTask(xmlName, false) }, null); engine.Run(); string outputFolderInThisTest = MySetUpClass.outputFolder; string final = Path.Combine(MySetUpClass.outputFolder, "task1", "okkkpruned.xml"); //string[] files = Directory.GetFiles(fileAtPath); //string file = fileAtPath; var proteins = ProteinDbLoader.LoadProteinXML(final, true, true, new List<Modification>(), false, new List<string>(), out ok); //check length Assert.AreEqual(proteins[0].OneBasedPossibleLocalizedModifications.Count, 1); //check location (key) Assert.AreEqual(proteins[0].OneBasedPossibleLocalizedModifications.ContainsKey(3), true); List<Modification> listOfMods = new List<Modification>(); listOfMods = proteins[0].OneBasedPossibleLocalizedModifications[3]; //check Type, count, ID Assert.AreEqual(listOfMods[0].modificationType, "ConnorModType"); Assert.AreEqual(listOfMods[0].id, "ConnorMod"); Assert.AreEqual(listOfMods.Count, 1); } [Test] public static void TestUniquePeptideCount() { #region setup SearchTask testUnique = new SearchTask { CommonParameters = new CommonParameters { ListOfModsLocalize = new List<Tuple<string, string>> { new Tuple<string, string>("ConnorModType", "ConnorMod") }, }, SearchParameters = new SearchParameters { WritePrunedDatabase = true } }; List<Tuple<string, MetaMorpheusTask>> taskList = new List<Tuple<string, MetaMorpheusTask>> { new Tuple<string, MetaMorpheusTask>("TestUnique", testUnique)}; ModificationMotif.TryGetMotif("P", out ModificationMotif motif); var testUniqeMod = new ModificationWithMass("testUniqeMod", "mt", motif, TerminusLocalization.Any, 10); GlobalEngineLevelSettings.AddMods(new List<ModificationWithLocation> { testUniqeMod }); #endregion setup #region mod setup and protein creation //create modification lists List<ModificationWithMass> variableModifications = GlobalEngineLevelSettings.AllModsKnown.OfType<ModificationWithMass>().Where(b => testUnique.CommonParameters.ListOfModsVariable.Contains(new Tuple<string, string>(b.modificationType, b.id))).ToList(); //add modification to Protein object var modDictionary = new Dictionary<int, List<Modification>>(); ModificationWithMass modToAdd = testUniqeMod; modDictionary.Add(1, new List<Modification> { modToAdd }); modDictionary.Add(3, new List<Modification> { modToAdd }); //protein Creation (One with mod and one without) Protein TestProtein = new Protein("PEPTID", "accession1", new List<Tuple<string, string>>(), modDictionary); #endregion mod setup and protein creation #region XML setup //First Write XML Database string xmlName = "singleProteinWithTwoMods.xml"; //Add Mod to list and write XML input database Dictionary<string, HashSet<Tuple<int, Modification>>> modList = new Dictionary<string, HashSet<Tuple<int, Modification>>>(); var Hash = new HashSet<Tuple<int, Modification>> { new Tuple<int, Modification>(3, modToAdd) }; modList.Add("test", Hash); ProteinDbWriter.WriteXmlDatabase(modList, new List<Protein> { TestProtein }, xmlName); #endregion XML setup #region MZML setup //now write MZML file var protein = ProteinDbLoader.LoadProteinXML(xmlName, true, true, new List<Modification>(), false, new List<string>(), out Dictionary<string, Modification> ok); var digestedList = protein[0].Digest(testUnique.CommonParameters.DigestionParams, new List<ModificationWithMass> { }).ToList(); Assert.AreEqual(1, digestedList.Count); PeptideWithPossibleModifications modPep1 = digestedList[0]; var setList1 = modPep1.GetPeptidesWithSetModifications(testUnique.CommonParameters.DigestionParams, variableModifications).ToList(); Assert.AreEqual(4, setList1.Count); //Finally Write MZML file IMsDataFile<IMsDataScan<IMzSpectrum<IMzPeak>>> myMsDataFile = new TestDataFile(new List<PeptideWithSetModifications> { setList1[0], setList1[1], setList1[2], setList1[3], setList1[0], setList1[1] }); string mzmlName = @"singleProteinWithRepeatedMods.mzML"; IO.MzML.MzmlMethods.CreateAndWriteMyMzmlWithCalibratedSpectra(myMsDataFile, mzmlName, false); #endregion MZML setup #region run string outputFolderInThisTest = MySetUpClass.outputFolder; var engine = new EverythingRunnerEngine(taskList, new List<string> { mzmlName }, new List<DbForTask> { new DbForTask(xmlName, false) }, null); engine.Run(); string line; bool foundD = false; using (StreamReader file = new StreamReader(Path.Combine(MySetUpClass.outputFolder, "TestUnique", "results.txt"))) { while ((line = file.ReadLine()) != null) { if (line.Contains("Unique peptides within 1% FDR: 4")) { foundD = true; } } } Assert.IsTrue(foundD); #endregion run } #endregion Public Methods } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. namespace System.CodeDom { using System.Diagnostics; using System; using Microsoft.Win32; using System.Collections; using System.Runtime.InteropServices; using System.Globalization; using System.Reflection; [ ComVisible(true), FlagsAttribute ] internal enum CodeTypeReferenceOptions { GlobalReference = 0x00000001, GenericTypeParameter = 0x00000002 } /// <devdoc> /// <para> /// Represents a Type /// </para> /// </devdoc> [ ComVisible(true) ] internal class CodeTypeReference : CodeObject { private string _baseType; private bool _isInterface; private int _arrayRank; private CodeTypeReference _arrayElementType; private CodeTypeReferenceCollection _typeArguments; private CodeTypeReferenceOptions _referenceOptions; private bool _needsFixup = false; public CodeTypeReference() { _baseType = string.Empty; _arrayRank = 0; _arrayElementType = null; } public CodeTypeReference(Type type) { if (type == null) throw new ArgumentNullException("type"); if (type.IsArray) { _arrayRank = type.GetArrayRank(); _arrayElementType = new CodeTypeReference(type.GetElementType()); _baseType = null; } else { InitializeFromType(type); _arrayRank = 0; _arrayElementType = null; } _isInterface = type.GetTypeInfo().IsInterface; } public CodeTypeReference(Type type, CodeTypeReferenceOptions codeTypeReferenceOption) : this(type) { _referenceOptions = codeTypeReferenceOption; } public CodeTypeReference(String typeName, CodeTypeReferenceOptions codeTypeReferenceOption) { Initialize(typeName, codeTypeReferenceOption); } /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> // We support the reflection format for generice type name. // The format is like: // public CodeTypeReference(string typeName) { Initialize(typeName); } private void InitializeFromType(Type type) { _baseType = type.Name; if (!type.IsGenericParameter) { Type currentType = type; while (currentType.IsNested) { currentType = currentType.DeclaringType; _baseType = currentType.Name + "+" + _baseType; } if (!String.IsNullOrEmpty(type.Namespace)) _baseType = type.Namespace + "." + _baseType; } // pick up the type arguments from an instantiated generic type but not an open one if (type.GetTypeInfo().IsGenericType && !type.GetTypeInfo().ContainsGenericParameters) { Type[] genericArgs = type.GetGenericArguments(); for (int i = 0; i < genericArgs.Length; i++) { TypeArguments.Add(new CodeTypeReference(genericArgs[i])); } } else if (!type.GetTypeInfo().IsGenericTypeDefinition) { // if the user handed us a non-generic type, but later // appends generic type arguments, we'll pretend // it's a generic type for their sake - this is good for // them if they pass in System.Nullable class when they // meant the System.Nullable<T> value type. _needsFixup = true; } } private void Initialize(string typeName) { Initialize(typeName, _referenceOptions); } private void Initialize(string typeName, CodeTypeReferenceOptions options) { Options = options; if (typeName == null || typeName.Length == 0) { typeName = typeof(void).FullName; _baseType = typeName; _arrayRank = 0; _arrayElementType = null; return; } typeName = RipOffAssemblyInformationFromTypeName(typeName); int end = typeName.Length - 1; int current = end; _needsFixup = true; // default to true, and if we find arity or generic type args, we'll clear the flag. // Scan the entire string for valid array tails and store ranks for array tails // we found in a queue. Queue q = new Queue(); while (current >= 0) { int rank = 1; if (typeName[current--] == ']') { while (current >= 0 && typeName[current] == ',') { rank++; current--; } if (current >= 0 && typeName[current] == '[') { // found a valid array tail q.Enqueue(rank); current--; end = current; continue; } } break; } // Try find generic type arguments current = end; ArrayList typeArgumentList = new ArrayList(); Stack subTypeNames = new Stack(); if (current > 0 && typeName[current--] == ']') { _needsFixup = false; int unmatchedRightBrackets = 1; int subTypeNameEndIndex = end; // Try find the matching '[', if we can't find it, we will not try to parse the string while (current >= 0) { if (typeName[current] == '[') { // break if we found matched brackets if (--unmatchedRightBrackets == 0) break; } else if (typeName[current] == ']') { ++unmatchedRightBrackets; } else if (typeName[current] == ',' && unmatchedRightBrackets == 1) { // // Type name can contain nested generic types. Following is an example: // System.Collections.Generic.Dictionary`2[[System.String, mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089], // [System.Collections.Generic.List`1[[System.Int32, mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]], // mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]] // // Spliltting by ',' won't work. We need to do first-level split by ','. // if (current + 1 < subTypeNameEndIndex) { subTypeNames.Push(typeName.Substring(current + 1, subTypeNameEndIndex - current - 1)); } subTypeNameEndIndex = current; } --current; } if (current > 0 && (end - current - 1) > 0) { // push the last generic type argument name if there is any if (current + 1 < subTypeNameEndIndex) { subTypeNames.Push(typeName.Substring(current + 1, subTypeNameEndIndex - current - 1)); } // we found matched brackets and the brackets contains some characters. while (subTypeNames.Count > 0) { String name = RipOffAssemblyInformationFromTypeName((string)subTypeNames.Pop()); typeArgumentList.Add(new CodeTypeReference(name)); } end = current - 1; } } if (end < 0) { // this can happen if we have some string like "[...]" _baseType = typeName; return; } if (q.Count > 0) { CodeTypeReference type = new CodeTypeReference(typeName.Substring(0, end + 1), Options); for (int i = 0; i < typeArgumentList.Count; i++) { type.TypeArguments.Add((CodeTypeReference)typeArgumentList[i]); } while (q.Count > 1) { type = new CodeTypeReference(type, (int)q.Dequeue()); } // we don't need to create a new CodeTypeReference for the last one. Debug.Assert(q.Count == 1, "We should have one and only one in the rank queue."); _baseType = null; _arrayRank = (int)q.Dequeue(); _arrayElementType = type; } else if (typeArgumentList.Count > 0) { for (int i = 0; i < typeArgumentList.Count; i++) { TypeArguments.Add((CodeTypeReference)typeArgumentList[i]); } _baseType = typeName.Substring(0, end + 1); } else { _baseType = typeName; } // Now see if we have some arity. baseType could be null if this is an array type. if (_baseType != null && _baseType.IndexOf('`') != -1) _needsFixup = false; } public CodeTypeReference(string typeName, params CodeTypeReference[] typeArguments) : this(typeName) { if (typeArguments != null && typeArguments.Length > 0) { TypeArguments.AddRange(typeArguments); } } public CodeTypeReference(CodeTypeParameter typeParameter) : this((typeParameter == null) ? (string)null : typeParameter.Name) { _referenceOptions = CodeTypeReferenceOptions.GenericTypeParameter; } /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public CodeTypeReference(string baseType, int rank) { _baseType = null; _arrayRank = rank; _arrayElementType = new CodeTypeReference(baseType); } /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public CodeTypeReference(CodeTypeReference arrayType, int rank) { _baseType = null; _arrayRank = rank; _arrayElementType = arrayType; } /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public CodeTypeReference ArrayElementType { get { return _arrayElementType; } set { _arrayElementType = value; } } /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public int ArrayRank { get { return _arrayRank; } set { _arrayRank = value; } } internal int NestedArrayDepth { get { if (_arrayElementType == null) return 0; return 1 + _arrayElementType.NestedArrayDepth; } } /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public string BaseType { get { if (_arrayRank > 0 && _arrayElementType != null) { return _arrayElementType.BaseType; } if (String.IsNullOrEmpty(_baseType)) return string.Empty; string returnType = _baseType; if (_needsFixup && TypeArguments.Count > 0) returnType = returnType + '`' + TypeArguments.Count.ToString(CultureInfo.InvariantCulture); return returnType; } set { _baseType = value; Initialize(_baseType); } } [System.Runtime.InteropServices.ComVisible(false)] public CodeTypeReferenceOptions Options { get { return _referenceOptions; } set { _referenceOptions = value; } } [System.Runtime.InteropServices.ComVisible(false)] public CodeTypeReferenceCollection TypeArguments { get { if (_arrayRank > 0 && _arrayElementType != null) { return _arrayElementType.TypeArguments; } if (_typeArguments == null) { _typeArguments = new CodeTypeReferenceCollection(); } return _typeArguments; } } internal bool IsInterface { get { // Note that this only works correctly if the Type ctor was used. Otherwise, it's always false. return _isInterface; } } // // The string for generic type argument might contain assembly information and square bracket pair. // There might be leading spaces in front the type name. // Following function will rip off assembly information and brackets // Following is an example: // " [System.Collections.Generic.List[[System.String, mscorlib, Version=2.0.0.0, Culture=neutral, // PublicKeyToken=b77a5c561934e089]], mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]" // private string RipOffAssemblyInformationFromTypeName(string typeName) { int start = 0; int end = typeName.Length - 1; string result = typeName; // skip white space in the beginning while (start < typeName.Length && Char.IsWhiteSpace(typeName[start])) start++; while (end >= 0 && Char.IsWhiteSpace(typeName[end])) end--; if (start < end) { if (typeName[start] == '[' && typeName[end] == ']') { start++; end--; } // if we still have a ] at the end, there's no assembly info. if (typeName[end] != ']') { int commaCount = 0; for (int index = end; index >= start; index--) { if (typeName[index] == ',') { commaCount++; if (commaCount == 4) { result = typeName.Substring(start, index - start); break; } } } } } return result; } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using osu.Framework.Allocation; using osu.Framework.Extensions.Color4Extensions; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Effects; using osu.Framework.Graphics.Shapes; using osu.Framework.Graphics.Sprites; using osu.Game.Beatmaps; using osu.Game.Beatmaps.Legacy; using osu.Game.Graphics; using osu.Game.Graphics.Sprites; using osu.Game.Screens.Menu; using osuTK; using osuTK.Graphics; namespace osu.Game.Tournament.Components { public class SongBar : CompositeDrawable { private BeatmapInfo beatmap; public BeatmapInfo Beatmap { get => beatmap; set { if (beatmap == value) return; beatmap = value; update(); } } private LegacyMods mods; public LegacyMods Mods { get => mods; set { mods = value; update(); } } private Container panelContents; private Container innerPanel; private Container outerPanel; private TournamentBeatmapPanel panel; private float panelWidth => expanded ? 0.6f : 1; private const float main_width = 0.97f; private const float inner_panel_width = 0.7f; private bool expanded; public bool Expanded { get => expanded; set { expanded = value; panel?.ResizeWidthTo(panelWidth, 800, Easing.OutQuint); if (expanded) { innerPanel.ResizeWidthTo(inner_panel_width, 800, Easing.OutQuint); outerPanel.ResizeWidthTo(main_width, 800, Easing.OutQuint); } else { innerPanel.ResizeWidthTo(1, 800, Easing.OutQuint); outerPanel.ResizeWidthTo(0.25f, 800, Easing.OutQuint); } } } [BackgroundDependencyLoader] private void load() { RelativeSizeAxes = Axes.Both; InternalChildren = new Drawable[] { outerPanel = new Container { Masking = true, EdgeEffect = new EdgeEffectParameters { Colour = Color4.Black.Opacity(0.2f), Type = EdgeEffectType.Shadow, Radius = 5, }, RelativeSizeAxes = Axes.X, Anchor = Anchor.BottomRight, Origin = Anchor.BottomRight, RelativePositionAxes = Axes.X, X = -(1 - main_width) / 2, Y = -10, Width = main_width, Height = TournamentBeatmapPanel.HEIGHT, CornerRadius = TournamentBeatmapPanel.HEIGHT / 2, Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = OsuColour.Gray(0.93f), }, new OsuLogo { Triangles = false, Colour = OsuColour.Gray(0.33f), Scale = new Vector2(0.08f), Margin = new MarginPadding(50), Anchor = Anchor.CentreRight, Origin = Anchor.CentreRight, }, innerPanel = new Container { Masking = true, CornerRadius = TournamentBeatmapPanel.HEIGHT / 2, Anchor = Anchor.Centre, Origin = Anchor.Centre, RelativeSizeAxes = Axes.Both, Width = inner_panel_width, Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = OsuColour.Gray(0.86f), }, panelContents = new Container { RelativeSizeAxes = Axes.Both, } } } } } }; Expanded = true; } private void update() { if (beatmap == null) { panelContents.Clear(); return; } var bpm = beatmap.BeatmapSet.OnlineInfo.BPM; var length = beatmap.Length; string hardRockExtra = ""; string srExtra = ""; //var ar = beatmap.BaseDifficulty.ApproachRate; if ((mods & LegacyMods.HardRock) > 0) { hardRockExtra = "*"; srExtra = "*"; } if ((mods & LegacyMods.DoubleTime) > 0) { //ar *= 1.5f; bpm *= 1.5f; length /= 1.5f; srExtra = "*"; } panelContents.Children = new Drawable[] { new DiffPiece(("Length", TimeSpan.FromMilliseconds(length).ToString(@"mm\:ss"))) { Anchor = Anchor.CentreLeft, Origin = Anchor.BottomLeft, }, new DiffPiece(("BPM", $"{bpm:0.#}")) { Anchor = Anchor.CentreLeft, Origin = Anchor.TopLeft }, new DiffPiece( //("CS", $"{beatmap.BaseDifficulty.CircleSize:0.#}{hardRockExtra}"), //("AR", $"{ar:0.#}{srExtra}"), ("OD", $"{beatmap.BaseDifficulty.OverallDifficulty:0.#}{hardRockExtra}"), ("HP", $"{beatmap.BaseDifficulty.DrainRate:0.#}{hardRockExtra}") ) { Anchor = Anchor.CentreRight, Origin = Anchor.BottomRight }, new DiffPiece(("Star Rating", $"{beatmap.StarDifficulty:0.#}{srExtra}")) { Anchor = Anchor.CentreRight, Origin = Anchor.TopRight }, panel = new TournamentBeatmapPanel(beatmap) { Anchor = Anchor.Centre, Origin = Anchor.Centre, RelativeSizeAxes = Axes.Both, Size = new Vector2(panelWidth, 1) } }; } public class DiffPiece : TextFlowContainer { public DiffPiece(params (string heading, string content)[] tuples) { Margin = new MarginPadding { Horizontal = 15, Vertical = 1 }; AutoSizeAxes = Axes.Both; void cp(SpriteText s, Color4 colour) { s.Colour = colour; s.Font = OsuFont.GetFont(weight: FontWeight.Bold, size: 15); } for (var i = 0; i < tuples.Length; i++) { var tuple = tuples[i]; if (i > 0) { AddText(" / ", s => { cp(s, OsuColour.Gray(0.33f)); s.Spacing = new Vector2(-2, 0); }); } AddText(new OsuSpriteText { Text = tuple.heading }, s => cp(s, OsuColour.Gray(0.33f))); AddText(" ", s => cp(s, OsuColour.Gray(0.33f))); AddText(new OsuSpriteText { Text = tuple.content }, s => cp(s, OsuColour.Gray(0.5f))); } } } } }
namespace Microsoft.Protocols.TestSuites.MS_WOPI { using System; using System.Net; using System.Net.Sockets; using System.Text; using System.Threading; using System.Xml; /// <summary> /// This class is used to help the implementation of the discovery operation. /// </summary> public class DiscoveryRequestListener : HelperBase, IDisposable { /// <summary> /// A bool value indicating whether the listen thread has been started by one instance of this type. /// </summary> private static bool hasStartListenThread = false; /// <summary> /// A bool value indicating whether the listen thread has response a discovery request succeed. /// </summary> private static bool hasResponseDiscoveryRequestSucceed = false; /// <summary> /// A thread handle indicating the instance of the listen thread. /// </summary> private static Thread listenThreadHandle = null; /// <summary> /// An object instance is used for lock blocks which is used for multiple threads. This instance is used to keep asynchronous process for verifying whether the listen thread has been started. /// </summary> private static object threadLockStaticObjectForVisitThread = new object(); /// <summary> /// An object instance is used for lock blocks which is used for multiple threads. This instance is used to keep asynchronous process for append log in different threads. /// </summary> private static object threadLockObjectForAppendLog = new object(); /// <summary> /// A Type instance represents the current helper's type information. /// </summary> private static Type currentHelperType; /// <summary> /// Initializes a new instance of the <see cref="DiscoveryRequestListener"/> class. /// </summary> /// <param name="hostDiscoveryMachineName">A parameter represents the machine name which will listen the discovery request. The value must be the name of the current machine.</param> /// <param name="responseXmlForDiscovery">A parameter represents the discovery response which will response to WOPI server.</param> public DiscoveryRequestListener(string hostDiscoveryMachineName, string responseXmlForDiscovery) { if (string.IsNullOrEmpty(hostDiscoveryMachineName)) { throw new ArgumentNullException("hostDiscoveryMachineName"); } if (string.IsNullOrEmpty(responseXmlForDiscovery)) { throw new ArgumentNullException("responseXmlForDiscovery"); } if (null == currentHelperType) { currentHelperType = this.GetType(); } this.HostNameOfDiscoveryService = hostDiscoveryMachineName; this.ResponseDiscovery = responseXmlForDiscovery; if (null == ListenInstance) { IPAddress iPAddress = IPAddress.Any; IPEndPoint endPoint = new IPEndPoint(iPAddress, 80); ListenInstance = new TcpListener(endPoint); } this.IsRequiredStop = false; this.IsDisposed = false; } /// <summary> /// Finalizes an instance of the <see cref="DiscoveryRequestListener"/> class. This method will be invoked by .net GC collector automatically. /// </summary> ~DiscoveryRequestListener() { lock (threadLockStaticObjectForVisitThread) { this.Dispose(false); } } #region properties /// <summary> /// Gets a value indicating whether the DiscoveryRequestListener has responded to a discovery request successfully. /// </summary> public static bool HasResponseSucceed { get { lock (threadLockStaticObjectForVisitThread) { return hasResponseDiscoveryRequestSucceed; } } } /// <summary> /// Gets or sets the HttpListener type instance. /// </summary> protected static TcpListener ListenInstance { get; set; } /// <summary> /// Gets or sets the host name which will listen and response for the discovery request. /// </summary> protected string HostNameOfDiscoveryService { get; set; } /// <summary> /// Gets or sets a value indicating whether the DiscoveryRequestListener type has released related resource. /// </summary> protected bool IsDisposed { get; set; } /// <summary> /// Gets or sets the response information for the discovery request. /// </summary> protected string ResponseDiscovery { get; set; } /// <summary> /// Gets or sets a value indicating whether the listening thread is required to stop. /// </summary> protected bool IsRequiredStop { get; set; } #endregion /// <summary> /// A method is used to implement the IDisposable interface, it allows the user to dispose the current instance if user need to release allocated resources. /// </summary> public void Dispose() { lock (threadLockStaticObjectForVisitThread) { this.Dispose(true); GC.SuppressFinalize(this); } } /// <summary> /// A method is used to start the listen thread to listen the discovery request. /// </summary> /// <returns>A return value represents the thread instance handle, which is processing the listen logic. This thread instance can be used to control the thread's status and clean up.</returns> public Thread StartListen() { // Verify whether the listen thread has been started from a DiscoveryRequestListener type instance. lock (threadLockStaticObjectForVisitThread) { lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, @"Try to start listener thread from current thread."); } if (null == ListenInstance) { IPAddress iPAddress = IPAddress.Any; IPEndPoint endPoint = new IPEndPoint(iPAddress, 80); ListenInstance = new TcpListener(endPoint); } if (hasStartListenThread) { lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, string.Format(@"The listen thread [{0}] exists.", listenThreadHandle.ManagedThreadId)); } return listenThreadHandle; } listenThreadHandle = new Thread(this.ListenToRequest); listenThreadHandle.Name = "Listen Discovery request thread"; listenThreadHandle.Start(); lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, string.Format("Start the listening thread. The listening thread managed Id[{0}]", listenThreadHandle.ManagedThreadId)); } // Set the status to indicate there has started a listen thread. hasStartListenThread = true; return listenThreadHandle; } } /// <summary> /// A method is used to stop listen process. This method will abort the thread which is listening discovery request and release all resource are used by the thread. /// </summary> public void StopListen() { lock (threadLockStaticObjectForVisitThread) { // If the listen thread has not been start, skip the stop operation. if (!hasStartListenThread) { return; } this.IsRequiredStop = true; lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, string.Format("Stop the listening thread.The listening thread managed Id[{0}]", listenThreadHandle.ManagedThreadId)); } if (listenThreadHandle != null && listenThreadHandle.ThreadState != ThreadState.Unstarted && ListenInstance != null) { lock (threadLockObjectForAppendLog) { // Close the http listener and release the resource used by listener. This might cause the thread generate exception and then the thread will be expected to end and join to the main thread. ListenInstance.Stop(); hasStartListenThread = false; DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, string.Format("Release the Httplistener resource. The listening thread managed Id[{0}]", listenThreadHandle.ManagedThreadId)); } // Wait the thread join to the main caller thread. TimeSpan listenThreadJoinTimeOut = new TimeSpan(0, 0, 1); bool isthreadEnd = listenThreadHandle.Join(listenThreadJoinTimeOut); // If the thread could not end as expected, abort this thread. if (!isthreadEnd) { if ((listenThreadHandle.ThreadState & (ThreadState.Stopped | ThreadState.Unstarted)) == 0) { listenThreadHandle.Abort(); lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, string.Format("Abort the listening thread. The listening thread managed Id[{0}]", listenThreadHandle.ManagedThreadId)); } } } // Set the static status to tell other instance, the listen thread has been abort. listenThreadHandle = null; } } } #region protected method /// <summary> /// A method is used to perform custom dispose logic when the GC try to collect this instance. /// </summary> /// <param name="disposing">A parameter represents the disposing way, the 'true' means it is called from user code by calling IDisposable.Dispose, otherwise it means the GC is trying to process this instance.</param> protected virtual void Dispose(bool disposing) { lock (threadLockStaticObjectForVisitThread) { if (!this.IsDisposed) { this.StopListen(); if (disposing) { ListenInstance = null; } this.IsDisposed = true; } } } /// <summary> /// A method is used to listening the discovery request. It will be executed by a thread which is started on ListenThreadInstance method. /// </summary> protected void ListenToRequest() { ListenInstance.Start(); // If the listener is listening, just keep on execute below code. while (hasStartListenThread) { try { TcpClient client = ListenInstance.AcceptTcpClient(); if (client.Connected == true) { Console.WriteLine("Created connection"); } // if the calling thread requires stopping the listening mission, just return and exit the loop. This value of "IsrequireStop" property is managed by "StopListen" method. if (this.IsRequiredStop) { break; } lock (threadLockStaticObjectForVisitThread) { // Double check the "IsrequireStop" status. if (this.IsRequiredStop) { break; } } lock (threadLockObjectForAppendLog) { string logMsg = string.Format("Listening............ The listen thread: managed id[{0}].", Thread.CurrentThread.ManagedThreadId); DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, logMsg); } NetworkStream netstream = client.GetStream(); try { byte[] buffer = new byte[2048]; int receivelength = netstream.Read(buffer, 0, 2048); string requeststring = Encoding.UTF8.GetString(buffer, 0, receivelength); if (!requeststring.StartsWith(@"GET /hosting/discovery", StringComparison.OrdinalIgnoreCase)) { break; } } catch (Exception ex) { lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, string.Format("The listen thread catches an [{0}] exception:[{1}].", ex.GetType().Name, ex.Message)); } lock (threadLockStaticObjectForVisitThread) { if (this.IsRequiredStop) { lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, "Requires stopping the Httplistener."); } return; } else { this.RestartListener(); } } } bool writeResponseSucceed = false; try { string statusLine = "HTTP/1.1 200 OK\r\n"; byte[] responseStatusLineBytes = Encoding.UTF8.GetBytes(statusLine); string responseHeader = string.Format( "Content-Type: text/xml; charset=UTf-8\r\nContent-Length: {0}\r\n", this.ResponseDiscovery.Length); byte[] responseHeaderBytes = Encoding.UTF8.GetBytes(responseHeader); byte[] responseBodyBytes = Encoding.UTF8.GetBytes(this.ResponseDiscovery); writeResponseSucceed = true; netstream.Write(responseStatusLineBytes, 0, responseStatusLineBytes.Length); netstream.Write(responseHeaderBytes, 0, responseHeaderBytes.Length); netstream.Write(new byte[] { 13, 10 }, 0, 2); netstream.Write(responseBodyBytes, 0, responseBodyBytes.Length); client.Close(); } catch (Exception ex) { lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, string.Format("The listen thread catches an [{0}] exception:[{1}] on responding.", ex.GetType().Name, ex.Message)); } lock (threadLockStaticObjectForVisitThread) { if (this.IsRequiredStop) { lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, string.Format("Catch an exception:[{0}]. Current requires stopping the Httplistener. Thread managed Id[{1}].", ex.Message, Thread.CurrentThread.ManagedThreadId)); } return; } else { this.RestartListener(); } } } if (writeResponseSucceed) { lock (threadLockStaticObjectForVisitThread) { // Setting the status. if (!hasResponseDiscoveryRequestSucceed) { hasResponseDiscoveryRequestSucceed = true; } } lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs( currentHelperType, DateTime.Now, string.Format( "Response the discovery requestsucceed! The listen thread managedId[{0}]", Thread.CurrentThread.ManagedThreadId)); } } } catch(SocketException ee) { DiscoveryProcessHelper.AppendLogs( currentHelperType, DateTime.Now, string.Format("SocketException: {0}", ee.Message)); } } } /// <summary> /// A method is used to restart the HTTP listener. It will dispose the original HTTP listener and then re-generate a HTTP listen instance to listen request. /// </summary> protected void RestartListener() { lock (threadLockObjectForAppendLog) { DiscoveryProcessHelper.AppendLogs(currentHelperType, DateTime.Now, "Try to restart the Httplistener."); } // Release the original HttpListener resource. ListenInstance.Stop(); ListenInstance = null; // Restart a new TcpListener instance. IPAddress iPAddress = IPAddress.Any; IPEndPoint endPoint = new IPEndPoint(iPAddress, 80); ListenInstance = new TcpListener(endPoint); } #endregion } }
#region Apache License // // Licensed to the Apache Software Foundation (ASF) under one or more // contributor license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright ownership. // The ASF licenses this file to you under the Apache License, Version 2.0 // (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion // Compatibility: // http://msdn.microsoft.com/en-us/library/system.console.foregroundcolor.aspx // Disable for unsupported targets #if !NETCF #if !SSCLI #if !CLI_1_0 #if !MONO_1_0 #if !NET_1_0 #if !NET_1_1 // The original ColoredConsoleAppender was written before the .NET framework // (and Mono) had built-in support for console colors so it was written using // Win32 API calls. The AnsiColorTerminalAppender, while it works, isn't // understood by the Windows command prompt. // This is a replacement for both that uses the new (.NET 2) Console colors // and works on both platforms. // On Mono/Linux (at least), setting the background color to 'Black' is // not the same as the default background color, as it is after // Console.Reset(). The difference becomes apparent while running in a // terminal application that supports background transparency; the // default color is treated as transparent while 'Black' isn't. // For this reason, we always reset the colors and only set those // explicitly specified in the configuration (Console.BackgroundColor // isn't set if ommited). using System; using Ctrip.Layout; using Ctrip.Util; using System.Globalization; namespace Ctrip.Appender { /// <summary> /// Appends colorful logging events to the console, using the .NET 2 /// built-in capabilities. /// </summary> /// <remarks> /// <para> /// ManagedColoredConsoleAppender appends log events to the standard output stream /// or the error output stream using a layout specified by the /// user. It also allows the color of a specific type of message to be set. /// </para> /// <para> /// By default, all output is written to the console's standard output stream. /// The <see cref="Target"/> property can be set to direct the output to the /// error stream. /// </para> /// <para> /// When configuring the colored console appender, mappings should be /// specified to map logging levels to colors. For example: /// </para> /// <code lang="XML" escaped="true"> /// <mapping> /// <level value="ERROR" /> /// <foreColor value="DarkRed" /> /// <backColor value="White" /> /// </mapping> /// <mapping> /// <level value="WARN" /> /// <foreColor value="Yellow" /> /// </mapping> /// <mapping> /// <level value="INFO" /> /// <foreColor value="White" /> /// </mapping> /// <mapping> /// <level value="DEBUG" /> /// <foreColor value="Blue" /> /// </mapping> /// </code> /// <para> /// The Level is the standard Ctrip logging level while /// ForeColor and BackColor are the values of <see cref="System.ConsoleColor"/> /// enumeration. /// </para> /// <para> /// Based on the ColoredConsoleAppender /// </para> /// </remarks> /// <author>Rick Hobbs</author> /// <author>Nicko Cadell</author> /// <author>Pavlos Touboulidis</author> public class ManagedColoredConsoleAppender: AppenderSkeleton { /// <summary> /// Initializes a new instance of the <see cref="ManagedColoredConsoleAppender" /> class. /// </summary> /// <remarks> /// The instance of the <see cref="ManagedColoredConsoleAppender" /> class is set up to write /// to the standard output stream. /// </remarks> public ManagedColoredConsoleAppender() { } #region Public Instance Properties /// <summary> /// Target is the value of the console output stream. /// This is either <c>"Console.Out"</c> or <c>"Console.Error"</c>. /// </summary> /// <value> /// Target is the value of the console output stream. /// This is either <c>"Console.Out"</c> or <c>"Console.Error"</c>. /// </value> /// <remarks> /// <para> /// Target is the value of the console output stream. /// This is either <c>"Console.Out"</c> or <c>"Console.Error"</c>. /// </para> /// </remarks> virtual public string Target { get { return m_writeToErrorStream ? ConsoleError : ConsoleOut; } set { string v = value.Trim(); if (string.Compare(ConsoleError, v, true, CultureInfo.InvariantCulture) == 0) { m_writeToErrorStream = true; } else { m_writeToErrorStream = false; } } } /// <summary> /// Add a mapping of level to color - done by the config file /// </summary> /// <param name="mapping">The mapping to add</param> /// <remarks> /// <para> /// Add a <see cref="LevelColors"/> mapping to this appender. /// Each mapping defines the foreground and background colors /// for a level. /// </para> /// </remarks> public void AddMapping(LevelColors mapping) { m_levelMapping.Add(mapping); } #endregion // Public Instance Properties #region Override implementation of AppenderSkeleton /// <summary> /// This method is called by the <see cref="M:AppenderSkeleton.DoAppend(Ctrip.Core.LoggingEvent)"/> method. /// </summary> /// <param name="loggingEvent">The event to log.</param> /// <remarks> /// <para> /// Writes the event to the console. /// </para> /// <para> /// The format of the output will depend on the appender's layout. /// </para> /// </remarks> override protected void Append(Ctrip.Core.LoggingEvent loggingEvent) { System.IO.TextWriter writer; if (m_writeToErrorStream) writer = Console.Error; else writer = Console.Out; // Reset color Console.ResetColor(); // see if there is a specified lookup LevelColors levelColors = m_levelMapping.Lookup(loggingEvent.Level) as LevelColors; if (levelColors != null) { // if the backColor has been explicitly set if (levelColors.HasBackColor) Console.BackgroundColor = levelColors.BackColor; // if the foreColor has been explicitly set if (levelColors.HasForeColor) Console.ForegroundColor = levelColors.ForeColor; } // Render the event to a string string strLoggingMessage = RenderLoggingEvent(loggingEvent); // and write it writer.Write(strLoggingMessage); // Reset color again Console.ResetColor(); } /// <summary> /// This appender requires a <see cref="Layout"/> to be set. /// </summary> /// <value><c>true</c></value> /// <remarks> /// <para> /// This appender requires a <see cref="Layout"/> to be set. /// </para> /// </remarks> override protected bool RequiresLayout { get { return true; } } /// <summary> /// Initialize the options for this appender /// </summary> /// <remarks> /// <para> /// Initialize the level to color mappings set on this appender. /// </para> /// </remarks> public override void ActivateOptions() { base.ActivateOptions(); m_levelMapping.ActivateOptions(); } #endregion // Override implementation of AppenderSkeleton #region Public Static Fields /// <summary> /// The <see cref="ManagedColoredConsoleAppender.Target"/> to use when writing to the Console /// standard output stream. /// </summary> /// <remarks> /// <para> /// The <see cref="ManagedColoredConsoleAppender.Target"/> to use when writing to the Console /// standard output stream. /// </para> /// </remarks> public const string ConsoleOut = "Console.Out"; /// <summary> /// The <see cref="ManagedColoredConsoleAppender.Target"/> to use when writing to the Console /// standard error output stream. /// </summary> /// <remarks> /// <para> /// The <see cref="ManagedColoredConsoleAppender.Target"/> to use when writing to the Console /// standard error output stream. /// </para> /// </remarks> public const string ConsoleError = "Console.Error"; #endregion // Public Static Fields #region Private Instances Fields /// <summary> /// Flag to write output to the error stream rather than the standard output stream /// </summary> private bool m_writeToErrorStream = false; /// <summary> /// Mapping from level object to color value /// </summary> private LevelMapping m_levelMapping = new LevelMapping(); #endregion // Private Instances Fields #region LevelColors LevelMapping Entry /// <summary> /// A class to act as a mapping between the level that a logging call is made at and /// the color it should be displayed as. /// </summary> /// <remarks> /// <para> /// Defines the mapping between a level and the color it should be displayed in. /// </para> /// </remarks> public class LevelColors : LevelMappingEntry { /// <summary> /// The mapped foreground color for the specified level /// </summary> /// <remarks> /// <para> /// Required property. /// The mapped foreground color for the specified level. /// </para> /// </remarks> public ConsoleColor ForeColor { get { return (this.foreColor); } // Keep a flag that the color has been set // and is no longer the default. set { this.foreColor = value; this.hasForeColor = true; } } private ConsoleColor foreColor; private bool hasForeColor; internal bool HasForeColor { get { return hasForeColor; } } /// <summary> /// The mapped background color for the specified level /// </summary> /// <remarks> /// <para> /// Required property. /// The mapped background color for the specified level. /// </para> /// </remarks> public ConsoleColor BackColor { get { return (this.backColor); } // Keep a flag that the color has been set // and is no longer the default. set { this.backColor = value; this.hasBackColor = true; } } private ConsoleColor backColor; private bool hasBackColor; internal bool HasBackColor { get { return hasBackColor; } } } #endregion // LevelColors LevelMapping Entry } } #endif #endif #endif // !MONO_1_0 #endif // !CLI_1_0 #endif // !SSCLI #endif // !NETCF
using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Reflection; using System.Text; using System.Text.RegularExpressions; using System.Threading.Tasks; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using unirest_net.request; namespace pinch { public static class APIHelper { //DateTime format to use for parsing and converting dates public static string DateTimeFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; /// <summary> /// JSON Serialization of a given object. /// </summary> /// <param name="obj">The object to serialize into JSON</param> /// <returns>The serialized Json string representation of the given object</returns> public static string JsonSerialize(object obj) { if(null == obj) return null; return JsonConvert.SerializeObject(obj, Formatting.None, new IsoDateTimeConverter() { DateTimeFormat = DateTimeFormat }); } /// <summary> /// JSON Deserialization of the given json string. /// </summary> /// <param name="json">The json string to deserialize</param> /// <typeparam name="T">The type of the object to desialize into</typeparam> /// <returns>The deserialized object</returns> public static T JsonDeserialize<T>(string json) { if (string.IsNullOrWhiteSpace(json)) return default(T); return JsonConvert.DeserializeObject<T>(json, new IsoDateTimeConverter() { DateTimeFormat = DateTimeFormat }); } /// <summary> /// Replaces template parameters in the given url /// </summary> /// <param name="queryUrl">The query url string to replace the template parameters</param> /// <param name="parameters">The parameters to replace in the url</param> public static void AppendUrlWithTemplateParameters (StringBuilder queryBuilder, IEnumerable<KeyValuePair<string, object>> parameters) { //perform parameter validation if (null == queryBuilder) throw new ArgumentNullException("queryBuilder"); if (null == parameters) return; //iterate and replace parameters foreach(KeyValuePair<string, object> pair in parameters) { string replaceValue = string.Empty; //load element value as string if (null == pair.Value) replaceValue = ""; else if (pair.Value is ICollection) replaceValue = flattenCollection(pair.Value as ICollection, "{0}{1}", '/', false); else if (pair.Value is DateTime) replaceValue = ((DateTime)pair.Value).ToString(DateTimeFormat); else replaceValue = pair.Value.ToString(); replaceValue = Uri.EscapeUriString(replaceValue); //find the template parameter and replace it with its value queryBuilder.Replace(string.Format("{{{0}}}", pair.Key), replaceValue); } } /// <summary> /// Appends the given set of parameters to the given query string /// </summary> /// <param name="queryUrl">The query url string to append the parameters</param> /// <param name="parameters">The parameters to append</param> public static void AppendUrlWithQueryParameters (StringBuilder queryBuilder, IEnumerable<KeyValuePair<string, object>> parameters) { //perform parameter validation if (null == queryBuilder) throw new ArgumentNullException("queryBuilder"); if (null == parameters) return; //does the query string already has parameters bool hasParams = (indexOf(queryBuilder, "?") > 0); //iterate and append parameters foreach (KeyValuePair<string, object> pair in parameters) { //ignore null values if (pair.Value == null) continue; //if already has parameters, use the &amp; to append new parameters queryBuilder.Append((hasParams) ? '&' : '?'); //indicate that now the query has some params hasParams = true; string paramKeyValPair; //load element value as string if (pair.Value is ICollection) paramKeyValPair = flattenCollection(pair.Value as ICollection, string.Format("{0}[]={{0}}{{1}}", pair.Key), '&', true); else if (pair.Value is DateTime) paramKeyValPair = string.Format("{0}={1}", Uri.EscapeUriString(pair.Key), ((DateTime)pair.Value).ToString(DateTimeFormat)); else paramKeyValPair = string.Format("{0}={1}", Uri.EscapeUriString(pair.Key), Uri.EscapeUriString(pair.Value.ToString())); //append keyval pair for current parameter queryBuilder.Append(paramKeyValPair); } } /// <summary> /// StringBuilder extension method to implement IndexOf functionality. /// This does a StringComparison.Ordinal kind of comparison. /// </summary> /// <param name="stringBuilder">The string builder to find the index in</param> /// <param name="strCheck">The string to locate in the string builder</param> /// <returns>The index of string inside the string builder</returns> private static int indexOf(StringBuilder stringBuilder, string strCheck) { if (stringBuilder == null) throw new ArgumentNullException("stringBuilder"); if (strCheck == null) return 0; //iterate over the input for (int inputCounter = 0; inputCounter < stringBuilder.Length; inputCounter++) { int matchCounter; //attempt to locate a potential match for (matchCounter = 0; (matchCounter < strCheck.Length) && (inputCounter + matchCounter < stringBuilder.Length) && (stringBuilder[inputCounter + matchCounter] == strCheck[matchCounter]); matchCounter++); //verify the match if (matchCounter == strCheck.Length) return inputCounter; } return -1; } /// <summary> /// Validates and processes the given query Url to clean empty slashes /// </summary> /// <param name="queryBuilder">The given query Url to process</param> /// <returns>Clean Url as string</returns> public static string CleanUrl(StringBuilder queryBuilder) { //convert to immutable string string url = queryBuilder.ToString(); //ensure that the urls are absolute Match protocol = Regex.Match(url, "^https?://[^/]+"); if (!protocol.Success) throw new ArgumentException("Invalid Url format."); //remove redundant forward slashes string query = url.Substring(protocol.Length); query = Regex.Replace(query, "//+", "/"); //return process url return string.Concat(protocol.Value, query); } /// <summary> /// Used for flattening a collection of objects into a string /// </summary> /// <param name="array">Array of elements to flatten</param> /// <param name="fmt">Format string to use for array flattening</param> /// <param name="separator">Separator to use for string concat</param> /// <returns>Representative string made up of array elements</returns> private static string flattenCollection(ICollection array, string fmt, char separator, bool urlEncode) { StringBuilder builder = new StringBuilder(); //append all elements in the array into a string foreach (object element in array) { string elemValue = null; //replace null values with empty string to maintain index order if (null == element) elemValue = string.Empty; else if (element is DateTime) elemValue = ((DateTime)element).ToString(DateTimeFormat); else elemValue = element.ToString(); if (urlEncode) elemValue = Uri.EscapeUriString(elemValue); builder.AppendFormat(fmt, elemValue, separator); } //remove the last separator, if appended if ((builder.Length > 1) && (builder[builder.Length - 1] == separator)) builder.Length -= 1; return builder.ToString(); } /// <summary> /// Prepares the object as form fields using the provided name. /// </summary> /// <param name="name">root name for the variable</param> /// <param name="value">form field value</param> /// <param name="keys">Contains a flattend and form friendly values</param> /// <returns>Contains a flattend and form friendly values</returns> public static Dictionary<string, object> PrepareFormFieldsFromObject( string name, object value, Dictionary<string, object> keys = null) { keys = keys ?? new Dictionary<string, object>(); if (value == null) { return keys; } if (value is Stream) { keys[name] = value; return keys; } if (value is IList) { int i = 0; var enumerator = ((IEnumerable)value).GetEnumerator(); while (enumerator.MoveNext()) { var subValue = enumerator.Current; if(subValue == null) continue; var fullSubName = name + '[' + i + ']'; PrepareFormFieldsFromObject(fullSubName, subValue, keys); i++; } } else if (value is Enum) { #if WINDOWS_UWP Assembly thisAssembly = typeof(APIHelper).GetTypeInfo().Assembly; #else Assembly thisAssembly = Assembly.GetExecutingAssembly(); #endif string enumTypeName = value.GetType().FullName; Type enumHelperType = thisAssembly.GetType(string.Format("{0}Helper", enumTypeName)); object enumValue = (int)value; if (enumHelperType != null) { //this enum has an associated helper, use that to load the value MethodInfo enumHelperMethod = enumHelperType.GetMethod("ToValue", new[] { value.GetType() }); if(enumHelperMethod != null) enumValue = enumHelperMethod.Invoke(null, new object[] { value }); } keys[name] = enumValue; } else if (value is IDictionary) { var obj = (IDictionary) value; foreach (var sName in obj.Keys) { var subName = sName.ToString(); var subValue = obj[subName]; string fullSubName = string.IsNullOrWhiteSpace(name) ? subName : name + '[' + subName + ']'; PrepareFormFieldsFromObject(fullSubName, subValue, keys); } } else if (!(value.GetType().Namespace.StartsWith("System"))) { //Custom object Iterate through its properties var enumerator = value.GetType().GetProperties().GetEnumerator(); PropertyInfo pInfo = null; var t = new JsonPropertyAttribute().GetType(); while (enumerator.MoveNext()) { pInfo = enumerator.Current as PropertyInfo; var jsonProperty = (JsonPropertyAttribute) pInfo.GetCustomAttributes(t, true).FirstOrDefault(); var subName = (jsonProperty != null) ? jsonProperty.PropertyName : pInfo.Name; string fullSubName = string.IsNullOrWhiteSpace(name) ? subName : name + '[' + subName + ']'; var subValue = pInfo.GetValue(value,null); PrepareFormFieldsFromObject(fullSubName, subValue, keys); } } else if (value is DateTime) { keys[name] = ((DateTime)value).ToString(DateTimeFormat); } else { keys[name] = value; } return keys; } /// <summary> /// Add/update entries with the new dictionary. /// </summary> /// <param name="dictionary"></param> /// <param name="dictionary2"></param> public static void Add(this Dictionary<string, object> dictionary, Dictionary<string, object> dictionary2 ) { foreach (var kvp in dictionary2) { dictionary[kvp.Key] = kvp.Value; } } } }
// Code generated by Microsoft (R) AutoRest Code Generator 1.2.1.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace ApplicationGateway { using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; /// <summary> /// VirtualNetworksOperations operations. /// </summary> public partial interface IVirtualNetworksOperations { /// <summary> /// Deletes the specified virtual network. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the specified virtual network by resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='expand'> /// Expands referenced resources. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<VirtualNetwork>> GetWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string expand = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Creates or updates a virtual network in the specified resource /// group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='parameters'> /// Parameters supplied to the create or update virtual network /// operation /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<VirtualNetwork>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, VirtualNetwork parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all virtual networks in a subscription. /// </summary> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<VirtualNetwork>>> ListAllWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all virtual networks in a resource group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<VirtualNetwork>>> ListWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Checks whether a private IP address is available for use. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='ipAddress'> /// The private IP address to be verified. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPAddressAvailabilityResult>> CheckIPAddressAvailabilityWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, string ipAddress = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the specified virtual network. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Creates or updates a virtual network in the specified resource /// group. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='virtualNetworkName'> /// The name of the virtual network. /// </param> /// <param name='parameters'> /// Parameters supplied to the create or update virtual network /// operation /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<VirtualNetwork>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkName, VirtualNetwork parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all virtual networks in a subscription. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<VirtualNetwork>>> ListAllNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets all virtual networks in a resource group. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<VirtualNetwork>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Specialized; using System.Diagnostics; using System.Globalization; using System.IO; using System.Net.WebSockets; using System.Runtime.InteropServices; using System.Security; using System.Security.Authentication.ExtendedProtection; using System.Security.Cryptography; using System.Security.Cryptography.X509Certificates; using System.Security.Principal; using System.Text; namespace System.Net { public sealed unsafe partial class HttpListenerRequest { private ulong _requestId; internal ulong _connectionId; private SslStatus _sslStatus; private string _cookedUrlHost; private string _cookedUrlPath; private string _cookedUrlQuery; private long _contentLength; private Stream _requestStream; private string _httpMethod; private WebHeaderCollection _webHeaders; private IPEndPoint _localEndPoint; private IPEndPoint _remoteEndPoint; private BoundaryType _boundaryType; private int _clientCertificateError; private RequestContextBase _memoryBlob; private HttpListenerContext _httpContext; private bool _isDisposed = false; internal const uint CertBoblSize = 1500; private string _serviceName; private object _lock = new object(); private enum SslStatus : byte { Insecure, NoClientCert, ClientCert } internal HttpListenerRequest(HttpListenerContext httpContext, RequestContextBase memoryBlob) { if (NetEventSource.IsEnabled) { NetEventSource.Info(this, $"httpContext:${httpContext} memoryBlob {((IntPtr)memoryBlob.RequestBlob)}"); NetEventSource.Associate(this, httpContext); } _httpContext = httpContext; _memoryBlob = memoryBlob; _boundaryType = BoundaryType.None; // Set up some of these now to avoid refcounting on memory blob later. _requestId = memoryBlob.RequestBlob->RequestId; _connectionId = memoryBlob.RequestBlob->ConnectionId; _sslStatus = memoryBlob.RequestBlob->pSslInfo == null ? SslStatus.Insecure : memoryBlob.RequestBlob->pSslInfo->SslClientCertNegotiated == 0 ? SslStatus.NoClientCert : SslStatus.ClientCert; if (memoryBlob.RequestBlob->pRawUrl != null && memoryBlob.RequestBlob->RawUrlLength > 0) { _rawUrl = Marshal.PtrToStringAnsi((IntPtr)memoryBlob.RequestBlob->pRawUrl, memoryBlob.RequestBlob->RawUrlLength); } Interop.HttpApi.HTTP_COOKED_URL cookedUrl = memoryBlob.RequestBlob->CookedUrl; if (cookedUrl.pHost != null && cookedUrl.HostLength > 0) { _cookedUrlHost = Marshal.PtrToStringUni((IntPtr)cookedUrl.pHost, cookedUrl.HostLength / 2); } if (cookedUrl.pAbsPath != null && cookedUrl.AbsPathLength > 0) { _cookedUrlPath = Marshal.PtrToStringUni((IntPtr)cookedUrl.pAbsPath, cookedUrl.AbsPathLength / 2); } if (cookedUrl.pQueryString != null && cookedUrl.QueryStringLength > 0) { _cookedUrlQuery = Marshal.PtrToStringUni((IntPtr)cookedUrl.pQueryString, cookedUrl.QueryStringLength / 2); } _version = new Version(memoryBlob.RequestBlob->Version.MajorVersion, memoryBlob.RequestBlob->Version.MinorVersion); if (NetEventSource.IsEnabled) { NetEventSource.Info(this, $"RequestId:{RequestId} ConnectionId:{_connectionId} RawConnectionId:{memoryBlob.RequestBlob->RawConnectionId} UrlContext:{memoryBlob.RequestBlob->UrlContext} RawUrl:{_rawUrl} Version:{_version} Secure:{_sslStatus}"); NetEventSource.Info(this, $"httpContext:${httpContext} RequestUri:{RequestUri} Content-Length:{ContentLength64} HTTP Method:{HttpMethod}"); } // Log headers if (NetEventSource.IsEnabled) { StringBuilder sb = new StringBuilder("HttpListenerRequest Headers:\n"); for (int i = 0; i < Headers.Count; i++) { sb.Append("\t"); sb.Append(Headers.GetKey(i)); sb.Append(" : "); sb.Append(Headers.Get(i)); sb.Append("\n"); } NetEventSource.Info(this, sb.ToString()); } } internal HttpListenerContext HttpListenerContext => _httpContext; // Note: RequestBuffer may get moved in memory. If you dereference a pointer from inside the RequestBuffer, // you must use 'OriginalBlobAddress' below to adjust the location of the pointer to match the location of // RequestBuffer. internal byte[] RequestBuffer { get { CheckDisposed(); return _memoryBlob.RequestBuffer; } } internal IntPtr OriginalBlobAddress { get { CheckDisposed(); return _memoryBlob.OriginalBlobAddress; } } // Use this to save the blob from dispose if this object was never used (never given to a user) and is about to be // disposed. internal void DetachBlob(RequestContextBase memoryBlob) { if (memoryBlob != null && (object)memoryBlob == (object)_memoryBlob) { _memoryBlob = null; } } // Finalizes ownership of the memory blob. DetachBlob can't be called after this. internal void ReleasePins() { _memoryBlob.ReleasePins(); } internal ulong RequestId => _requestId; public Guid RequestTraceIdentifier { get { Guid guid = new Guid(); *(1 + (ulong*)&guid) = RequestId; return guid; } } public long ContentLength64 { get { if (_boundaryType == BoundaryType.None) { string transferEncodingHeader = Headers[HttpKnownHeaderNames.TransferEncoding]; if (transferEncodingHeader != null && transferEncodingHeader.Equals("chunked", StringComparison.OrdinalIgnoreCase)) { _boundaryType = BoundaryType.Chunked; _contentLength = -1; } else { _contentLength = 0; _boundaryType = BoundaryType.ContentLength; string length = Headers[HttpKnownHeaderNames.ContentLength]; if (length != null) { bool success = long.TryParse(length, NumberStyles.None, CultureInfo.InvariantCulture.NumberFormat, out _contentLength); if (!success) { _contentLength = 0; _boundaryType = BoundaryType.Invalid; } } } } if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"_contentLength:{_contentLength} _boundaryType:{_boundaryType}"); return _contentLength; } } public NameValueCollection Headers { get { if (_webHeaders == null) { _webHeaders = Interop.HttpApi.GetHeaders(RequestBuffer, OriginalBlobAddress); } if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"webHeaders:{_webHeaders}"); return _webHeaders; } } public string HttpMethod { get { if (_httpMethod == null) { _httpMethod = Interop.HttpApi.GetVerb(RequestBuffer, OriginalBlobAddress); } if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"_httpMethod:{_httpMethod}"); return _httpMethod; } } public Stream InputStream { get { if (NetEventSource.IsEnabled) NetEventSource.Enter(this); if (_requestStream == null) { _requestStream = HasEntityBody ? new HttpRequestStream(HttpListenerContext) : Stream.Null; } if (NetEventSource.IsEnabled) NetEventSource.Exit(this); return _requestStream; } } public bool IsAuthenticated { get { IPrincipal user = HttpListenerContext.User; return user != null && user.Identity != null && user.Identity.IsAuthenticated; } } public bool IsSecureConnection => _sslStatus != SslStatus.Insecure; public string ServiceName { get => _serviceName; internal set => _serviceName = value; } private int GetClientCertificateErrorCore() { if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"ClientCertificateError:{_clientCertificateError}"); return _clientCertificateError; } internal void SetClientCertificateError(int clientCertificateError) { _clientCertificateError = clientCertificateError; } public X509Certificate2 EndGetClientCertificate(IAsyncResult asyncResult) { if (NetEventSource.IsEnabled) NetEventSource.Enter(this); X509Certificate2 clientCertificate = null; try { if (asyncResult == null) { throw new ArgumentNullException(nameof(asyncResult)); } ListenerClientCertAsyncResult clientCertAsyncResult = asyncResult as ListenerClientCertAsyncResult; if (clientCertAsyncResult == null || clientCertAsyncResult.AsyncObject != this) { throw new ArgumentException(SR.net_io_invalidasyncresult, nameof(asyncResult)); } if (clientCertAsyncResult.EndCalled) { throw new InvalidOperationException(SR.Format(SR.net_io_invalidendcall, nameof(EndGetClientCertificate))); } clientCertAsyncResult.EndCalled = true; clientCertificate = clientCertAsyncResult.InternalWaitForCompletion() as X509Certificate2; if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"_clientCertificate:{ClientCertificate}"); } finally { if (NetEventSource.IsEnabled) NetEventSource.Exit(this); } return clientCertificate; } public TransportContext TransportContext => new HttpListenerRequestContext(this); public bool HasEntityBody { get { // accessing the ContentLength property delay creates m_BoundaryType return (ContentLength64 > 0 && _boundaryType == BoundaryType.ContentLength) || _boundaryType == BoundaryType.Chunked || _boundaryType == BoundaryType.Multipart; } } public IPEndPoint RemoteEndPoint { get { if (_remoteEndPoint == null) { _remoteEndPoint = Interop.HttpApi.GetRemoteEndPoint(RequestBuffer, OriginalBlobAddress); } if (NetEventSource.IsEnabled) NetEventSource.Info(this, "_remoteEndPoint" + _remoteEndPoint); return _remoteEndPoint; } } public IPEndPoint LocalEndPoint { get { if (_localEndPoint == null) { _localEndPoint = Interop.HttpApi.GetLocalEndPoint(RequestBuffer, OriginalBlobAddress); } if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"_localEndPoint={_localEndPoint}"); return _localEndPoint; } } //should only be called from httplistenercontext internal void Close() { if (NetEventSource.IsEnabled) NetEventSource.Enter(this); RequestContextBase memoryBlob = _memoryBlob; if (memoryBlob != null) { memoryBlob.Close(); _memoryBlob = null; } _isDisposed = true; if (NetEventSource.IsEnabled) NetEventSource.Exit(this); } private ListenerClientCertAsyncResult BeginGetClientCertificateCore(AsyncCallback requestCallback, object state) { ListenerClientCertAsyncResult asyncResult = null; //-------------------------------------------------------------------- //When you configure the HTTP.SYS with a flag value 2 //which means require client certificates, when the client makes the //initial SSL connection, server (HTTP.SYS) demands the client certificate // //Some apps may not want to demand the client cert at the beginning //perhaps server the default.htm. In this case the HTTP.SYS is configured //with a flag value other than 2, whcih means that the client certificate is //optional.So initially when SSL is established HTTP.SYS won't ask for client //certificate. This works fine for the default.htm in the case above //However, if the app wants to demand a client certficate at a later time //perhaps showing "YOUR ORDERS" page, then the server wans to demand //Client certs. this will inturn makes HTTP.SYS to do the //SEC_I_RENOGOTIATE through which the client cert demand is made // //THE BUG HERE IS THAT PRIOR TO QFE 4796, we call //GET Client certificate native API ONLY WHEN THE HTTP.SYS is configured with //flag = 2. Which means that apps using HTTPListener will not be able to //demand a client cert at a later point // //The fix here is to demand the client cert when the channel is NOT INSECURE //which means whether the client certs are requried at the beginning or not, //if this is an SSL connection, Call HttpReceiveClientCertificate, thus //starting the cert negotiation at that point // //NOTE: WHEN CALLING THE HttpReceiveClientCertificate, you can get //ERROR_NOT_FOUND - which means the client did not provide the cert //If this is important, the server should respond with 403 forbidden //HTTP.SYS will not do this for you automatically *** //-------------------------------------------------------------------- if (_sslStatus != SslStatus.Insecure) { // at this point we know that DefaultFlags has the 2 bit set (Negotiate Client certificate) // the cert, though might or might not be there. try to retrieve it // this number is the same that IIS decided to use uint size = CertBoblSize; asyncResult = new ListenerClientCertAsyncResult(HttpListenerContext.RequestQueueBoundHandle, this, state, requestCallback, size); try { while (true) { if (NetEventSource.IsEnabled) NetEventSource.Info(this, "Calling Interop.HttpApi.HttpReceiveClientCertificate size:" + size); uint bytesReceived = 0; uint statusCode = Interop.HttpApi.HttpReceiveClientCertificate( HttpListenerContext.RequestQueueHandle, _connectionId, (uint)Interop.HttpApi.HTTP_FLAGS.NONE, asyncResult.RequestBlob, size, &bytesReceived, asyncResult.NativeOverlapped); if (NetEventSource.IsEnabled) NetEventSource.Info(this, "Call to Interop.HttpApi.HttpReceiveClientCertificate returned:" + statusCode + " bytesReceived:" + bytesReceived); if (statusCode == Interop.HttpApi.ERROR_MORE_DATA) { Interop.HttpApi.HTTP_SSL_CLIENT_CERT_INFO* pClientCertInfo = asyncResult.RequestBlob; size = bytesReceived + pClientCertInfo->CertEncodedSize; asyncResult.Reset(size); continue; } if (statusCode != Interop.HttpApi.ERROR_SUCCESS && statusCode != Interop.HttpApi.ERROR_IO_PENDING) { // someother bad error, possible return values are: // ERROR_INVALID_HANDLE, ERROR_INSUFFICIENT_BUFFER, ERROR_OPERATION_ABORTED // Also ERROR_BAD_DATA if we got it twice or it reported smaller size buffer required. throw new HttpListenerException((int)statusCode); } if (statusCode == Interop.HttpApi.ERROR_SUCCESS && HttpListener.SkipIOCPCallbackOnSuccess) { asyncResult.IOCompleted(statusCode, bytesReceived); } break; } } catch { asyncResult?.InternalCleanup(); throw; } } else { asyncResult = new ListenerClientCertAsyncResult(HttpListenerContext.RequestQueueBoundHandle, this, state, requestCallback, 0); asyncResult.InvokeCallback(); } return asyncResult; } private void GetClientCertificateCore() { if (NetEventSource.IsEnabled) NetEventSource.Info(this); //-------------------------------------------------------------------- //When you configure the HTTP.SYS with a flag value 2 //which means require client certificates, when the client makes the //initial SSL connection, server (HTTP.SYS) demands the client certificate // //Some apps may not want to demand the client cert at the beginning //perhaps server the default.htm. In this case the HTTP.SYS is configured //with a flag value other than 2, whcih means that the client certificate is //optional.So initially when SSL is established HTTP.SYS won't ask for client //certificate. This works fine for the default.htm in the case above //However, if the app wants to demand a client certficate at a later time //perhaps showing "YOUR ORDERS" page, then the server wans to demand //Client certs. this will inturn makes HTTP.SYS to do the //SEC_I_RENOGOTIATE through which the client cert demand is made // //THE BUG HERE IS THAT PRIOR TO QFE 4796, we call //GET Client certificate native API ONLY WHEN THE HTTP.SYS is configured with //flag = 2. Which means that apps using HTTPListener will not be able to //demand a client cert at a later point // //The fix here is to demand the client cert when the channel is NOT INSECURE //which means whether the client certs are requried at the beginning or not, //if this is an SSL connection, Call HttpReceiveClientCertificate, thus //starting the cert negotiation at that point // //NOTE: WHEN CALLING THE HttpReceiveClientCertificate, you can get //ERROR_NOT_FOUND - which means the client did not provide the cert //If this is important, the server should respond with 403 forbidden //HTTP.SYS will not do this for you automatically *** //-------------------------------------------------------------------- if (_sslStatus != SslStatus.Insecure) { // at this point we know that DefaultFlags has the 2 bit set (Negotiate Client certificate) // the cert, though might or might not be there. try to retrieve it // this number is the same that IIS decided to use uint size = CertBoblSize; while (true) { byte[] clientCertInfoBlob = new byte[checked((int)size)]; fixed (byte* pClientCertInfoBlob = &clientCertInfoBlob[0]) { Interop.HttpApi.HTTP_SSL_CLIENT_CERT_INFO* pClientCertInfo = (Interop.HttpApi.HTTP_SSL_CLIENT_CERT_INFO*)pClientCertInfoBlob; if (NetEventSource.IsEnabled) NetEventSource.Info(this, "Calling Interop.HttpApi.HttpReceiveClientCertificate size:" + size); uint bytesReceived = 0; uint statusCode = Interop.HttpApi.HttpReceiveClientCertificate( HttpListenerContext.RequestQueueHandle, _connectionId, (uint)Interop.HttpApi.HTTP_FLAGS.NONE, pClientCertInfo, size, &bytesReceived, null); if (NetEventSource.IsEnabled) NetEventSource.Info(this, "Call to Interop.HttpApi.HttpReceiveClientCertificate returned:" + statusCode + " bytesReceived:" + bytesReceived); if (statusCode == Interop.HttpApi.ERROR_MORE_DATA) { size = bytesReceived + pClientCertInfo->CertEncodedSize; continue; } else if (statusCode == Interop.HttpApi.ERROR_SUCCESS) { if (pClientCertInfo != null) { if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"pClientCertInfo:{(IntPtr)pClientCertInfo} pClientCertInfo->CertFlags: {pClientCertInfo->CertFlags} pClientCertInfo->CertEncodedSize: {pClientCertInfo->CertEncodedSize} pClientCertInfo->pCertEncoded: {(IntPtr)pClientCertInfo->pCertEncoded} pClientCertInfo->Token: {(IntPtr)pClientCertInfo->Token} pClientCertInfo->CertDeniedByMapper: {pClientCertInfo->CertDeniedByMapper}"); if (pClientCertInfo->pCertEncoded != null) { try { byte[] certEncoded = new byte[pClientCertInfo->CertEncodedSize]; Marshal.Copy((IntPtr)pClientCertInfo->pCertEncoded, certEncoded, 0, certEncoded.Length); ClientCertificate = new X509Certificate2(certEncoded); } catch (CryptographicException exception) { if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"CryptographicException={exception}"); } catch (SecurityException exception) { if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"SecurityException={exception}"); } } _clientCertificateError = (int)pClientCertInfo->CertFlags; } } else { Debug.Assert(statusCode == Interop.HttpApi.ERROR_NOT_FOUND, $"Call to Interop.HttpApi.HttpReceiveClientCertificate() failed with statusCode {statusCode}."); } } break; } } } private Uri RequestUri { get { if (_requestUri == null) { _requestUri = HttpListenerRequestUriBuilder.GetRequestUri( _rawUrl, RequestScheme, _cookedUrlHost, _cookedUrlPath, _cookedUrlQuery); } if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"_requestUri:{_requestUri}"); return _requestUri; } } internal ChannelBinding GetChannelBinding() { return HttpListenerContext.Listener.GetChannelBindingFromTls(_connectionId); } internal void CheckDisposed() { if (_isDisposed) { throw new ObjectDisposedException(this.GetType().FullName); } } private bool SupportsWebSockets => WebSocketProtocolComponent.IsSupported; } }
using System; using System.IO; using System.ComponentModel; using System.Collections.Generic; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using ChargeBee.Internal; using ChargeBee.Api; using ChargeBee.Models.Enums; using ChargeBee.Filters.Enums; namespace ChargeBee.Models { public class Addon : Resource { public Addon() { } public Addon(Stream stream) { using (StreamReader reader = new StreamReader(stream)) { JObj = JToken.Parse(reader.ReadToEnd()); apiVersionCheck (JObj); } } public Addon(TextReader reader) { JObj = JToken.Parse(reader.ReadToEnd()); apiVersionCheck (JObj); } public Addon(String jsonString) { JObj = JToken.Parse(jsonString); apiVersionCheck (JObj); } #region Methods public static CreateRequest Create() { string url = ApiUtil.BuildUrl("addons"); return new CreateRequest(url, HttpMethod.POST); } public static UpdateRequest Update(string id) { string url = ApiUtil.BuildUrl("addons", CheckNull(id)); return new UpdateRequest(url, HttpMethod.POST); } public static AddonListRequest List() { string url = ApiUtil.BuildUrl("addons"); return new AddonListRequest(url); } public static EntityRequest<Type> Retrieve(string id) { string url = ApiUtil.BuildUrl("addons", CheckNull(id)); return new EntityRequest<Type>(url, HttpMethod.GET); } public static EntityRequest<Type> Delete(string id) { string url = ApiUtil.BuildUrl("addons", CheckNull(id), "delete"); return new EntityRequest<Type>(url, HttpMethod.POST); } public static CopyRequest Copy() { string url = ApiUtil.BuildUrl("addons", "copy"); return new CopyRequest(url, HttpMethod.POST); } public static EntityRequest<Type> Unarchive(string id) { string url = ApiUtil.BuildUrl("addons", CheckNull(id), "unarchive"); return new EntityRequest<Type>(url, HttpMethod.POST); } #endregion #region Properties public string Id { get { return GetValue<string>("id", true); } } public string Name { get { return GetValue<string>("name", true); } } public string InvoiceName { get { return GetValue<string>("invoice_name", false); } } public string Description { get { return GetValue<string>("description", false); } } public PricingModelEnum PricingModel { get { return GetEnum<PricingModelEnum>("pricing_model", true); } } [Obsolete] public TypeEnum AddonType { get { return GetEnum<TypeEnum>("type", true); } } public ChargeTypeEnum ChargeType { get { return GetEnum<ChargeTypeEnum>("charge_type", true); } } public int? Price { get { return GetValue<int?>("price", false); } } public string CurrencyCode { get { return GetValue<string>("currency_code", true); } } public int? Period { get { return GetValue<int?>("period", false); } } public PeriodUnitEnum PeriodUnit { get { return GetEnum<PeriodUnitEnum>("period_unit", true); } } public string Unit { get { return GetValue<string>("unit", false); } } public StatusEnum Status { get { return GetEnum<StatusEnum>("status", true); } } public DateTime? ArchivedAt { get { return GetDateTime("archived_at", false); } } public bool EnabledInPortal { get { return GetValue<bool>("enabled_in_portal", true); } } public string TaxCode { get { return GetValue<string>("tax_code", false); } } public string HsnCode { get { return GetValue<string>("hsn_code", false); } } public string TaxjarProductCode { get { return GetValue<string>("taxjar_product_code", false); } } public AvalaraSaleTypeEnum? AvalaraSaleType { get { return GetEnum<AvalaraSaleTypeEnum>("avalara_sale_type", false); } } public int? AvalaraTransactionType { get { return GetValue<int?>("avalara_transaction_type", false); } } public int? AvalaraServiceType { get { return GetValue<int?>("avalara_service_type", false); } } public string Sku { get { return GetValue<string>("sku", false); } } public string AccountingCode { get { return GetValue<string>("accounting_code", false); } } public string AccountingCategory1 { get { return GetValue<string>("accounting_category1", false); } } public string AccountingCategory2 { get { return GetValue<string>("accounting_category2", false); } } public string AccountingCategory3 { get { return GetValue<string>("accounting_category3", false); } } public string AccountingCategory4 { get { return GetValue<string>("accounting_category4", false); } } public bool? IsShippable { get { return GetValue<bool?>("is_shippable", false); } } public int? ShippingFrequencyPeriod { get { return GetValue<int?>("shipping_frequency_period", false); } } public ShippingFrequencyPeriodUnitEnum? ShippingFrequencyPeriodUnit { get { return GetEnum<ShippingFrequencyPeriodUnitEnum>("shipping_frequency_period_unit", false); } } public long? ResourceVersion { get { return GetValue<long?>("resource_version", false); } } public DateTime? UpdatedAt { get { return GetDateTime("updated_at", false); } } public string PriceInDecimal { get { return GetValue<string>("price_in_decimal", false); } } public bool? IncludedInMrr { get { return GetValue<bool?>("included_in_mrr", false); } } public string InvoiceNotes { get { return GetValue<string>("invoice_notes", false); } } public bool? Taxable { get { return GetValue<bool?>("taxable", false); } } public string TaxProfileId { get { return GetValue<string>("tax_profile_id", false); } } public JToken MetaData { get { return GetJToken("meta_data", false); } } public List<AddonTier> Tiers { get { return GetResourceList<AddonTier>("tiers"); } } public bool? ShowDescriptionInInvoices { get { return GetValue<bool?>("show_description_in_invoices", false); } } public bool? ShowDescriptionInQuotes { get { return GetValue<bool?>("show_description_in_quotes", false); } } #endregion #region Requests public class CreateRequest : EntityRequest<CreateRequest> { public CreateRequest(string url, HttpMethod method) : base(url, method) { } public CreateRequest Id(string id) { m_params.Add("id", id); return this; } public CreateRequest Name(string name) { m_params.Add("name", name); return this; } public CreateRequest InvoiceName(string invoiceName) { m_params.AddOpt("invoice_name", invoiceName); return this; } public CreateRequest Description(string description) { m_params.AddOpt("description", description); return this; } public CreateRequest ChargeType(Addon.ChargeTypeEnum chargeType) { m_params.Add("charge_type", chargeType); return this; } public CreateRequest Price(int price) { m_params.AddOpt("price", price); return this; } public CreateRequest CurrencyCode(string currencyCode) { m_params.AddOpt("currency_code", currencyCode); return this; } public CreateRequest Period(int period) { m_params.AddOpt("period", period); return this; } public CreateRequest PeriodUnit(Addon.PeriodUnitEnum periodUnit) { m_params.AddOpt("period_unit", periodUnit); return this; } public CreateRequest PricingModel(ChargeBee.Models.Enums.PricingModelEnum pricingModel) { m_params.AddOpt("pricing_model", pricingModel); return this; } [Obsolete] public CreateRequest Type(TypeEnum type) { m_params.AddOpt("type", type); return this; } public CreateRequest Unit(string unit) { m_params.AddOpt("unit", unit); return this; } public CreateRequest EnabledInPortal(bool enabledInPortal) { m_params.AddOpt("enabled_in_portal", enabledInPortal); return this; } public CreateRequest Taxable(bool taxable) { m_params.AddOpt("taxable", taxable); return this; } public CreateRequest TaxProfileId(string taxProfileId) { m_params.AddOpt("tax_profile_id", taxProfileId); return this; } public CreateRequest AvalaraSaleType(ChargeBee.Models.Enums.AvalaraSaleTypeEnum avalaraSaleType) { m_params.AddOpt("avalara_sale_type", avalaraSaleType); return this; } public CreateRequest AvalaraTransactionType(int avalaraTransactionType) { m_params.AddOpt("avalara_transaction_type", avalaraTransactionType); return this; } public CreateRequest AvalaraServiceType(int avalaraServiceType) { m_params.AddOpt("avalara_service_type", avalaraServiceType); return this; } public CreateRequest TaxCode(string taxCode) { m_params.AddOpt("tax_code", taxCode); return this; } public CreateRequest HsnCode(string hsnCode) { m_params.AddOpt("hsn_code", hsnCode); return this; } public CreateRequest TaxjarProductCode(string taxjarProductCode) { m_params.AddOpt("taxjar_product_code", taxjarProductCode); return this; } public CreateRequest InvoiceNotes(string invoiceNotes) { m_params.AddOpt("invoice_notes", invoiceNotes); return this; } public CreateRequest MetaData(JToken metaData) { m_params.AddOpt("meta_data", metaData); return this; } public CreateRequest Sku(string sku) { m_params.AddOpt("sku", sku); return this; } public CreateRequest AccountingCode(string accountingCode) { m_params.AddOpt("accounting_code", accountingCode); return this; } public CreateRequest AccountingCategory1(string accountingCategory1) { m_params.AddOpt("accounting_category1", accountingCategory1); return this; } public CreateRequest AccountingCategory2(string accountingCategory2) { m_params.AddOpt("accounting_category2", accountingCategory2); return this; } public CreateRequest AccountingCategory3(string accountingCategory3) { m_params.AddOpt("accounting_category3", accountingCategory3); return this; } public CreateRequest AccountingCategory4(string accountingCategory4) { m_params.AddOpt("accounting_category4", accountingCategory4); return this; } public CreateRequest IsShippable(bool isShippable) { m_params.AddOpt("is_shippable", isShippable); return this; } public CreateRequest ShippingFrequencyPeriod(int shippingFrequencyPeriod) { m_params.AddOpt("shipping_frequency_period", shippingFrequencyPeriod); return this; } public CreateRequest ShippingFrequencyPeriodUnit(Addon.ShippingFrequencyPeriodUnitEnum shippingFrequencyPeriodUnit) { m_params.AddOpt("shipping_frequency_period_unit", shippingFrequencyPeriodUnit); return this; } public CreateRequest IncludedInMrr(bool includedInMrr) { m_params.AddOpt("included_in_mrr", includedInMrr); return this; } public CreateRequest ShowDescriptionInInvoices(bool showDescriptionInInvoices) { m_params.AddOpt("show_description_in_invoices", showDescriptionInInvoices); return this; } public CreateRequest ShowDescriptionInQuotes(bool showDescriptionInQuotes) { m_params.AddOpt("show_description_in_quotes", showDescriptionInQuotes); return this; } public CreateRequest PriceInDecimal(string priceInDecimal) { m_params.AddOpt("price_in_decimal", priceInDecimal); return this; } public CreateRequest Status(Addon.StatusEnum status) { m_params.AddOpt("status", status); return this; } public CreateRequest TierStartingUnit(int index, int tierStartingUnit) { m_params.AddOpt("tiers[starting_unit][" + index + "]", tierStartingUnit); return this; } public CreateRequest TierEndingUnit(int index, int tierEndingUnit) { m_params.AddOpt("tiers[ending_unit][" + index + "]", tierEndingUnit); return this; } public CreateRequest TierPrice(int index, int tierPrice) { m_params.AddOpt("tiers[price][" + index + "]", tierPrice); return this; } public CreateRequest TierStartingUnitInDecimal(int index, string tierStartingUnitInDecimal) { m_params.AddOpt("tiers[starting_unit_in_decimal][" + index + "]", tierStartingUnitInDecimal); return this; } public CreateRequest TierEndingUnitInDecimal(int index, string tierEndingUnitInDecimal) { m_params.AddOpt("tiers[ending_unit_in_decimal][" + index + "]", tierEndingUnitInDecimal); return this; } public CreateRequest TierPriceInDecimal(int index, string tierPriceInDecimal) { m_params.AddOpt("tiers[price_in_decimal][" + index + "]", tierPriceInDecimal); return this; } } public class UpdateRequest : EntityRequest<UpdateRequest> { public UpdateRequest(string url, HttpMethod method) : base(url, method) { } public UpdateRequest Name(string name) { m_params.AddOpt("name", name); return this; } public UpdateRequest InvoiceName(string invoiceName) { m_params.AddOpt("invoice_name", invoiceName); return this; } public UpdateRequest Description(string description) { m_params.AddOpt("description", description); return this; } public UpdateRequest ChargeType(Addon.ChargeTypeEnum chargeType) { m_params.AddOpt("charge_type", chargeType); return this; } public UpdateRequest Price(int price) { m_params.AddOpt("price", price); return this; } public UpdateRequest CurrencyCode(string currencyCode) { m_params.AddOpt("currency_code", currencyCode); return this; } public UpdateRequest Period(int period) { m_params.AddOpt("period", period); return this; } public UpdateRequest PeriodUnit(Addon.PeriodUnitEnum periodUnit) { m_params.AddOpt("period_unit", periodUnit); return this; } public UpdateRequest PricingModel(ChargeBee.Models.Enums.PricingModelEnum pricingModel) { m_params.AddOpt("pricing_model", pricingModel); return this; } [Obsolete] public UpdateRequest Type(TypeEnum type) { m_params.AddOpt("type", type); return this; } public UpdateRequest Unit(string unit) { m_params.AddOpt("unit", unit); return this; } public UpdateRequest EnabledInPortal(bool enabledInPortal) { m_params.AddOpt("enabled_in_portal", enabledInPortal); return this; } public UpdateRequest Taxable(bool taxable) { m_params.AddOpt("taxable", taxable); return this; } public UpdateRequest TaxProfileId(string taxProfileId) { m_params.AddOpt("tax_profile_id", taxProfileId); return this; } public UpdateRequest AvalaraSaleType(ChargeBee.Models.Enums.AvalaraSaleTypeEnum avalaraSaleType) { m_params.AddOpt("avalara_sale_type", avalaraSaleType); return this; } public UpdateRequest AvalaraTransactionType(int avalaraTransactionType) { m_params.AddOpt("avalara_transaction_type", avalaraTransactionType); return this; } public UpdateRequest AvalaraServiceType(int avalaraServiceType) { m_params.AddOpt("avalara_service_type", avalaraServiceType); return this; } public UpdateRequest TaxCode(string taxCode) { m_params.AddOpt("tax_code", taxCode); return this; } public UpdateRequest HsnCode(string hsnCode) { m_params.AddOpt("hsn_code", hsnCode); return this; } public UpdateRequest TaxjarProductCode(string taxjarProductCode) { m_params.AddOpt("taxjar_product_code", taxjarProductCode); return this; } public UpdateRequest InvoiceNotes(string invoiceNotes) { m_params.AddOpt("invoice_notes", invoiceNotes); return this; } public UpdateRequest MetaData(JToken metaData) { m_params.AddOpt("meta_data", metaData); return this; } public UpdateRequest Sku(string sku) { m_params.AddOpt("sku", sku); return this; } public UpdateRequest AccountingCode(string accountingCode) { m_params.AddOpt("accounting_code", accountingCode); return this; } public UpdateRequest AccountingCategory1(string accountingCategory1) { m_params.AddOpt("accounting_category1", accountingCategory1); return this; } public UpdateRequest AccountingCategory2(string accountingCategory2) { m_params.AddOpt("accounting_category2", accountingCategory2); return this; } public UpdateRequest AccountingCategory3(string accountingCategory3) { m_params.AddOpt("accounting_category3", accountingCategory3); return this; } public UpdateRequest AccountingCategory4(string accountingCategory4) { m_params.AddOpt("accounting_category4", accountingCategory4); return this; } public UpdateRequest IsShippable(bool isShippable) { m_params.AddOpt("is_shippable", isShippable); return this; } public UpdateRequest ShippingFrequencyPeriod(int shippingFrequencyPeriod) { m_params.AddOpt("shipping_frequency_period", shippingFrequencyPeriod); return this; } public UpdateRequest ShippingFrequencyPeriodUnit(Addon.ShippingFrequencyPeriodUnitEnum shippingFrequencyPeriodUnit) { m_params.AddOpt("shipping_frequency_period_unit", shippingFrequencyPeriodUnit); return this; } public UpdateRequest IncludedInMrr(bool includedInMrr) { m_params.AddOpt("included_in_mrr", includedInMrr); return this; } public UpdateRequest ShowDescriptionInInvoices(bool showDescriptionInInvoices) { m_params.AddOpt("show_description_in_invoices", showDescriptionInInvoices); return this; } public UpdateRequest ShowDescriptionInQuotes(bool showDescriptionInQuotes) { m_params.AddOpt("show_description_in_quotes", showDescriptionInQuotes); return this; } public UpdateRequest PriceInDecimal(string priceInDecimal) { m_params.AddOpt("price_in_decimal", priceInDecimal); return this; } public UpdateRequest TierStartingUnit(int index, int tierStartingUnit) { m_params.AddOpt("tiers[starting_unit][" + index + "]", tierStartingUnit); return this; } public UpdateRequest TierEndingUnit(int index, int tierEndingUnit) { m_params.AddOpt("tiers[ending_unit][" + index + "]", tierEndingUnit); return this; } public UpdateRequest TierPrice(int index, int tierPrice) { m_params.AddOpt("tiers[price][" + index + "]", tierPrice); return this; } public UpdateRequest TierStartingUnitInDecimal(int index, string tierStartingUnitInDecimal) { m_params.AddOpt("tiers[starting_unit_in_decimal][" + index + "]", tierStartingUnitInDecimal); return this; } public UpdateRequest TierEndingUnitInDecimal(int index, string tierEndingUnitInDecimal) { m_params.AddOpt("tiers[ending_unit_in_decimal][" + index + "]", tierEndingUnitInDecimal); return this; } public UpdateRequest TierPriceInDecimal(int index, string tierPriceInDecimal) { m_params.AddOpt("tiers[price_in_decimal][" + index + "]", tierPriceInDecimal); return this; } } public class AddonListRequest : ListRequestBase<AddonListRequest> { public AddonListRequest(string url) : base(url) { } public StringFilter<AddonListRequest> Id() { return new StringFilter<AddonListRequest>("id", this).SupportsMultiOperators(true); } public StringFilter<AddonListRequest> Name() { return new StringFilter<AddonListRequest>("name", this).SupportsMultiOperators(true); } public EnumFilter<ChargeBee.Models.Enums.PricingModelEnum, AddonListRequest> PricingModel() { return new EnumFilter<ChargeBee.Models.Enums.PricingModelEnum, AddonListRequest>("pricing_model", this); } [Obsolete] public EnumFilter<TypeEnum, AddonListRequest> Type() { return new EnumFilter<TypeEnum, AddonListRequest>("type", this); } public EnumFilter<Addon.ChargeTypeEnum, AddonListRequest> ChargeType() { return new EnumFilter<Addon.ChargeTypeEnum, AddonListRequest>("charge_type", this); } public NumberFilter<int, AddonListRequest> Price() { return new NumberFilter<int, AddonListRequest>("price", this); } public NumberFilter<int, AddonListRequest> Period() { return new NumberFilter<int, AddonListRequest>("period", this); } public EnumFilter<Addon.PeriodUnitEnum, AddonListRequest> PeriodUnit() { return new EnumFilter<Addon.PeriodUnitEnum, AddonListRequest>("period_unit", this); } public EnumFilter<Addon.StatusEnum, AddonListRequest> Status() { return new EnumFilter<Addon.StatusEnum, AddonListRequest>("status", this); } public TimestampFilter<AddonListRequest> UpdatedAt() { return new TimestampFilter<AddonListRequest>("updated_at", this); } public StringFilter<AddonListRequest> CurrencyCode() { return new StringFilter<AddonListRequest>("currency_code", this).SupportsMultiOperators(true); } public AddonListRequest IncludeDeleted(bool includeDeleted) { m_params.AddOpt("include_deleted", includeDeleted); return this; } } public class CopyRequest : EntityRequest<CopyRequest> { public CopyRequest(string url, HttpMethod method) : base(url, method) { } public CopyRequest FromSite(string fromSite) { m_params.Add("from_site", fromSite); return this; } public CopyRequest IdAtFromSite(string idAtFromSite) { m_params.Add("id_at_from_site", idAtFromSite); return this; } public CopyRequest Id(string id) { m_params.AddOpt("id", id); return this; } public CopyRequest ForSiteMerging(bool forSiteMerging) { m_params.AddOpt("for_site_merging", forSiteMerging); return this; } } #endregion [Obsolete] public enum TypeEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "on_off")] OnOff, [EnumMember(Value = "quantity")] Quantity, [EnumMember(Value = "tiered")] Tiered, [EnumMember(Value = "volume")] Volume, [EnumMember(Value = "stairstep")] Stairstep, } public enum ChargeTypeEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "recurring")] Recurring, [EnumMember(Value = "non_recurring")] NonRecurring, } public enum PeriodUnitEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "day")] Day, [EnumMember(Value = "week")] Week, [EnumMember(Value = "month")] Month, [EnumMember(Value = "year")] Year, [EnumMember(Value = "not_applicable")] NotApplicable, } public enum StatusEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "active")] Active, [EnumMember(Value = "archived")] Archived, [EnumMember(Value = "deleted")] Deleted, } public enum ShippingFrequencyPeriodUnitEnum { UnKnown, /*Indicates unexpected value for this enum. You can get this when there is a dotnet-client version incompatibility. We suggest you to upgrade to the latest version */ [EnumMember(Value = "year")] Year, [EnumMember(Value = "month")] Month, [EnumMember(Value = "week")] Week, [EnumMember(Value = "day")] Day, } #region Subclasses public class AddonTier : Resource { public int StartingUnit { get { return GetValue<int>("starting_unit", true); } } public int? EndingUnit { get { return GetValue<int?>("ending_unit", false); } } public int Price { get { return GetValue<int>("price", true); } } public string StartingUnitInDecimal { get { return GetValue<string>("starting_unit_in_decimal", false); } } public string EndingUnitInDecimal { get { return GetValue<string>("ending_unit_in_decimal", false); } } public string PriceInDecimal { get { return GetValue<string>("price_in_decimal", false); } } } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.CodeDom.Compiler; using System.Collections.Generic; using System.Runtime.Serialization; using Microsoft.CodeAnalysis.Sarif.Readers; namespace Microsoft.CodeAnalysis.Sarif { /// <summary> /// A result produced by an analysis tool. /// </summary> [DataContract] [GeneratedCode("Microsoft.Json.Schema.ToDotNet", "0.42.0.0")] public partial class Result : PropertyBagHolder, ISarifNode { public static IEqualityComparer<Result> ValueComparer => ResultEqualityComparer.Instance; public bool ValueEquals(Result other) => ValueComparer.Equals(this, other); public int ValueGetHashCode() => ValueComparer.GetHashCode(this); /// <summary> /// Gets a value indicating the type of object implementing <see cref="ISarifNode" />. /// </summary> public SarifNodeKind SarifNodeKind { get { return SarifNodeKind.Result; } } /// <summary> /// The stable, unique identifier of the rule (if any) to which this notification is relevant. If 'ruleKey' is not specified, this member can be used to retrieve rule metadata from the rules dictionary, if it exists. /// </summary> [DataMember(Name = "ruleId", IsRequired = false, EmitDefaultValue = false)] public string RuleId { get; set; } /// <summary> /// A key used to retrieve the rule metadata from the rules dictionary that is relevant to the notificationn. /// </summary> [DataMember(Name = "ruleKey", IsRequired = false, EmitDefaultValue = false)] public string RuleKey { get; set; } /// <summary> /// A value specifying the severity level of the result. If this property is not present, its implied value is 'warning'. /// </summary> [DataMember(Name = "level", IsRequired = false, EmitDefaultValue = false)] public ResultLevel Level { get; set; } /// <summary> /// A string that describes the result. The first sentence of the message only will be displayed when visible space is limited. /// </summary> [DataMember(Name = "message", IsRequired = false, EmitDefaultValue = false)] public string Message { get; set; } /// <summary> /// A 'formattedRuleMessage' object that can be used to construct a formatted message that describes the result. If the 'formattedMessage' property is present on a result, the 'fullMessage' property shall not be present. If the 'fullMessage' property is present on an result, the 'formattedMessage' property shall not be present /// </summary> [DataMember(Name = "formattedRuleMessage", IsRequired = false, EmitDefaultValue = false)] public FormattedRuleMessage FormattedRuleMessage { get; set; } /// <summary> /// One or more locations where the result occurred. Specify only one location unless the problem indicated by the result can only be corrected by making a change at every specified location. /// </summary> [DataMember(Name = "locations", IsRequired = false, EmitDefaultValue = false)] public IList<Location> Locations { get; set; } /// <summary> /// A source code or other file fragment that illustrates the result. /// </summary> [DataMember(Name = "snippet", IsRequired = false, EmitDefaultValue = false)] public string Snippet { get; set; } /// <summary> /// A unique identifer for the result. /// </summary> [DataMember(Name = "id", IsRequired = false, EmitDefaultValue = false)] public string Id { get; set; } /// <summary> /// A string that contributes to the unique identity of the result. /// </summary> [DataMember(Name = "toolFingerprintContribution", IsRequired = false, EmitDefaultValue = false)] public string ToolFingerprintContribution { get; set; } /// <summary> /// An array of 'stack' objects relevant to the result. /// </summary> [DataMember(Name = "stacks", IsRequired = false, EmitDefaultValue = false)] public IList<Stack> Stacks { get; set; } /// <summary> /// An array of 'codeFlow' objects relevant to the result. /// </summary> [DataMember(Name = "codeFlows", IsRequired = false, EmitDefaultValue = false)] public IList<CodeFlow> CodeFlows { get; set; } /// <summary> /// A grouped set of locations and messages, if available, that represent code areas that are related to this result. /// </summary> [DataMember(Name = "relatedLocations", IsRequired = false, EmitDefaultValue = false)] public IList<AnnotatedCodeLocation> RelatedLocations { get; set; } [DataMember(Name = "suppressionStates", IsRequired = false, EmitDefaultValue = false)] public SuppressionStates SuppressionStates { get; set; } /// <summary> /// The state of a result relative to a baseline of a previous run. /// </summary> [DataMember(Name = "baselineState", IsRequired = false, EmitDefaultValue = false)] public BaselineState BaselineState { get; set; } /// <summary> /// An array of 'fix' objects, each of which represents a proposed fix to the problem indicated by the result. /// </summary> [DataMember(Name = "fixes", IsRequired = false, EmitDefaultValue = false)] public IList<Fix> Fixes { get; set; } /// <summary> /// Key/value pairs that provide additional information about the result. /// </summary> [DataMember(Name = "properties", IsRequired = false, EmitDefaultValue = false)] internal override IDictionary<string, SerializedPropertyInfo> Properties { get; set; } /// <summary> /// Initializes a new instance of the <see cref="Result" /> class. /// </summary> public Result() { } /// <summary> /// Initializes a new instance of the <see cref="Result" /> class from the supplied values. /// </summary> /// <param name="ruleId"> /// An initialization value for the <see cref="P: RuleId" /> property. /// </param> /// <param name="ruleKey"> /// An initialization value for the <see cref="P: RuleKey" /> property. /// </param> /// <param name="level"> /// An initialization value for the <see cref="P: Level" /> property. /// </param> /// <param name="message"> /// An initialization value for the <see cref="P: Message" /> property. /// </param> /// <param name="formattedRuleMessage"> /// An initialization value for the <see cref="P: FormattedRuleMessage" /> property. /// </param> /// <param name="locations"> /// An initialization value for the <see cref="P: Locations" /> property. /// </param> /// <param name="snippet"> /// An initialization value for the <see cref="P: Snippet" /> property. /// </param> /// <param name="id"> /// An initialization value for the <see cref="P: Id" /> property. /// </param> /// <param name="toolFingerprintContribution"> /// An initialization value for the <see cref="P: ToolFingerprintContribution" /> property. /// </param> /// <param name="stacks"> /// An initialization value for the <see cref="P: Stacks" /> property. /// </param> /// <param name="codeFlows"> /// An initialization value for the <see cref="P: CodeFlows" /> property. /// </param> /// <param name="relatedLocations"> /// An initialization value for the <see cref="P: RelatedLocations" /> property. /// </param> /// <param name="suppressionStates"> /// An initialization value for the <see cref="P: SuppressionStates" /> property. /// </param> /// <param name="baselineState"> /// An initialization value for the <see cref="P: BaselineState" /> property. /// </param> /// <param name="fixes"> /// An initialization value for the <see cref="P: Fixes" /> property. /// </param> /// <param name="properties"> /// An initialization value for the <see cref="P: Properties" /> property. /// </param> public Result(string ruleId, string ruleKey, ResultLevel level, string message, FormattedRuleMessage formattedRuleMessage, IEnumerable<Location> locations, string snippet, string id, string toolFingerprintContribution, IEnumerable<Stack> stacks, IEnumerable<CodeFlow> codeFlows, IEnumerable<AnnotatedCodeLocation> relatedLocations, SuppressionStates suppressionStates, BaselineState baselineState, IEnumerable<Fix> fixes, IDictionary<string, SerializedPropertyInfo> properties) { Init(ruleId, ruleKey, level, message, formattedRuleMessage, locations, snippet, id, toolFingerprintContribution, stacks, codeFlows, relatedLocations, suppressionStates, baselineState, fixes, properties); } /// <summary> /// Initializes a new instance of the <see cref="Result" /> class from the specified instance. /// </summary> /// <param name="other"> /// The instance from which the new instance is to be initialized. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown if <paramref name="other" /> is null. /// </exception> public Result(Result other) { if (other == null) { throw new ArgumentNullException(nameof(other)); } Init(other.RuleId, other.RuleKey, other.Level, other.Message, other.FormattedRuleMessage, other.Locations, other.Snippet, other.Id, other.ToolFingerprintContribution, other.Stacks, other.CodeFlows, other.RelatedLocations, other.SuppressionStates, other.BaselineState, other.Fixes, other.Properties); } ISarifNode ISarifNode.DeepClone() { return DeepCloneCore(); } /// <summary> /// Creates a deep copy of this instance. /// </summary> public Result DeepClone() { return (Result)DeepCloneCore(); } private ISarifNode DeepCloneCore() { return new Result(this); } private void Init(string ruleId, string ruleKey, ResultLevel level, string message, FormattedRuleMessage formattedRuleMessage, IEnumerable<Location> locations, string snippet, string id, string toolFingerprintContribution, IEnumerable<Stack> stacks, IEnumerable<CodeFlow> codeFlows, IEnumerable<AnnotatedCodeLocation> relatedLocations, SuppressionStates suppressionStates, BaselineState baselineState, IEnumerable<Fix> fixes, IDictionary<string, SerializedPropertyInfo> properties) { RuleId = ruleId; RuleKey = ruleKey; Level = level; Message = message; if (formattedRuleMessage != null) { FormattedRuleMessage = new FormattedRuleMessage(formattedRuleMessage); } if (locations != null) { var destination_0 = new List<Location>(); foreach (var value_0 in locations) { if (value_0 == null) { destination_0.Add(null); } else { destination_0.Add(new Location(value_0)); } } Locations = destination_0; } Snippet = snippet; Id = id; ToolFingerprintContribution = toolFingerprintContribution; if (stacks != null) { var destination_1 = new List<Stack>(); foreach (var value_1 in stacks) { if (value_1 == null) { destination_1.Add(null); } else { destination_1.Add(new Stack(value_1)); } } Stacks = destination_1; } if (codeFlows != null) { var destination_2 = new List<CodeFlow>(); foreach (var value_2 in codeFlows) { if (value_2 == null) { destination_2.Add(null); } else { destination_2.Add(new CodeFlow(value_2)); } } CodeFlows = destination_2; } if (relatedLocations != null) { var destination_3 = new List<AnnotatedCodeLocation>(); foreach (var value_3 in relatedLocations) { if (value_3 == null) { destination_3.Add(null); } else { destination_3.Add(new AnnotatedCodeLocation(value_3)); } } RelatedLocations = destination_3; } SuppressionStates = suppressionStates; BaselineState = baselineState; if (fixes != null) { var destination_4 = new List<Fix>(); foreach (var value_4 in fixes) { if (value_4 == null) { destination_4.Add(null); } else { destination_4.Add(new Fix(value_4)); } } Fixes = destination_4; } if (properties != null) { Properties = new Dictionary<string, SerializedPropertyInfo>(properties); } } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using QuantConnect.Data; using QuantConnect.Brokerages; using QuantConnect.Indicators; using QuantConnect.Orders; using QuantConnect.Interfaces; namespace QuantConnect.Algorithm.CSharp { /// <summary> /// The demonstration algorithm shows some of the most common order methods when working with Crypto assets. /// </summary> /// <meta name="tag" content="using data" /> /// <meta name="tag" content="using quantconnect" /> /// <meta name="tag" content="trading and orders" /> public class BasicTemplateCryptoAlgorithm : QCAlgorithm, IRegressionAlgorithmDefinition { private ExponentialMovingAverage _fast; private ExponentialMovingAverage _slow; /// <summary> /// Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized. /// </summary> public override void Initialize() { SetStartDate(2018, 4, 4); // Set Start Date SetEndDate(2018, 4, 4); // Set End Date // Although typically real brokerages as GDAX only support a single account currency, // here we add both USD and EUR to demonstrate how to handle non-USD account currencies. // Set Strategy Cash (USD) SetCash(10000); // Set Strategy Cash (EUR) // EUR/USD conversion rate will be updated dynamically SetCash("EUR", 10000); // Add some coins as initial holdings // When connected to a real brokerage, the amount specified in SetCash // will be replaced with the amount in your actual account. SetCash("BTC", 1m); SetCash("ETH", 5m); SetBrokerageModel(BrokerageName.GDAX, AccountType.Cash); // You can uncomment the following line when live trading with GDAX, // to ensure limit orders will only be posted to the order book and never executed as a taker (incurring fees). // Please note this statement has no effect in backtesting or paper trading. // DefaultOrderProperties = new GDAXOrderProperties { PostOnly = true }; // Find more symbols here: http://quantconnect.com/data AddCrypto("BTCUSD"); AddCrypto("ETHUSD"); AddCrypto("BTCEUR"); var symbol = AddCrypto("LTCUSD").Symbol; // create two moving averages _fast = EMA(symbol, 30, Resolution.Minute); _slow = EMA(symbol, 60, Resolution.Minute); } /// <summary> /// OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here. /// </summary> /// <param name="data">Slice object keyed by symbol containing the stock data</param> public override void OnData(Slice data) { if (Portfolio.CashBook["EUR"].ConversionRate == 0 || Portfolio.CashBook["BTC"].ConversionRate == 0 || Portfolio.CashBook["ETH"].ConversionRate == 0 || Portfolio.CashBook["LTC"].ConversionRate == 0) { Log($"EUR conversion rate: {Portfolio.CashBook["EUR"].ConversionRate}"); Log($"BTC conversion rate: {Portfolio.CashBook["BTC"].ConversionRate}"); Log($"LTC conversion rate: {Portfolio.CashBook["LTC"].ConversionRate}"); Log($"ETH conversion rate: {Portfolio.CashBook["ETH"].ConversionRate}"); throw new Exception("Conversion rate is 0"); } if (Time.Hour == 1 && Time.Minute == 0) { // Sell all ETH holdings with a limit order at 1% above the current price var limitPrice = Math.Round(Securities["ETHUSD"].Price * 1.01m, 2); var quantity = Portfolio.CashBook["ETH"].Amount; LimitOrder("ETHUSD", -quantity, limitPrice); } else if (Time.Hour == 2 && Time.Minute == 0) { // Submit a buy limit order for BTC at 5% below the current price var usdTotal = Portfolio.CashBook["USD"].Amount; var limitPrice = Math.Round(Securities["BTCUSD"].Price * 0.95m, 2); // use only half of our total USD var quantity = usdTotal * 0.5m / limitPrice; LimitOrder("BTCUSD", quantity, limitPrice); } else if (Time.Hour == 2 && Time.Minute == 1) { // Get current USD available, subtracting amount reserved for buy open orders var usdTotal = Portfolio.CashBook["USD"].Amount; var usdReserved = Transactions.GetOpenOrders(x => x.Direction == OrderDirection.Buy && x.Type == OrderType.Limit) .Where(x => x.Symbol == "BTCUSD" || x.Symbol == "ETHUSD") .Sum(x => x.Quantity * ((LimitOrder) x).LimitPrice); var usdAvailable = usdTotal - usdReserved; // Submit a marketable buy limit order for ETH at 1% above the current price var limitPrice = Math.Round(Securities["ETHUSD"].Price * 1.01m, 2); // use all of our available USD var quantity = usdAvailable / limitPrice; // this order will be rejected for insufficient funds LimitOrder("ETHUSD", quantity, limitPrice); // use only half of our available USD quantity = usdAvailable * 0.5m / limitPrice; LimitOrder("ETHUSD", quantity, limitPrice); } else if (Time.Hour == 11 && Time.Minute == 0) { // Liquidate our BTC holdings (including the initial holding) SetHoldings("BTCUSD", 0m); } else if (Time.Hour == 12 && Time.Minute == 0) { // Submit a market buy order for 1 BTC using EUR Buy("BTCEUR", 1m); // Submit a sell limit order at 10% above market price var limitPrice = Math.Round(Securities["BTCEUR"].Price * 1.1m, 2); LimitOrder("BTCEUR", -1, limitPrice); } else if (Time.Hour == 13 && Time.Minute == 0) { // Cancel the limit order if not filled Transactions.CancelOpenOrders("BTCEUR"); } else if (Time.Hour > 13) { // To include any initial holdings, we read the LTC amount from the cashbook // instead of using Portfolio["LTCUSD"].Quantity if (_fast > _slow) { if (Portfolio.CashBook["LTC"].Amount == 0) { Buy("LTCUSD", 10); } } else { if (Portfolio.CashBook["LTC"].Amount > 0) { // The following two statements currently behave differently if we have initial holdings: // https://github.com/QuantConnect/Lean/issues/1860 Liquidate("LTCUSD"); // SetHoldings("LTCUSD", 0); } } } } public override void OnOrderEvent(OrderEvent orderEvent) { Debug(Time + " " + orderEvent); } public override void OnEndOfAlgorithm() { Log($"{Time} - TotalPortfolioValue: {Portfolio.TotalPortfolioValue}"); Log($"{Time} - CashBook: {Portfolio.CashBook}"); } /// <summary> /// This is used by the regression test system to indicate if the open source Lean repository has the required data to run this algorithm. /// </summary> public bool CanRunLocally { get; } = true; /// <summary> /// This is used by the regression test system to indicate which languages this algorithm is written in. /// </summary> public Language[] Languages { get; } = { Language.CSharp, Language.Python }; /// <summary> /// This is used by the regression test system to indicate what the expected statistics are from running the algorithm /// </summary> public Dictionary<string, string> ExpectedStatistics => new Dictionary<string, string> { {"Total Trades", "10"}, {"Average Win", "0%"}, {"Average Loss", "0%"}, {"Compounding Annual Return", "0%"}, {"Drawdown", "0%"}, {"Expectancy", "0"}, {"Net Profit", "0%"}, {"Sharpe Ratio", "0"}, {"Probabilistic Sharpe Ratio", "0%"}, {"Loss Rate", "0%"}, {"Win Rate", "0%"}, {"Profit-Loss Ratio", "0"}, {"Alpha", "0"}, {"Beta", "0"}, {"Annual Standard Deviation", "0"}, {"Annual Variance", "0"}, {"Information Ratio", "0"}, {"Tracking Error", "0"}, {"Treynor Ratio", "0"}, {"Total Fees", "$85.34"}, {"Estimated Strategy Capacity", "$0"}, {"Lowest Capacity Asset", "BTCEUR XJ"}, {"Fitness Score", "0.5"}, {"Kelly Criterion Estimate", "0"}, {"Kelly Criterion Probability Value", "0"}, {"Sortino Ratio", "79228162514264337593543950335"}, {"Return Over Maximum Drawdown", "-43.943"}, {"Portfolio Turnover", "1.028"}, {"Total Insights Generated", "0"}, {"Total Insights Closed", "0"}, {"Total Insights Analysis Completed", "0"}, {"Long Insight Count", "0"}, {"Short Insight Count", "0"}, {"Long/Short Ratio", "100%"}, {"Estimated Monthly Alpha Value", "$0"}, {"Total Accumulated Estimated Alpha Value", "$0"}, {"Mean Population Estimated Insight Value", "$0"}, {"Mean Population Direction", "0%"}, {"Mean Population Magnitude", "0%"}, {"Rolling Averaged Population Direction", "0%"}, {"Rolling Averaged Population Magnitude", "0%"}, {"OrderListHash", "1bf1a6d9dd921982b72a6178f9e50e68"} }; } }
//#define Trace // ParallelBZip2OutputStream.cs // ------------------------------------------------------------------ // // Copyright (c) 2011 Dino Chiesa. // All rights reserved. // // This code module is part of DotNetZip, a zipfile class library. // // ------------------------------------------------------------------ // // This code is licensed under the Microsoft Public License. // See the file License.txt for the license details. // More info on: http://dotnetzip.codeplex.com // // ------------------------------------------------------------------ // // Last Saved: <2011-August-02 16:44:24> // // ------------------------------------------------------------------ // // This module defines the ParallelBZip2OutputStream class, which is a // BZip2 compressing stream. This code was derived in part from Apache // commons source code. The license below applies to the original Apache // code. // // ------------------------------------------------------------------ // flymake: csc.exe /t:module BZip2InputStream.cs BZip2Compressor.cs Rand.cs BCRC32.cs @@FILE@@ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ // Design Notes: // // This class follows the classic Decorator pattern: it is a Stream that // wraps itself around a Stream, and in doing so provides bzip2 // compression as callers Write into it. It is exactly the same in // outward function as the BZip2OutputStream, except that this class can // perform compression using multiple independent threads. Because of // that, and because of the CPU-intensive nature of BZip2 compression, // this class can perform significantly better (in terms of wall-click // time) than the single-threaded variant, at the expense of memory and // CPU utilization. // // BZip2 is a straightforward data format: there are 4 magic bytes at // the top of the file, followed by 1 or more compressed blocks. There // is a small "magic byte" trailer after all compressed blocks. // // In concept parallelizing BZip2 is simple: do the CPU-intensive // compression for each block in a separate thread, then emit the // compressed output, in order, to the output stream. Each block can be // compressed independently, so a block is the natural candidate for the // parcel of work that can be passed to an independent worker thread. // // The design approach used here is simple: within the Write() method of // the stream, fill a block. When the block is full, pass it to a // background worker thread for compression. When the compressor thread // completes its work, the main thread (the application thread that // calls Write()) can send the compressed data to the output stream, // being careful to respect the order of the compressed blocks. // // The challenge of ordering the compressed data is a solved and // well-understood problem - it is the same approach here as DotNetZip // uses in the ParallelDeflateOutputStream. It is a map/reduce approach // in design intent. // // One new twist for BZip2 is that the compressor output is not // byte-aligned. In other words the final output of a compressed block // will in general be a number of bits that is not a multiple of // 8. Therefore, combining the ordered results of the N compressor // threads requires additional byte-shredding by the parent // stream. Hence this stream uses a BitWriter to adapt bit-oriented // BZip2 output to the byte-oriented .NET Stream. // // The approach used here creates N instances of the BZip2Compressor // type, where N is governed by the number of cores (cpus) and limited // by the MaxWorkers property exposed by this class. Each // BZip2Compressor instance gets its own MemoryStream, to which it // writes its data, via a BitWriter. // // along with the bit accumulator described above. The MemoryStream // would gather the byte-aligned compressed output of the compressor. // When reducing the output of the various workers, this class must // again do the byte-shredding thing. The data from the compressors is // therefore shredded twice: once when being placed into the // MemoryStream, and again when emitted into the final output stream // that this class decorates. This is an unfortunate and seemingly // unavoidable inefficiency. Two rounds of byte-shredding will use more // CPU than we'd like, but I haven't imagined a way to avoid it. // // The BZip2Compressor is designed to write directly into the parent // stream's accumulator (BitWriter) when possible, and write into a // distinct BitWriter when necessary. The former can be used in a // single-thread scenario, while the latter is required in a // multi-thread scenario. // // ---- // // Regarding the Apache code base: Most of the code in this particular // class is related to stream operations and thread synchronization, and // is my own code. It largely does not rely on any code obtained from // Apache commons. If you compare this code with the Apache commons // BZip2OutputStream, you will see very little code that is common, // except for the nearly-boilerplate structure that is common to all // subtypes of System.IO.Stream. There may be some small remnants of // code in this module derived from the Apache stuff, which is why I // left the license in here. Most of the Apache commons compressor magic // has been ported into the BZip2Compressor class. // using System; using System.IO; using System.Collections.Generic; using System.Threading; namespace Ionic.BZip2 { internal class WorkItem { public int index; public BZip2Compressor Compressor { get; private set; } public MemoryStream ms; public int ordinal; public BitWriter bw; public WorkItem(int ix, int blockSize) { // compressed data gets written to a MemoryStream this.ms = new MemoryStream(); this.bw = new BitWriter(ms); this.Compressor = new BZip2Compressor(bw, blockSize); this.index = ix; } } /// <summary> /// A write-only decorator stream that compresses data as it is /// written using the BZip2 algorithm. This stream compresses by /// block using multiple threads. /// </summary> /// <para> /// This class performs BZIP2 compression through writing. For /// more information on the BZIP2 algorithm, see /// <see href="http://en.wikipedia.org/wiki/BZIP2"/>. /// </para> /// /// <para> /// This class is similar to <see cref="Ionic.BZip2.BZip2OutputStream"/>, /// except that this implementation uses an approach that employs multiple /// worker threads to perform the compression. On a multi-cpu or multi-core /// computer, the performance of this class can be significantly higher than /// the single-threaded BZip2OutputStream, particularly for larger streams. /// How large? Anything over 10mb is a good candidate for parallel /// compression. /// </para> /// /// <para> /// The tradeoff is that this class uses more memory and more CPU than the /// vanilla <c>BZip2OutputStream</c>. Also, for small files, the /// <c>ParallelBZip2OutputStream</c> can be much slower than the vanilla /// <c>BZip2OutputStream</c>, because of the overhead associated to using the /// thread pool. /// </para> /// /// <seealso cref="Ionic.BZip2.BZip2OutputStream" /> public class ParallelBZip2OutputStream : System.IO.Stream { private static readonly int BufferPairsPerCore = 4; private int _maxWorkers; private bool firstWriteDone; private int lastFilled; private int lastWritten; private int latestCompressed; private int currentlyFilling; private volatile Exception pendingException; private bool handlingException; private bool emitting; private System.Collections.Generic.Queue<int> toWrite; private System.Collections.Generic.Queue<int> toFill; private System.Collections.Generic.List<WorkItem> pool; private object latestLock = new object(); private object eLock = new object(); // for exceptions private object outputLock = new object(); // for multi-thread output private AutoResetEvent newlyCompressedBlob; long totalBytesWrittenIn; long totalBytesWrittenOut; bool leaveOpen; uint combinedCRC; Stream output; BitWriter bw; int blockSize100k; // 0...9 private TraceBits desiredTrace = TraceBits.Crc | TraceBits.Write; /// <summary> /// Constructs a new <c>ParallelBZip2OutputStream</c>, that sends its /// compressed output to the given output stream. /// </summary> /// /// <param name='output'> /// The destination stream, to which compressed output will be sent. /// </param> /// /// <example> /// /// This example reads a file, then compresses it with bzip2 file, /// and writes the compressed data into a newly created file. /// /// <code> /// var fname = "logfile.log"; /// using (var fs = File.OpenRead(fname)) /// { /// var outFname = fname + ".bz2"; /// using (var output = File.Create(outFname)) /// { /// using (var compressor = new Ionic.BZip2.ParallelBZip2OutputStream(output)) /// { /// byte[] buffer = new byte[2048]; /// int n; /// while ((n = fs.Read(buffer, 0, buffer.Length)) > 0) /// { /// compressor.Write(buffer, 0, n); /// } /// } /// } /// } /// </code> /// </example> public ParallelBZip2OutputStream(Stream output) : this(output, BZip2.MaxBlockSize, false) { } /// <summary> /// Constructs a new <c>ParallelBZip2OutputStream</c> with specified blocksize. /// </summary> /// <param name = "output">the destination stream.</param> /// <param name = "blockSize"> /// The blockSize in units of 100000 bytes. /// The valid range is 1..9. /// </param> public ParallelBZip2OutputStream(Stream output, int blockSize) : this(output, blockSize, false) { } /// <summary> /// Constructs a new <c>ParallelBZip2OutputStream</c>. /// </summary> /// <param name = "output">the destination stream.</param> /// <param name = "leaveOpen"> /// whether to leave the captive stream open upon closing this stream. /// </param> public ParallelBZip2OutputStream(Stream output, bool leaveOpen) : this(output, BZip2.MaxBlockSize, leaveOpen) { } /// <summary> /// Constructs a new <c>ParallelBZip2OutputStream</c> with specified blocksize, /// and explicitly specifies whether to leave the wrapped stream open. /// </summary> /// /// <param name = "output">the destination stream.</param> /// <param name = "blockSize"> /// The blockSize in units of 100000 bytes. /// The valid range is 1..9. /// </param> /// <param name = "leaveOpen"> /// whether to leave the captive stream open upon closing this stream. /// </param> public ParallelBZip2OutputStream(Stream output, int blockSize, bool leaveOpen) { if (blockSize < BZip2.MinBlockSize || blockSize > BZip2.MaxBlockSize) { var msg = String.Format("blockSize={0} is out of range; must be between {1} and {2}", blockSize, BZip2.MinBlockSize, BZip2.MaxBlockSize); throw new ArgumentException(msg, "blockSize"); } this.output = output; if (!this.output.CanWrite) throw new ArgumentException("The stream is not writable.", "output"); this.bw = new BitWriter(this.output); this.blockSize100k = blockSize; this.leaveOpen = leaveOpen; this.combinedCRC = 0; this.MaxWorkers = 16; // default EmitHeader(); } private void InitializePoolOfWorkItems() { this.toWrite = new Queue<int>(); this.toFill = new Queue<int>(); this.pool = new System.Collections.Generic.List<WorkItem>(); int nWorkers = BufferPairsPerCore * Environment.ProcessorCount; nWorkers = Math.Min(nWorkers, this.MaxWorkers); for(int i=0; i < nWorkers; i++) { this.pool.Add(new WorkItem(i, this.blockSize100k)); this.toFill.Enqueue(i); } this.newlyCompressedBlob = new AutoResetEvent(false); this.currentlyFilling = -1; this.lastFilled = -1; this.lastWritten = -1; this.latestCompressed = -1; } /// <summary> /// The maximum number of concurrent compression worker threads to use. /// </summary> /// /// <remarks> /// <para> /// This property sets an upper limit on the number of concurrent worker /// threads to employ for compression. The implementation of this stream /// employs multiple threads from the .NET thread pool, via /// ThreadPool.QueueUserWorkItem(), to compress the incoming data by /// block. As each block of data is compressed, this stream re-orders the /// compressed blocks and writes them to the output stream. /// </para> /// /// <para> /// A higher number of workers enables a higher degree of /// parallelism, which tends to increase the speed of compression on /// multi-cpu computers. On the other hand, a higher number of buffer /// pairs also implies a larger memory consumption, more active worker /// threads, and a higher cpu utilization for any compression. This /// property enables the application to limit its memory consumption and /// CPU utilization behavior depending on requirements. /// </para> /// /// <para> /// By default, DotNetZip allocates 4 workers per CPU core, subject to the /// upper limit specified in this property. For example, suppose the /// application sets this property to 16. Then, on a machine with 2 /// cores, DotNetZip will use 8 workers; that number does not exceed the /// upper limit specified by this property, so the actual number of /// workers used will be 4 * 2 = 8. On a machine with 4 cores, DotNetZip /// will use 16 workers; again, the limit does not apply. On a machine /// with 8 cores, DotNetZip will use 16 workers, because of the limit. /// </para> /// /// <para> /// For each compression "worker thread" that occurs in parallel, there is /// up to 2mb of memory allocated, for buffering and processing. The /// actual number depends on the <see cref="BlockSize"/> property. /// </para> /// /// <para> /// CPU utilization will also go up with additional workers, because a /// larger number of buffer pairs allows a larger number of background /// threads to compress in parallel. If you find that parallel /// compression is consuming too much memory or CPU, you can adjust this /// value downward. /// </para> /// /// <para> /// The default value is 16. Different values may deliver better or /// worse results, depending on your priorities and the dynamic /// performance characteristics of your storage and compute resources. /// </para> /// /// <para> /// The application can set this value at any time, but it is effective /// only before the first call to Write(), which is when the buffers are /// allocated. /// </para> /// </remarks> public int MaxWorkers { get { return _maxWorkers; } set { if (value < 4) throw new ArgumentException("MaxWorkers", "Value must be 4 or greater."); _maxWorkers = value; } } /// <summary> /// Close the stream. /// </summary> /// <remarks> /// <para> /// This may or may not close the underlying stream. Check the /// constructors that accept a bool value. /// </para> /// </remarks> public override void Close() { if (this.pendingException != null) { this.handlingException = true; var pe = this.pendingException; this.pendingException = null; throw pe; } if (this.handlingException) return; if (output == null) return; Stream o = this.output; try { FlushOutput(true); } finally { this.output = null; this.bw = null; } if (!leaveOpen) o.Close(); } private void FlushOutput(bool lastInput) { if (this.emitting) return; // compress and write whatever is ready if (this.currentlyFilling >= 0) { WorkItem workitem = this.pool[this.currentlyFilling]; CompressOne(workitem); this.currentlyFilling = -1; // get a new buffer next Write() } if (lastInput) { EmitPendingBuffers(true, false); EmitTrailer(); } else { EmitPendingBuffers(false, false); } } /// <summary> /// Flush the stream. /// </summary> public override void Flush() { if (this.output != null) { FlushOutput(false); this.bw.Flush(); this.output.Flush(); } } private void EmitHeader() { var magic = new byte[] { (byte) 'B', (byte) 'Z', (byte) 'h', (byte) ('0' + this.blockSize100k) }; // not necessary to shred the initial magic bytes this.output.Write(magic, 0, magic.Length); } private void EmitTrailer() { // A magic 48-bit number, 0x177245385090, to indicate the end // of the last block. (sqrt(pi), if you want to know) TraceOutput(TraceBits.Write, "total written out: {0} (0x{0:X})", this.bw.TotalBytesWrittenOut); // must shred this.bw.WriteByte(0x17); this.bw.WriteByte(0x72); this.bw.WriteByte(0x45); this.bw.WriteByte(0x38); this.bw.WriteByte(0x50); this.bw.WriteByte(0x90); this.bw.WriteInt(this.combinedCRC); this.bw.FinishAndPad(); TraceOutput(TraceBits.Write, "final total : {0} (0x{0:X})", this.bw.TotalBytesWrittenOut); } /// <summary> /// The blocksize parameter specified at construction time. /// </summary> public int BlockSize { get { return this.blockSize100k; } } /// <summary> /// Write data to the stream. /// </summary> /// <remarks> /// /// <para> /// Use the <c>ParallelBZip2OutputStream</c> to compress data while /// writing: create a <c>ParallelBZip2OutputStream</c> with a writable /// output stream. Then call <c>Write()</c> on that /// <c>ParallelBZip2OutputStream</c>, providing uncompressed data as /// input. The data sent to the output stream will be the compressed /// form of the input data. /// </para> /// /// <para> /// A <c>ParallelBZip2OutputStream</c> can be used only for /// <c>Write()</c> not for <c>Read()</c>. /// </para> /// /// </remarks> /// /// <param name="buffer">The buffer holding data to write to the stream.</param> /// <param name="offset">the offset within that data array to find the first byte to write.</param> /// <param name="count">the number of bytes to write.</param> public override void Write(byte[] buffer, int offset, int count) { bool mustWait = false; // This method does this: // 0. handles any pending exceptions // 1. write any buffers that are ready to be written // 2. fills a compressor buffer; when full, flip state to 'Filled', // 3. if more data to be written, goto step 1 if (this.output == null) throw new IOException("the stream is not open"); // dispense any exceptions that occurred on the BG threads if (this.pendingException != null) { this.handlingException = true; var pe = this.pendingException; this.pendingException = null; throw pe; } if (offset < 0) throw new IndexOutOfRangeException(String.Format("offset ({0}) must be > 0", offset)); if (count < 0) throw new IndexOutOfRangeException(String.Format("count ({0}) must be > 0", count)); if (offset + count > buffer.Length) throw new IndexOutOfRangeException(String.Format("offset({0}) count({1}) bLength({2})", offset, count, buffer.Length)); if (count == 0) return; // nothing to do if (!this.firstWriteDone) { // Want to do this on first Write, first session, and not in the // constructor. Must allow the MaxWorkers to change after // construction, but before first Write(). InitializePoolOfWorkItems(); this.firstWriteDone = true; } int bytesWritten = 0; int bytesRemaining = count; do { // may need to make buffers available EmitPendingBuffers(false, mustWait); mustWait = false; // get a compressor to fill int ix = -1; if (this.currentlyFilling >= 0) { ix = this.currentlyFilling; } else { if (this.toFill.Count == 0) { // No compressors available to fill, so... need to emit // compressed buffers. mustWait = true; continue; } ix = this.toFill.Dequeue(); ++this.lastFilled; } WorkItem workitem = this.pool[ix]; workitem.ordinal = this.lastFilled; int n = workitem.Compressor.Fill(buffer, offset, bytesRemaining); if (n != bytesRemaining) { if (!ThreadPool.QueueUserWorkItem( CompressOne, workitem )) throw new Exception("Cannot enqueue workitem"); this.currentlyFilling = -1; // will get a new buffer next time offset += n; } else this.currentlyFilling = ix; bytesRemaining -= n; bytesWritten += n; } while (bytesRemaining > 0); totalBytesWrittenIn += bytesWritten; return; } private void EmitPendingBuffers(bool doAll, bool mustWait) { // When combining parallel compression with a ZipSegmentedStream, it's // possible for the ZSS to throw from within this method. In that // case, Close/Dispose will be called on this stream, if this stream // is employed within a using or try/finally pair as required. But // this stream is unaware of the pending exception, so the Close() // method invokes this method AGAIN. This can lead to a deadlock. // Therefore, failfast if re-entering. if (emitting) return; emitting = true; if (doAll || mustWait) this.newlyCompressedBlob.WaitOne(); do { int firstSkip = -1; int millisecondsToWait = doAll ? 200 : (mustWait ? -1 : 0); int nextToWrite = -1; do { if (Monitor.TryEnter(this.toWrite, millisecondsToWait)) { nextToWrite = -1; try { if (this.toWrite.Count > 0) nextToWrite = this.toWrite.Dequeue(); } finally { Monitor.Exit(this.toWrite); } if (nextToWrite >= 0) { WorkItem workitem = this.pool[nextToWrite]; if (workitem.ordinal != this.lastWritten + 1) { // out of order. requeue and try again. lock(this.toWrite) { this.toWrite.Enqueue(nextToWrite); } if (firstSkip == nextToWrite) { // We went around the list once. // None of the items in the list is the one we want. // Now wait for a compressor to signal again. this.newlyCompressedBlob.WaitOne(); firstSkip = -1; } else if (firstSkip == -1) firstSkip = nextToWrite; continue; } firstSkip = -1; TraceOutput(TraceBits.Write, "Writing block {0}", workitem.ordinal); // write the data to the output var bw2 = workitem.bw; bw2.Flush(); // not bw2.FinishAndPad()! var ms = workitem.ms; ms.Seek(0,SeekOrigin.Begin); // cannot dump bytes!! // ms.WriteTo(this.output); // // must do byte shredding: int n; int y = -1; long totOut = 0; var buffer = new byte[1024]; while ((n = ms.Read(buffer,0,buffer.Length)) > 0) { #if Trace if (y == -1) // diagnostics only { var sb1 = new System.Text.StringBuilder(); sb1.Append("first 16 whole bytes in block: "); for (int z=0; z < 16; z++) sb1.Append(String.Format(" {0:X2}", buffer[z])); TraceOutput(TraceBits.Write, sb1.ToString()); } #endif y = n; for (int k=0; k < n; k++) { this.bw.WriteByte(buffer[k]); } totOut += n; } #if Trace TraceOutput(TraceBits.Write,"out block length (bytes): {0} (0x{0:X})", totOut); var sb = new System.Text.StringBuilder(); sb.Append("final 16 whole bytes in block: "); for (int z=0; z < 16; z++) sb.Append(String.Format(" {0:X2}", buffer[y-1-12+z])); TraceOutput(TraceBits.Write, sb.ToString()); #endif // and now any remaining bits TraceOutput(TraceBits.Write, " remaining bits: {0} 0x{1:X}", bw2.NumRemainingBits, bw2.RemainingBits); if (bw2.NumRemainingBits > 0) { this.bw.WriteBits(bw2.NumRemainingBits, bw2.RemainingBits); } TraceOutput(TraceBits.Crc," combined CRC (before): {0:X8}", this.combinedCRC); this.combinedCRC = (this.combinedCRC << 1) | (this.combinedCRC >> 31); this.combinedCRC ^= (uint) workitem.Compressor.Crc32; TraceOutput(TraceBits.Crc, " block CRC : {0:X8}", workitem.Compressor.Crc32); TraceOutput(TraceBits.Crc, " combined CRC (after) : {0:X8}", this.combinedCRC); TraceOutput(TraceBits.Write, "total written out: {0} (0x{0:X})", this.bw.TotalBytesWrittenOut); TraceOutput(TraceBits.Write | TraceBits.Crc, ""); this.totalBytesWrittenOut += totOut; bw2.Reset(); this.lastWritten = workitem.ordinal; workitem.ordinal = -1; this.toFill.Enqueue(workitem.index); // don't wait next time through if (millisecondsToWait == -1) millisecondsToWait = 0; } } else nextToWrite = -1; } while (nextToWrite >= 0); } while (doAll && (this.lastWritten != this.latestCompressed)); if (doAll) { TraceOutput(TraceBits.Crc, " combined CRC (final) : {0:X8}", this.combinedCRC); } emitting = false; } private void CompressOne(Object wi) { // compress and one buffer WorkItem workitem = (WorkItem) wi; try { // compress and write to the compressor's MemoryStream workitem.Compressor.CompressAndWrite(); lock(this.latestLock) { if (workitem.ordinal > this.latestCompressed) this.latestCompressed = workitem.ordinal; } lock (this.toWrite) { this.toWrite.Enqueue(workitem.index); } this.newlyCompressedBlob.Set(); } catch (System.Exception exc1) { lock(this.eLock) { // expose the exception to the main thread if (this.pendingException!=null) this.pendingException = exc1; } } } /// <summary> /// Indicates whether the stream can be read. /// </summary> /// <remarks> /// The return value is always false. /// </remarks> public override bool CanRead { get { return false; } } /// <summary> /// Indicates whether the stream supports Seek operations. /// </summary> /// <remarks> /// Always returns false. /// </remarks> public override bool CanSeek { get { return false; } } /// <summary> /// Indicates whether the stream can be written. /// </summary> /// <remarks> /// The return value depends on whether the captive stream supports writing. /// </remarks> public override bool CanWrite { get { if (this.output == null) throw new ObjectDisposedException("BZip2Stream"); return this.output.CanWrite; } } /// <summary> /// Reading this property always throws a <see cref="NotImplementedException"/>. /// </summary> public override long Length { get { throw new NotImplementedException(); } } /// <summary> /// The position of the stream pointer. /// </summary> /// /// <remarks> /// Setting this property always throws a <see /// cref="NotImplementedException"/>. Reading will return the /// total number of uncompressed bytes written through. /// </remarks> public override long Position { get { return this.totalBytesWrittenIn; } set { throw new NotImplementedException(); } } /// <summary> /// The total number of bytes written out by the stream. /// </summary> /// <remarks> /// This value is meaningful only after a call to Close(). /// </remarks> public Int64 BytesWrittenOut { get { return totalBytesWrittenOut; } } /// <summary> /// Calling this method always throws a <see cref="NotImplementedException"/>. /// </summary> /// <param name="offset">this is irrelevant, since it will always throw!</param> /// <param name="origin">this is irrelevant, since it will always throw!</param> /// <returns>irrelevant!</returns> public override long Seek(long offset, System.IO.SeekOrigin origin) { throw new NotImplementedException(); } /// <summary> /// Calling this method always throws a <see cref="NotImplementedException"/>. /// </summary> /// <param name="value">this is irrelevant, since it will always throw!</param> public override void SetLength(long value) { throw new NotImplementedException(); } /// <summary> /// Calling this method always throws a <see cref="NotImplementedException"/>. /// </summary> /// <param name='buffer'>this parameter is never used</param> /// <param name='offset'>this parameter is never used</param> /// <param name='count'>this parameter is never used</param> /// <returns>never returns anything; always throws</returns> public override int Read(byte[] buffer, int offset, int count) { throw new NotImplementedException(); } // used only when Trace is defined [Flags] enum TraceBits : uint { None = 0, Crc = 1, Write = 2, All = 0xffffffff, } [System.Diagnostics.ConditionalAttribute("Trace")] private void TraceOutput(TraceBits bits, string format, params object[] varParams) { if ((bits & this.desiredTrace) != 0) { lock(outputLock) { int tid = Thread.CurrentThread.GetHashCode(); #if FEATURE_FULL_CONSOLE Console.ForegroundColor = (ConsoleColor) (tid % 8 + 10); #endif Console.Write("{0:000} PBOS ", tid); Console.WriteLine(format, varParams); #if FEATURE_FULL_CONSOLE Console.ResetColor(); #endif } } } } }
/* * This file is part of UniERM ReportDesigner, based on reportFU by Josh Wilson, * the work of Kim Sheffield and the fyiReporting project. * * Prior Copyrights: * _________________________________________________________ * |Copyright (C) 2010 devFU Pty Ltd, Josh Wilson and Others| * | (http://reportfu.org) | * ========================================================= * _________________________________________________________ * |Copyright (C) 2004-2008 fyiReporting Software, LLC | * |For additional information, email info@fyireporting.com | * |or visit the website www.fyiReporting.com. | * ========================================================= * * License: * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Text; using System.Drawing; using System.ComponentModel; // need this for the properties metadata using System.Drawing.Design; using System.Xml; using System.Globalization; using System.Windows.Forms; using System.Windows.Forms.Design; namespace Reporting.RdlDesign { /// <summary> /// PropertyAction - /// </summary> [TypeConverter(typeof(PropertyAppearanceConverter)), Editor(typeof(PropertyAppearanceUIEditor), typeof(System.Drawing.Design.UITypeEditor))] internal class PropertyAppearance : IReportItem { PropertyReportItem pri; string[] _subitems; string[] _names; internal PropertyAppearance(PropertyReportItem ri) { pri = ri; _names = null; _subitems = new string[] { "Style", "" }; } internal PropertyAppearance(PropertyReportItem ri, params string[] names) { pri = ri; _names = names; // now build the array used to get/set values if (names != null) { _subitems = new string[names.Length + 2]; int i = 0; foreach (string s in names) _subitems[i++] = s; _subitems[i++] = "Style"; } else _subitems = new string[] { "Style", "" }; } internal string[] Names { get { return _names; } } [RefreshProperties(RefreshProperties.Repaint), DescriptionAttribute("FontFamily is the name of the font family. Not all renderers support all fonts.")] public PropertyExpr FontFamily { get { return new PropertyExpr(GetStyleValue("FontFamily", "Arial")); } set { SetStyleValue("FontFamily", value.Expression); } } [RefreshProperties(RefreshProperties.Repaint), DescriptionAttribute("Font size controls the text size.")] public PropertyExpr FontSize { get { return new PropertyExpr(GetStyleValue("FontSize", "10pt")); } set { if (!pri.IsExpression(value.Expression)) DesignerUtility.ValidateSize(value.Expression, true, false); SetStyleValue("FontSize", value.Expression); } } [TypeConverter(typeof(FontStyleConverter)), DescriptionAttribute("FontStyle determines if font is italicized.")] public string FontStyle { get { return GetStyleValue("FontStyle", "Normal"); } set { SetStyleValue("FontStyle", value); } } [TypeConverter(typeof(FontWeightConverter)), DescriptionAttribute("FontWeight controls the boldness of the font.")] public string FontWeight { get { return GetStyleValue("FontWeight", "Normal"); } set { SetStyleValue("FontWeight", value); } } [TypeConverter(typeof(ColorConverter)), DescriptionAttribute("Text color")] public string Color { get { return GetStyleValue("Color", "black"); } set { SetStyleValue("Color", value); } } [TypeConverter(typeof(TextDecorationConverter)), DescriptionAttribute("TextDecoration controls underline, overline, and linethrough. Not all renderers support all options.")] public string TextDecoration { get { return GetStyleValue("TextDecoration", "None"); } set { SetStyleValue("TextDecoration", value); } } [TypeConverter(typeof(TextAlignConverter)), DescriptionAttribute("Horizontal alignment")] public string TextAlign { get { return GetStyleValue("TextAlign", "General"); } set { SetStyleValue("TextAlign", value); } } [TypeConverter(typeof(VerticalAlignConverter)), DescriptionAttribute("Vertical alignment")] public string VerticalAlign { get { return GetStyleValue("VerticalAlign", "Top"); } set { SetStyleValue("VerticalAlign", value); } } [TypeConverter(typeof(DirectionConverter)), DescriptionAttribute("Text is either written left-to-right (LTR) or right-to-left (RTL).")] public string Direction { get { return GetStyleValue("Direction", "LTR"); } set { SetStyleValue("Direction", value); } } [TypeConverter(typeof(WritingModeConverter)), DescriptionAttribute("Text is either written horizontally (lr-tb) or vertically (tb-rl).")] public string WritingMode { get { return GetStyleValue("WritingMode", "lr-tb"); } set { SetStyleValue("WritingMode", value); } } [TypeConverter(typeof(FormatConverter)), DescriptionAttribute("Depending on type the value can be formatted.")] public string Format { get { return GetStyleValue("Format", ""); } set { SetStyleValue("Format", value); } } public override string ToString() { string f = GetStyleValue("FontFamily", "Arial"); string s = GetStyleValue("FontSize", "10pt"); string c = GetStyleValue("Color", "Black"); return string.Format("{0}, {1}, {2}", f,s,c); } private string GetStyleValue(string l1, string def) { _subitems[_subitems.Length - 1] = l1; return pri.GetWithList(def, _subitems); } private void SetStyleValue(string l1, string val) { _subitems[_subitems.Length - 1] = l1; pri.SetWithList(val, _subitems); } #region IReportItem Members public PropertyReportItem GetPRI() { return pri; } #endregion } internal class PropertyAppearanceConverter : ExpandableObjectConverter { public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; } public override bool CanConvertTo(ITypeDescriptorContext context, System.Type destinationType) { if (destinationType == typeof(PropertyAppearance)) return true; return base.CanConvertTo(context, destinationType); } public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) { if (destinationType == typeof(string) && value is PropertyAppearance) { PropertyAppearance pf = value as PropertyAppearance; return pf.ToString(); } return base.ConvertTo(context, culture, value, destinationType); } } internal class PropertyAppearanceUIEditor : UITypeEditor { internal PropertyAppearanceUIEditor() { } public override UITypeEditorEditStyle GetEditStyle(ITypeDescriptorContext context) { return UITypeEditorEditStyle.Modal; } public override object EditValue(ITypeDescriptorContext context, IServiceProvider provider, object value) { if ((context == null) || (provider == null)) return base.EditValue(context, provider, value); // Access the Property Browser's UI display service IWindowsFormsEditorService editorService = (IWindowsFormsEditorService)provider.GetService(typeof(IWindowsFormsEditorService)); if (editorService == null) return base.EditValue(context, provider, value); // Create an instance of the UI editor form IReportItem iri = context.Instance as IReportItem; if (iri == null) return base.EditValue(context, provider, value); PropertyReportItem pre = iri.GetPRI(); PropertyAppearance pf = value as PropertyAppearance; if (pf == null) return base.EditValue(context, provider, value); using (SingleCtlDialog scd = new SingleCtlDialog(pre.DesignCtl, pre.Draw, pre.Nodes, SingleCtlTypeEnum.FontCtl, pf.Names)) { // Display the UI editor dialog if (editorService.ShowDialog(scd) == DialogResult.OK) { // Return the new property value from the UI editor form return new PropertyAppearance(pre, pf.Names); } } return base.EditValue(context, provider, value); } } #region FontStyle internal class FontStyleConverter : StringConverter { static readonly string[] StyleList = new string[] {"Normal","Italic"}; public override bool GetStandardValuesSupported(ITypeDescriptorContext context) { return true; } public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; // allow user to also edit the color directly } public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context) { return new StandardValuesCollection(StyleList); } } #endregion #region FontWeight internal class FontWeightConverter : StringConverter { static readonly string[] WeightList = new string[] { "Lighter", "Normal","Bold", "Bolder", "100", "200", "300", "400", "500", "600", "700", "800", "900"}; public override bool GetStandardValuesSupported(ITypeDescriptorContext context) { return true; } public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; // allow user to also edit the color directly } public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context) { return new StandardValuesCollection(WeightList); } } #endregion #region TextDecoration internal class TextDecorationConverter : StringConverter { static readonly string[] TDList = new string[] { "Underline", "Overline", "LineThrough", "None"}; public override bool GetStandardValuesSupported(ITypeDescriptorContext context) { return true; } public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; // allow user to also edit directly } public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context) { return new StandardValuesCollection(TDList); } } #endregion #region TextAlign internal class TextAlignConverter : StringConverter { static readonly string[] TAList = new string[] { "Left", "Center", "Right", "General" }; public override bool GetStandardValuesSupported(ITypeDescriptorContext context) { return true; } public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; // allow user to also edit directly } public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context) { return new StandardValuesCollection(TAList); } } #endregion #region VerticalAlign internal class VerticalAlignConverter : StringConverter { static readonly string[] VAList = new string[] { "Top", "Middle", "Bottom" }; public override bool GetStandardValuesSupported(ITypeDescriptorContext context) { return true; } public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; // allow user to also edit directly } public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context) { return new StandardValuesCollection(VAList); } } #endregion #region Direction internal class DirectionConverter : StringConverter { static readonly string[] DirList = new string[] { "LTR", "RTL" }; public override bool GetStandardValuesSupported(ITypeDescriptorContext context) { return true; } public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; // allow user to also edit directly } public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context) { return new StandardValuesCollection(DirList); } } #endregion #region WritingMode internal class WritingModeConverter : StringConverter { static readonly string[] WMList = new string[] { "lr-tb", "tb-rl" }; public override bool GetStandardValuesSupported(ITypeDescriptorContext context) { return true; } public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; // allow user to also edit directly } public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context) { return new StandardValuesCollection(WMList); } } #endregion #region Format internal class FormatConverter : StringConverter { public override bool GetStandardValuesSupported(ITypeDescriptorContext context) { return true; } public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) { return false; // allow user to also edit directly } public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context) { return new StandardValuesCollection(StaticLists.FormatList); } } #endregion }
// // Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net> // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.Targets { using System; using System.Collections.Generic; using System.ComponentModel; using System.Globalization; #if WCF_SUPPORTED using System.ServiceModel; using System.ServiceModel.Channels; #endif using System.Threading; #if SILVERLIGHT using System.Windows; using System.Windows.Threading; #endif using NLog.Common; using NLog.Config; using NLog.Internal; using NLog.Layouts; using NLog.LogReceiverService; /// <summary> /// Sends log messages to a NLog Receiver Service (using WCF or Web Services). /// </summary> /// <seealso href="http://nlog-project.org/wiki/LogReceiverService_target">Documentation on NLog Wiki</seealso> [Target("LogReceiverService")] public class LogReceiverWebServiceTarget : Target { private LogEventInfoBuffer buffer = new LogEventInfoBuffer(10000, false, 10000); private bool inCall; /// <summary> /// Initializes a new instance of the <see cref="LogReceiverWebServiceTarget"/> class. /// </summary> public LogReceiverWebServiceTarget() { this.Parameters = new List<MethodCallParameter>(); } /// <summary> /// Gets or sets the endpoint address. /// </summary> /// <value>The endpoint address.</value> /// <docgen category='Connection Options' order='10' /> [RequiredParameter] public virtual string EndpointAddress { get; set; } #if WCF_SUPPORTED /// <summary> /// Gets or sets the name of the endpoint configuration in WCF configuration file. /// </summary> /// <value>The name of the endpoint configuration.</value> /// <docgen category='Connection Options' order='10' /> public string EndpointConfigurationName { get; set; } /// <summary> /// Gets or sets a value indicating whether to use binary message encoding. /// </summary> /// <docgen category='Payload Options' order='10' /> public bool UseBinaryEncoding { get; set; } #endif /// <summary> /// Gets or sets the client ID. /// </summary> /// <value>The client ID.</value> /// <docgen category='Payload Options' order='10' /> public Layout ClientId { get; set; } /// <summary> /// Gets the list of parameters. /// </summary> /// <value>The parameters.</value> /// <docgen category='Payload Options' order='10' /> [ArrayParameter(typeof(MethodCallParameter), "parameter")] public IList<MethodCallParameter> Parameters { get; private set; } /// <summary> /// Gets or sets a value indicating whether to include per-event properties in the payload sent to the server. /// </summary> /// <docgen category='Payload Options' order='10' /> public bool IncludeEventProperties { get; set; } /// <summary> /// Called when log events are being sent (test hook). /// </summary> /// <param name="events">The events.</param> /// <param name="asyncContinuations">The async continuations.</param> /// <returns>True if events should be sent, false to stop processing them.</returns> protected internal virtual bool OnSend(NLogEvents events, IEnumerable<AsyncLogEventInfo> asyncContinuations) { return true; } /// <summary> /// Writes logging event to the log target. Must be overridden in inheriting /// classes. /// </summary> /// <param name="logEvent">Logging event to be written out.</param> protected override void Write(AsyncLogEventInfo logEvent) { this.Write(new[] { logEvent }); } /// <summary> /// Writes an array of logging events to the log target. By default it iterates on all /// events and passes them to "Append" method. Inheriting classes can use this method to /// optimize batch writes. /// </summary> /// <param name="logEvents">Logging events to be written out.</param> protected override void Write(AsyncLogEventInfo[] logEvents) { // if web service call is being processed, buffer new events and return // lock is being held here if (this.inCall) { foreach (var ev in logEvents) { this.buffer.Append(ev); } return; } var networkLogEvents = this.TranslateLogEvents(logEvents); this.Send(networkLogEvents, logEvents); } /// <summary> /// Flush any pending log messages asynchronously (in case of asynchronous targets). /// </summary> /// <param name="asyncContinuation">The asynchronous continuation.</param> protected override void FlushAsync(AsyncContinuation asyncContinuation) { try { this.SendBufferedEvents(); asyncContinuation(null); } catch (Exception exception) { if (exception.MustBeRethrown()) { throw; } asyncContinuation(exception); } } private static int AddValueAndGetStringOrdinal(NLogEvents context, Dictionary<string, int> stringTable, string value) { int stringIndex; if (!stringTable.TryGetValue(value, out stringIndex)) { stringIndex = context.Strings.Count; stringTable.Add(value, stringIndex); context.Strings.Add(value); } return stringIndex; } private NLogEvents TranslateLogEvents(AsyncLogEventInfo[] logEvents) { if (logEvents.Length == 0 && !LogManager.ThrowExceptions) { InternalLogger.Error("LogEvents array is empty, sending empty event..."); return new NLogEvents(); } string clientID = string.Empty; if (this.ClientId != null) { clientID = this.ClientId.Render(logEvents[0].LogEvent); } var networkLogEvents = new NLogEvents { ClientName = clientID, LayoutNames = new StringCollection(), Strings = new StringCollection(), BaseTimeUtc = logEvents[0].LogEvent.TimeStamp.ToUniversalTime().Ticks }; var stringTable = new Dictionary<string, int>(); for (int i = 0; i < this.Parameters.Count; ++i) { networkLogEvents.LayoutNames.Add(this.Parameters[i].Name); } if (this.IncludeEventProperties) { for (int i = 0; i < logEvents.Length; ++i) { var ev = logEvents[i].LogEvent; // add all event-level property names in 'LayoutNames' collection. foreach (var prop in ev.Properties) { string propName = prop.Key as string; if (propName != null) { if (!networkLogEvents.LayoutNames.Contains(propName)) { networkLogEvents.LayoutNames.Add(propName); } } } } } networkLogEvents.Events = new NLogEvent[logEvents.Length]; for (int i = 0; i < logEvents.Length; ++i) { networkLogEvents.Events[i] = this.TranslateEvent(logEvents[i].LogEvent, networkLogEvents, stringTable); } return networkLogEvents; } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope", Justification = "Client is disposed asynchronously.")] private void Send(NLogEvents events, IEnumerable<AsyncLogEventInfo> asyncContinuations) { if (!this.OnSend(events, asyncContinuations)) { return; } #if WCF_SUPPORTED var client = CreateWcfLogReceiverClient(); client.ProcessLogMessagesCompleted += (sender, e) => { // report error to the callers foreach (var ev in asyncContinuations) { ev.Continuation(e.Error); } // send any buffered events this.SendBufferedEvents(); }; this.inCall = true; #if SILVERLIGHT if (!Deployment.Current.Dispatcher.CheckAccess()) { Deployment.Current.Dispatcher.BeginInvoke(() => client.ProcessLogMessagesAsync(events)); } else { client.ProcessLogMessagesAsync(events); } #else client.ProcessLogMessagesAsync(events); #endif #else var client = new SoapLogReceiverClient(this.EndpointAddress); this.inCall = true; client.BeginProcessLogMessages( events, result => { Exception exception = null; try { client.EndProcessLogMessages(result); } catch (Exception ex) { if (ex.MustBeRethrown()) { throw; } exception = ex; } // report error to the callers foreach (var ev in asyncContinuations) { ev.Continuation(exception); } // send any buffered events this.SendBufferedEvents(); }, null); #endif } #if WCF_SUPPORTED /// <summary> /// Creating a new instance of WcfLogReceiverClient /// /// Inheritors can override this method and provide their own /// service configuration - binding and endpoint address /// </summary> /// <returns></returns> protected virtual WcfLogReceiverClient CreateWcfLogReceiverClient() { WcfLogReceiverClient client; if (string.IsNullOrEmpty(this.EndpointConfigurationName)) { // endpoint not specified - use BasicHttpBinding Binding binding; if (this.UseBinaryEncoding) { binding = new CustomBinding(new BinaryMessageEncodingBindingElement(), new HttpTransportBindingElement()); } else { binding = new BasicHttpBinding(); } client = new WcfLogReceiverClient(binding, new EndpointAddress(this.EndpointAddress)); } else { client = new WcfLogReceiverClient(this.EndpointConfigurationName, new EndpointAddress(this.EndpointAddress)); } client.ProcessLogMessagesCompleted += ClientOnProcessLogMessagesCompleted; return client; } private void ClientOnProcessLogMessagesCompleted(object sender, AsyncCompletedEventArgs asyncCompletedEventArgs) { var client = sender as WcfLogReceiverClient; if (client != null && client.State == CommunicationState.Opened) { ((ICommunicationObject)client).Close(); } } #endif private void SendBufferedEvents() { lock (this.SyncRoot) { // clear inCall flag AsyncLogEventInfo[] bufferedEvents = this.buffer.GetEventsAndClear(); if (bufferedEvents.Length > 0) { var networkLogEvents = this.TranslateLogEvents(bufferedEvents); this.Send(networkLogEvents, bufferedEvents); } else { // nothing in the buffer, clear in-call flag this.inCall = false; } } } private NLogEvent TranslateEvent(LogEventInfo eventInfo, NLogEvents context, Dictionary<string, int> stringTable) { var nlogEvent = new NLogEvent(); nlogEvent.Id = eventInfo.SequenceID; nlogEvent.MessageOrdinal = AddValueAndGetStringOrdinal(context, stringTable, eventInfo.FormattedMessage); nlogEvent.LevelOrdinal = eventInfo.Level.Ordinal; nlogEvent.LoggerOrdinal = AddValueAndGetStringOrdinal(context, stringTable, eventInfo.LoggerName); nlogEvent.TimeDelta = eventInfo.TimeStamp.ToUniversalTime().Ticks - context.BaseTimeUtc; for (int i = 0; i < this.Parameters.Count; ++i) { var param = this.Parameters[i]; var value = param.Layout.Render(eventInfo); int stringIndex = AddValueAndGetStringOrdinal(context, stringTable, value); nlogEvent.ValueIndexes.Add(stringIndex); } // layout names beyond Parameters.Count are per-event property names. for (int i = this.Parameters.Count; i < context.LayoutNames.Count; ++i) { string value; object propertyValue; if (eventInfo.Properties.TryGetValue(context.LayoutNames[i], out propertyValue)) { value = Convert.ToString(propertyValue, CultureInfo.InvariantCulture); } else { value = string.Empty; } int stringIndex = AddValueAndGetStringOrdinal(context, stringTable, value); nlogEvent.ValueIndexes.Add(stringIndex); } nlogEvent.ValueIndexes.Add(AddValueAndGetStringOrdinal(context, stringTable, eventInfo.Exception.ToString())); return nlogEvent; } } }
namespace Nancy.Validation.FluentValidation.Tests { using FakeItEasy; using Nancy.Tests; using Xunit; using global::FluentValidation.Internal; using global::FluentValidation.Validators; public class DefaultFluentAdapterFactoryFixture { private PropertyRule rule; private DefaultFluentAdapterFactory factory; public DefaultFluentAdapterFactoryFixture() { this.rule = new PropertyRule(null, null, null, null, null, null); this.factory = new DefaultFluentAdapterFactory(); } [Fact] public void Should_create_custom_adapter_for_unknown_validator() { // Given var validator = A.Fake<IPropertyValidator>(); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<FluentAdapter>(); } [Fact] public void Should_create_emailadapter_for_emailvalidator() { // Given var validator = new EmailValidator(); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<EmailAdapter>(); } [Fact] public void Should_create_equaladapter_for_equalvalidator() { // Given var validator = new EqualValidator(10); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<EqualAdapter>(); } [Fact] public void Should_create_exactlengthadapter_for_exactlengthvalidator() { // Given var validator = new ExactLengthValidator(10); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<ExactLengthAdapater>(); } [Fact] public void Should_create_exclusivebetweenadapter_for_exclusivebetweenvalidator() { // Given var validator = new ExclusiveBetweenValidator(1, 10); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<ExclusiveBetweenAdapter>(); } [Fact] public void Should_create_greaterthanadapter_for_greaterthanvalidator() { // Given var validator = new GreaterThanValidator(1); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<GreaterThanAdapter>(); } [Fact] public void Should_create_greaterthanorequaladapter_for_greaterthanorequalvalidator() { // Given var validator = new GreaterThanOrEqualValidator(1); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<GreaterThanOrEqualAdapter>(); } [Fact] public void Should_create_inclusivebetweenadapter_for_inclusivebetweenvalidator() { // Given var validator = new InclusiveBetweenValidator(1, 10); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<InclusiveBetweenAdapter>(); } [Fact] public void Should_create_lengthadapter_for_lengthvalidator() { // Given var validator = new LengthValidator(1, 10); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<LengthAdapter>(); } [Fact] public void Should_create_lessthanadapter_for_lessthanvalidator() { // Given var validator = new LessThanValidator(1); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<LessThanAdapter>(); } [Fact] public void Should_create_lessthanorequaladapter_for_lessthanorequalvalidator() { // Given var validator = new LessThanOrEqualValidator(1); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<LessThanOrEqualAdapter>(); } [Fact] public void Should_create_notemptyadapter_for_notemptyvalidator() { // Given var validator = new NotEmptyValidator(1); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<NotEmptyAdapter>(); } [Fact] public void Should_create_notequaladapter_for_notequalvalidator() { // Given var validator = new NotEqualValidator(1); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<NotEqualAdapter>(); } [Fact] public void Should_create_notnulladapter_for_notnullvalidator() { // Given var validator = new NotNullValidator(); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<NotNullAdapter>(); } [Fact] public void Should_create_regularexpressionadapter_for_regularexpressionvalidator() { // Given var validator = new RegularExpressionValidator("[A-Z]*"); // When var result = factory.Create(this.rule, validator); // Then result.ShouldBeOfType<RegularExpressionAdapter>(); } } }
//--------------------------------------------------------------------------- // // Copyright (C) Microsoft Corporation. All rights reserved. // //--------------------------------------------------------------------------- using System; using System.Collections.ObjectModel; using System.Collections.Specialized; using System.ComponentModel; using System.Diagnostics; using System.Windows; using System.Windows.Automation; using System.Windows.Automation.Peers; using System.Windows.Controls.Primitives; using System.Windows.Data; using System.Windows.Media; using System.Windows.Threading; using MS.Internal; namespace System.Windows.Controls { /// <summary> /// A control for displaying a row of the DataGrid. /// A row represents a data item in the DataGrid. /// A row displays a cell for each column of the DataGrid. /// /// The data item for the row is added n times to the row's Items collection, /// where n is the number of columns in the DataGrid. /// </summary> public class DataGridRow : Control { #region Constants private const byte DATAGRIDROW_stateMouseOverCode = 0; private const byte DATAGRIDROW_stateMouseOverEditingCode = 1; private const byte DATAGRIDROW_stateMouseOverEditingFocusedCode = 2; private const byte DATAGRIDROW_stateMouseOverSelectedCode = 3; private const byte DATAGRIDROW_stateMouseOverSelectedFocusedCode = 4; private const byte DATAGRIDROW_stateNormalCode = 5; private const byte DATAGRIDROW_stateNormalEditingCode = 6; private const byte DATAGRIDROW_stateNormalEditingFocusedCode = 7; private const byte DATAGRIDROW_stateSelectedCode = 8; private const byte DATAGRIDROW_stateSelectedFocusedCode = 9; private const byte DATAGRIDROW_stateNullCode = 255; // Static arrays to handle state transitions: private static byte[] _idealStateMapping = new byte[] { DATAGRIDROW_stateNormalCode, DATAGRIDROW_stateNormalCode, DATAGRIDROW_stateMouseOverCode, DATAGRIDROW_stateMouseOverCode, DATAGRIDROW_stateNullCode, DATAGRIDROW_stateNullCode, DATAGRIDROW_stateNullCode, DATAGRIDROW_stateNullCode, DATAGRIDROW_stateSelectedCode, DATAGRIDROW_stateSelectedFocusedCode, DATAGRIDROW_stateMouseOverSelectedCode, DATAGRIDROW_stateMouseOverSelectedFocusedCode, DATAGRIDROW_stateNormalEditingCode, DATAGRIDROW_stateNormalEditingFocusedCode, DATAGRIDROW_stateMouseOverEditingCode, DATAGRIDROW_stateMouseOverEditingFocusedCode }; private static byte[] _fallbackStateMapping = new byte[] { DATAGRIDROW_stateNormalCode, //DATAGRIDROW_stateMouseOverCode's fallback DATAGRIDROW_stateMouseOverEditingFocusedCode, //DATAGRIDROW_stateMouseOverEditingCode's fallback DATAGRIDROW_stateNormalEditingFocusedCode, //DATAGRIDROW_stateMouseOverEditingFocusedCode's fallback DATAGRIDROW_stateMouseOverSelectedFocusedCode, //DATAGRIDROW_stateMouseOverSelectedCode's fallback DATAGRIDROW_stateSelectedFocusedCode, //DATAGRIDROW_stateMouseOverSelectedFocusedCode's fallback DATAGRIDROW_stateNullCode, //DATAGRIDROW_stateNormalCode's fallback DATAGRIDROW_stateNormalEditingFocusedCode, //DATAGRIDROW_stateNormalEditingCode's fallback DATAGRIDROW_stateSelectedFocusedCode, //DATAGRIDROW_stateNormalEditingFocusedCode's fallback DATAGRIDROW_stateSelectedFocusedCode, //DATAGRIDROW_stateSelectedCode's fallback DATAGRIDROW_stateNormalCode //DATAGRIDROW_stateSelectedFocusedCode's fallback }; private static string[] _stateNames = new string[] { VisualStates.DATAGRIDROW_stateMouseOver, VisualStates.DATAGRIDROW_stateMouseOverEditing, VisualStates.DATAGRIDROW_stateMouseOverEditingFocused, VisualStates.DATAGRIDROW_stateMouseOverSelected, VisualStates.DATAGRIDROW_stateMouseOverSelectedFocused, VisualStates.DATAGRIDROW_stateNormal, VisualStates.DATAGRIDROW_stateNormalEditing, VisualStates.DATAGRIDROW_stateNormalEditingFocused, VisualStates.DATAGRIDROW_stateSelected, VisualStates.DATAGRIDROW_stateSelectedFocused }; #endregion Constants #region Constructors /// <summary> /// Instantiates global information. /// </summary> static DataGridRow() { VisibilityProperty.OverrideMetadata(typeof(DataGridRow), new FrameworkPropertyMetadata(null, OnCoerceVisibility)); DefaultStyleKeyProperty.OverrideMetadata(typeof(DataGridRow), new FrameworkPropertyMetadata(typeof(DataGridRow))); ItemsPanelProperty.OverrideMetadata(typeof(DataGridRow), new FrameworkPropertyMetadata(new ItemsPanelTemplate(new FrameworkElementFactory(typeof(DataGridCellsPanel))))); FocusableProperty.OverrideMetadata(typeof(DataGridRow), new FrameworkPropertyMetadata(false)); BackgroundProperty.OverrideMetadata(typeof(DataGridRow), new FrameworkPropertyMetadata(null, OnNotifyRowPropertyChanged, OnCoerceBackground)); BindingGroupProperty.OverrideMetadata(typeof(DataGridRow), new FrameworkPropertyMetadata(OnNotifyRowPropertyChanged)); // Set SnapsToDevicePixels to true so that this element can draw grid lines. The metadata options are so that the property value doesn't inherit down the tree from here. SnapsToDevicePixelsProperty.OverrideMetadata(typeof(DataGridRow), new FrameworkPropertyMetadata(true, FrameworkPropertyMetadataOptions.AffectsArrange)); IsMouseOverPropertyKey.OverrideMetadata(typeof(DataGridRow), new UIPropertyMetadata(new PropertyChangedCallback(OnNotifyRowAndRowHeaderPropertyChanged))); VirtualizingPanel.ShouldCacheContainerSizeProperty.OverrideMetadata(typeof(DataGridRow), new FrameworkPropertyMetadata(null, new CoerceValueCallback(OnCoerceShouldCacheContainerSize))); AutomationProperties.IsOffscreenBehaviorProperty.OverrideMetadata(typeof(DataGridRow), new FrameworkPropertyMetadata(IsOffscreenBehavior.FromClip)); } /// <summary> /// Instantiates a new instance of this class. /// </summary> public DataGridRow() { _tracker = new ContainerTracking<DataGridRow>(this); } #endregion #region Data Item /// <summary> /// The item that the row represents. This item is an entry in the list of items from the DataGrid. /// From this item, cells are generated for each column in the DataGrid. /// </summary> public object Item { get { return GetValue(ItemProperty); } set { SetValue(ItemProperty, value); } } /// <summary> /// The DependencyProperty for the Item property. /// </summary> public static readonly DependencyProperty ItemProperty = DependencyProperty.Register("Item", typeof(object), typeof(DataGridRow), new FrameworkPropertyMetadata(null, new PropertyChangedCallback(OnNotifyRowPropertyChanged))); /// <summary> /// Called when the value of the Item property changes. /// </summary> /// <param name="oldItem">The old value of Item.</param> /// <param name="newItem">The new value of Item.</param> protected virtual void OnItemChanged(object oldItem, object newItem) { DataGridCellsPresenter cellsPresenter = CellsPresenter; if (cellsPresenter != null) { cellsPresenter.Item = newItem; } } #endregion #region Template /// <summary> /// A template that will generate the panel that arranges the cells in this row. /// </summary> /// <remarks> /// The template for the row should contain an ItemsControl that template binds to this property. /// </remarks> public ItemsPanelTemplate ItemsPanel { get { return (ItemsPanelTemplate)GetValue(ItemsPanelProperty); } set { SetValue(ItemsPanelProperty, value); } } /// <summary> /// The DependencyProperty that represents the ItemsPanel property. /// </summary> public static readonly DependencyProperty ItemsPanelProperty = ItemsControl.ItemsPanelProperty.AddOwner(typeof(DataGridRow)); /// <summary> /// Clears the CellsPresenter and DetailsPresenter references on Template change. /// </summary> protected override void OnTemplateChanged(ControlTemplate oldTemplate, ControlTemplate newTemplate) { base.OnTemplateChanged(oldTemplate, newTemplate); CellsPresenter = null; DetailsPresenter = null; } #endregion #region Visual States private bool IsDataGridKeyboardFocusWithin { get { var dataGrid = DataGridOwner; if (dataGrid != null) { return dataGrid.IsKeyboardFocusWithin; } return false; } } /// <summary> /// Updates the background brush of the row, using a storyboard if available. /// </summary> internal override void ChangeVisualState(bool useTransitions) { byte idealStateMappingIndex = 0; if (IsSelected || IsEditing) // this is slightly different than SL because they assume if it's editing it will be selected. { idealStateMappingIndex += 8; } if (IsEditing) { idealStateMappingIndex += 4; } if (IsMouseOver) { idealStateMappingIndex += 2; } if (IsDataGridKeyboardFocusWithin) { idealStateMappingIndex += 1; } byte stateCode = _idealStateMapping[idealStateMappingIndex]; Debug.Assert(stateCode != DATAGRIDROW_stateNullCode); string storyboardName; while (stateCode != DATAGRIDROW_stateNullCode) { if (stateCode == DATAGRIDROW_stateNormalCode) { if (AlternationIndex % 2 == 1) { storyboardName = VisualStates.DATAGRIDROW_stateAlternate; } else { storyboardName = VisualStates.DATAGRIDROW_stateNormal; } } else { storyboardName = _stateNames[stateCode]; } if (VisualStateManager.GoToState(this, storyboardName, useTransitions)) { break; } else { // The state wasn't implemented so fall back to the next one stateCode = _fallbackStateMapping[stateCode]; } } base.ChangeVisualState(useTransitions); } #endregion #region Row Header /// <summary> /// The object representing the Row Header. /// </summary> public object Header { get { return GetValue(HeaderProperty); } set { SetValue(HeaderProperty, value); } } /// <summary> /// The DependencyProperty for the Header property. /// </summary> public static readonly DependencyProperty HeaderProperty = DependencyProperty.Register("Header", typeof(object), typeof(DataGridRow), new FrameworkPropertyMetadata(null, new PropertyChangedCallback(OnNotifyRowAndRowHeaderPropertyChanged))); /// <summary> /// Called when the value of the Header property changes. /// </summary> /// <param name="oldHeader">The old value of Header</param> /// <param name="newHeader">The new value of Header</param> protected virtual void OnHeaderChanged(object oldHeader, object newHeader) { } /// <summary> /// The object representing the Row Header style. /// </summary> public Style HeaderStyle { get { return (Style)GetValue(HeaderStyleProperty); } set { SetValue(HeaderStyleProperty, value); } } /// <summary> /// The DependencyProperty for the HeaderStyle property. /// </summary> public static readonly DependencyProperty HeaderStyleProperty = DependencyProperty.Register("HeaderStyle", typeof(Style), typeof(DataGridRow), new FrameworkPropertyMetadata(null, OnNotifyRowAndRowHeaderPropertyChanged, OnCoerceHeaderStyle)); /// <summary> /// The object representing the Row Header template. /// </summary> public DataTemplate HeaderTemplate { get { return (DataTemplate)GetValue(HeaderTemplateProperty); } set { SetValue(HeaderTemplateProperty, value); } } /// <summary> /// The DependencyProperty for the HeaderTemplate property. /// </summary> public static readonly DependencyProperty HeaderTemplateProperty = DependencyProperty.Register("HeaderTemplate", typeof(DataTemplate), typeof(DataGridRow), new FrameworkPropertyMetadata(null, OnNotifyRowAndRowHeaderPropertyChanged, OnCoerceHeaderTemplate)); /// <summary> /// The object representing the Row Header template selector. /// </summary> public DataTemplateSelector HeaderTemplateSelector { get { return (DataTemplateSelector)GetValue(HeaderTemplateSelectorProperty); } set { SetValue(HeaderTemplateSelectorProperty, value); } } /// <summary> /// The DependencyProperty for the HeaderTemplateSelector property. /// </summary> public static readonly DependencyProperty HeaderTemplateSelectorProperty = DependencyProperty.Register("HeaderTemplateSelector", typeof(DataTemplateSelector), typeof(DataGridRow), new FrameworkPropertyMetadata(null, OnNotifyRowAndRowHeaderPropertyChanged, OnCoerceHeaderTemplateSelector)); /// <summary> /// Template used to visually indicate an error in row Validation. /// </summary> public ControlTemplate ValidationErrorTemplate { get { return (ControlTemplate)GetValue(ValidationErrorTemplateProperty); } set { SetValue(ValidationErrorTemplateProperty, value); } } /// <summary> /// DependencyProperty for the ValidationErrorTemplate property. /// </summary> public static readonly DependencyProperty ValidationErrorTemplateProperty = DependencyProperty.Register("ValidationErrorTemplate", typeof(ControlTemplate), typeof(DataGridRow), new FrameworkPropertyMetadata(null, OnNotifyRowPropertyChanged, OnCoerceValidationErrorTemplate)); #endregion #region Row Details /// <summary> /// The object representing the Row Details template. /// </summary> public DataTemplate DetailsTemplate { get { return (DataTemplate)GetValue(DetailsTemplateProperty); } set { SetValue(DetailsTemplateProperty, value); } } /// <summary> /// The DependencyProperty for the DetailsTemplate property. /// </summary> public static readonly DependencyProperty DetailsTemplateProperty = DependencyProperty.Register("DetailsTemplate", typeof(DataTemplate), typeof(DataGridRow), new FrameworkPropertyMetadata(null, OnNotifyDetailsTemplatePropertyChanged, OnCoerceDetailsTemplate)); /// <summary> /// The object representing the Row Details template selector. /// </summary> public DataTemplateSelector DetailsTemplateSelector { get { return (DataTemplateSelector)GetValue(DetailsTemplateSelectorProperty); } set { SetValue(DetailsTemplateSelectorProperty, value); } } /// <summary> /// The DependencyProperty for the DetailsTemplateSelector property. /// </summary> public static readonly DependencyProperty DetailsTemplateSelectorProperty = DependencyProperty.Register("DetailsTemplateSelector", typeof(DataTemplateSelector), typeof(DataGridRow), new FrameworkPropertyMetadata(null, OnNotifyDetailsTemplatePropertyChanged, OnCoerceDetailsTemplateSelector)); /// <summary> /// The Visibility of the Details presenter /// </summary> public Visibility DetailsVisibility { get { return (Visibility)GetValue(DetailsVisibilityProperty); } set { SetValue(DetailsVisibilityProperty, value); } } /// <summary> /// The DependencyProperty for the DetailsVisibility property. /// </summary> public static readonly DependencyProperty DetailsVisibilityProperty = DependencyProperty.Register("DetailsVisibility", typeof(Visibility), typeof(DataGridRow), new FrameworkPropertyMetadata(Visibility.Collapsed, OnNotifyDetailsVisibilityChanged, OnCoerceDetailsVisibility)); internal bool DetailsLoaded { get { return _detailsLoaded; } set { _detailsLoaded = value; } } #endregion #region Row Generation /// <summary> /// We can't override the metadata for a read only property, so we'll get the property change notification for AlternationIndexProperty this way instead. /// </summary> protected override void OnPropertyChanged(DependencyPropertyChangedEventArgs e) { base.OnPropertyChanged(e); if (e.Property == AlternationIndexProperty) { NotifyPropertyChanged(this, e, DataGridNotificationTarget.Rows); } } /// <summary> /// Prepares a row container for active use. /// </summary> /// <remarks> /// Instantiates or updates a MultipleCopiesCollection ItemsSource in /// order that cells be generated. /// </remarks> /// <param name="item">The data item that the row represents.</param> /// <param name="owningDataGrid">The DataGrid owner.</param> internal void PrepareRow(object item, DataGrid owningDataGrid) { bool fireOwnerChanged = (_owner != owningDataGrid); Debug.Assert(_owner == null || _owner == owningDataGrid, "_owner should be null before PrepareRow is called or the same as the owningDataGrid."); bool forcePrepareCells = false; _owner = owningDataGrid; if (this != item) { if (Item != item) { Item = item; } else { forcePrepareCells = true; } } if (IsEditing) { // If IsEditing was left on and this container was recycled, reset it here. IsEditing = false; } // Since we just changed _owner we need to invalidate all child properties that rely on a value supplied by the DataGrid. // A common scenario is when a recycled Row was detached from the visual tree and has just been reattached (we always clear out the // owner when recycling a container). if (fireOwnerChanged) { SyncProperties(forcePrepareCells); } CoerceValue(VirtualizingPanel.ShouldCacheContainerSizeProperty); // Re-run validation, but wait until Binding has occured. Dispatcher.BeginInvoke(new DispatcherOperationCallback(DelayedValidateWithoutUpdate), DispatcherPriority.DataBind, BindingGroup); } /// <summary> /// Clears the row of references. /// </summary> internal void ClearRow(DataGrid owningDataGrid) { Debug.Assert(_owner == owningDataGrid, "_owner should be the same as the DataGrid that is clearing the row."); var cellsPresenter = CellsPresenter; if (cellsPresenter != null) { PersistAttachedItemValue(cellsPresenter, DataGridCellsPresenter.HeightProperty); } PersistAttachedItemValue(this, DetailsVisibilityProperty); _owner = null; } private void PersistAttachedItemValue(DependencyObject objectWithProperty, DependencyProperty property) { ValueSource valueSource = DependencyPropertyHelper.GetValueSource(objectWithProperty, property); if (valueSource.BaseValueSource == BaseValueSource.Local) { // attach the local value to the item so it can be restored later. _owner.ItemAttachedStorage.SetValue(Item, property, objectWithProperty.GetValue(property)); objectWithProperty.ClearValue(property); } } private void RestoreAttachedItemValue(DependencyObject objectWithProperty, DependencyProperty property) { object value; if (_owner.ItemAttachedStorage.TryGetValue(Item, property, out value)) { objectWithProperty.SetValue(property, value); } } /// <summary> /// Used by the DataGrid owner to send notifications to the row container. /// </summary> internal ContainerTracking<DataGridRow> Tracker { get { return _tracker; } } #endregion #region Row Resizing internal void OnRowResizeStarted() { var cellsPresenter = CellsPresenter; if (cellsPresenter != null) { _cellsPresenterResizeHeight = cellsPresenter.Height; } } internal void OnRowResize(double changeAmount) { var cellsPresenter = CellsPresenter; if (cellsPresenter != null) { double newHeight = cellsPresenter.ActualHeight + changeAmount; // clamp the CellsPresenter size to the RowHeader size or MinHeight because the header wont shrink any smaller. double minHeight = Math.Max(RowHeader.DesiredSize.Height, MinHeight); if (DoubleUtil.LessThan(newHeight, minHeight)) { newHeight = minHeight; } // clamp the CellsPresenter size to the MaxHeight of Row, because row wouldn't grow any larger double maxHeight = MaxHeight; if (DoubleUtil.GreaterThan(newHeight, maxHeight)) { newHeight = maxHeight; } cellsPresenter.Height = newHeight; } } internal void OnRowResizeCompleted(bool canceled) { var cellsPresenter = CellsPresenter; if (cellsPresenter != null && canceled) { cellsPresenter.Height = _cellsPresenterResizeHeight; } } internal void OnRowResizeReset() { var cellsPresenter = CellsPresenter; if (cellsPresenter != null) { cellsPresenter.ClearValue(DataGridCellsPresenter.HeightProperty); if (_owner != null) { _owner.ItemAttachedStorage.ClearValue(Item, DataGridCellsPresenter.HeightProperty); } } } #endregion #region Columns Notification /// <summary> /// Notification from the DataGrid that the columns collection has changed. /// </summary> /// <param name="columns">The columns collection.</param> /// <param name="e">The event arguments from the collection's change event.</param> protected internal virtual void OnColumnsChanged(ObservableCollection<DataGridColumn> columns, NotifyCollectionChangedEventArgs e) { DataGridCellsPresenter cellsPresenter = CellsPresenter; if (cellsPresenter != null) { cellsPresenter.OnColumnsChanged(columns, e); } } #endregion #region Property Coercion private static object OnCoerceHeaderStyle(DependencyObject d, object baseValue) { var row = (DataGridRow)d; return DataGridHelper.GetCoercedTransferPropertyValue( row, baseValue, HeaderStyleProperty, row.DataGridOwner, DataGrid.RowHeaderStyleProperty); } private static object OnCoerceHeaderTemplate(DependencyObject d, object baseValue) { var row = (DataGridRow)d; return DataGridHelper.GetCoercedTransferPropertyValue( row, baseValue, HeaderTemplateProperty, row.DataGridOwner, DataGrid.RowHeaderTemplateProperty); } private static object OnCoerceHeaderTemplateSelector(DependencyObject d, object baseValue) { var row = (DataGridRow)d; return DataGridHelper.GetCoercedTransferPropertyValue( row, baseValue, HeaderTemplateSelectorProperty, row.DataGridOwner, DataGrid.RowHeaderTemplateSelectorProperty); } private static object OnCoerceBackground(DependencyObject d, object baseValue) { var row = (DataGridRow)d; object coercedValue = baseValue; switch (row.AlternationIndex) { case 0: coercedValue = DataGridHelper.GetCoercedTransferPropertyValue( row, baseValue, BackgroundProperty, row.DataGridOwner, DataGrid.RowBackgroundProperty); break; case 1: coercedValue = DataGridHelper.GetCoercedTransferPropertyValue( row, baseValue, BackgroundProperty, row.DataGridOwner, DataGrid.AlternatingRowBackgroundProperty); break; } return coercedValue; } private static object OnCoerceValidationErrorTemplate(DependencyObject d, object baseValue) { var row = (DataGridRow)d; return DataGridHelper.GetCoercedTransferPropertyValue( row, baseValue, ValidationErrorTemplateProperty, row.DataGridOwner, DataGrid.RowValidationErrorTemplateProperty); } private static object OnCoerceDetailsTemplate(DependencyObject d, object baseValue) { var row = (DataGridRow)d; return DataGridHelper.GetCoercedTransferPropertyValue( row, baseValue, DetailsTemplateProperty, row.DataGridOwner, DataGrid.RowDetailsTemplateProperty); } private static object OnCoerceDetailsTemplateSelector(DependencyObject d, object baseValue) { var row = (DataGridRow)d; return DataGridHelper.GetCoercedTransferPropertyValue( row, baseValue, DetailsTemplateSelectorProperty, row.DataGridOwner, DataGrid.RowDetailsTemplateSelectorProperty); } private static object OnCoerceDetailsVisibility(DependencyObject d, object baseValue) { var row = (DataGridRow)d; object visibility = DataGridHelper.GetCoercedTransferPropertyValue( row, baseValue, DetailsVisibilityProperty, row.DataGridOwner, DataGrid.RowDetailsVisibilityModeProperty); if (visibility is DataGridRowDetailsVisibilityMode) { var visibilityMode = (DataGridRowDetailsVisibilityMode)visibility; var hasDetailsTemplate = row.DetailsTemplate != null || row.DetailsTemplateSelector != null; var isRealItem = row.Item != CollectionView.NewItemPlaceholder; switch (visibilityMode) { case DataGridRowDetailsVisibilityMode.Collapsed: visibility = Visibility.Collapsed; break; case DataGridRowDetailsVisibilityMode.Visible: visibility = hasDetailsTemplate && isRealItem ? Visibility.Visible : Visibility.Collapsed; break; case DataGridRowDetailsVisibilityMode.VisibleWhenSelected: visibility = row.IsSelected && hasDetailsTemplate && isRealItem ? Visibility.Visible : Visibility.Collapsed; break; default: visibility = Visibility.Collapsed; break; } } return visibility; } /// <summary> /// Coerces Visibility so that the NewItemPlaceholder doesn't show up while you're entering a new Item /// </summary> private static object OnCoerceVisibility(DependencyObject d, object baseValue) { var row = (DataGridRow)d; var owningDataGrid = row.DataGridOwner; if (row.Item == CollectionView.NewItemPlaceholder && owningDataGrid != null) { return owningDataGrid.PlaceholderVisibility; } else { return baseValue; } } /// <summary> /// Coerces ShouldCacheContainerSize so that the NewItemPlaceholder doesn't cache its size. /// </summary> private static object OnCoerceShouldCacheContainerSize(DependencyObject d, object baseValue) { var row = (DataGridRow)d; if (row.Item == CollectionView.NewItemPlaceholder) { return false; } else { return baseValue; } } #endregion #region Notification Propagation private static void OnNotifyRowPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { (d as DataGridRow).NotifyPropertyChanged(d, e, DataGridNotificationTarget.Rows); } private static void OnNotifyRowAndRowHeaderPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { (d as DataGridRow).NotifyPropertyChanged(d, e, DataGridNotificationTarget.Rows | DataGridNotificationTarget.RowHeaders); } private static void OnNotifyDetailsTemplatePropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { DataGridRow row = (DataGridRow)d; row.NotifyPropertyChanged(row, e, DataGridNotificationTarget.Rows | DataGridNotificationTarget.DetailsPresenter); // It only makes sense to fire UnloadingRowDetails if the row details are already loaded. The same is true for LoadingRowDetails, // since making row details visible will take care of firing LoadingRowDetails. if (row.DetailsLoaded && d.GetValue(e.Property) == e.NewValue) { if (row.DataGridOwner != null) { row.DataGridOwner.OnUnloadingRowDetailsWrapper(row); } if (e.NewValue != null) { // Invoke LoadingRowDetails, but only after the details template is expanded (so DetailsElement will be available). Dispatcher.CurrentDispatcher.BeginInvoke(new DispatcherOperationCallback(DataGrid.DelayedOnLoadingRowDetails), DispatcherPriority.Loaded, row); } } } private static void OnNotifyDetailsVisibilityChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { var row = (DataGridRow)d; // Notify the DataGrid at Loaded priority so the template has time to expland. Dispatcher.CurrentDispatcher.BeginInvoke(new DispatcherOperationCallback(DelayedRowDetailsVisibilityChanged), DispatcherPriority.Loaded, row); row.NotifyPropertyChanged(d, e, DataGridNotificationTarget.Rows | DataGridNotificationTarget.DetailsPresenter); } /// <summary> /// Notifies the DataGrid that the visibility is changed. This is intended to be Invoked at lower than Layout priority to give the template time to expand. /// </summary> private static object DelayedRowDetailsVisibilityChanged(object arg) { var row = (DataGridRow)arg; var dataGrid = row.DataGridOwner; var detailsElement = row.DetailsPresenter != null ? row.DetailsPresenter.DetailsElement : null; if (dataGrid != null) { var detailsEventArgs = new DataGridRowDetailsEventArgs(row, detailsElement); dataGrid.OnRowDetailsVisibilityChanged(detailsEventArgs); } return null; } /// <summary> /// Set by the CellsPresenter when it is created. Used by the Row to send down property change notifications. /// </summary> internal DataGridCellsPresenter CellsPresenter { get { return _cellsPresenter; } set { _cellsPresenter = value; } } /// <summary> /// Set by the DetailsPresenter when it is created. Used by the Row to send down property change notifications. /// </summary> internal DataGridDetailsPresenter DetailsPresenter { get { return _detailsPresenter; } set { _detailsPresenter = value; } } /// <summary> /// Set by the RowHeader when it is created. Used by the Row to send down property change notifications. /// </summary> internal DataGridRowHeader RowHeader { get { return _rowHeader; } set { _rowHeader = value; } } /// <summary> /// General notification for DependencyProperty changes from the grid or from columns. /// </summary> internal void NotifyPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e, DataGridNotificationTarget target) { NotifyPropertyChanged(d, string.Empty, e, target); } /// <summary> /// General notification for DependencyProperty changes from the grid or from columns. /// </summary> internal void NotifyPropertyChanged(DependencyObject d, string propertyName, DependencyPropertyChangedEventArgs e, DataGridNotificationTarget target) { if (DataGridHelper.ShouldNotifyRows(target)) { if (e.Property == DataGrid.RowBackgroundProperty || e.Property == DataGrid.AlternatingRowBackgroundProperty || e.Property == BackgroundProperty || e.Property == AlternationIndexProperty) { DataGridHelper.TransferProperty(this, BackgroundProperty); } else if (e.Property == DataGrid.RowHeaderStyleProperty || e.Property == HeaderStyleProperty) { DataGridHelper.TransferProperty(this, HeaderStyleProperty); } else if (e.Property == DataGrid.RowHeaderTemplateProperty || e.Property == HeaderTemplateProperty) { DataGridHelper.TransferProperty(this, HeaderTemplateProperty); } else if (e.Property == DataGrid.RowHeaderTemplateSelectorProperty || e.Property == HeaderTemplateSelectorProperty) { DataGridHelper.TransferProperty(this, HeaderTemplateSelectorProperty); } else if (e.Property == DataGrid.RowValidationErrorTemplateProperty || e.Property == ValidationErrorTemplateProperty) { DataGridHelper.TransferProperty(this, ValidationErrorTemplateProperty); } else if (e.Property == DataGrid.RowDetailsTemplateProperty || e.Property == DetailsTemplateProperty) { DataGridHelper.TransferProperty(this, DetailsTemplateProperty); DataGridHelper.TransferProperty(this, DetailsVisibilityProperty); } else if (e.Property == DataGrid.RowDetailsTemplateSelectorProperty || e.Property == DetailsTemplateSelectorProperty) { DataGridHelper.TransferProperty(this, DetailsTemplateSelectorProperty); DataGridHelper.TransferProperty(this, DetailsVisibilityProperty); } else if (e.Property == DataGrid.RowDetailsVisibilityModeProperty || e.Property == DetailsVisibilityProperty || e.Property == IsSelectedProperty) { DataGridHelper.TransferProperty(this, DetailsVisibilityProperty); } else if (e.Property == ItemProperty) { OnItemChanged(e.OldValue, e.NewValue); } else if (e.Property == HeaderProperty) { OnHeaderChanged(e.OldValue, e.NewValue); } else if (e.Property == BindingGroupProperty) { // Re-run validation, but wait until Binding has occured. Dispatcher.BeginInvoke(new DispatcherOperationCallback(DelayedValidateWithoutUpdate), DispatcherPriority.DataBind, e.NewValue); } else if (e.Property == DataGridRow.IsEditingProperty || e.Property == DataGridRow.IsMouseOverProperty || e.Property == DataGrid.IsKeyboardFocusWithinProperty) { UpdateVisualState(); } } if (DataGridHelper.ShouldNotifyDetailsPresenter(target)) { if (DetailsPresenter != null) { DetailsPresenter.NotifyPropertyChanged(d, e); } } if (DataGridHelper.ShouldNotifyCellsPresenter(target) || DataGridHelper.ShouldNotifyCells(target) || DataGridHelper.ShouldRefreshCellContent(target)) { DataGridCellsPresenter cellsPresenter = CellsPresenter; if (cellsPresenter != null) { cellsPresenter.NotifyPropertyChanged(d, propertyName, e, target); } } if (DataGridHelper.ShouldNotifyRowHeaders(target) && RowHeader != null) { RowHeader.NotifyPropertyChanged(d, e); } } private object DelayedValidateWithoutUpdate(object arg) { // Only validate if we have an Item. var bindingGroup = (BindingGroup)arg; if (bindingGroup != null && bindingGroup.Items.Count > 0) { bindingGroup.ValidateWithoutUpdate(); } return null; } /// <summary> /// Fired when the Row is attached to the DataGrid. The scenario here is if the user is scrolling and /// the Row is a recycled container that was just added back to the visual tree. Properties that rely on a value from /// the Grid should be reevaluated because they may be stale. /// </summary> /// <remarks> /// Properties can obviously be stale if the DataGrid's value changes while the row is disconnected. They can also /// be stale for unobvious reasons. /// /// For example, the Style property is invalidated when we detect a new Visual parent. This happens for /// elements in the row (such as the RowHeader) before Prepare is called on the Row. The coercion callback /// will thus be unable to find the DataGrid and will return the wrong value. /// /// There is a potential for perf work here. If we know a DP isn't invalidated when the visual tree is reconnected /// and we know that the Grid hasn't modified that property then its value is likely fine. We could also cache whether /// or not the Grid's property is the one that's winning. If not, no need to redo the coercion. This notification /// is pretty fast already and thus not worth the work for now. /// </remarks> private void SyncProperties(bool forcePrepareCells) { // Coerce all properties on Row that depend on values from the DataGrid // Style is ok since it's equivalent to ItemContainerStyle and has already been invalidated. DataGridHelper.TransferProperty(this, BackgroundProperty); DataGridHelper.TransferProperty(this, HeaderStyleProperty); DataGridHelper.TransferProperty(this, HeaderTemplateProperty); DataGridHelper.TransferProperty(this, HeaderTemplateSelectorProperty); DataGridHelper.TransferProperty(this, ValidationErrorTemplateProperty); DataGridHelper.TransferProperty(this, DetailsTemplateProperty); DataGridHelper.TransferProperty(this, DetailsTemplateSelectorProperty); DataGridHelper.TransferProperty(this, DetailsVisibilityProperty); CoerceValue(VisibilityProperty); // Handle NewItemPlaceholder case RestoreAttachedItemValue(this, DetailsVisibilityProperty); var cellsPresenter = CellsPresenter; if (cellsPresenter != null) { cellsPresenter.SyncProperties(forcePrepareCells); RestoreAttachedItemValue(cellsPresenter, DataGridCellsPresenter.HeightProperty); } if (DetailsPresenter != null) { DetailsPresenter.SyncProperties(); } if (RowHeader != null) { RowHeader.SyncProperties(); } } #endregion #region Alternation /// <summary> /// AlternationIndex is set on containers generated for an ItemsControl, when /// the ItemsControl's AlternationCount property is positive. The AlternationIndex /// lies in the range [0, AlternationCount), and adjacent containers always get /// assigned different values. /// </summary> /// <remarks> /// Exposes ItemsControl.AlternationIndexProperty attached property as a direct property. /// </remarks> public int AlternationIndex { get { return (int)GetValue(AlternationIndexProperty); } } /// <summary> /// DependencyProperty for AlternationIndex. /// </summary> /// <remarks> /// Same as ItemsControl.AlternationIndexProperty. /// </remarks> public static readonly DependencyProperty AlternationIndexProperty = ItemsControl.AlternationIndexProperty.AddOwner(typeof(DataGridRow)); #endregion #region Selection /// <summary> /// Indicates whether this DataGridRow is selected. /// </summary> /// <remarks> /// When IsSelected is set to true, an InvalidOperationException may be /// thrown if the value of the SelectionUnit property on the parent DataGrid /// prevents selection or rows. /// </remarks> [Bindable(true), Category("Appearance")] public bool IsSelected { get { return (bool)GetValue(IsSelectedProperty); } set { SetValue(IsSelectedProperty, value); } } /// <summary> /// The DependencyProperty for the IsSelected property. /// </summary> public static readonly DependencyProperty IsSelectedProperty = Selector.IsSelectedProperty.AddOwner( typeof(DataGridRow), new FrameworkPropertyMetadata(false, FrameworkPropertyMetadataOptions.BindsTwoWayByDefault | FrameworkPropertyMetadataOptions.Journal, new PropertyChangedCallback(OnIsSelectedChanged))); private static void OnIsSelectedChanged(object sender, DependencyPropertyChangedEventArgs e) { DataGridRow row = (DataGridRow)sender; bool isSelected = (bool)e.NewValue; if (isSelected && !row.IsSelectable) { throw new InvalidOperationException(SR.Get(SRID.DataGridRow_CannotSelectRowWhenCells)); } DataGrid grid = row.DataGridOwner; if (grid != null && row.DataContext != null) { DataGridAutomationPeer gridPeer = UIElementAutomationPeer.FromElement(grid) as DataGridAutomationPeer; if (gridPeer != null) { DataGridItemAutomationPeer rowItemPeer = gridPeer.FindOrCreateItemAutomationPeer(row.DataContext) as DataGridItemAutomationPeer; if (rowItemPeer != null) { rowItemPeer.RaisePropertyChangedEvent( System.Windows.Automation.SelectionItemPatternIdentifiers.IsSelectedProperty, (bool)e.OldValue, isSelected); } } } // Update the header's IsRowSelected property row.NotifyPropertyChanged(row, e, DataGridNotificationTarget.Rows | DataGridNotificationTarget.RowHeaders); // This will raise the appropriate selection event, which will // bubble to the DataGrid. The base class Selector code will listen // for these events and will update SelectedItems as necessary. row.RaiseSelectionChangedEvent(isSelected); row.UpdateVisualState(); // Update the header's IsRowSelected property row.NotifyPropertyChanged(row, e, DataGridNotificationTarget.Rows | DataGridNotificationTarget.RowHeaders); } private void RaiseSelectionChangedEvent(bool isSelected) { if (isSelected) { OnSelected(new RoutedEventArgs(SelectedEvent, this)); } else { OnUnselected(new RoutedEventArgs(UnselectedEvent, this)); } } /// <summary> /// Raised when the item's IsSelected property becomes true. /// </summary> public static readonly RoutedEvent SelectedEvent = Selector.SelectedEvent.AddOwner(typeof(DataGridRow)); /// <summary> /// Raised when the item's IsSelected property becomes true. /// </summary> public event RoutedEventHandler Selected { add { AddHandler(SelectedEvent, value); } remove { RemoveHandler(SelectedEvent, value); } } /// <summary> /// Called when IsSelected becomes true. Raises the Selected event. /// </summary> /// <param name="e">Empty event arguments.</param> protected virtual void OnSelected(RoutedEventArgs e) { RaiseEvent(e); } /// <summary> /// Raised when the item's IsSelected property becomes false. /// </summary> public static readonly RoutedEvent UnselectedEvent = Selector.UnselectedEvent.AddOwner(typeof(DataGridRow)); /// <summary> /// Raised when the item's IsSelected property becomes false. /// </summary> public event RoutedEventHandler Unselected { add { AddHandler(UnselectedEvent, value); } remove { RemoveHandler(UnselectedEvent, value); } } /// <summary> /// Called when IsSelected becomes false. Raises the Unselected event. /// </summary> /// <param name="e">Empty event arguments.</param> protected virtual void OnUnselected(RoutedEventArgs e) { RaiseEvent(e); } /// <summary> /// Determines if a row can be selected, based on the DataGrid's SelectionUnit property. /// </summary> private bool IsSelectable { get { DataGrid dataGrid = DataGridOwner; if (dataGrid != null) { DataGridSelectionUnit unit = dataGrid.SelectionUnit; return (unit == DataGridSelectionUnit.FullRow) || (unit == DataGridSelectionUnit.CellOrRowHeader); } return true; } } #endregion #region Editing /// <summary> /// Whether the row is in editing mode. /// </summary> public bool IsEditing { get { return (bool)GetValue(IsEditingProperty); } internal set { SetValue(IsEditingPropertyKey, value); } } private static readonly DependencyPropertyKey IsEditingPropertyKey = DependencyProperty.RegisterReadOnly("IsEditing", typeof(bool), typeof(DataGridRow), new FrameworkPropertyMetadata(false, OnNotifyRowAndRowHeaderPropertyChanged)); /// <summary> /// The DependencyProperty for IsEditing. /// </summary> public static readonly DependencyProperty IsEditingProperty = IsEditingPropertyKey.DependencyProperty; #endregion #region Automation protected override System.Windows.Automation.Peers.AutomationPeer OnCreateAutomationPeer() { return new System.Windows.Automation.Peers.DataGridRowAutomationPeer(this); } #endregion #region Column Virtualization /// <summary> /// Method which tries to scroll a cell for given index into the scroll view /// </summary> /// <param name="index"></param> internal void ScrollCellIntoView(int index) { DataGridCellsPresenter cellsPresenter = CellsPresenter; if (cellsPresenter != null) { cellsPresenter.ScrollCellIntoView(index); } } #endregion #region Layout /// <summary> /// Arrange /// </summary> protected override Size ArrangeOverride(Size arrangeBounds) { DataGrid dataGrid = DataGridOwner; if (dataGrid != null) { dataGrid.QueueInvalidateCellsPanelHorizontalOffset(); } return base.ArrangeOverride(arrangeBounds); } #endregion #region New Item /// <summary> /// Indicates whether the row belongs to new item (both placeholder /// as well as adding item) or not. /// </summary> public bool IsNewItem { get { return (bool)GetValue(IsNewItemProperty); } internal set { SetValue(IsNewItemPropertyKey, value); } } /// <summary> /// Using a DependencyProperty as the backing store for IsNewItem. This enables animation, styling, binding, etc... /// </summary> internal static readonly DependencyPropertyKey IsNewItemPropertyKey = DependencyProperty.RegisterReadOnly("IsNewItem", typeof(bool), typeof(DataGridRow), new FrameworkPropertyMetadata(false)); /// <summary> /// DependencyProperty for IsNewItem property. /// </summary> public static readonly DependencyProperty IsNewItemProperty = IsNewItemPropertyKey.DependencyProperty; #endregion #region Helpers /// <summary> /// Returns the index of this row within the DataGrid's list of item containers. /// </summary> /// <remarks> /// This method performs a linear search. /// </remarks> /// <returns>The index, if found, -1 otherwise.</returns> public int GetIndex() { DataGrid dataGridOwner = DataGridOwner; if (dataGridOwner != null) { return dataGridOwner.ItemContainerGenerator.IndexFromContainer(this); } return -1; } /// <summary> /// Searchs up the visual parent chain from the given element until /// a DataGridRow element is found. /// </summary> /// <param name="element">The descendent of a DataGridRow.</param> /// <returns> /// The first ancestor DataGridRow of the element parameter. /// Returns null of none is found. /// </returns> public static DataGridRow GetRowContainingElement(FrameworkElement element) { return DataGridHelper.FindVisualParent<DataGridRow>(element); } internal DataGrid DataGridOwner { get { return _owner; } } /// <summary> /// Returns true if the DetailsPresenter is supposed to draw gridlines for the row. Only true /// if the DetailsPresenter hooked itself up properly to the Row. /// </summary> internal bool DetailsPresenterDrawsGridLines { get { return _detailsPresenter != null && _detailsPresenter.Visibility == Visibility.Visible; } } /// <summary> /// Acceses the CellsPresenter and attempts to get the cell at the given index. /// This is not necessarily the display order. /// </summary> internal DataGridCell TryGetCell(int index) { DataGridCellsPresenter cellsPresenter = CellsPresenter; if (cellsPresenter != null) { return cellsPresenter.ItemContainerGenerator.ContainerFromIndex(index) as DataGridCell; } return null; } #endregion #region Data // Tracks whether row details have been displayed. // true - row details template has been loaded and has rendered at least once // false - row details template has either is unset, or has never been asked to render internal bool _detailsLoaded; private DataGrid _owner; private DataGridCellsPresenter _cellsPresenter; private DataGridDetailsPresenter _detailsPresenter; private DataGridRowHeader _rowHeader; private ContainerTracking<DataGridRow> _tracker; private double _cellsPresenterResizeHeight; #endregion } }
// // ViewContainer.cs // // Author: // Aaron Bockover <abockover@novell.com> // // Copyright (C) 2007 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Collections.Generic; using Gtk; using Mono.Unix; using Banshee.Widgets; using Banshee.Gui.Widgets; using Banshee.Sources.Gui; using Banshee.Collection; using Banshee.Gui; using Banshee.ServiceStack; namespace Nereid { public class ViewContainer : VBox { private SearchEntry search_entry; private Alignment source_actions_align; private EventBox source_actions_box; private Banshee.ContextPane.ContextPane context_pane; private VBox footer; private ISourceContents content; public ViewContainer () { BuildHeader (); Spacing = 6; SearchSensitive = false; } private void BuildHeader () { source_actions_align = new Gtk.Alignment (0f, .5f, 1f, 0f) { RightPadding = 0, LeftPadding = 0, NoShowAll = true }; footer = new VBox (); source_actions_box = new EventBox () { Visible = true }; BuildSearchEntry (); InterfaceActionService uia = ServiceManager.Get<InterfaceActionService> (); if (uia != null) { Gtk.Action action = uia.GlobalActions["WikiSearchHelpAction"]; if (action != null) { MenuItem item = new SeparatorMenuItem (); item.Show (); search_entry.Menu.Append (item); item = new ImageMenuItem (Stock.Help, null); item.Activated += delegate { action.Activate (); }; item.Show (); search_entry.Menu.Append (item); } } source_actions_box.ShowAll (); source_actions_align.Add (source_actions_box); source_actions_align.Hide (); search_entry.Show (); context_pane = new Banshee.ContextPane.ContextPane (); context_pane.ExpandHandler = b => { SetChildPacking (content.Widget, !b, true, 0, PackType.Start); SetChildPacking (context_pane, b, b, 0, PackType.End); }; // Top to bottom, their order is reverse of this: PackEnd (footer, false, false, 0); PackEnd (context_pane, false, false, 0); PackEnd (source_actions_align, false, false, 0); PackEnd (new ConnectedMessageBar (), false, true, 0); } private struct SearchFilter { public int Id; public string Field; public string Title; } private Dictionary<int, SearchFilter> search_filters = new Dictionary<int, SearchFilter> (); private void AddSearchFilter (TrackFilterType id, string field, string title) { SearchFilter filter = new SearchFilter (); filter.Id = (int)id; filter.Field = field; filter.Title = title; search_filters.Add (filter.Id, filter); } private void BuildSearchEntry () { AddSearchFilter (TrackFilterType.None, String.Empty, Catalog.GetString ("Artist, Album, or Title")); AddSearchFilter (TrackFilterType.SongName, "title", Catalog.GetString ("Track Title")); AddSearchFilter (TrackFilterType.ArtistName, "artist", Catalog.GetString ("Artist Name")); AddSearchFilter (TrackFilterType.AlbumArtist, "albumartist", Catalog.GetString ("Album Artist")); AddSearchFilter (TrackFilterType.AlbumTitle, "album", Catalog.GetString ("Album Title")); AddSearchFilter (TrackFilterType.Composer, "composer", Catalog.GetString ("Composer")); AddSearchFilter (TrackFilterType.Genre, "genre", Catalog.GetString ("Genre")); AddSearchFilter (TrackFilterType.Year, "year", Catalog.GetString ("Year")); AddSearchFilter (TrackFilterType.Comment, "comment", Catalog.GetString ("Comment")); search_entry = new SearchEntry (); search_entry.SetSizeRequest (260, -1); foreach (SearchFilter filter in search_filters.Values) { search_entry.AddFilterOption (filter.Id, filter.Title); if (filter.Id == (int)TrackFilterType.None) { search_entry.AddFilterSeparator (); } } search_entry.FilterChanged += OnSearchEntryFilterChanged; search_entry.ActivateFilter ((int)TrackFilterType.None); OnSearchEntryFilterChanged (search_entry, EventArgs.Empty); } private void OnSearchEntryFilterChanged (object o, EventArgs args) { /*search_entry.EmptyMessage = String.Format (Catalog.GetString ("Filter on {0}"), search_entry.GetLabelForFilterID (search_entry.ActiveFilterID));*/ string query = search_filters.ContainsKey (search_entry.ActiveFilterID) ? search_filters[search_entry.ActiveFilterID].Field : String.Empty; search_entry.Query = String.IsNullOrEmpty (query) ? String.Empty : query + ":"; var editable = search_entry as IEditable; if (editable != null) { editable.Position = search_entry.Query.Length; } } public void SetHeaderWidget (Widget widget) { if (widget != null) { source_actions_box.Add (widget); widget.Show (); source_actions_align.Show (); } } public void ClearHeaderWidget () { source_actions_align.Hide (); if (source_actions_box.Child != null) { source_actions_box.Remove (source_actions_box.Child); } } public void SetFooter (Widget contents) { if (contents != null) { footer.PackStart (contents, false, false, 0); contents.Show (); footer.Show (); } } public void ClearFooter () { footer.Hide (); foreach (Widget child in footer.Children) { footer.Remove (child); } } public Alignment Header { get { return source_actions_align; } } public SearchEntry SearchEntry { get { return search_entry; } } [Obsolete] public void SetTitleWidget (Widget widget) { if (widget != null) { Hyena.Log.Warning ("Nereid.SourceContents.TitleWidget is no longer used (from {0})", ServiceManager.SourceManager.ActiveSource.Name); } } public ISourceContents Content { get { return content; } set { if (content == value) { return; } // Hide the old content widget if (content != null && content.Widget != null) { content.Widget.Hide (); } // Add and show the new one if (value != null && value.Widget != null) { PackStart (value.Widget, !context_pane.Large, true, 0); value.Widget.Show (); } // Remove the old one if (content != null && content.Widget != null) { Remove (content.Widget); } content = value; } } [Obsolete] public string Title { set {} } public bool SearchSensitive { get { return search_entry.Sensitive; } set { if (search_entry.Visible != value) { search_entry.Sensitive = value; search_entry.Visible = value; } } } } }
//=============================================================================== // This file is based on the Microsoft Data Access Application Block for .NET // For more information please go to // http://msdn.microsoft.com/library/en-us/dnbda/html/daab-rm.asp //=============================================================================== using System; using System.Configuration; using System.Data; using System.Data.SqlClient; using System.Collections; namespace XHD.DBUtility { /// <summary> /// The SqlHelper class is intended to encapsulate high performance, /// scalable best practices for common uses of SqlClient. /// </summary> public abstract class SqlHelper { //Database connection strings public static readonly string ConnectionStringLocalTransaction = ConfigurationManager.AppSettings["SQLConnString1"]; public static readonly string ConnectionStringInventoryDistributedTransaction = ConfigurationManager.AppSettings["SQLConnString2"]; public static readonly string ConnectionStringOrderDistributedTransaction = ConfigurationManager.AppSettings["SQLConnString3"]; public static readonly string ConnectionStringProfile = ConfigurationManager.AppSettings["SQLProfileConnString"]; // Hashtable to store cached parameters private static Hashtable parmCache = Hashtable.Synchronized(new Hashtable()); /// <summary> /// Execute a SqlCommand (that returns no resultset) against the database specified in the connection string /// using the provided parameters. /// </summary> /// <remarks> /// e.g.: /// int result = ExecuteNonQuery(connString, CommandType.StoredProcedure, "PublishOrders", new SqlParameter("@prodid", 24)); /// </remarks> /// <param name="connectionString">a valid connection string for a SqlConnection</param> /// <param name="commandType">the CommandType (stored procedure, text, etc.)</param> /// <param name="commandText">the stored procedure name or T-SQL command</param> /// <param name="commandParameters">an array of SqlParamters used to execute the command</param> /// <returns>an int representing the number of rows affected by the command</returns> public static int ExecuteNonQuery(string connectionString, CommandType cmdType, string cmdText, params SqlParameter[] commandParameters) { SqlCommand cmd = new SqlCommand(); using (SqlConnection conn = new SqlConnection(connectionString)) { PrepareCommand(cmd, conn, null, cmdType, cmdText, commandParameters); int val = cmd.ExecuteNonQuery(); cmd.Parameters.Clear(); return val; } } /// <summary> /// Execute a SqlCommand (that returns no resultset) against an existing database connection /// using the provided parameters. /// </summary> /// <remarks> /// e.g.: /// int result = ExecuteNonQuery(connString, CommandType.StoredProcedure, "PublishOrders", new SqlParameter("@prodid", 24)); /// </remarks> /// <param name="conn">an existing database connection</param> /// <param name="commandType">the CommandType (stored procedure, text, etc.)</param> /// <param name="commandText">the stored procedure name or T-SQL command</param> /// <param name="commandParameters">an array of SqlParamters used to execute the command</param> /// <returns>an int representing the number of rows affected by the command</returns> public static int ExecuteNonQuery(SqlConnection connection, CommandType cmdType, string cmdText, params SqlParameter[] commandParameters) { SqlCommand cmd = new SqlCommand(); PrepareCommand(cmd, connection, null, cmdType, cmdText, commandParameters); int val = cmd.ExecuteNonQuery(); cmd.Parameters.Clear(); return val; } /// <summary> /// Execute a SqlCommand (that returns no resultset) using an existing SQL Transaction /// using the provided parameters. /// </summary> /// <remarks> /// e.g.: /// int result = ExecuteNonQuery(connString, CommandType.StoredProcedure, "PublishOrders", new SqlParameter("@prodid", 24)); /// </remarks> /// <param name="trans">an existing sql transaction</param> /// <param name="commandType">the CommandType (stored procedure, text, etc.)</param> /// <param name="commandText">the stored procedure name or T-SQL command</param> /// <param name="commandParameters">an array of SqlParamters used to execute the command</param> /// <returns>an int representing the number of rows affected by the command</returns> public static int ExecuteNonQuery(SqlTransaction trans, CommandType cmdType, string cmdText, params SqlParameter[] commandParameters) { SqlCommand cmd = new SqlCommand(); PrepareCommand(cmd, trans.Connection, trans, cmdType, cmdText, commandParameters); int val = cmd.ExecuteNonQuery(); cmd.Parameters.Clear(); return val; } /// <summary> /// Execute a SqlCommand that returns a resultset against the database specified in the connection string /// using the provided parameters. /// </summary> /// <remarks> /// e.g.: /// SqlDataReader r = ExecuteReader(connString, CommandType.StoredProcedure, "PublishOrders", new SqlParameter("@prodid", 24)); /// </remarks> /// <param name="connectionString">a valid connection string for a SqlConnection</param> /// <param name="commandType">the CommandType (stored procedure, text, etc.)</param> /// <param name="commandText">the stored procedure name or T-SQL command</param> /// <param name="commandParameters">an array of SqlParamters used to execute the command</param> /// <returns>A SqlDataReader containing the results</returns> public static SqlDataReader ExecuteReader(string connectionString, CommandType cmdType, string cmdText, params SqlParameter[] commandParameters) { SqlCommand cmd = new SqlCommand(); SqlConnection conn = new SqlConnection(connectionString); // we use a try/catch here because if the method throws an exception we want to // close the connection throw code, because no datareader will exist, hence the // commandBehaviour.CloseConnection will not work try { PrepareCommand(cmd, conn, null, cmdType, cmdText, commandParameters); SqlDataReader rdr = cmd.ExecuteReader(CommandBehavior.CloseConnection); cmd.Parameters.Clear(); return rdr; } catch { conn.Close(); throw; } } /// <summary> /// Execute a SqlCommand that returns the first column of the first record against the database specified in the connection string /// using the provided parameters. /// </summary> /// <remarks> /// e.g.: /// Object obj = ExecuteScalar(connString, CommandType.StoredProcedure, "PublishOrders", new SqlParameter("@prodid", 24)); /// </remarks> /// <param name="connectionString">a valid connection string for a SqlConnection</param> /// <param name="commandType">the CommandType (stored procedure, text, etc.)</param> /// <param name="commandText">the stored procedure name or T-SQL command</param> /// <param name="commandParameters">an array of SqlParamters used to execute the command</param> /// <returns>An object that should be converted to the expected type using Convert.To{Type}</returns> public static object ExecuteScalar(string connectionString, CommandType cmdType, string cmdText, params SqlParameter[] commandParameters) { SqlCommand cmd = new SqlCommand(); using (SqlConnection connection = new SqlConnection(connectionString)) { PrepareCommand(cmd, connection, null, cmdType, cmdText, commandParameters); object val = cmd.ExecuteScalar(); cmd.Parameters.Clear(); return val; } } /// <summary> /// Execute a SqlCommand that returns the first column of the first record against an existing database connection /// using the provided parameters. /// </summary> /// <remarks> /// e.g.: /// Object obj = ExecuteScalar(connString, CommandType.StoredProcedure, "PublishOrders", new SqlParameter("@prodid", 24)); /// </remarks> /// <param name="conn">an existing database connection</param> /// <param name="commandType">the CommandType (stored procedure, text, etc.)</param> /// <param name="commandText">the stored procedure name or T-SQL command</param> /// <param name="commandParameters">an array of SqlParamters used to execute the command</param> /// <returns>An object that should be converted to the expected type using Convert.To{Type}</returns> public static object ExecuteScalar(SqlConnection connection, CommandType cmdType, string cmdText, params SqlParameter[] commandParameters) { SqlCommand cmd = new SqlCommand(); PrepareCommand(cmd, connection, null, cmdType, cmdText, commandParameters); object val = cmd.ExecuteScalar(); cmd.Parameters.Clear(); return val; } /// <summary> /// add parameter array to the cache /// </summary> /// <param name="cacheKey">Key to the parameter cache</param> /// <param name="cmdParms">an array of SqlParamters to be cached</param> public static void CacheParameters(string cacheKey, params SqlParameter[] commandParameters) { parmCache[cacheKey] = commandParameters; } /// <summary> /// Retrieve cached parameters /// </summary> /// <param name="cacheKey">key used to lookup parameters</param> /// <returns>Cached SqlParamters array</returns> public static SqlParameter[] GetCachedParameters(string cacheKey) { SqlParameter[] cachedParms = (SqlParameter[])parmCache[cacheKey]; if (cachedParms == null) return null; SqlParameter[] clonedParms = new SqlParameter[cachedParms.Length]; for (int i = 0, j = cachedParms.Length; i < j; i++) clonedParms[i] = (SqlParameter)((ICloneable)cachedParms[i]).Clone(); return clonedParms; } /// <summary> /// Prepare a command for execution /// </summary> /// <param name="cmd">SqlCommand object</param> /// <param name="conn">SqlConnection object</param> /// <param name="trans">SqlTransaction object</param> /// <param name="cmdType">Cmd type e.g. stored procedure or text</param> /// <param name="cmdText">Command text, e.g. Select * from Products</param> /// <param name="cmdParms">SqlParameters to use in the command</param> private static void PrepareCommand(SqlCommand cmd, SqlConnection conn, SqlTransaction trans, CommandType cmdType, string cmdText, SqlParameter[] cmdParms) { if (conn.State != ConnectionState.Open) conn.Open(); cmd.Connection = conn; cmd.CommandText = cmdText; if (trans != null) cmd.Transaction = trans; cmd.CommandType = cmdType; if (cmdParms != null) { foreach (SqlParameter parm in cmdParms) cmd.Parameters.Add(parm); } } } }
using System; using NUnit.Framework; using RefactoringEssentials.CSharp.CodeRefactorings; namespace RefactoringEssentials.Tests.CSharp.CodeRefactorings { [TestFixture] public class SplitLocalVariableDeclarationAndAssignmentTests : CSharpCodeRefactoringTestBase { [Test] public void TestSimpleExpression() { string result = RunContextAction( new SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider(), "class TestClass" + Environment.NewLine + "{" + Environment.NewLine + " void Test ()" + Environment.NewLine + " {" + Environment.NewLine + " int $myInt = 5 + 3 * (2 - 10);" + Environment.NewLine + " }" + Environment.NewLine + "}" ); Assert.AreEqual( "class TestClass" + Environment.NewLine + "{" + Environment.NewLine + " void Test ()" + Environment.NewLine + " {" + Environment.NewLine + " int myInt;" + Environment.NewLine + " myInt = 5 + 3 * (2 - 10);" + Environment.NewLine + " }" + Environment.NewLine + "}", result); } [Test] public void TestSimpleExpressionWithComment() { string result = RunContextAction( new SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider(), "class TestClass" + Environment.NewLine + "{" + Environment.NewLine + " void Test ()" + Environment.NewLine + " {" + Environment.NewLine + " // Some comment" + Environment.NewLine + " int $myInt = 5 + 3 * (2 - 10);" + Environment.NewLine + " }" + Environment.NewLine + "}" ); Assert.AreEqual( "class TestClass" + Environment.NewLine + "{" + Environment.NewLine + " void Test ()" + Environment.NewLine + " {" + Environment.NewLine + " // Some comment" + Environment.NewLine + " int myInt;" + Environment.NewLine + " myInt = 5 + 3 * (2 - 10);" + Environment.NewLine + " }" + Environment.NewLine + "}", result); } [Test] public void TestVarType() { string result = RunContextAction( new SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider(), "class TestClass" + Environment.NewLine + "{" + Environment.NewLine + " void Test ()" + Environment.NewLine + " {" + Environment.NewLine + " var $aVar = this;" + Environment.NewLine + " }" + Environment.NewLine + "}" ); Assert.AreEqual( "class TestClass" + Environment.NewLine + "{" + Environment.NewLine + " void Test ()" + Environment.NewLine + " {" + Environment.NewLine + " TestClass aVar;" + Environment.NewLine + " aVar = this;" + Environment.NewLine + " }" + Environment.NewLine + "}", result); } [Test] public void TestForStatement() { string result = RunContextAction( new SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider(), "class TestClass" + Environment.NewLine + "{" + Environment.NewLine + " void Test ()" + Environment.NewLine + " {" + Environment.NewLine + " for (int $i = 1; i < 10; i++) {}" + Environment.NewLine + " }" + Environment.NewLine + "}" ); string expected = @"class TestClass { void Test () { int i; for (i = 1; i < 10; i++) {} } }"; Assert.AreEqual(HomogenizeEol(expected), HomogenizeEol(result)); } [Test] public void TestPopupAtAssign() { Test<SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider>(@"class Test { public static void Main (string[] args) { var foo $= 5; } }", @"class Test { public static void Main (string[] args) { int foo; foo = 5; } }"); } [Test] public void TestPopupAtBeginningOfExpression() { Test<SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider>(@"class Test { public static void Main (string[] args) { var foo = $5; } }", @"class Test { public static void Main (string[] args) { int foo; foo = 5; } }"); } [Test] public void TestMultipleInitializers() { Test<SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider>(@"class Test { public static void Main (string[] args) { int a, b, $foo = 5 + 12, c; Console.WriteLine(foo); } }", @"class Test { public static void Main (string[] args) { int a, b, foo, c; foo = 5 + 12; Console.WriteLine(foo); } }"); } [Test] public void TestVarDeclarationWithComment() { Test<SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider>(@"class Test { public void T() { // Some comment int $i = 5; } }", @"class Test { public void T() { // Some comment int i; i = 5; } }"); } [Test] public void TestForStatementWithComment() { Test<SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider>(@"class Test { public void T() { // Some comment for (int $i = 1; i < 10; i++) {} } }", @"class Test { public void T() { // Some comment int i; for (i = 1; i < 10; i++) {} } }"); } [Test] public void TestHideInExpression() { TestWrongContext<SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider>(@"class Test { public static void Main (string[] args) { var foo = 5 $+ 5; } }"); } [Test] public void TestLocalConstants() { TestWrongContext<SplitLocalVariableDeclarationAndAssignmentCodeRefactoringProvider>(@"class Test { public static void Main (string[] args) { const int foo $= 5; } }"); } } }
// Copyright (c) Brock Allen & Dominick Baier. All rights reserved. // Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information. using IdentityModel; using System; using System.Collections.Generic; using System.Diagnostics; using System.Security.Claims; using System.Security.Principal; namespace IdentityServer4.Extensions { /// <summary> /// Extension methods for <see cref="System.Security.Principal.IPrincipal"/> and <see cref="System.Security.Principal.IIdentity"/> . /// </summary> public static class PrincipalExtensions { /// <summary> /// Gets the authentication time. /// </summary> /// <param name="principal">The principal.</param> /// <returns></returns> [DebuggerStepThrough] public static DateTime GetAuthenticationTime(this IPrincipal principal) { return DateTimeOffset.FromUnixTimeSeconds(principal.GetAuthenticationTimeEpoch()).UtcDateTime; } /// <summary> /// Gets the authentication epoch time. /// </summary> /// <param name="principal">The principal.</param> /// <returns></returns> [DebuggerStepThrough] public static long GetAuthenticationTimeEpoch(this IPrincipal principal) { return principal.Identity.GetAuthenticationTimeEpoch(); } /// <summary> /// Gets the authentication epoch time. /// </summary> /// <param name="identity">The identity.</param> /// <returns></returns> [DebuggerStepThrough] public static long GetAuthenticationTimeEpoch(this IIdentity identity) { var id = identity as ClaimsIdentity; var claim = id.FindFirst(JwtClaimTypes.AuthenticationTime); if (claim == null) throw new InvalidOperationException("auth_time is missing."); return long.Parse(claim.Value); } /// <summary> /// Gets the subject identifier. /// </summary> /// <param name="principal">The principal.</param> /// <returns></returns> [DebuggerStepThrough] public static string GetSubjectId(this IPrincipal principal) { return principal.Identity.GetSubjectId(); } /// <summary> /// Gets the subject identifier. /// </summary> /// <param name="identity">The identity.</param> /// <returns></returns> /// <exception cref="System.InvalidOperationException">sub claim is missing</exception> [DebuggerStepThrough] public static string GetSubjectId(this IIdentity identity) { var id = identity as ClaimsIdentity; var claim = id.FindFirst(JwtClaimTypes.Subject); if (claim == null) throw new InvalidOperationException("sub claim is missing"); return claim.Value; } /// <summary> /// Gets the name. /// </summary> /// <param name="principal">The principal.</param> /// <returns></returns> [DebuggerStepThrough] [Obsolete("This method will be removed in a future version. Use GetDisplayName instead.")] public static string GetName(this IPrincipal principal) { return principal.Identity.GetName(); } /// <summary> /// Gets the name. /// </summary> /// <param name="principal">The principal.</param> /// <returns></returns> [DebuggerStepThrough] public static string GetDisplayName(this ClaimsPrincipal principal) { var name = principal.Identity.Name; if (name.IsPresent()) return name; var sub = principal.FindFirst(JwtClaimTypes.Subject); if (sub != null) return sub.Value; return string.Empty; } /// <summary> /// Gets the name. /// </summary> /// <param name="identity">The identity.</param> /// <returns></returns> /// <exception cref="System.InvalidOperationException">name claim is missing</exception> [DebuggerStepThrough] [Obsolete("This method will be removed in a future version. Use GetDisplayName instead.")] public static string GetName(this IIdentity identity) { var id = identity as ClaimsIdentity; var claim = id.FindFirst(JwtClaimTypes.Name); if (claim == null) throw new InvalidOperationException("name claim is missing"); return claim.Value; } /// <summary> /// Gets the authentication method. /// </summary> /// <param name="principal">The principal.</param> /// <returns></returns> [DebuggerStepThrough] public static string GetAuthenticationMethod(this IPrincipal principal) { return principal.Identity.GetAuthenticationMethod(); } /// <summary> /// Gets the authentication method claims. /// </summary> /// <param name="principal">The principal.</param> /// <returns></returns> [DebuggerStepThrough] public static IEnumerable<Claim> GetAuthenticationMethods(this IPrincipal principal) { return principal.Identity.GetAuthenticationMethods(); } /// <summary> /// Gets the authentication method. /// </summary> /// <param name="identity">The identity.</param> /// <returns></returns> /// <exception cref="System.InvalidOperationException">amr claim is missing</exception> [DebuggerStepThrough] public static string GetAuthenticationMethod(this IIdentity identity) { var id = identity as ClaimsIdentity; var claim = id.FindFirst(JwtClaimTypes.AuthenticationMethod); if (claim == null) throw new InvalidOperationException("amr claim is missing"); return claim.Value; } /// <summary> /// Gets the authentication method claims. /// </summary> /// <param name="identity">The identity.</param> /// <returns></returns> [DebuggerStepThrough] public static IEnumerable<Claim> GetAuthenticationMethods(this IIdentity identity) { var id = identity as ClaimsIdentity; return id.FindAll(JwtClaimTypes.AuthenticationMethod); } /// <summary> /// Gets the identity provider. /// </summary> /// <param name="principal">The principal.</param> /// <returns></returns> [DebuggerStepThrough] public static string GetIdentityProvider(this IPrincipal principal) { return principal.Identity.GetIdentityProvider(); } /// <summary> /// Gets the identity provider. /// </summary> /// <param name="identity">The identity.</param> /// <returns></returns> /// <exception cref="System.InvalidOperationException">idp claim is missing</exception> [DebuggerStepThrough] public static string GetIdentityProvider(this IIdentity identity) { var id = identity as ClaimsIdentity; var claim = id.FindFirst(JwtClaimTypes.IdentityProvider); if (claim == null) throw new InvalidOperationException("idp claim is missing"); return claim.Value; } /// <summary> /// Determines whether this instance is authenticated. /// </summary> /// <param name="principal">The principal.</param> /// <returns> /// <c>true</c> if the specified principal is authenticated; otherwise, <c>false</c>. /// </returns> [DebuggerStepThrough] public static bool IsAuthenticated(this IPrincipal principal) { return principal != null && principal.Identity != null && principal.Identity.IsAuthenticated; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Security; using System.Text; using System.Threading; using System.Threading.Tasks; namespace System.IO { // Class for creating FileStream objects, and some basic file management // routines such as Delete, etc. public static class File { private static Encoding s_UTF8NoBOM; internal const int DefaultBufferSize = 4096; public static StreamReader OpenText(String path) { if (path == null) throw new ArgumentNullException(nameof(path)); Contract.EndContractBlock(); return new StreamReader(path); } public static StreamWriter CreateText(String path) { if (path == null) throw new ArgumentNullException(nameof(path)); Contract.EndContractBlock(); return new StreamWriter(path, append: false); } public static StreamWriter AppendText(String path) { if (path == null) throw new ArgumentNullException(nameof(path)); Contract.EndContractBlock(); return new StreamWriter(path, append: true); } // Copies an existing file to a new file. An exception is raised if the // destination file already exists. Use the // Copy(String, String, boolean) method to allow // overwriting an existing file. // // The caller must have certain FileIOPermissions. The caller must have // Read permission to sourceFileName and Create // and Write permissions to destFileName. // public static void Copy(String sourceFileName, String destFileName) { if (sourceFileName == null) throw new ArgumentNullException(nameof(sourceFileName), SR.ArgumentNull_FileName); if (destFileName == null) throw new ArgumentNullException(nameof(destFileName), SR.ArgumentNull_FileName); if (sourceFileName.Length == 0) throw new ArgumentException(SR.Argument_EmptyFileName, nameof(sourceFileName)); if (destFileName.Length == 0) throw new ArgumentException(SR.Argument_EmptyFileName, nameof(destFileName)); Contract.EndContractBlock(); InternalCopy(sourceFileName, destFileName, false); } // Copies an existing file to a new file. If overwrite is // false, then an IOException is thrown if the destination file // already exists. If overwrite is true, the file is // overwritten. // // The caller must have certain FileIOPermissions. The caller must have // Read permission to sourceFileName // and Write permissions to destFileName. // public static void Copy(String sourceFileName, String destFileName, bool overwrite) { if (sourceFileName == null) throw new ArgumentNullException(nameof(sourceFileName), SR.ArgumentNull_FileName); if (destFileName == null) throw new ArgumentNullException(nameof(destFileName), SR.ArgumentNull_FileName); if (sourceFileName.Length == 0) throw new ArgumentException(SR.Argument_EmptyFileName, nameof(sourceFileName)); if (destFileName.Length == 0) throw new ArgumentException(SR.Argument_EmptyFileName, nameof(destFileName)); Contract.EndContractBlock(); InternalCopy(sourceFileName, destFileName, overwrite); } /// <devdoc> /// Note: This returns the fully qualified name of the destination file. /// </devdoc> [System.Security.SecuritySafeCritical] internal static String InternalCopy(String sourceFileName, String destFileName, bool overwrite) { Debug.Assert(sourceFileName != null); Debug.Assert(destFileName != null); Debug.Assert(sourceFileName.Length > 0); Debug.Assert(destFileName.Length > 0); String fullSourceFileName = Path.GetFullPath(sourceFileName); String fullDestFileName = Path.GetFullPath(destFileName); FileSystem.Current.CopyFile(fullSourceFileName, fullDestFileName, overwrite); return fullDestFileName; } // Creates a file in a particular path. If the file exists, it is replaced. // The file is opened with ReadWrite access and cannot be opened by another // application until it has been closed. An IOException is thrown if the // directory specified doesn't exist. // // Your application must have Create, Read, and Write permissions to // the file. // public static FileStream Create(string path) { return Create(path, DefaultBufferSize); } // Creates a file in a particular path. If the file exists, it is replaced. // The file is opened with ReadWrite access and cannot be opened by another // application until it has been closed. An IOException is thrown if the // directory specified doesn't exist. // // Your application must have Create, Read, and Write permissions to // the file. // public static FileStream Create(String path, int bufferSize) { return new FileStream(path, FileMode.Create, FileAccess.ReadWrite, FileShare.None, bufferSize); } public static FileStream Create(String path, int bufferSize, FileOptions options) { return new FileStream(path, FileMode.Create, FileAccess.ReadWrite, FileShare.None, bufferSize, options); } // Deletes a file. The file specified by the designated path is deleted. // If the file does not exist, Delete succeeds without throwing // an exception. // // On NT, Delete will fail for a file that is open for normal I/O // or a file that is memory mapped. // // Your application must have Delete permission to the target file. // [System.Security.SecuritySafeCritical] public static void Delete(String path) { if (path == null) throw new ArgumentNullException(nameof(path)); Contract.EndContractBlock(); String fullPath = Path.GetFullPath(path); FileSystem.Current.DeleteFile(fullPath); } // Tests if a file exists. The result is true if the file // given by the specified path exists; otherwise, the result is // false. Note that if path describes a directory, // Exists will return true. // // Your application must have Read permission for the target directory. // [System.Security.SecuritySafeCritical] public static bool Exists(String path) { try { if (path == null) return false; if (path.Length == 0) return false; path = Path.GetFullPath(path); // After normalizing, check whether path ends in directory separator. // Otherwise, FillAttributeInfo removes it and we may return a false positive. // GetFullPath should never return null Debug.Assert(path != null, "File.Exists: GetFullPath returned null"); if (path.Length > 0 && PathInternal.IsDirectorySeparator(path[path.Length - 1])) { return false; } return InternalExists(path); } catch (ArgumentException) { } catch (NotSupportedException) { } // Security can throw this on ":" catch (SecurityException) { } catch (IOException) { } catch (UnauthorizedAccessException) { } return false; } [System.Security.SecurityCritical] // auto-generated internal static bool InternalExists(String path) { return FileSystem.Current.FileExists(path); } public static FileStream Open(String path, FileMode mode) { return Open(path, mode, (mode == FileMode.Append ? FileAccess.Write : FileAccess.ReadWrite), FileShare.None); } public static FileStream Open(String path, FileMode mode, FileAccess access) { return Open(path, mode, access, FileShare.None); } public static FileStream Open(String path, FileMode mode, FileAccess access, FileShare share) { return new FileStream(path, mode, access, share); } internal static DateTimeOffset GetUtcDateTimeOffset(DateTime dateTime) { // File and Directory UTC APIs treat a DateTimeKind.Unspecified as UTC whereas // ToUniversalTime treats this as local. if (dateTime.Kind == DateTimeKind.Unspecified) { return DateTime.SpecifyKind(dateTime, DateTimeKind.Utc); } return dateTime.ToUniversalTime(); } public static void SetCreationTime(String path, DateTime creationTime) { String fullPath = Path.GetFullPath(path); FileSystem.Current.SetCreationTime(fullPath, creationTime, asDirectory: false); } public static void SetCreationTimeUtc(String path, DateTime creationTimeUtc) { String fullPath = Path.GetFullPath(path); FileSystem.Current.SetCreationTime(fullPath, GetUtcDateTimeOffset(creationTimeUtc), asDirectory: false); } [System.Security.SecuritySafeCritical] public static DateTime GetCreationTime(String path) { String fullPath = Path.GetFullPath(path); return FileSystem.Current.GetCreationTime(fullPath).LocalDateTime; } [System.Security.SecuritySafeCritical] // auto-generated public static DateTime GetCreationTimeUtc(String path) { String fullPath = Path.GetFullPath(path); return FileSystem.Current.GetCreationTime(fullPath).UtcDateTime; } public static void SetLastAccessTime(String path, DateTime lastAccessTime) { String fullPath = Path.GetFullPath(path); FileSystem.Current.SetLastAccessTime(fullPath, lastAccessTime, asDirectory: false); } public static void SetLastAccessTimeUtc(String path, DateTime lastAccessTimeUtc) { String fullPath = Path.GetFullPath(path); FileSystem.Current.SetLastAccessTime(fullPath, GetUtcDateTimeOffset(lastAccessTimeUtc), asDirectory: false); } [System.Security.SecuritySafeCritical] public static DateTime GetLastAccessTime(String path) { String fullPath = Path.GetFullPath(path); return FileSystem.Current.GetLastAccessTime(fullPath).LocalDateTime; } [System.Security.SecuritySafeCritical] // auto-generated public static DateTime GetLastAccessTimeUtc(String path) { String fullPath = Path.GetFullPath(path); return FileSystem.Current.GetLastAccessTime(fullPath).UtcDateTime; } public static void SetLastWriteTime(String path, DateTime lastWriteTime) { String fullPath = Path.GetFullPath(path); FileSystem.Current.SetLastWriteTime(fullPath, lastWriteTime, asDirectory: false); } public static void SetLastWriteTimeUtc(String path, DateTime lastWriteTimeUtc) { String fullPath = Path.GetFullPath(path); FileSystem.Current.SetLastWriteTime(fullPath, GetUtcDateTimeOffset(lastWriteTimeUtc), asDirectory: false); } [System.Security.SecuritySafeCritical] public static DateTime GetLastWriteTime(String path) { String fullPath = Path.GetFullPath(path); return FileSystem.Current.GetLastWriteTime(fullPath).LocalDateTime; } [System.Security.SecuritySafeCritical] // auto-generated public static DateTime GetLastWriteTimeUtc(String path) { String fullPath = Path.GetFullPath(path); return FileSystem.Current.GetLastWriteTime(fullPath).UtcDateTime; } [System.Security.SecuritySafeCritical] public static FileAttributes GetAttributes(String path) { String fullPath = Path.GetFullPath(path); return FileSystem.Current.GetAttributes(fullPath); } [System.Security.SecurityCritical] public static void SetAttributes(String path, FileAttributes fileAttributes) { String fullPath = Path.GetFullPath(path); FileSystem.Current.SetAttributes(fullPath, fileAttributes); } [System.Security.SecuritySafeCritical] public static FileStream OpenRead(String path) { return new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); } public static FileStream OpenWrite(String path) { return new FileStream(path, FileMode.OpenOrCreate, FileAccess.Write, FileShare.None); } [System.Security.SecuritySafeCritical] // auto-generated public static String ReadAllText(String path) { if (path == null) throw new ArgumentNullException(nameof(path)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); return InternalReadAllText(path, Encoding.UTF8); } [System.Security.SecuritySafeCritical] // auto-generated public static String ReadAllText(String path, Encoding encoding) { if (path == null) throw new ArgumentNullException(nameof(path)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); return InternalReadAllText(path, encoding); } [System.Security.SecurityCritical] private static String InternalReadAllText(String path, Encoding encoding) { Debug.Assert(path != null); Debug.Assert(encoding != null); Debug.Assert(path.Length > 0); using (StreamReader sr = new StreamReader(path, encoding, detectEncodingFromByteOrderMarks: true)) return sr.ReadToEnd(); } [System.Security.SecuritySafeCritical] // auto-generated public static void WriteAllText(String path, String contents) { if (path == null) throw new ArgumentNullException(nameof(path)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); using (StreamWriter sw = new StreamWriter(path)) { sw.Write(contents); } } [System.Security.SecuritySafeCritical] // auto-generated public static void WriteAllText(String path, String contents, Encoding encoding) { if (path == null) throw new ArgumentNullException(nameof(path)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); using (StreamWriter sw = new StreamWriter(path, false, encoding)) { sw.Write(contents); } } [System.Security.SecuritySafeCritical] // auto-generated public static byte[] ReadAllBytes(String path) { return InternalReadAllBytes(path); } [System.Security.SecurityCritical] private static byte[] InternalReadAllBytes(String path) { // bufferSize == 1 used to avoid unnecessary buffer in FileStream using (FileStream fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read, bufferSize: 1)) { long fileLength = fs.Length; if (fileLength > Int32.MaxValue) throw new IOException(SR.IO_FileTooLong2GB); int index = 0; int count = (int)fileLength; byte[] bytes = new byte[count]; while (count > 0) { int n = fs.Read(bytes, index, count); if (n == 0) throw Error.GetEndOfFile(); index += n; count -= n; } return bytes; } } [System.Security.SecuritySafeCritical] // auto-generated public static void WriteAllBytes(String path, byte[] bytes) { if (path == null) throw new ArgumentNullException(nameof(path), SR.ArgumentNull_Path); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); if (bytes == null) throw new ArgumentNullException(nameof(bytes)); Contract.EndContractBlock(); InternalWriteAllBytes(path, bytes); } [System.Security.SecurityCritical] private static void InternalWriteAllBytes(String path, byte[] bytes) { Debug.Assert(path != null); Debug.Assert(path.Length != 0); Debug.Assert(bytes != null); using (FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.Read)) { fs.Write(bytes, 0, bytes.Length); } } public static String[] ReadAllLines(String path) { if (path == null) throw new ArgumentNullException(nameof(path)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); return InternalReadAllLines(path, Encoding.UTF8); } public static String[] ReadAllLines(String path, Encoding encoding) { if (path == null) throw new ArgumentNullException(nameof(path)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); return InternalReadAllLines(path, encoding); } private static String[] InternalReadAllLines(String path, Encoding encoding) { Debug.Assert(path != null); Debug.Assert(encoding != null); Debug.Assert(path.Length != 0); String line; List<String> lines = new List<String>(); using (StreamReader sr = new StreamReader(path, encoding)) while ((line = sr.ReadLine()) != null) lines.Add(line); return lines.ToArray(); } public static IEnumerable<String> ReadLines(String path) { if (path == null) throw new ArgumentNullException(nameof(path)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); return ReadLinesIterator.CreateIterator(path, Encoding.UTF8); } public static IEnumerable<String> ReadLines(String path, Encoding encoding) { if (path == null) throw new ArgumentNullException(nameof(path)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); return ReadLinesIterator.CreateIterator(path, encoding); } public static void WriteAllLines(String path, String[] contents) { WriteAllLines(path, (IEnumerable<String>)contents); } public static void WriteAllLines(String path, IEnumerable<String> contents) { if (path == null) throw new ArgumentNullException(nameof(path)); if (contents == null) throw new ArgumentNullException(nameof(contents)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); InternalWriteAllLines(new StreamWriter(path), contents); } public static void WriteAllLines(String path, String[] contents, Encoding encoding) { WriteAllLines(path, (IEnumerable<String>)contents, encoding); } public static void WriteAllLines(String path, IEnumerable<String> contents, Encoding encoding) { if (path == null) throw new ArgumentNullException(nameof(path)); if (contents == null) throw new ArgumentNullException(nameof(contents)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); InternalWriteAllLines(new StreamWriter(path, false, encoding), contents); } private static void InternalWriteAllLines(TextWriter writer, IEnumerable<String> contents) { Debug.Assert(writer != null); Debug.Assert(contents != null); using (writer) { foreach (String line in contents) { writer.WriteLine(line); } } } public static void AppendAllText(String path, String contents) { if (path == null) throw new ArgumentNullException(nameof(path)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); using (StreamWriter sw = new StreamWriter(path, append: true)) { sw.Write(contents); } } public static void AppendAllText(String path, String contents, Encoding encoding) { if (path == null) throw new ArgumentNullException(nameof(path)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); using (StreamWriter sw = new StreamWriter(path, true, encoding)) { sw.Write(contents); } } public static void AppendAllLines(String path, IEnumerable<String> contents) { if (path == null) throw new ArgumentNullException(nameof(path)); if (contents == null) throw new ArgumentNullException(nameof(contents)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); InternalWriteAllLines(new StreamWriter(path, append: true), contents); } public static void AppendAllLines(String path, IEnumerable<String> contents, Encoding encoding) { if (path == null) throw new ArgumentNullException(nameof(path)); if (contents == null) throw new ArgumentNullException(nameof(contents)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); Contract.EndContractBlock(); InternalWriteAllLines(new StreamWriter(path, true, encoding), contents); } public static void Replace(String sourceFileName, String destinationFileName, String destinationBackupFileName) { Replace(sourceFileName, destinationFileName, destinationBackupFileName, ignoreMetadataErrors: false); } public static void Replace(String sourceFileName, String destinationFileName, String destinationBackupFileName, bool ignoreMetadataErrors) { if (sourceFileName == null) throw new ArgumentNullException(nameof(sourceFileName)); if (destinationFileName == null) throw new ArgumentNullException(nameof(destinationFileName)); FileSystem.Current.ReplaceFile( Path.GetFullPath(sourceFileName), Path.GetFullPath(destinationFileName), destinationBackupFileName != null ? Path.GetFullPath(destinationBackupFileName) : null, ignoreMetadataErrors); } // Moves a specified file to a new location and potentially a new file name. // This method does work across volumes. // // The caller must have certain FileIOPermissions. The caller must // have Read and Write permission to // sourceFileName and Write // permissions to destFileName. // [System.Security.SecuritySafeCritical] public static void Move(String sourceFileName, String destFileName) { if (sourceFileName == null) throw new ArgumentNullException(nameof(sourceFileName), SR.ArgumentNull_FileName); if (destFileName == null) throw new ArgumentNullException(nameof(destFileName), SR.ArgumentNull_FileName); if (sourceFileName.Length == 0) throw new ArgumentException(SR.Argument_EmptyFileName, nameof(sourceFileName)); if (destFileName.Length == 0) throw new ArgumentException(SR.Argument_EmptyFileName, nameof(destFileName)); Contract.EndContractBlock(); String fullSourceFileName = Path.GetFullPath(sourceFileName); String fullDestFileName = Path.GetFullPath(destFileName); if (!InternalExists(fullSourceFileName)) { throw new FileNotFoundException(SR.Format(SR.IO_FileNotFound_FileName, fullSourceFileName), fullSourceFileName); } FileSystem.Current.MoveFile(fullSourceFileName, fullDestFileName); } public static void Encrypt(String path) { if (path == null) throw new ArgumentNullException(nameof(path)); // TODO: Not supported on Unix or in WinRt, and the EncryptFile API isn't currently // available in OneCore. For now, we just throw PNSE everywhere. When the API is // available, we can put this into the FileSystem abstraction and implement it // properly for Win32. throw new PlatformNotSupportedException(SR.PlatformNotSupported_FileEncryption); } public static void Decrypt(String path) { if (path == null) throw new ArgumentNullException(nameof(path)); // TODO: Not supported on Unix or in WinRt, and the EncryptFile API isn't currently // available in OneCore. For now, we just throw PNSE everywhere. When the API is // available, we can put this into the FileSystem abstraction and implement it // properly for Win32. throw new PlatformNotSupportedException(SR.PlatformNotSupported_FileEncryption); } // UTF-8 without BOM and with error detection. Same as the default encoding for StreamWriter. private static Encoding UTF8NoBOM => s_UTF8NoBOM ?? (s_UTF8NoBOM = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false, throwOnInvalidBytes: true)); // If we use the path-taking constructors we will not have FileOptions.Asynchronous set and // we will have asynchronous file access faked by the thread pool. We want the real thing. private static StreamReader AsyncStreamReader(string path, Encoding encoding) { FileStream stream = new FileStream( path, FileMode.Open, FileAccess.Read, FileShare.Read, DefaultBufferSize, FileOptions.Asynchronous | FileOptions.SequentialScan); return new StreamReader(stream, encoding, detectEncodingFromByteOrderMarks: true); } private static StreamWriter AsyncStreamWriter(string path, Encoding encoding, bool append) { FileStream stream = new FileStream( path, append ? FileMode.Append : FileMode.Create, FileAccess.Write, FileShare.Read, DefaultBufferSize, FileOptions.Asynchronous | FileOptions.SequentialScan); return new StreamWriter(stream, encoding); } public static Task<string> ReadAllTextAsync(string path, CancellationToken cancellationToken = default(CancellationToken)) => ReadAllTextAsync(path, Encoding.UTF8, cancellationToken); public static Task<string> ReadAllTextAsync(string path, Encoding encoding, CancellationToken cancellationToken = default(CancellationToken)) { if (path == null) throw new ArgumentNullException(nameof(path)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); return cancellationToken.IsCancellationRequested ? Task.FromCanceled<string>(cancellationToken) : InternalReadAllTextAsync(path, encoding, cancellationToken); } private static async Task<string> InternalReadAllTextAsync(string path, Encoding encoding, CancellationToken cancellationToken) { Debug.Assert(!string.IsNullOrEmpty(path)); Debug.Assert(encoding != null); char[] buffer = null; StringBuilder sb = null; StreamReader sr = AsyncStreamReader(path, encoding); try { cancellationToken.ThrowIfCancellationRequested(); sb = StringBuilderCache.Acquire(); buffer = ArrayPool<char>.Shared.Rent(sr.CurrentEncoding.GetMaxCharCount(DefaultBufferSize)); for (;;) { int read = await sr.ReadAsync(buffer, 0, buffer.Length).ConfigureAwait(false); if (read == 0) { return sb.ToString(); } sb.Append(buffer, 0, read); cancellationToken.ThrowIfCancellationRequested(); } } finally { sr.Dispose(); if (buffer != null) { ArrayPool<char>.Shared.Return(buffer); } if (sb != null) { StringBuilderCache.Release(sb); } } } public static Task WriteAllTextAsync(string path, string contents, CancellationToken cancellationToken = default(CancellationToken)) => WriteAllTextAsync(path, contents, UTF8NoBOM, cancellationToken); public static Task WriteAllTextAsync(string path, string contents, Encoding encoding, CancellationToken cancellationToken = default(CancellationToken)) { if (path == null) throw new ArgumentNullException(nameof(path)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled(cancellationToken); } if (string.IsNullOrEmpty(contents)) { new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.Read).Dispose(); return Task.CompletedTask; } return InternalWriteAllTextAsync(AsyncStreamWriter(path, encoding, append: false), contents, cancellationToken); } public static Task<byte[]> ReadAllBytesAsync(string path, CancellationToken cancellationToken = default(CancellationToken)) { if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled<byte[]>(cancellationToken); } FileStream fs = new FileStream( path, FileMode.Open, FileAccess.Read, FileShare.Read, DefaultBufferSize, FileOptions.Asynchronous | FileOptions.SequentialScan); bool returningInternalTask = false; try { long fileLength = fs.Length; if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled<byte[]>(cancellationToken); } if (fileLength > int.MaxValue) { return Task.FromException<byte[]>(new IOException(SR.IO_FileTooLong2GB)); } if (fileLength == 0) { return Task.FromResult(Array.Empty<byte>()); } returningInternalTask = true; return InternalReadAllBytesAsync(fs, (int)fileLength, cancellationToken); } finally { if (!returningInternalTask) { fs.Dispose(); } } } private static async Task<byte[]> InternalReadAllBytesAsync(FileStream fs, int count, CancellationToken cancellationToken) { using (fs) { int index = 0; byte[] bytes = new byte[count]; do { int n = await fs.ReadAsync(bytes, index, count - index, cancellationToken).ConfigureAwait(false); if (n == 0) { throw Error.GetEndOfFile(); } index += n; } while (index < count); return bytes; } } public static Task WriteAllBytesAsync(string path, byte[] bytes, CancellationToken cancellationToken = default(CancellationToken)) { if (path == null) throw new ArgumentNullException(nameof(path), SR.ArgumentNull_Path); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); if (bytes == null) throw new ArgumentNullException(nameof(bytes)); return cancellationToken.IsCancellationRequested ? Task.FromCanceled(cancellationToken) : InternalWriteAllBytesAsync(path, bytes, cancellationToken); } private static async Task InternalWriteAllBytesAsync(String path, byte[] bytes, CancellationToken cancellationToken) { Debug.Assert(!string.IsNullOrEmpty(path)); Debug.Assert(bytes != null); using (FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.Read, DefaultBufferSize, FileOptions.Asynchronous | FileOptions.SequentialScan)) { await fs.WriteAsync(bytes, 0, bytes.Length, cancellationToken).ConfigureAwait(false); await fs.FlushAsync(cancellationToken).ConfigureAwait(false); } } public static Task<string[]> ReadAllLinesAsync(string path, CancellationToken cancellationToken = default(CancellationToken)) => ReadAllLinesAsync(path, Encoding.UTF8, cancellationToken); public static Task<string[]> ReadAllLinesAsync(string path, Encoding encoding, CancellationToken cancellationToken = default(CancellationToken)) { if (path == null) throw new ArgumentNullException(nameof(path)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); return cancellationToken.IsCancellationRequested ? Task.FromCanceled<string[]>(cancellationToken) : InternalReadAllLinesAsync(path, encoding, cancellationToken); } private static async Task<string[]> InternalReadAllLinesAsync(string path, Encoding encoding, CancellationToken cancellationToken) { Debug.Assert(!string.IsNullOrEmpty(path)); Debug.Assert(encoding != null); using (StreamReader sr = AsyncStreamReader(path, encoding)) { cancellationToken.ThrowIfCancellationRequested(); string line; List<string> lines = new List<string>(); while ((line = await sr.ReadLineAsync().ConfigureAwait(false)) != null) { lines.Add(line); cancellationToken.ThrowIfCancellationRequested(); } return lines.ToArray(); } } public static Task WriteAllLinesAsync(string path, IEnumerable<string> contents, CancellationToken cancellationToken = default(CancellationToken)) => WriteAllLinesAsync(path, contents, UTF8NoBOM, cancellationToken); public static Task WriteAllLinesAsync(string path, IEnumerable<string> contents, Encoding encoding, CancellationToken cancellationToken = default(CancellationToken)) { if (path == null) throw new ArgumentNullException(nameof(path)); if (contents == null) throw new ArgumentNullException(nameof(contents)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); return cancellationToken.IsCancellationRequested ? Task.FromCanceled(cancellationToken) : InternalWriteAllLinesAsync(AsyncStreamWriter(path, encoding, append: false), contents, cancellationToken); } private static async Task InternalWriteAllLinesAsync(TextWriter writer, IEnumerable<string> contents, CancellationToken cancellationToken) { Debug.Assert(writer != null); Debug.Assert(contents != null); using (writer) { foreach (string line in contents) { cancellationToken.ThrowIfCancellationRequested(); // Note that this working depends on the fix to #14563, and cannot be ported without // either also porting that fix, or explicitly checking for line being null. await writer.WriteLineAsync(line).ConfigureAwait(false); } cancellationToken.ThrowIfCancellationRequested(); await writer.FlushAsync().ConfigureAwait(false); } } private static async Task InternalWriteAllTextAsync(StreamWriter sw, string contents, CancellationToken cancellationToken) { char[] buffer = null; try { buffer = ArrayPool<char>.Shared.Rent(DefaultBufferSize); int count = contents.Length; int index = 0; while (index < count) { int batchSize = Math.Min(DefaultBufferSize, count); contents.CopyTo(index, buffer, 0, batchSize); cancellationToken.ThrowIfCancellationRequested(); await sw.WriteAsync(buffer, 0, batchSize).ConfigureAwait(false); index += batchSize; } cancellationToken.ThrowIfCancellationRequested(); await sw.FlushAsync().ConfigureAwait(false); } finally { sw.Dispose(); if (buffer != null) { ArrayPool<char>.Shared.Return(buffer); } } } public static Task AppendAllTextAsync(string path, string contents, CancellationToken cancellationToken = default(CancellationToken)) => AppendAllTextAsync(path, contents, UTF8NoBOM, cancellationToken); public static Task AppendAllTextAsync(string path, string contents, Encoding encoding, CancellationToken cancellationToken = default(CancellationToken)) { if (path == null) throw new ArgumentNullException(nameof(path)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled(cancellationToken); } if (string.IsNullOrEmpty(contents)) { // Just to throw exception if there is a problem opening the file. new FileStream(path, FileMode.Append, FileAccess.Write, FileShare.Read).Dispose(); return Task.CompletedTask; } return InternalWriteAllTextAsync(AsyncStreamWriter(path, encoding, append: true), contents, cancellationToken); } public static Task AppendAllLinesAsync(string path, IEnumerable<string> contents, CancellationToken cancellationToken = default(CancellationToken)) => AppendAllLinesAsync(path, contents, UTF8NoBOM, cancellationToken); public static Task AppendAllLinesAsync(string path, IEnumerable<string> contents, Encoding encoding, CancellationToken cancellationToken = default(CancellationToken)) { if (path == null) throw new ArgumentNullException(nameof(path)); if (contents == null) throw new ArgumentNullException(nameof(contents)); if (encoding == null) throw new ArgumentNullException(nameof(encoding)); if (path.Length == 0) throw new ArgumentException(SR.Argument_EmptyPath, nameof(path)); return cancellationToken.IsCancellationRequested ? Task.FromCanceled(cancellationToken) : InternalWriteAllLinesAsync(AsyncStreamWriter(path, encoding, append: true), contents, cancellationToken); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.IO; using System.Text; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Runtime; using System.Runtime.InteropServices; using System.Runtime.CompilerServices; using Internal.Runtime.Augments; using Internal.DeveloperExperience; namespace System { // Eagerly preallocate instance of out of memory exception to avoid infinite recursion once we run out of memory [EagerOrderedStaticConstructor(EagerStaticConstructorOrder.SystemPreallocatedOutOfMemoryException)] internal class PreallocatedOutOfMemoryException { public static readonly OutOfMemoryException Instance = new OutOfMemoryException(message: null); // Cannot call the nullary constructor as that triggers non-trivial resource manager logic. } internal class RuntimeExceptionHelpers { //------------------------------------------------------------------------------------------------------------ // @TODO: this function is related to throwing exceptions out of Rtm. If we did not have to throw // out of Rtm, then we would note have to have the code below to get a classlib exception object given // an exception id, or the special functions to back up the MDIL THROW_* instructions, or the allocation // failure helper. If we could move to a world where we never throw out of Rtm, perhaps by moving parts // of Rtm that do need to throw out to Bartok- or Binder-generated functions, then we could remove all of this. //------------------------------------------------------------------------------------------------------------ // This is the classlib-provided "get exception" function that will be invoked whenever the runtime // needs to throw an exception back to a method in a non-runtime module. The classlib is expected // to convert every code in the ExceptionIDs enum to an exception object. [RuntimeExport("GetRuntimeException")] public static Exception GetRuntimeException(ExceptionIDs id) { // This method is called by the runtime's EH dispatch code and is not allowed to leak exceptions // back into the dispatcher. try { // @TODO: this function should return pre-allocated exception objects, either frozen in the image // or preallocated during DllMain(). In particular, this function will be called when out of memory, // and failure to create an exception will result in infinite recursion and therefore a stack overflow. switch (id) { case ExceptionIDs.OutOfMemory: return PreallocatedOutOfMemoryException.Instance; case ExceptionIDs.Arithmetic: return new ArithmeticException(); case ExceptionIDs.ArrayTypeMismatch: return new ArrayTypeMismatchException(); case ExceptionIDs.DivideByZero: return new DivideByZeroException(); case ExceptionIDs.IndexOutOfRange: return new IndexOutOfRangeException(); case ExceptionIDs.InvalidCast: return new InvalidCastException(); case ExceptionIDs.Overflow: return new OverflowException(); case ExceptionIDs.NullReference: return new NullReferenceException(); case ExceptionIDs.AccessViolation: FailFast("Access Violation: Attempted to read or write protected memory. This is often an indication that other memory is corrupt. The application will be terminated since this platform does not support throwing an AccessViolationException."); return null; case ExceptionIDs.DataMisaligned: return new DataMisalignedException(); default: FailFast("The runtime requires an exception for a case that this class library does not understand."); return null; } } catch { return null; // returning null will cause the runtime to FailFast via the class library. } } public enum RhFailFastReason { Unknown = 0, InternalError = 1, // "Runtime internal error" UnhandledException_ExceptionDispatchNotAllowed = 2, // "Unhandled exception: no handler found before escaping a finally clause or other fail-fast scope." UnhandledException_CallerDidNotHandle = 3, // "Unhandled exception: no handler found in calling method." ClassLibDidNotTranslateExceptionID = 4, // "Unable to translate failure into a classlib-specific exception object." IllegalNativeCallableEntry = 5, // "Invalid Program: attempted to call a NativeCallable method from runtime-typesafe code." PN_UnhandledException = 6, // ProjectN: "Unhandled exception: a managed exception was not handled before reaching unmanaged code" PN_UnhandledExceptionFromPInvoke = 7, // ProjectN: "Unhandled exception: an unmanaged exception was thrown out of a managed-to-native transition." Max } private static string GetStringForFailFastReason(RhFailFastReason reason) { switch (reason) { case RhFailFastReason.InternalError: return "Runtime internal error"; case RhFailFastReason.UnhandledException_ExceptionDispatchNotAllowed: return "Unhandled exception: no handler found before escaping a finally clause or other fail-fast scope."; case RhFailFastReason.UnhandledException_CallerDidNotHandle: return "Unhandled exception: no handler found in calling method."; case RhFailFastReason.ClassLibDidNotTranslateExceptionID: return "Unable to translate failure into a classlib-specific exception object."; case RhFailFastReason.IllegalNativeCallableEntry: return "Invalid Program: attempted to call a NativeCallable method from runtime-typesafe code."; case RhFailFastReason.PN_UnhandledException: return "Unhandled exception: a managed exception was not handled before reaching unmanaged code"; case RhFailFastReason.PN_UnhandledExceptionFromPInvoke: return "Unhandled exception: an unmanaged exception was thrown out of a managed-to-native transition."; default: return "Unknown reason."; } } public static void FailFast(String message) { FailFast(message, null, RhFailFastReason.Unknown, IntPtr.Zero, IntPtr.Zero); } public static unsafe void FailFast(string message, Exception exception) { FailFast(message, exception, RhFailFastReason.Unknown, IntPtr.Zero, IntPtr.Zero); } // Used to report exceptions that *logically* go unhandled in the Fx code. For example, an // exception that escapes from a ThreadPool workitem, or from a void-returning async method. public static void ReportUnhandledException(Exception exception) { // ReportUnhandledError will also call this in APPX scenarios, // but WinRT can failfast before we get another chance // (in APPX scenarios, this one will get overwritten by the one with the CCW pointer) GenerateExceptionInformationForDump(exception, IntPtr.Zero); // If possible report the exception to GEH, if not fail fast. WinRTInteropCallbacks callbacks = WinRTInterop.UnsafeCallbacks; if (callbacks == null || !callbacks.ReportUnhandledError(exception)) FailFast(GetStringForFailFastReason(RhFailFastReason.PN_UnhandledException), exception); } // This is the classlib-provided fail-fast function that will be invoked whenever the runtime // needs to cause the process to exit. It is the classlib's opprotunity to customize the // termination behavior in whatever way necessary. [RuntimeExport("FailFast")] public static void RuntimeFailFast(RhFailFastReason reason, Exception exception, IntPtr pExAddress, IntPtr pExContext) { // This method is called by the runtime's EH dispatch code and is not allowed to leak exceptions // back into the dispatcher. try { if (!SafeToPerformRichExceptionSupport) return; if ((reason == RhFailFastReason.PN_UnhandledException) && (exception != null) && !(exception is OutOfMemoryException)) { Debug.WriteLine("Unhandled Exception: " + exception.ToString()); } FailFast(String.Format("Runtime-generated FailFast: ({0}): {1}{2}", reason.ToString(), // Explicit call to ToString() to avoid MissingMetadataException inside String.Format(). GetStringForFailFastReason(reason), exception != null ? " [exception object available]" : ""), exception, reason, pExAddress, pExContext); } catch { // Returning from this callback will cause the runtime to FailFast without involving the class // library. } } internal static void FailFast(string message, Exception exception, RhFailFastReason reason, IntPtr pExAddress, IntPtr pExContext) { // If this a recursive call to FailFast, avoid all unnecessary and complex actitivy the second time around to avoid the recursion // that got us here the first time (Some judgement is required as to what activity is "unnecessary and complex".) bool minimalFailFast = s_inFailFast || (exception is OutOfMemoryException); s_inFailFast = true; if (!minimalFailFast) { String output = (exception != null) ? "Unhandled Exception: " + exception.ToString() : message; DeveloperExperience.Default.WriteLine(output); GenerateExceptionInformationForDump(exception, IntPtr.Zero); } uint errorCode = 0x80004005; // E_FAIL // To help enable testing to bucket the failures we choose one of the following as errorCode: // * RVA of EETypePtr if it is an unhandled managed exception // * HRESULT, if available // * RhFailFastReason, if it is one of the known reasons if (exception != null) { if (reason == RhFailFastReason.PN_UnhandledException) errorCode = (uint)(exception.EETypePtr.RawValue.ToInt64() - RuntimeImports.RhGetModuleFromEEType(exception.EETypePtr.RawValue).ToInt64()); else if (exception.HResult != 0) errorCode = (uint)exception.HResult; } else if (reason != RhFailFastReason.Unknown) { errorCode = (uint)reason + 0x1000; // Add something to avoid common low level exit codes } Interop.mincore.RaiseFailFastException(errorCode, pExAddress, pExContext); } // This boolean is used to stop runaway FailFast recursions. Though this is technically a concurrently set field, it only gets set during // fatal process shutdowns and it's only purpose is a reasonable-case effort to make a bad situation a little less bad. // Trying to use locks or other concurrent access apis would actually defeat the purpose of making FailFast as robust as possible. private static bool s_inFailFast; #pragma warning disable 414 // field is assigned, but never used -- This is because C# doesn't realize that we // copy the field into a buffer. /// <summary> /// This is the header that describes our 'error report' buffer to the minidump auxillary provider. /// Its format is know to that system-wide DLL, so do not change it. The remainder of the buffer is /// opaque to the minidump auxillary provider, so it'll have its own format that is more easily /// changed. /// </summary> [StructLayout(LayoutKind.Sequential)] private struct ERROR_REPORT_BUFFER_HEADER { private int _headerSignature; private int _bufferByteCount; public void WriteHeader(int cbBuffer) { _headerSignature = 0x31304244; // 'DB01' _bufferByteCount = cbBuffer; } } /// <summary> /// This header describes the contents of the serialized error report to DAC, which can deserialize it /// from a dump file or live debugging session. This format is easier to change than the /// ERROR_REPORT_BUFFER_HEADER, but it is still well-known to DAC, so any changes must update the /// version number and also have corresponding changes made to DAC. /// </summary> [StructLayout(LayoutKind.Sequential)] private struct SERIALIZED_ERROR_REPORT_HEADER { private int _errorReportSignature; // This is the version of the 'container format'. private int _exceptionSerializationVersion; // This is the version of the Exception format. It is // separate from the 'container format' version since the // implementation of the Exception serialization is owned by // the Exception class. private int _exceptionCount; // We just contain a logical array of exceptions. private int _loadedModuleCount; // Number of loaded modules. present when signature >= ER02. // {ExceptionCount} serialized Exceptions follow. // {LoadedModuleCount} module handles follow. present when signature >= ER02. public void WriteHeader(int nExceptions, int nLoadedModules) { _errorReportSignature = 0x32305245; // 'ER02' _exceptionSerializationVersion = Exception.CurrentSerializationSignature; _exceptionCount = nExceptions; _loadedModuleCount = nLoadedModules; } } /// <summary> /// Holds metadata about an exception in flight. Class because ConditionalWeakTable only accepts reference types /// </summary> private class ExceptionData { public ExceptionData() { // Set this to a non-zero value so that logic mapping entries to threads // doesn't think an uninitialized ExceptionData is on thread 0 ExceptionMetadata.ThreadId = 0xFFFFFFFF; } public struct ExceptionMetadataStruct { public UInt32 ExceptionId { get; set; } // Id assigned to the exception. May not be contiguous or start at 0. public UInt32 InnerExceptionId { get; set; } // ID of the inner exception or 0xFFFFFFFF for 'no inner exception' public UInt32 ThreadId { get; set; } // Managed thread ID the eception was thrown on public Int32 NestingLevel { get; set; } // If multiple exceptions are currently active on a thread, this gives the ordering for them. // The highest number is the most recent exception. -1 means the exception is not currently in flight // (but it may still be an InnerException). public IntPtr ExceptionCCWPtr { get; set; } // If the exception was thrown in an interop scenario, this contains the CCW pointer, otherwise, IntPtr.Zero } public ExceptionMetadataStruct ExceptionMetadata; /// <summary> /// Data created by Exception.SerializeForDump() /// </summary> public byte[] SerializedExceptionData { get; set; } /// <summary> /// Serializes the exception metadata and SerializedExceptionData /// </summary> public unsafe byte[] Serialize() { checked { byte[] serializedData = new byte[sizeof(ExceptionMetadataStruct) + SerializedExceptionData.Length]; fixed (byte* pSerializedData = serializedData) { ExceptionMetadataStruct* pMetadata = (ExceptionMetadataStruct*)pSerializedData; pMetadata->ExceptionId = ExceptionMetadata.ExceptionId; pMetadata->InnerExceptionId = ExceptionMetadata.InnerExceptionId; pMetadata->ThreadId = ExceptionMetadata.ThreadId; pMetadata->NestingLevel = ExceptionMetadata.NestingLevel; pMetadata->ExceptionCCWPtr = ExceptionMetadata.ExceptionCCWPtr; Array.CopyToNative(SerializedExceptionData, 0, (IntPtr)(pSerializedData + sizeof(ExceptionMetadataStruct)), SerializedExceptionData.Length); } return serializedData; } } } /// <summary> /// Table of exceptions that were on stacks triggering GenerateExceptionInformationForDump /// </summary> private readonly static ConditionalWeakTable<Exception, ExceptionData> s_exceptionDataTable = new ConditionalWeakTable<Exception, ExceptionData>(); /// <summary> /// Counter for exception ID assignment /// </summary> private static int s_currentExceptionId = 0; /// <summary> /// This method will call the runtime to gather the Exception objects from every exception dispatch in /// progress on the current thread. It will then serialize them into a new buffer and pass that /// buffer back to the runtime, which will publish it to a place where a global "minidump auxillary /// provider" will be able to save the buffer's contents into triage dumps. /// /// Thread safety information: The guarantee of this method is that the buffer it produces will have /// complete and correct information for all live exceptions on the current thread (as long as the same exception object /// is not thrown simultaneously on multiple threads). It will do a best-effort attempt to serialize information about exceptions /// already recorded on other threads, but that data can be lost or corrupted. The restrictions are: /// 1. Only exceptions active or recorded on the current thread have their table data modified. /// 2. After updating data in the table, we serialize a snapshot of the table (provided by ConditionalWeakTable.Values), /// regardless of what other threads might do to the table before or after. However, because of #1, this thread's /// exception data should stay stable /// 3. There is a dependency on the fact that ConditionalWeakTable's members are all threadsafe and that .Values returns a snapshot /// </summary> public static void GenerateExceptionInformationForDump(Exception currentException, IntPtr exceptionCCWPtr) { LowLevelList<byte[]> serializedExceptions = new LowLevelList<byte[]>(); // If currentException is null, there's a state corrupting exception in flight and we can't serialize it if (currentException != null) { SerializeExceptionsForDump(currentException, exceptionCCWPtr, serializedExceptions); } GenerateErrorReportForDump(serializedExceptions); } private static void SerializeExceptionsForDump(Exception currentException, IntPtr exceptionCCWPtr, LowLevelList<byte[]> serializedExceptions) { const UInt32 NoInnerExceptionValue = 0xFFFFFFFF; // Approximate upper size limit for the serialized exceptions (but we'll always serialize currentException) // If we hit the limit, because we serialize in arbitrary order, there may be missing InnerExceptions or nested exceptions. const int MaxBufferSize = 20000; int nExceptions; RuntimeImports.RhGetExceptionsForCurrentThread(null, out nExceptions); Exception[] curThreadExceptions = new Exception[nExceptions]; RuntimeImports.RhGetExceptionsForCurrentThread(curThreadExceptions, out nExceptions); LowLevelList<Exception> exceptions = new LowLevelList<Exception>(curThreadExceptions); LowLevelList<Exception> nonThrownInnerExceptions = new LowLevelList<Exception>(); uint currentThreadId = Interop.mincore.GetCurrentThreadId(); // Reset nesting levels for exceptions on this thread that might not be currently in flight foreach (ExceptionData exceptionData in s_exceptionDataTable.GetValues()) { if (exceptionData.ExceptionMetadata.ThreadId == currentThreadId) { exceptionData.ExceptionMetadata.NestingLevel = -1; } } // Find all inner exceptions, even if they're not currently being handled for (int i = 0; i < exceptions.Count; i++) { if (exceptions[i].InnerException != null && !exceptions.Contains(exceptions[i].InnerException)) { exceptions.Add(exceptions[i].InnerException); nonThrownInnerExceptions.Add(exceptions[i].InnerException); } } int currentNestingLevel = curThreadExceptions.Length - 1; // Make sure we serialize currentException if (!exceptions.Contains(currentException)) { // When this happens, currentException is probably passed to this function through System.Environment.FailFast(), we // would want to treat as if this exception is last thrown in the current thread. exceptions.Insert(0, currentException); currentNestingLevel++; } // Populate exception data for all exceptions interesting to this thread. // Whether or not there was previously data for that object, it might have changed. for (int i = 0; i < exceptions.Count; i++) { ExceptionData exceptionData = s_exceptionDataTable.GetOrCreateValue(exceptions[i]); exceptionData.ExceptionMetadata.ExceptionId = (UInt32)System.Threading.Interlocked.Increment(ref s_currentExceptionId); if (exceptionData.ExceptionMetadata.ExceptionId == NoInnerExceptionValue) { exceptionData.ExceptionMetadata.ExceptionId = (UInt32)System.Threading.Interlocked.Increment(ref s_currentExceptionId); } exceptionData.ExceptionMetadata.ThreadId = currentThreadId; // Only include nesting information for exceptions that were thrown on this thread if (!nonThrownInnerExceptions.Contains(exceptions[i])) { exceptionData.ExceptionMetadata.NestingLevel = currentNestingLevel; currentNestingLevel--; } else { exceptionData.ExceptionMetadata.NestingLevel = -1; } // Only match the CCW pointer up to the current exception if (Object.ReferenceEquals(exceptions[i], currentException)) { exceptionData.ExceptionMetadata.ExceptionCCWPtr = exceptionCCWPtr; } byte[] serializedEx = exceptions[i].SerializeForDump(); exceptionData.SerializedExceptionData = serializedEx; } // Populate inner exception ids now that we have all of them in the table for (int i = 0; i < exceptions.Count; i++) { ExceptionData exceptionData; if (!s_exceptionDataTable.TryGetValue(exceptions[i], out exceptionData)) { // This shouldn't happen, but we can't meaningfully throw here continue; } if (exceptions[i].InnerException != null) { ExceptionData innerExceptionData; if (s_exceptionDataTable.TryGetValue(exceptions[i].InnerException, out innerExceptionData)) { exceptionData.ExceptionMetadata.InnerExceptionId = innerExceptionData.ExceptionMetadata.ExceptionId; } } else { exceptionData.ExceptionMetadata.InnerExceptionId = NoInnerExceptionValue; } } int totalSerializedExceptionSize = 0; // Make sure we include the current exception, regardless of buffer size ExceptionData currentExceptionData = null; if (s_exceptionDataTable.TryGetValue(currentException, out currentExceptionData)) { byte[] serializedExceptionData = currentExceptionData.Serialize(); serializedExceptions.Add(serializedExceptionData); totalSerializedExceptionSize = serializedExceptionData.Length; } checked { foreach (ExceptionData exceptionData in s_exceptionDataTable.GetValues()) { // Already serialized currentException if (currentExceptionData != null && exceptionData.ExceptionMetadata.ExceptionId == currentExceptionData.ExceptionMetadata.ExceptionId) { continue; } byte[] serializedExceptionData = exceptionData.Serialize(); if (totalSerializedExceptionSize + serializedExceptionData.Length >= MaxBufferSize) { break; } serializedExceptions.Add(serializedExceptionData); totalSerializedExceptionSize += serializedExceptionData.Length; } } } private unsafe static void GenerateErrorReportForDump(LowLevelList<byte[]> serializedExceptions) { checked { int loadedModuleCount = RuntimeAugments.GetLoadedModules(null); int cbModuleHandles = sizeof(System.IntPtr) * loadedModuleCount; int cbFinalBuffer = sizeof(ERROR_REPORT_BUFFER_HEADER) + sizeof(SERIALIZED_ERROR_REPORT_HEADER) + cbModuleHandles; for (int i = 0; i < serializedExceptions.Count; i++) { cbFinalBuffer += serializedExceptions[i].Length; } byte[] finalBuffer = new byte[cbFinalBuffer]; fixed (byte* pBuffer = finalBuffer) { byte* pCursor = pBuffer; int cbRemaining = cbFinalBuffer; ERROR_REPORT_BUFFER_HEADER* pDacHeader = (ERROR_REPORT_BUFFER_HEADER*)pCursor; pDacHeader->WriteHeader(cbFinalBuffer); pCursor += sizeof(ERROR_REPORT_BUFFER_HEADER); cbRemaining -= sizeof(ERROR_REPORT_BUFFER_HEADER); SERIALIZED_ERROR_REPORT_HEADER* pPayloadHeader = (SERIALIZED_ERROR_REPORT_HEADER*)pCursor; pPayloadHeader->WriteHeader(serializedExceptions.Count, loadedModuleCount); pCursor += sizeof(SERIALIZED_ERROR_REPORT_HEADER); cbRemaining -= sizeof(SERIALIZED_ERROR_REPORT_HEADER); // copy the serialized exceptions to report buffer for (int i = 0; i < serializedExceptions.Count; i++) { int cbChunk = serializedExceptions[i].Length; Array.CopyToNative(serializedExceptions[i], 0, (IntPtr)pCursor, cbChunk); cbRemaining -= cbChunk; pCursor += cbChunk; } // copy the module-handle array to report buffer System.IntPtr[] loadedModuleHandles = new System.IntPtr[loadedModuleCount]; RuntimeAugments.GetLoadedModules(loadedModuleHandles); Array.CopyToNative(loadedModuleHandles, 0, (IntPtr)pCursor, loadedModuleHandles.Length); cbRemaining -= cbModuleHandles; pCursor += cbModuleHandles; Debug.Assert(cbRemaining == 0); } UpdateErrorReportBuffer(finalBuffer); } } // This returns "true" once enough of the framework has been initialized to safely perform operations // such as filling in the stack frame and generating diagnostic support. public static bool SafeToPerformRichExceptionSupport { get { // Reflection needs to work as the exception code calls GetType() and GetType().ToString() if (RuntimeAugments.CallbacksIfAvailable == null) return false; return true; } } private static GCHandle s_ExceptionInfoBufferPinningHandle; private static object s_ExceptionInfoBufferLock = new object(); private unsafe static void UpdateErrorReportBuffer(byte[] finalBuffer) { fixed (byte* pBuffer = finalBuffer) { lock (s_ExceptionInfoBufferLock) { byte* pPrevBuffer = (byte*)RuntimeImports.RhSetErrorInfoBuffer(pBuffer); Debug.Assert(s_ExceptionInfoBufferPinningHandle.IsAllocated == (pPrevBuffer != null)); if (pPrevBuffer != null) { byte[] currentExceptionInfoBuffer = (byte[])s_ExceptionInfoBufferPinningHandle.Target; Debug.Assert(currentExceptionInfoBuffer != null); fixed (byte* pPrev = currentExceptionInfoBuffer) Debug.Assert(pPrev == pPrevBuffer); } if (!s_ExceptionInfoBufferPinningHandle.IsAllocated) { // We allocate a pinning GC handle because we are logically giving the runtime 'unmanaged memory'. s_ExceptionInfoBufferPinningHandle = GCHandle.Alloc(finalBuffer, GCHandleType.Pinned); } else { s_ExceptionInfoBufferPinningHandle.Target = finalBuffer; } } } } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.AIPlatform.V1.Snippets { using Google.Api.Gax; using Google.Api.Gax.ResourceNames; using Google.LongRunning; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; /// <summary>Generated snippets.</summary> public sealed class AllGeneratedMigrationServiceClientSnippets { /// <summary>Snippet for SearchMigratableResources</summary> public void SearchMigratableResourcesRequestObject() { // Snippet: SearchMigratableResources(SearchMigratableResourcesRequest, CallSettings) // Create client MigrationServiceClient migrationServiceClient = MigrationServiceClient.Create(); // Initialize request argument(s) SearchMigratableResourcesRequest request = new SearchMigratableResourcesRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), Filter = "", }; // Make the request PagedEnumerable<SearchMigratableResourcesResponse, MigratableResource> response = migrationServiceClient.SearchMigratableResources(request); // Iterate over all response items, lazily performing RPCs as required foreach (MigratableResource item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (SearchMigratableResourcesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (MigratableResource item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<MigratableResource> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (MigratableResource item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchMigratableResourcesAsync</summary> public async Task SearchMigratableResourcesRequestObjectAsync() { // Snippet: SearchMigratableResourcesAsync(SearchMigratableResourcesRequest, CallSettings) // Create client MigrationServiceClient migrationServiceClient = await MigrationServiceClient.CreateAsync(); // Initialize request argument(s) SearchMigratableResourcesRequest request = new SearchMigratableResourcesRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), Filter = "", }; // Make the request PagedAsyncEnumerable<SearchMigratableResourcesResponse, MigratableResource> response = migrationServiceClient.SearchMigratableResourcesAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((MigratableResource item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((SearchMigratableResourcesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (MigratableResource item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<MigratableResource> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (MigratableResource item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchMigratableResources</summary> public void SearchMigratableResources() { // Snippet: SearchMigratableResources(string, string, int?, CallSettings) // Create client MigrationServiceClient migrationServiceClient = MigrationServiceClient.Create(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; // Make the request PagedEnumerable<SearchMigratableResourcesResponse, MigratableResource> response = migrationServiceClient.SearchMigratableResources(parent); // Iterate over all response items, lazily performing RPCs as required foreach (MigratableResource item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (SearchMigratableResourcesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (MigratableResource item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<MigratableResource> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (MigratableResource item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchMigratableResourcesAsync</summary> public async Task SearchMigratableResourcesAsync() { // Snippet: SearchMigratableResourcesAsync(string, string, int?, CallSettings) // Create client MigrationServiceClient migrationServiceClient = await MigrationServiceClient.CreateAsync(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; // Make the request PagedAsyncEnumerable<SearchMigratableResourcesResponse, MigratableResource> response = migrationServiceClient.SearchMigratableResourcesAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((MigratableResource item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((SearchMigratableResourcesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (MigratableResource item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<MigratableResource> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (MigratableResource item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchMigratableResources</summary> public void SearchMigratableResourcesResourceNames() { // Snippet: SearchMigratableResources(LocationName, string, int?, CallSettings) // Create client MigrationServiceClient migrationServiceClient = MigrationServiceClient.Create(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); // Make the request PagedEnumerable<SearchMigratableResourcesResponse, MigratableResource> response = migrationServiceClient.SearchMigratableResources(parent); // Iterate over all response items, lazily performing RPCs as required foreach (MigratableResource item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (SearchMigratableResourcesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (MigratableResource item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<MigratableResource> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (MigratableResource item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchMigratableResourcesAsync</summary> public async Task SearchMigratableResourcesResourceNamesAsync() { // Snippet: SearchMigratableResourcesAsync(LocationName, string, int?, CallSettings) // Create client MigrationServiceClient migrationServiceClient = await MigrationServiceClient.CreateAsync(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); // Make the request PagedAsyncEnumerable<SearchMigratableResourcesResponse, MigratableResource> response = migrationServiceClient.SearchMigratableResourcesAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((MigratableResource item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((SearchMigratableResourcesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (MigratableResource item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<MigratableResource> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (MigratableResource item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for BatchMigrateResources</summary> public void BatchMigrateResourcesRequestObject() { // Snippet: BatchMigrateResources(BatchMigrateResourcesRequest, CallSettings) // Create client MigrationServiceClient migrationServiceClient = MigrationServiceClient.Create(); // Initialize request argument(s) BatchMigrateResourcesRequest request = new BatchMigrateResourcesRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), MigrateResourceRequests = { new MigrateResourceRequest(), }, }; // Make the request Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> response = migrationServiceClient.BatchMigrateResources(request); // Poll until the returned long-running operation is complete Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result BatchMigrateResourcesResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> retrievedResponse = migrationServiceClient.PollOnceBatchMigrateResources(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result BatchMigrateResourcesResponse retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for BatchMigrateResourcesAsync</summary> public async Task BatchMigrateResourcesRequestObjectAsync() { // Snippet: BatchMigrateResourcesAsync(BatchMigrateResourcesRequest, CallSettings) // Additional: BatchMigrateResourcesAsync(BatchMigrateResourcesRequest, CancellationToken) // Create client MigrationServiceClient migrationServiceClient = await MigrationServiceClient.CreateAsync(); // Initialize request argument(s) BatchMigrateResourcesRequest request = new BatchMigrateResourcesRequest { ParentAsLocationName = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"), MigrateResourceRequests = { new MigrateResourceRequest(), }, }; // Make the request Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> response = await migrationServiceClient.BatchMigrateResourcesAsync(request); // Poll until the returned long-running operation is complete Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result BatchMigrateResourcesResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> retrievedResponse = await migrationServiceClient.PollOnceBatchMigrateResourcesAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result BatchMigrateResourcesResponse retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for BatchMigrateResources</summary> public void BatchMigrateResources() { // Snippet: BatchMigrateResources(string, IEnumerable<MigrateResourceRequest>, CallSettings) // Create client MigrationServiceClient migrationServiceClient = MigrationServiceClient.Create(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; IEnumerable<MigrateResourceRequest> migrateResourceRequests = new MigrateResourceRequest[] { new MigrateResourceRequest(), }; // Make the request Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> response = migrationServiceClient.BatchMigrateResources(parent, migrateResourceRequests); // Poll until the returned long-running operation is complete Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result BatchMigrateResourcesResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> retrievedResponse = migrationServiceClient.PollOnceBatchMigrateResources(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result BatchMigrateResourcesResponse retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for BatchMigrateResourcesAsync</summary> public async Task BatchMigrateResourcesAsync() { // Snippet: BatchMigrateResourcesAsync(string, IEnumerable<MigrateResourceRequest>, CallSettings) // Additional: BatchMigrateResourcesAsync(string, IEnumerable<MigrateResourceRequest>, CancellationToken) // Create client MigrationServiceClient migrationServiceClient = await MigrationServiceClient.CreateAsync(); // Initialize request argument(s) string parent = "projects/[PROJECT]/locations/[LOCATION]"; IEnumerable<MigrateResourceRequest> migrateResourceRequests = new MigrateResourceRequest[] { new MigrateResourceRequest(), }; // Make the request Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> response = await migrationServiceClient.BatchMigrateResourcesAsync(parent, migrateResourceRequests); // Poll until the returned long-running operation is complete Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result BatchMigrateResourcesResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> retrievedResponse = await migrationServiceClient.PollOnceBatchMigrateResourcesAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result BatchMigrateResourcesResponse retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for BatchMigrateResources</summary> public void BatchMigrateResourcesResourceNames() { // Snippet: BatchMigrateResources(LocationName, IEnumerable<MigrateResourceRequest>, CallSettings) // Create client MigrationServiceClient migrationServiceClient = MigrationServiceClient.Create(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); IEnumerable<MigrateResourceRequest> migrateResourceRequests = new MigrateResourceRequest[] { new MigrateResourceRequest(), }; // Make the request Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> response = migrationServiceClient.BatchMigrateResources(parent, migrateResourceRequests); // Poll until the returned long-running operation is complete Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result BatchMigrateResourcesResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> retrievedResponse = migrationServiceClient.PollOnceBatchMigrateResources(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result BatchMigrateResourcesResponse retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for BatchMigrateResourcesAsync</summary> public async Task BatchMigrateResourcesResourceNamesAsync() { // Snippet: BatchMigrateResourcesAsync(LocationName, IEnumerable<MigrateResourceRequest>, CallSettings) // Additional: BatchMigrateResourcesAsync(LocationName, IEnumerable<MigrateResourceRequest>, CancellationToken) // Create client MigrationServiceClient migrationServiceClient = await MigrationServiceClient.CreateAsync(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); IEnumerable<MigrateResourceRequest> migrateResourceRequests = new MigrateResourceRequest[] { new MigrateResourceRequest(), }; // Make the request Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> response = await migrationServiceClient.BatchMigrateResourcesAsync(parent, migrateResourceRequests); // Poll until the returned long-running operation is complete Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result BatchMigrateResourcesResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<BatchMigrateResourcesResponse, BatchMigrateResourcesOperationMetadata> retrievedResponse = await migrationServiceClient.PollOnceBatchMigrateResourcesAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result BatchMigrateResourcesResponse retrievedResult = retrievedResponse.Result; } // End snippet } } }
using System; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Orleans.Messaging; using Orleans.Serialization; using Microsoft.Extensions.Options; using Orleans.Configuration; using Orleans.Hosting; namespace Orleans.Runtime.Messaging { internal class MessageCenter : ISiloMessageCenter, IDisposable { private Gateway Gateway { get; set; } private IncomingMessageAcceptor ima; private readonly ILogger log; private Action<Message> rerouteHandler; internal Func<Message, bool> ShouldDrop; private IHostedClient hostedClient; // ReSharper disable NotAccessedField.Local private IntValueStatistic sendQueueLengthCounter; private IntValueStatistic receiveQueueLengthCounter; // ReSharper restore NotAccessedField.Local internal IOutboundMessageQueue OutboundQueue { get; set; } internal IInboundMessageQueue InboundQueue { get; set; } internal SocketManager SocketManager; private readonly SerializationManager serializationManager; private readonly MessageFactory messageFactory; private readonly ILoggerFactory loggerFactory; private readonly ExecutorService executorService; private readonly Action<Message>[] localMessageHandlers; private SiloMessagingOptions messagingOptions; internal bool IsBlockingApplicationMessages { get; private set; } public void SetHostedClient(IHostedClient client) => this.hostedClient = client; public bool IsProxying => this.Gateway != null || this.hostedClient?.ClientId != null; public bool TryDeliverToProxy(Message msg) { if (!msg.TargetGrain.IsClient) return false; if (this.Gateway != null && this.Gateway.TryDeliverToProxy(msg)) return true; return this.hostedClient?.TryDispatchToClient(msg) ?? false; } // This is determined by the IMA but needed by the OMS, and so is kept here in the message center itself. public SiloAddress MyAddress { get; private set; } public MessageCenter( ILocalSiloDetails siloDetails, IOptions<EndpointOptions> endpointOptions, IOptions<SiloMessagingOptions> messagingOptions, IOptions<NetworkingOptions> networkingOptions, SerializationManager serializationManager, MessageFactory messageFactory, Factory<MessageCenter, Gateway> gatewayFactory, ExecutorService executorService, ILoggerFactory loggerFactory, IOptions<StatisticsOptions> statisticsOptions) { this.messagingOptions = messagingOptions.Value; this.loggerFactory = loggerFactory; this.log = loggerFactory.CreateLogger<MessageCenter>(); this.serializationManager = serializationManager; this.messageFactory = messageFactory; this.executorService = executorService; this.MyAddress = siloDetails.SiloAddress; this.Initialize(endpointOptions, messagingOptions, networkingOptions, statisticsOptions); if (siloDetails.GatewayAddress != null) { Gateway = gatewayFactory(this); } localMessageHandlers = new Action<Message>[Enum.GetValues(typeof(Message.Categories)).Length]; } private void Initialize(IOptions<EndpointOptions> endpointOptions, IOptions<SiloMessagingOptions> messagingOptions, IOptions<NetworkingOptions> networkingOptions, IOptions<StatisticsOptions> statisticsOptions) { if (log.IsEnabled(LogLevel.Trace)) log.Trace("Starting initialization."); SocketManager = new SocketManager(networkingOptions, this.loggerFactory); var listeningEndpoint = endpointOptions.Value.GetListeningSiloEndpoint(); ima = new IncomingMessageAcceptor(this, listeningEndpoint, SocketDirection.SiloToSilo, this.messageFactory, this.serializationManager, this.executorService, this.loggerFactory); InboundQueue = new InboundMessageQueue(this.loggerFactory, statisticsOptions); OutboundQueue = new OutboundMessageQueue(this, messagingOptions, this.serializationManager, this.executorService, this.loggerFactory); sendQueueLengthCounter = IntValueStatistic.FindOrCreate(StatisticNames.MESSAGE_CENTER_SEND_QUEUE_LENGTH, () => SendQueueLength); receiveQueueLengthCounter = IntValueStatistic.FindOrCreate(StatisticNames.MESSAGE_CENTER_RECEIVE_QUEUE_LENGTH, () => ReceiveQueueLength); if (log.IsEnabled(LogLevel.Trace)) log.Trace("Completed initialization."); } public void Start() { IsBlockingApplicationMessages = false; ima.Start(); OutboundQueue.Start(); } public void StartGateway(ClientObserverRegistrar clientRegistrar) { if (Gateway != null) Gateway.Start(clientRegistrar); } public void PrepareToStop() { } private void WaitToRerouteAllQueuedMessages() { DateTime maxWaitTime = DateTime.UtcNow + this.messagingOptions.ShutdownRerouteTimeout; while (DateTime.UtcNow < maxWaitTime) { var applicationMessageQueueLength = this.OutboundQueue.GetApplicationMessageCount(); if (applicationMessageQueueLength == 0) break; Thread.Sleep(100); } } public void Stop() { IsBlockingApplicationMessages = true; try { ima.Stop(); } catch (Exception exc) { log.Error(ErrorCode.Runtime_Error_100108, "Stop failed.", exc); } StopAcceptingClientMessages(); try { WaitToRerouteAllQueuedMessages(); OutboundQueue.Stop(); } catch (Exception exc) { log.Error(ErrorCode.Runtime_Error_100110, "Stop failed.", exc); } try { SocketManager.Stop(); } catch (Exception exc) { log.Error(ErrorCode.Runtime_Error_100111, "Stop failed.", exc); } } public void StopAcceptingClientMessages() { if (log.IsEnabled(LogLevel.Debug)) log.Debug("StopClientMessages"); if (Gateway == null) return; try { Gateway.Stop(); } catch (Exception exc) { log.Error(ErrorCode.Runtime_Error_100109, "Stop failed.", exc); } Gateway = null; } public Action<Message> RerouteHandler { set { if (rerouteHandler != null) throw new InvalidOperationException("MessageCenter RerouteHandler already set"); rerouteHandler = value; } } public void RerouteMessage(Message message) { if (rerouteHandler != null) rerouteHandler(message); else SendMessage(message); } public Action<Message> SniffIncomingMessage { set { ima.SniffIncomingMessage = value; } } public Func<SiloAddress, bool> SiloDeadOracle { get; set; } public void SendMessage(Message msg) { // Note that if we identify or add other grains that are required for proper stopping, we will need to treat them as we do the membership table grain here. if (IsBlockingApplicationMessages && (msg.Category == Message.Categories.Application) && (msg.Result != Message.ResponseTypes.Rejection) && !Constants.SystemMembershipTableId.Equals(msg.TargetGrain)) { // Drop the message on the floor if it's an application message that isn't a rejection } else { if (msg.SendingSilo == null) msg.SendingSilo = MyAddress; OutboundQueue.SendMessage(msg); } } public bool TrySendLocal(Message message) { if (!message.TargetSilo.Equals(MyAddress)) { return false; } if (log.IsEnabled(LogLevel.Trace)) log.Trace("Message has been looped back to this silo: {0}", message); MessagingStatisticsGroup.LocalMessagesSent.Increment(); var localHandler = localMessageHandlers[(int) message.Category]; if (localHandler != null) { localHandler(message); } else { InboundQueue.PostMessage(message); } return true; } internal void SendRejection(Message msg, Message.RejectionTypes rejectionType, string reason) { MessagingStatisticsGroup.OnRejectedMessage(msg); if (string.IsNullOrEmpty(reason)) reason = string.Format("Rejection from silo {0} - Unknown reason.", MyAddress); Message error = this.messageFactory.CreateRejectionResponse(msg, rejectionType, reason); // rejection msgs are always originated in the local silo, they are never remote. InboundQueue.PostMessage(error); } public Message WaitMessage(Message.Categories type, CancellationToken ct) { return InboundQueue.WaitMessage(type, ct); } public void RegisterLocalMessageHandler(Message.Categories category, Action<Message> handler) { localMessageHandlers[(int) category] = handler; } public void Dispose() { if (ima != null) { ima.Dispose(); ima = null; } InboundQueue?.Dispose(); OutboundQueue?.Dispose(); GC.SuppressFinalize(this); } public int SendQueueLength { get { return OutboundQueue.GetCount(); } } public int ReceiveQueueLength { get { return InboundQueue.Count; } } /// <summary> /// Indicates that application messages should be blocked from being sent or received. /// This method is used by the "fast stop" process. /// <para> /// Specifically, all outbound application messages are dropped, except for rejections and messages to the membership table grain. /// Inbound application requests are rejected, and other inbound application messages are dropped. /// </para> /// </summary> public void BlockApplicationMessages() { if(log.IsEnabled(LogLevel.Debug)) log.Debug("BlockApplicationMessages"); IsBlockingApplicationMessages = true; } } }
//------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. //------------------------------------------------------------ namespace System.Runtime.Serialization.Json { using System; using System.Collections.Generic; using System.Runtime; using System.Security; using System.ServiceModel; using System.Text; using System.Xml; #if USE_REFEMIT public class XmlObjectSerializerReadContextComplexJson : XmlObjectSerializerReadContextComplex #else class XmlObjectSerializerReadContextComplexJson : XmlObjectSerializerReadContextComplex #endif { string extensionDataValueType; DateTimeFormat dateTimeFormat; bool useSimpleDictionaryFormat; public XmlObjectSerializerReadContextComplexJson(DataContractJsonSerializer serializer, DataContract rootTypeDataContract) : base(serializer, serializer.MaxItemsInObjectGraph, new StreamingContext(StreamingContextStates.All), serializer.IgnoreExtensionDataObject) { this.rootTypeDataContract = rootTypeDataContract; this.serializerKnownTypeList = serializer.knownTypeList; this.dataContractSurrogate = serializer.DataContractSurrogate; this.dateTimeFormat = serializer.DateTimeFormat; this.useSimpleDictionaryFormat = serializer.UseSimpleDictionaryFormat; } internal IList<Type> SerializerKnownTypeList { get { return this.serializerKnownTypeList; } } public bool UseSimpleDictionaryFormat { get { return this.useSimpleDictionaryFormat; } } protected override void StartReadExtensionDataValue(XmlReaderDelegator xmlReader) { extensionDataValueType = xmlReader.GetAttribute(JsonGlobals.typeString); } protected override IDataNode ReadPrimitiveExtensionDataValue(XmlReaderDelegator xmlReader, string dataContractName, string dataContractNamespace) { IDataNode dataNode; switch (extensionDataValueType) { case null: case JsonGlobals.stringString: dataNode = new DataNode<string>(xmlReader.ReadContentAsString()); break; case JsonGlobals.booleanString: dataNode = new DataNode<bool>(xmlReader.ReadContentAsBoolean()); break; case JsonGlobals.numberString: dataNode = ReadNumericalPrimitiveExtensionDataValue(xmlReader); break; default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( XmlObjectSerializer.CreateSerializationException(SR.GetString(SR.JsonUnexpectedAttributeValue, extensionDataValueType))); } xmlReader.ReadEndElement(); return dataNode; } IDataNode ReadNumericalPrimitiveExtensionDataValue(XmlReaderDelegator xmlReader) { TypeCode type; object numericalValue = JsonObjectDataContract.ParseJsonNumber(xmlReader.ReadContentAsString(), out type); switch (type) { case TypeCode.Byte: return new DataNode<byte>((byte)numericalValue); case TypeCode.SByte: return new DataNode<sbyte>((sbyte)numericalValue); case TypeCode.Int16: return new DataNode<short>((short)numericalValue); case TypeCode.Int32: return new DataNode<int>((int)numericalValue); case TypeCode.Int64: return new DataNode<long>((long)numericalValue); case TypeCode.UInt16: return new DataNode<ushort>((ushort)numericalValue); case TypeCode.UInt32: return new DataNode<uint>((uint)numericalValue); case TypeCode.UInt64: return new DataNode<ulong>((ulong)numericalValue); case TypeCode.Single: return new DataNode<float>((float)numericalValue); case TypeCode.Double: return new DataNode<double>((double)numericalValue); case TypeCode.Decimal: return new DataNode<decimal>((decimal)numericalValue); default: throw Fx.AssertAndThrow("JsonObjectDataContract.ParseJsonNumber shouldn't return a TypeCode that we're not expecting"); } } internal static XmlObjectSerializerReadContextComplexJson CreateContext(DataContractJsonSerializer serializer, DataContract rootTypeDataContract) { return new XmlObjectSerializerReadContextComplexJson(serializer, rootTypeDataContract); } #if USE_REFEMIT public override int GetArraySize() #else internal override int GetArraySize() #endif { return -1; } protected override object ReadDataContractValue(DataContract dataContract, XmlReaderDelegator reader) { return DataContractJsonSerializer.ReadJsonValue(dataContract, reader, this); } #if USE_REFEMIT public override void ReadAttributes(XmlReaderDelegator xmlReader) #else internal override void ReadAttributes(XmlReaderDelegator xmlReader) #endif { if (attributes == null) attributes = new Attributes(); attributes.Reset(); if (xmlReader.MoveToAttribute(JsonGlobals.typeString) && xmlReader.Value == JsonGlobals.nullString) { attributes.XsiNil = true; } else if (xmlReader.MoveToAttribute(JsonGlobals.serverTypeString)) { XmlQualifiedName qualifiedTypeName = JsonReaderDelegator.ParseQualifiedName(xmlReader.Value); attributes.XsiTypeName = qualifiedTypeName.Name; string serverTypeNamespace = qualifiedTypeName.Namespace; if (!string.IsNullOrEmpty(serverTypeNamespace)) { switch (serverTypeNamespace[0]) { case '#': serverTypeNamespace = string.Concat(Globals.DataContractXsdBaseNamespace, serverTypeNamespace.Substring(1)); break; case '\\': if (serverTypeNamespace.Length >= 2) { switch (serverTypeNamespace[1]) { case '#': case '\\': serverTypeNamespace = serverTypeNamespace.Substring(1); break; default: break; } } break; default: break; } } attributes.XsiTypeNamespace = serverTypeNamespace; } xmlReader.MoveToElement(); } public int GetJsonMemberIndex(XmlReaderDelegator xmlReader, XmlDictionaryString[] memberNames, int memberIndex, ExtensionDataObject extensionData) { int length = memberNames.Length; if (length != 0) { for (int i = 0, index = (memberIndex + 1) % length; i < length; i++, index = (index + 1) % length) { if (xmlReader.IsStartElement(memberNames[index], XmlDictionaryString.Empty)) { return index; } } string name; if (TryGetJsonLocalName(xmlReader, out name)) { for (int i = 0, index = (memberIndex + 1) % length; i < length; i++, index = (index + 1) % length) { if (memberNames[index].Value == name) { return index; } } } } HandleMemberNotFound(xmlReader, extensionData, memberIndex); return length; } internal static bool TryGetJsonLocalName(XmlReaderDelegator xmlReader, out string name) { if (xmlReader.IsStartElement(JsonGlobals.itemDictionaryString, JsonGlobals.itemDictionaryString)) { if (xmlReader.MoveToAttribute(JsonGlobals.itemString)) { name = xmlReader.Value; return true; } } name = null; return false; } public static string GetJsonMemberName(XmlReaderDelegator xmlReader) { string name; if (!TryGetJsonLocalName(xmlReader, out name)) { name = xmlReader.LocalName; } return name; } public static void ThrowMissingRequiredMembers(object obj, XmlDictionaryString[] memberNames, byte[] expectedElements, byte[] requiredElements) { StringBuilder stringBuilder = new StringBuilder(); int missingMembersCount = 0; for (int i = 0; i < memberNames.Length; i++) { if (IsBitSet(expectedElements, i) && IsBitSet(requiredElements, i)) { if (stringBuilder.Length != 0) stringBuilder.Append(", "); stringBuilder.Append(memberNames[i]); missingMembersCount++; } } if (missingMembersCount == 1) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SerializationException(SR.GetString( SR.JsonOneRequiredMemberNotFound, DataContract.GetClrTypeFullName(obj.GetType()), stringBuilder.ToString()))); } else { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SerializationException(SR.GetString( SR.JsonRequiredMembersNotFound, DataContract.GetClrTypeFullName(obj.GetType()), stringBuilder.ToString()))); } } public static void ThrowDuplicateMemberException(object obj, XmlDictionaryString[] memberNames, int memberIndex) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SerializationException( SR.GetString(SR.JsonDuplicateMemberInInput, DataContract.GetClrTypeFullName(obj.GetType()), memberNames[memberIndex]))); } [Fx.Tag.SecurityNote(Critical = "Accesses SecurityCritical helper class 'BitFlagsGenerator'.", Safe = "This method is safe to call.")] [SecuritySafeCritical] static bool IsBitSet(byte[] bytes, int bitIndex) { return BitFlagsGenerator.IsBitSet(bytes, bitIndex); } protected override bool IsReadingCollectionExtensionData(XmlReaderDelegator xmlReader) { return xmlReader.GetAttribute(JsonGlobals.typeString) == JsonGlobals.arrayString; } protected override bool IsReadingClassExtensionData(XmlReaderDelegator xmlReader) { return xmlReader.GetAttribute(JsonGlobals.typeString) == JsonGlobals.objectString; } protected override XmlReaderDelegator CreateReaderDelegatorForReader(XmlReader xmlReader) { return new JsonReaderDelegator(xmlReader, this.dateTimeFormat); } internal override DataContract GetDataContract(RuntimeTypeHandle typeHandle, Type type) { DataContract dataContract = base.GetDataContract(typeHandle, type); DataContractJsonSerializer.CheckIfTypeIsReference(dataContract); return dataContract; } internal override DataContract GetDataContractSkipValidation(int typeId, RuntimeTypeHandle typeHandle, Type type) { DataContract dataContract = base.GetDataContractSkipValidation(typeId, typeHandle, type); DataContractJsonSerializer.CheckIfTypeIsReference(dataContract); return dataContract; } internal override DataContract GetDataContract(int id, RuntimeTypeHandle typeHandle) { DataContract dataContract = base.GetDataContract(id, typeHandle); DataContractJsonSerializer.CheckIfTypeIsReference(dataContract); return dataContract; } protected override DataContract ResolveDataContractFromRootDataContract(XmlQualifiedName typeQName) { return XmlObjectSerializerWriteContextComplexJson.ResolveJsonDataContractFromRootDataContract(this, typeQName, rootTypeDataContract); } } }
/* Generated SBE (Simple Binary Encoding) message codec */ using System; using System.Text; using System.Collections.Generic; using Adaptive.Agrona; namespace Adaptive.Cluster.Codecs { public class NewLeadershipTermDecoder { public const ushort BLOCK_LENGTH = 88; public const ushort TEMPLATE_ID = 53; public const ushort SCHEMA_ID = 111; public const ushort SCHEMA_VERSION = 7; private NewLeadershipTermDecoder _parentMessage; private IDirectBuffer _buffer; protected int _offset; protected int _limit; protected int _actingBlockLength; protected int _actingVersion; public NewLeadershipTermDecoder() { _parentMessage = this; } public ushort SbeBlockLength() { return BLOCK_LENGTH; } public ushort SbeTemplateId() { return TEMPLATE_ID; } public ushort SbeSchemaId() { return SCHEMA_ID; } public ushort SbeSchemaVersion() { return SCHEMA_VERSION; } public string SbeSemanticType() { return ""; } public IDirectBuffer Buffer() { return _buffer; } public int Offset() { return _offset; } public NewLeadershipTermDecoder Wrap( IDirectBuffer buffer, int offset, int actingBlockLength, int actingVersion) { this._buffer = buffer; this._offset = offset; this._actingBlockLength = actingBlockLength; this._actingVersion = actingVersion; Limit(offset + actingBlockLength); return this; } public int EncodedLength() { return _limit - _offset; } public int Limit() { return _limit; } public void Limit(int limit) { this._limit = limit; } public static int LogLeadershipTermIdId() { return 1; } public static int LogLeadershipTermIdSinceVersion() { return 0; } public static int LogLeadershipTermIdEncodingOffset() { return 0; } public static int LogLeadershipTermIdEncodingLength() { return 8; } public static string LogLeadershipTermIdMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static long LogLeadershipTermIdNullValue() { return -9223372036854775808L; } public static long LogLeadershipTermIdMinValue() { return -9223372036854775807L; } public static long LogLeadershipTermIdMaxValue() { return 9223372036854775807L; } public long LogLeadershipTermId() { return _buffer.GetLong(_offset + 0, ByteOrder.LittleEndian); } public static int NextLeadershipTermIdId() { return 2; } public static int NextLeadershipTermIdSinceVersion() { return 0; } public static int NextLeadershipTermIdEncodingOffset() { return 8; } public static int NextLeadershipTermIdEncodingLength() { return 8; } public static string NextLeadershipTermIdMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static long NextLeadershipTermIdNullValue() { return -9223372036854775808L; } public static long NextLeadershipTermIdMinValue() { return -9223372036854775807L; } public static long NextLeadershipTermIdMaxValue() { return 9223372036854775807L; } public long NextLeadershipTermId() { return _buffer.GetLong(_offset + 8, ByteOrder.LittleEndian); } public static int NextTermBaseLogPositionId() { return 3; } public static int NextTermBaseLogPositionSinceVersion() { return 0; } public static int NextTermBaseLogPositionEncodingOffset() { return 16; } public static int NextTermBaseLogPositionEncodingLength() { return 8; } public static string NextTermBaseLogPositionMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static long NextTermBaseLogPositionNullValue() { return -9223372036854775808L; } public static long NextTermBaseLogPositionMinValue() { return -9223372036854775807L; } public static long NextTermBaseLogPositionMaxValue() { return 9223372036854775807L; } public long NextTermBaseLogPosition() { return _buffer.GetLong(_offset + 16, ByteOrder.LittleEndian); } public static int NextLogPositionId() { return 4; } public static int NextLogPositionSinceVersion() { return 0; } public static int NextLogPositionEncodingOffset() { return 24; } public static int NextLogPositionEncodingLength() { return 8; } public static string NextLogPositionMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static long NextLogPositionNullValue() { return -9223372036854775808L; } public static long NextLogPositionMinValue() { return -9223372036854775807L; } public static long NextLogPositionMaxValue() { return 9223372036854775807L; } public long NextLogPosition() { return _buffer.GetLong(_offset + 24, ByteOrder.LittleEndian); } public static int LeadershipTermIdId() { return 5; } public static int LeadershipTermIdSinceVersion() { return 0; } public static int LeadershipTermIdEncodingOffset() { return 32; } public static int LeadershipTermIdEncodingLength() { return 8; } public static string LeadershipTermIdMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static long LeadershipTermIdNullValue() { return -9223372036854775808L; } public static long LeadershipTermIdMinValue() { return -9223372036854775807L; } public static long LeadershipTermIdMaxValue() { return 9223372036854775807L; } public long LeadershipTermId() { return _buffer.GetLong(_offset + 32, ByteOrder.LittleEndian); } public static int TermBaseLogPositionId() { return 6; } public static int TermBaseLogPositionSinceVersion() { return 0; } public static int TermBaseLogPositionEncodingOffset() { return 40; } public static int TermBaseLogPositionEncodingLength() { return 8; } public static string TermBaseLogPositionMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static long TermBaseLogPositionNullValue() { return -9223372036854775808L; } public static long TermBaseLogPositionMinValue() { return -9223372036854775807L; } public static long TermBaseLogPositionMaxValue() { return 9223372036854775807L; } public long TermBaseLogPosition() { return _buffer.GetLong(_offset + 40, ByteOrder.LittleEndian); } public static int LogPositionId() { return 7; } public static int LogPositionSinceVersion() { return 0; } public static int LogPositionEncodingOffset() { return 48; } public static int LogPositionEncodingLength() { return 8; } public static string LogPositionMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static long LogPositionNullValue() { return -9223372036854775808L; } public static long LogPositionMinValue() { return -9223372036854775807L; } public static long LogPositionMaxValue() { return 9223372036854775807L; } public long LogPosition() { return _buffer.GetLong(_offset + 48, ByteOrder.LittleEndian); } public static int LeaderRecordingIdId() { return 8; } public static int LeaderRecordingIdSinceVersion() { return 0; } public static int LeaderRecordingIdEncodingOffset() { return 56; } public static int LeaderRecordingIdEncodingLength() { return 8; } public static string LeaderRecordingIdMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static long LeaderRecordingIdNullValue() { return -9223372036854775808L; } public static long LeaderRecordingIdMinValue() { return -9223372036854775807L; } public static long LeaderRecordingIdMaxValue() { return 9223372036854775807L; } public long LeaderRecordingId() { return _buffer.GetLong(_offset + 56, ByteOrder.LittleEndian); } public static int TimestampId() { return 9; } public static int TimestampSinceVersion() { return 0; } public static int TimestampEncodingOffset() { return 64; } public static int TimestampEncodingLength() { return 8; } public static string TimestampMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static long TimestampNullValue() { return -9223372036854775808L; } public static long TimestampMinValue() { return -9223372036854775807L; } public static long TimestampMaxValue() { return 9223372036854775807L; } public long Timestamp() { return _buffer.GetLong(_offset + 64, ByteOrder.LittleEndian); } public static int LeaderMemberIdId() { return 10; } public static int LeaderMemberIdSinceVersion() { return 0; } public static int LeaderMemberIdEncodingOffset() { return 72; } public static int LeaderMemberIdEncodingLength() { return 4; } public static string LeaderMemberIdMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static int LeaderMemberIdNullValue() { return -2147483648; } public static int LeaderMemberIdMinValue() { return -2147483647; } public static int LeaderMemberIdMaxValue() { return 2147483647; } public int LeaderMemberId() { return _buffer.GetInt(_offset + 72, ByteOrder.LittleEndian); } public static int LogSessionIdId() { return 11; } public static int LogSessionIdSinceVersion() { return 0; } public static int LogSessionIdEncodingOffset() { return 76; } public static int LogSessionIdEncodingLength() { return 4; } public static string LogSessionIdMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public static int LogSessionIdNullValue() { return -2147483648; } public static int LogSessionIdMinValue() { return -2147483647; } public static int LogSessionIdMaxValue() { return 2147483647; } public int LogSessionId() { return _buffer.GetInt(_offset + 76, ByteOrder.LittleEndian); } public static int AppVersionId() { return 12; } public static int AppVersionSinceVersion() { return 0; } public static int AppVersionEncodingOffset() { return 80; } public static int AppVersionEncodingLength() { return 4; } public static string AppVersionMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "optional"; } return ""; } public static int AppVersionNullValue() { return 0; } public static int AppVersionMinValue() { return 1; } public static int AppVersionMaxValue() { return 16777215; } public int AppVersion() { return _buffer.GetInt(_offset + 80, ByteOrder.LittleEndian); } public static int IsStartupId() { return 13; } public static int IsStartupSinceVersion() { return 0; } public static int IsStartupEncodingOffset() { return 84; } public static int IsStartupEncodingLength() { return 4; } public static string IsStartupMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.EPOCH: return "unix"; case MetaAttribute.TIME_UNIT: return "nanosecond"; case MetaAttribute.SEMANTIC_TYPE: return ""; case MetaAttribute.PRESENCE: return "required"; } return ""; } public BooleanType IsStartup() { return (BooleanType)_buffer.GetInt(_offset + 84, ByteOrder.LittleEndian); } public override string ToString() { return AppendTo(new StringBuilder(100)).ToString(); } public StringBuilder AppendTo(StringBuilder builder) { int originalLimit = Limit(); Limit(_offset + _actingBlockLength); builder.Append("[NewLeadershipTerm](sbeTemplateId="); builder.Append(TEMPLATE_ID); builder.Append("|sbeSchemaId="); builder.Append(SCHEMA_ID); builder.Append("|sbeSchemaVersion="); if (_parentMessage._actingVersion != SCHEMA_VERSION) { builder.Append(_parentMessage._actingVersion); builder.Append('/'); } builder.Append(SCHEMA_VERSION); builder.Append("|sbeBlockLength="); if (_actingBlockLength != BLOCK_LENGTH) { builder.Append(_actingBlockLength); builder.Append('/'); } builder.Append(BLOCK_LENGTH); builder.Append("):"); //Token{signal=BEGIN_FIELD, name='logLeadershipTermId', referencedName='null', description='null', id=1, version=0, deprecated=0, encodedLength=0, offset=0, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=0, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("LogLeadershipTermId="); builder.Append(LogLeadershipTermId()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='nextLeadershipTermId', referencedName='null', description='null', id=2, version=0, deprecated=0, encodedLength=0, offset=8, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=8, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("NextLeadershipTermId="); builder.Append(NextLeadershipTermId()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='nextTermBaseLogPosition', referencedName='null', description='null', id=3, version=0, deprecated=0, encodedLength=0, offset=16, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=16, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("NextTermBaseLogPosition="); builder.Append(NextTermBaseLogPosition()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='nextLogPosition', referencedName='null', description='null', id=4, version=0, deprecated=0, encodedLength=0, offset=24, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=24, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("NextLogPosition="); builder.Append(NextLogPosition()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='leadershipTermId', referencedName='null', description='null', id=5, version=0, deprecated=0, encodedLength=0, offset=32, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=32, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("LeadershipTermId="); builder.Append(LeadershipTermId()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='termBaseLogPosition', referencedName='null', description='null', id=6, version=0, deprecated=0, encodedLength=0, offset=40, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=40, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("TermBaseLogPosition="); builder.Append(TermBaseLogPosition()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='logPosition', referencedName='null', description='null', id=7, version=0, deprecated=0, encodedLength=0, offset=48, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=48, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("LogPosition="); builder.Append(LogPosition()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='leaderRecordingId', referencedName='null', description='null', id=8, version=0, deprecated=0, encodedLength=0, offset=56, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=56, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("LeaderRecordingId="); builder.Append(LeaderRecordingId()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='timestamp', referencedName='null', description='null', id=9, version=0, deprecated=0, encodedLength=0, offset=64, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='time_t', referencedName='null', description='Epoch time since 1 Jan 1970 UTC.', id=-1, version=0, deprecated=0, encodedLength=8, offset=64, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("Timestamp="); builder.Append(Timestamp()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='leaderMemberId', referencedName='null', description='null', id=10, version=0, deprecated=0, encodedLength=0, offset=72, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int32', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=4, offset=72, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT32, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("LeaderMemberId="); builder.Append(LeaderMemberId()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='logSessionId', referencedName='null', description='null', id=11, version=0, deprecated=0, encodedLength=0, offset=76, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='int32', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=4, offset=76, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT32, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("LogSessionId="); builder.Append(LogSessionId()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='appVersion', referencedName='null', description='null', id=12, version=0, deprecated=0, encodedLength=0, offset=80, componentTokenCount=3, encoding=Encoding{presence=OPTIONAL, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=ENCODING, name='version_t', referencedName='null', description='Protocol or application suite version.', id=-1, version=0, deprecated=0, encodedLength=4, offset=80, componentTokenCount=1, encoding=Encoding{presence=OPTIONAL, primitiveType=INT32, byteOrder=LITTLE_ENDIAN, minValue=1, maxValue=16777215, nullValue=0, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} builder.Append("AppVersion="); builder.Append(AppVersion()); builder.Append('|'); //Token{signal=BEGIN_FIELD, name='isStartup', referencedName='null', description='null', id=13, version=0, deprecated=0, encodedLength=0, offset=84, componentTokenCount=6, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}} //Token{signal=BEGIN_ENUM, name='BooleanType', referencedName='null', description='Language independent boolean type.', id=-1, version=0, deprecated=0, encodedLength=4, offset=84, componentTokenCount=4, encoding=Encoding{presence=REQUIRED, primitiveType=INT32, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='null', timeUnit=null, semanticType='null'}} builder.Append("IsStartup="); builder.Append(IsStartup()); Limit(originalLimit); return builder; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Text; namespace System.Xml.Schema { /// <summary> /// This structure holds components of an Xsd Duration. It is used internally to support Xsd durations without loss /// of fidelity. XsdDuration structures are immutable once they've been created. /// </summary> internal struct XsdDuration { private int _years; private int _months; private int _days; private int _hours; private int _minutes; private int _seconds; private uint _nanoseconds; // High bit is used to indicate whether duration is negative private const uint NegativeBit = 0x80000000; private enum Parts { HasNone = 0, HasYears = 1, HasMonths = 2, HasDays = 4, HasHours = 8, HasMinutes = 16, HasSeconds = 32, } public enum DurationType { Duration, YearMonthDuration, DayTimeDuration, }; /// <summary> /// Construct an XsdDuration from component parts. /// </summary> public XsdDuration(bool isNegative, int years, int months, int days, int hours, int minutes, int seconds, int nanoseconds) { if (years < 0) throw new ArgumentOutOfRangeException("years"); if (months < 0) throw new ArgumentOutOfRangeException("months"); if (days < 0) throw new ArgumentOutOfRangeException("days"); if (hours < 0) throw new ArgumentOutOfRangeException("hours"); if (minutes < 0) throw new ArgumentOutOfRangeException("minutes"); if (seconds < 0) throw new ArgumentOutOfRangeException("seconds"); if (nanoseconds < 0 || nanoseconds > 999999999) throw new ArgumentOutOfRangeException("nanoseconds"); _years = years; _months = months; _days = days; _hours = hours; _minutes = minutes; _seconds = seconds; _nanoseconds = (uint)nanoseconds; if (isNegative) _nanoseconds |= NegativeBit; } /// <summary> /// Construct an XsdDuration from a TimeSpan value. /// </summary> public XsdDuration(TimeSpan timeSpan) : this(timeSpan, DurationType.Duration) { } /// <summary> /// Construct an XsdDuration from a TimeSpan value that represents an xsd:duration, an xdt:dayTimeDuration, or /// an xdt:yearMonthDuration. /// </summary> public XsdDuration(TimeSpan timeSpan, DurationType durationType) { long ticks = timeSpan.Ticks; ulong ticksPos; bool isNegative; if (ticks < 0) { // Note that (ulong) -Int64.MinValue = Int64.MaxValue + 1, which is what we want for that special case isNegative = true; ticksPos = (ulong)-ticks; } else { isNegative = false; ticksPos = (ulong)ticks; } if (durationType == DurationType.YearMonthDuration) { int years = (int)(ticksPos / ((ulong)TimeSpan.TicksPerDay * 365)); int months = (int)((ticksPos % ((ulong)TimeSpan.TicksPerDay * 365)) / ((ulong)TimeSpan.TicksPerDay * 30)); if (months == 12) { // If remaining days >= 360 and < 365, then round off to year years++; months = 0; } this = new XsdDuration(isNegative, years, months, 0, 0, 0, 0, 0); } else { Debug.Assert(durationType == DurationType.Duration || durationType == DurationType.DayTimeDuration); // Tick count is expressed in 100 nanosecond intervals _nanoseconds = (uint)(ticksPos % 10000000) * 100; if (isNegative) _nanoseconds |= NegativeBit; _years = 0; _months = 0; _days = (int)(ticksPos / (ulong)TimeSpan.TicksPerDay); _hours = (int)((ticksPos / (ulong)TimeSpan.TicksPerHour) % 24); _minutes = (int)((ticksPos / (ulong)TimeSpan.TicksPerMinute) % 60); _seconds = (int)((ticksPos / (ulong)TimeSpan.TicksPerSecond) % 60); } } /// <summary> /// Constructs an XsdDuration from a string in the xsd:duration format. Components are stored with loss /// of fidelity (except in the case of overflow). /// </summary> public XsdDuration(string s) : this(s, DurationType.Duration) { } /// <summary> /// Constructs an XsdDuration from a string in the xsd:duration format. Components are stored without loss /// of fidelity (except in the case of overflow). /// </summary> public XsdDuration(string s, DurationType durationType) { XsdDuration result; Exception exception = TryParse(s, durationType, out result); if (exception != null) { throw exception; } _years = result.Years; _months = result.Months; _days = result.Days; _hours = result.Hours; _minutes = result.Minutes; _seconds = result.Seconds; _nanoseconds = (uint)result.Nanoseconds; if (result.IsNegative) { _nanoseconds |= NegativeBit; } return; } /// <summary> /// Return true if this duration is negative. /// </summary> public bool IsNegative { get { return (_nanoseconds & NegativeBit) != 0; } } /// <summary> /// Return number of years in this duration (stored in 31 bits). /// </summary> public int Years { get { return _years; } } /// <summary> /// Return number of months in this duration (stored in 31 bits). /// </summary> public int Months { get { return _months; } } /// <summary> /// Return number of days in this duration (stored in 31 bits). /// </summary> public int Days { get { return _days; } } /// <summary> /// Return number of hours in this duration (stored in 31 bits). /// </summary> public int Hours { get { return _hours; } } /// <summary> /// Return number of minutes in this duration (stored in 31 bits). /// </summary> public int Minutes { get { return _minutes; } } /// <summary> /// Return number of seconds in this duration (stored in 31 bits). /// </summary> public int Seconds { get { return _seconds; } } /// <summary> /// Return number of nanoseconds in this duration. /// </summary> public int Nanoseconds { get { return (int)(_nanoseconds & ~NegativeBit); } } /// <summary> /// Internal helper method that converts an Xsd duration to a TimeSpan value. This code uses the estimate /// that there are 365 days in the year and 30 days in a month. /// </summary> public TimeSpan ToTimeSpan() { return ToTimeSpan(DurationType.Duration); } /// <summary> /// Internal helper method that converts an Xsd duration to a TimeSpan value. This code uses the estimate /// that there are 365 days in the year and 30 days in a month. /// </summary> public TimeSpan ToTimeSpan(DurationType durationType) { TimeSpan result; Exception exception = TryToTimeSpan(durationType, out result); if (exception != null) { throw exception; } return result; } internal Exception TryToTimeSpan(DurationType durationType, out TimeSpan result) { Exception exception = null; ulong ticks = 0; // Throw error if result cannot fit into a long try { checked { // Discard year and month parts if constructing TimeSpan for DayTimeDuration if (durationType != DurationType.DayTimeDuration) { ticks += ((ulong)_years + (ulong)_months / 12) * 365; ticks += ((ulong)_months % 12) * 30; } // Discard day and time parts if constructing TimeSpan for YearMonthDuration if (durationType != DurationType.YearMonthDuration) { ticks += (ulong)_days; ticks *= 24; ticks += (ulong)_hours; ticks *= 60; ticks += (ulong)_minutes; ticks *= 60; ticks += (ulong)_seconds; // Tick count interval is in 100 nanosecond intervals (7 digits) ticks *= (ulong)TimeSpan.TicksPerSecond; ticks += (ulong)Nanoseconds / 100; } else { // Multiply YearMonth duration by number of ticks per day ticks *= (ulong)TimeSpan.TicksPerDay; } if (IsNegative) { // Handle special case of Int64.MaxValue + 1 before negation, since it would otherwise overflow if (ticks == (ulong)Int64.MaxValue + 1) { result = new TimeSpan(Int64.MinValue); } else { result = new TimeSpan(-((long)ticks)); } } else { result = new TimeSpan((long)ticks); } return null; } } catch (OverflowException) { result = TimeSpan.MinValue; exception = new OverflowException(SR.Format(SR.XmlConvert_Overflow, durationType, "TimeSpan")); } return exception; } /// <summary> /// Return the string representation of this Xsd duration. /// </summary> public override string ToString() { return ToString(DurationType.Duration); } /// <summary> /// Return the string representation according to xsd:duration rules, xdt:dayTimeDuration rules, or /// xdt:yearMonthDuration rules. /// </summary> internal string ToString(DurationType durationType) { StringBuilder sb = new StringBuilder(20); int nanoseconds, digit, zeroIdx, len; if (IsNegative) sb.Append('-'); sb.Append('P'); if (durationType != DurationType.DayTimeDuration) { if (_years != 0) { sb.Append(XmlConvert.ToString(_years)); sb.Append('Y'); } if (_months != 0) { sb.Append(XmlConvert.ToString(_months)); sb.Append('M'); } } if (durationType != DurationType.YearMonthDuration) { if (_days != 0) { sb.Append(XmlConvert.ToString(_days)); sb.Append('D'); } if (_hours != 0 || _minutes != 0 || _seconds != 0 || Nanoseconds != 0) { sb.Append('T'); if (_hours != 0) { sb.Append(XmlConvert.ToString(_hours)); sb.Append('H'); } if (_minutes != 0) { sb.Append(XmlConvert.ToString(_minutes)); sb.Append('M'); } nanoseconds = Nanoseconds; if (_seconds != 0 || nanoseconds != 0) { sb.Append(XmlConvert.ToString(_seconds)); if (nanoseconds != 0) { sb.Append('.'); len = sb.Length; sb.Length += 9; zeroIdx = sb.Length - 1; for (int idx = zeroIdx; idx >= len; idx--) { digit = nanoseconds % 10; sb[idx] = (char)(digit + '0'); if (zeroIdx == idx && digit == 0) zeroIdx--; nanoseconds /= 10; } sb.Length = zeroIdx + 1; } sb.Append('S'); } } // Zero is represented as "PT0S" if (sb[sb.Length - 1] == 'P') sb.Append("T0S"); } else { // Zero is represented as "T0M" if (sb[sb.Length - 1] == 'P') sb.Append("0M"); } return sb.ToString(); } internal static Exception TryParse(string s, DurationType durationType, out XsdDuration result) { string errorCode; int length; int value, pos, numDigits; Parts parts = Parts.HasNone; result = new XsdDuration(); s = s.Trim(); length = s.Length; pos = 0; numDigits = 0; if (pos >= length) goto InvalidFormat; if (s[pos] == '-') { pos++; result._nanoseconds = NegativeBit; } else { result._nanoseconds = 0; } if (pos >= length) goto InvalidFormat; if (s[pos++] != 'P') goto InvalidFormat; errorCode = TryParseDigits(s, ref pos, false, out value, out numDigits); if (errorCode != null) goto Error; if (pos >= length) goto InvalidFormat; if (s[pos] == 'Y') { if (numDigits == 0) goto InvalidFormat; parts |= Parts.HasYears; result._years = value; if (++pos == length) goto Done; errorCode = TryParseDigits(s, ref pos, false, out value, out numDigits); if (errorCode != null) goto Error; if (pos >= length) goto InvalidFormat; } if (s[pos] == 'M') { if (numDigits == 0) goto InvalidFormat; parts |= Parts.HasMonths; result._months = value; if (++pos == length) goto Done; errorCode = TryParseDigits(s, ref pos, false, out value, out numDigits); if (errorCode != null) goto Error; if (pos >= length) goto InvalidFormat; } if (s[pos] == 'D') { if (numDigits == 0) goto InvalidFormat; parts |= Parts.HasDays; result._days = value; if (++pos == length) goto Done; errorCode = TryParseDigits(s, ref pos, false, out value, out numDigits); if (errorCode != null) goto Error; if (pos >= length) goto InvalidFormat; } if (s[pos] == 'T') { if (numDigits != 0) goto InvalidFormat; pos++; errorCode = TryParseDigits(s, ref pos, false, out value, out numDigits); if (errorCode != null) goto Error; if (pos >= length) goto InvalidFormat; if (s[pos] == 'H') { if (numDigits == 0) goto InvalidFormat; parts |= Parts.HasHours; result._hours = value; if (++pos == length) goto Done; errorCode = TryParseDigits(s, ref pos, false, out value, out numDigits); if (errorCode != null) goto Error; if (pos >= length) goto InvalidFormat; } if (s[pos] == 'M') { if (numDigits == 0) goto InvalidFormat; parts |= Parts.HasMinutes; result._minutes = value; if (++pos == length) goto Done; errorCode = TryParseDigits(s, ref pos, false, out value, out numDigits); if (errorCode != null) goto Error; if (pos >= length) goto InvalidFormat; } if (s[pos] == '.') { pos++; parts |= Parts.HasSeconds; result._seconds = value; errorCode = TryParseDigits(s, ref pos, true, out value, out numDigits); if (errorCode != null) goto Error; if (numDigits == 0) { //If there are no digits after the decimal point, assume 0 value = 0; } // Normalize to nanosecond intervals for (; numDigits > 9; numDigits--) value /= 10; for (; numDigits < 9; numDigits++) value *= 10; result._nanoseconds |= (uint)value; if (pos >= length) goto InvalidFormat; if (s[pos] != 'S') goto InvalidFormat; if (++pos == length) goto Done; } else if (s[pos] == 'S') { if (numDigits == 0) goto InvalidFormat; parts |= Parts.HasSeconds; result._seconds = value; if (++pos == length) goto Done; } } // Duration cannot end with digits if (numDigits != 0) goto InvalidFormat; // No further characters are allowed if (pos != length) goto InvalidFormat; Done: // At least one part must be defined if (parts == Parts.HasNone) goto InvalidFormat; if (durationType == DurationType.DayTimeDuration) { if ((parts & (Parts.HasYears | Parts.HasMonths)) != 0) goto InvalidFormat; } else if (durationType == DurationType.YearMonthDuration) { if ((parts & ~(XsdDuration.Parts.HasYears | XsdDuration.Parts.HasMonths)) != 0) goto InvalidFormat; } return null; InvalidFormat: return new FormatException(SR.Format(SR.XmlConvert_BadFormat, s, durationType)); Error: return new OverflowException(SR.Format(SR.XmlConvert_Overflow, s, durationType)); } /// Helper method that constructs an integer from leading digits starting at s[offset]. "offset" is /// updated to contain an offset just beyond the last digit. The number of digits consumed is returned in /// cntDigits. The integer is returned (0 if no digits). If the digits cannot fit into an Int32: /// 1. If eatDigits is true, then additional digits will be silently discarded (don't count towards numDigits) /// 2. If eatDigits is false, an overflow exception is thrown private static string TryParseDigits(string s, ref int offset, bool eatDigits, out int result, out int numDigits) { int offsetStart = offset; int offsetEnd = s.Length; int digit; result = 0; numDigits = 0; while (offset < offsetEnd && s[offset] >= '0' && s[offset] <= '9') { digit = s[offset] - '0'; if (result > (Int32.MaxValue - digit) / 10) { if (!eatDigits) { return SR.XmlConvert_Overflow; } // Skip past any remaining digits numDigits = offset - offsetStart; while (offset < offsetEnd && s[offset] >= '0' && s[offset] <= '9') { offset++; } return null; } result = result * 10 + digit; offset++; } numDigits = offset - offsetStart; return null; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.IO; using Microsoft.Build.Framework; using Microsoft.Build.Shared; using Microsoft.Build.Tasks; using Microsoft.Build.Utilities; using Xunit; namespace Microsoft.Build.UnitTests { sealed public class Touch_Tests { internal static Microsoft.Build.Shared.FileExists fileExists = new Microsoft.Build.Shared.FileExists(FileExists); internal static Microsoft.Build.Shared.FileCreate fileCreate = new Microsoft.Build.Shared.FileCreate(FileCreate); internal static Microsoft.Build.Tasks.GetAttributes fileGetAttributes = new Microsoft.Build.Tasks.GetAttributes(GetAttributes); internal static Microsoft.Build.Tasks.SetAttributes fileSetAttributes = new Microsoft.Build.Tasks.SetAttributes(SetAttributes); internal static Microsoft.Build.Tasks.SetLastAccessTime setLastAccessTime = new Microsoft.Build.Tasks.SetLastAccessTime(SetLastAccessTime); internal static Microsoft.Build.Tasks.SetLastWriteTime setLastWriteTime = new Microsoft.Build.Tasks.SetLastWriteTime(SetLastWriteTime); internal static string myexisting_txt = NativeMethodsShared.IsWindows ? @"c:\touch\myexisting.txt" : @"/touch/myexisting.txt"; internal static string mynonexisting_txt = NativeMethodsShared.IsWindows ? @"c:\touch\mynonexisting.txt" : @"/touch/mynonexisting.txt"; internal static string nonexisting_txt = NativeMethodsShared.IsWindows ? @"c:\touch-nonexistent\file.txt" : @"/touch-nonexistent/file.txt"; internal static string myreadonly_txt = NativeMethodsShared.IsWindows ? @"c:\touch\myreadonly.txt" : @"/touch/myreadonly.txt"; private bool Execute(Touch t) { return t.ExecuteImpl ( fileExists, fileCreate, fileGetAttributes, fileSetAttributes, setLastAccessTime, setLastWriteTime ); } /// <summary> /// Mock file exists. /// </summary> /// <param name="path"></param> /// <returns></returns> private static bool FileExists(string path) { if (path == myexisting_txt) { return true; } if (path == mynonexisting_txt) { return false; } if (path == nonexisting_txt) { return false; } if (path == myreadonly_txt) { return true; } Assert.True(false, "Unexpected file exists: " + path); return true; } /// <summary> /// Mock file create. /// </summary> /// <param name="path"></param> private static FileStream FileCreate(string path) { if (path == mynonexisting_txt) { return null; } if (path == nonexisting_txt) { throw new DirectoryNotFoundException(); } Assert.True(false, "Unexpected file create: " + path); return null; } /// <summary> /// Mock get attributes. /// </summary> /// <param name="path"></param> private static FileAttributes GetAttributes(string path) { FileAttributes a = new FileAttributes(); if (path == myexisting_txt) { return a; } if (path == mynonexisting_txt) { // Has attributes because Touch created it. return a; } if (path == myreadonly_txt) { a = System.IO.FileAttributes.ReadOnly; return a; } Assert.True(false, "Unexpected file attributes: " + path); return a; } /// <summary> /// Mock get attributes. /// </summary> /// <param name="path"></param> private static void SetAttributes(string path, FileAttributes attributes) { if (path == myreadonly_txt) { return; } Assert.True(false, "Unexpected set file attributes: " + path); } /// <summary> /// Mock SetLastAccessTime. /// </summary> /// <param name="path"></param> private static void SetLastAccessTime(string path, DateTime timestamp) { if (path == myexisting_txt) { return; } if (path == mynonexisting_txt) { return; } if (path == myreadonly_txt) { // Read-only so throw an exception throw new IOException(); } Assert.True(false, "Unexpected set last access time: " + path); } /// <summary> /// Mock SetLastWriteTime. /// </summary> /// <param name="path"></param> private static void SetLastWriteTime(string path, DateTime timestamp) { if (path == myexisting_txt) { return; } if (path == mynonexisting_txt) { return; } if (path == myreadonly_txt) { return; } Assert.True(false, "Unexpected set last write time: " + path); } [Fact] public void TouchExisting() { Touch t = new Touch(); MockEngine engine = new MockEngine(); t.BuildEngine = engine; t.Files = new ITaskItem[] { new TaskItem(myexisting_txt) }; bool success = Execute(t); Assert.True(success); Assert.Single(t.TouchedFiles); Assert.Contains( String.Format(AssemblyResources.GetString("Touch.Touching"), myexisting_txt), engine.Log ); } [Fact] public void TouchNonExisting() { Touch t = new Touch(); MockEngine engine = new MockEngine(); t.BuildEngine = engine; t.Files = new ITaskItem[] { new TaskItem(mynonexisting_txt) }; bool success = Execute(t); // Not success because the file doesn't exist Assert.False(success); Assert.Contains( String.Format(AssemblyResources.GetString("Touch.FileDoesNotExist"), mynonexisting_txt), engine.Log ); } [Fact] public void TouchNonExistingAlwaysCreate() { Touch t = new Touch(); MockEngine engine = new MockEngine(); t.BuildEngine = engine; t.AlwaysCreate = true; t.Files = new ITaskItem[] { new TaskItem(mynonexisting_txt) }; bool success = Execute(t); // Success because the file was created. Assert.True(success); Assert.Contains( String.Format(AssemblyResources.GetString("Touch.CreatingFile"), mynonexisting_txt, "AlwaysCreate"), engine.Log ); } [Fact] public void TouchNonExistingAlwaysCreateAndBadlyFormedTimestamp() { Touch t = new Touch(); MockEngine engine = new MockEngine(); t.BuildEngine = engine; t.AlwaysCreate = true; t.ForceTouch = false; t.Time = "Badly formed time String."; t.Files = new ITaskItem[] { new TaskItem(mynonexisting_txt) }; bool success = Execute(t); // Failed because of badly formed time string. Assert.False(success); Assert.Contains("MSB3376", engine.Log); } [Fact] public void TouchReadonly() { Touch t = new Touch(); MockEngine engine = new MockEngine(); t.BuildEngine = engine; t.AlwaysCreate = true; t.Files = new ITaskItem[] { new TaskItem(myreadonly_txt) }; bool success = Execute(t); // Failed because file is readonly. Assert.False(success); Assert.Contains("MSB3374", engine.Log); Assert.Contains(myreadonly_txt, engine.Log); } [Fact] public void TouchReadonlyForce() { Touch t = new Touch(); MockEngine engine = new MockEngine(); t.BuildEngine = engine; t.ForceTouch = true; t.AlwaysCreate = true; t.Files = new ITaskItem[] { new TaskItem(myreadonly_txt) }; Execute(t); } [Fact] public void TouchNonExistingDirectoryDoesntExist() { Touch t = new Touch(); MockEngine engine = new MockEngine(); t.BuildEngine = engine; t.AlwaysCreate = true; t.Files = new ITaskItem[] { new TaskItem(nonexisting_txt) }; bool success = Execute(t); // Failed because the target directory didn't exist. Assert.False(success); Assert.Contains("MSB3371", engine.Log); Assert.Contains(nonexisting_txt, engine.Log); } } }
// <copyright file="DiscreteUniformTests.cs" company="Math.NET"> // Math.NET Numerics, part of the Math.NET Project // http://numerics.mathdotnet.com // http://github.com/mathnet/mathnet-numerics // // Copyright (c) 2009-2016 Math.NET // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. // </copyright> using System; using System.Linq; using MathNet.Numerics.Distributions; using NUnit.Framework; namespace MathNet.Numerics.UnitTests.DistributionTests.Discrete { /// <summary> /// Discrete uniform tests. /// </summary> [TestFixture, Category("Distributions")] public class DiscreteUniformTests { /// <summary> /// Can create discrete uniform. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> [TestCase(-10, 10)] [TestCase(0, 4)] [TestCase(10, 20)] [TestCase(20, 20)] public void CanCreateDiscreteUniform(int l, int u) { var du = new DiscreteUniform(l, u); Assert.AreEqual(l, du.LowerBound); Assert.AreEqual(u, du.UpperBound); } /// <summary> /// Discrete Uniform create fails with bad parameters. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> [TestCase(-1, -2)] [TestCase(6, 5)] public void DiscreteUniformCreateFailsWithBadParameters(int l, int u) { Assert.That(() => new DiscreteUniform(l, u), Throws.ArgumentException); } /// <summary> /// Validate ToString. /// </summary> [Test] public void ValidateToString() { var b = new DiscreteUniform(0, 10); Assert.AreEqual("DiscreteUniform(Lower = 0, Upper = 10)", b.ToString()); } /// <summary> /// Validate entropy. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> /// <param name="e">Expected value.</param> [TestCase(-10, 10, 3.0445224377234229965005979803657054342845752874046093)] [TestCase(0, 4, 1.6094379124341003746007593332261876395256013542685181)] [TestCase(10, 20, 2.3978952727983705440619435779651292998217068539374197)] [TestCase(20, 20, 0.0)] public void ValidateEntropy(int l, int u, double e) { var du = new DiscreteUniform(l, u); AssertHelpers.AlmostEqualRelative(e, du.Entropy, 14); } /// <summary> /// Validate skewness. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> [TestCase(-10, 10)] [TestCase(0, 4)] [TestCase(10, 20)] [TestCase(20, 20)] public void ValidateSkewness(int l, int u) { var du = new DiscreteUniform(l, u); Assert.AreEqual(0.0, du.Skewness); } /// <summary> /// Validate mode. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> /// <param name="m">Expected value.</param> [TestCase(-10, 10, 0)] [TestCase(0, 4, 2)] [TestCase(10, 20, 15)] [TestCase(20, 20, 20)] public void ValidateMode(int l, int u, int m) { var du = new DiscreteUniform(l, u); Assert.AreEqual(m, du.Mode); } /// <summary> /// Validate median. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> /// <param name="m">Expected value.</param> [TestCase(-10, 10, 0)] [TestCase(0, 4, 2)] [TestCase(10, 20, 15)] [TestCase(20, 20, 20)] public void ValidateMedian(int l, int u, int m) { var du = new DiscreteUniform(l, u); Assert.AreEqual(m, du.Median); } /// <summary> /// Validate mean. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> /// <param name="m">Expected value.</param> [TestCase(-10, 10, 0)] [TestCase(0, 4, 2)] [TestCase(10, 20, 15)] [TestCase(20, 20, 20)] public void ValidateMean(int l, int u, int m) { var du = new DiscreteUniform(l, u); Assert.AreEqual(m, du.Mean); } /// <summary> /// Validate minimum. /// </summary> [Test] public void ValidateMinimum() { var b = new DiscreteUniform(-10, 10); Assert.AreEqual(-10, b.Minimum); } /// <summary> /// Validate maximum. /// </summary> [Test] public void ValidateMaximum() { var b = new DiscreteUniform(-10, 10); Assert.AreEqual(10, b.Maximum); } /// <summary> /// Validate probability. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> /// <param name="x">Input X value.</param> /// <param name="p">Expected value.</param> [TestCase(-10, 10, -5, 1 / 21.0)] [TestCase(-10, 10, 1, 1 / 21.0)] [TestCase(-10, 10, 10, 1 / 21.0)] [TestCase(-10, -10, 0, 0.0)] [TestCase(-10, -10, -10, 1.0)] public void ValidateProbability(int l, int u, int x, double p) { var b = new DiscreteUniform(l, u); Assert.AreEqual(p, b.Probability(x)); } /// <summary> /// Validate porbability log. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> /// <param name="x">Input X value.</param> /// <param name="dln">Expected value.</param> [TestCase(-10, 10, -5, -3.0445224377234229965005979803657054342845752874046093)] [TestCase(-10, 10, 1, -3.0445224377234229965005979803657054342845752874046093)] [TestCase(-10, 10, 10, -3.0445224377234229965005979803657054342845752874046093)] [TestCase(-10, -10, 0, Double.NegativeInfinity)] [TestCase(-10, -10, -10, 0.0)] public void ValidateProbabilityLn(int l, int u, int x, double dln) { var b = new DiscreteUniform(l, u); Assert.AreEqual(dln, b.ProbabilityLn(x)); } /// <summary> /// Can sample static. /// </summary> [Test] public void CanSampleStatic() { DiscreteUniform.Sample(new System.Random(0), 0, 10); } /// <summary> /// Can sample sequence static. /// </summary> [Test] public void CanSampleSequenceStatic() { var ied = DiscreteUniform.Samples(new System.Random(0), 0, 10); GC.KeepAlive(ied.Take(5).ToArray()); } /// <summary> /// Fail sample static with bad parameters. /// </summary> [Test] public void FailSampleStatic() { Assert.That(() => DiscreteUniform.Sample(new System.Random(0), 20, 10), Throws.ArgumentException); } /// <summary> /// Fail sample sequence static with bad parameters. /// </summary> [Test] public void FailSampleSequenceStatic() { Assert.That(() => DiscreteUniform.Samples(new System.Random(0), 20, 10).First(), Throws.ArgumentException); } /// <summary> /// Can sample. /// </summary> [Test] public void CanSample() { var n = new DiscreteUniform(0, 10); n.Sample(); } /// <summary> /// Can sample sequence. /// </summary> [Test] public void CanSampleSequence() { var n = new DiscreteUniform(0, 10); var ied = n.Samples(); GC.KeepAlive(ied.Take(5).ToArray()); } /// <summary> /// Validate cumulative distribution. /// </summary> /// <param name="l">Lower bound.</param> /// <param name="u">Upper bound.</param> /// <param name="x">Input X value.</param> /// <param name="cdf">Expected value.</param> [TestCase(-10, 10, -5, 6.0 / 21.0)] [TestCase(-10, 10, 1, 12.0 / 21.0)] [TestCase(-10, 10, 10, 1.0)] [TestCase(-10, -10, 0, 1.0)] [TestCase(-10, -10, -10, 1.0)] [TestCase(-10, -10, -11, 0.0)] public void ValidateCumulativeDistribution(int l, int u, double x, double cdf) { var b = new DiscreteUniform(l, u); Assert.AreEqual(cdf, b.CumulativeDistribution(x)); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Bot.Builder.Luis.Models { using System; using System.Linq; using System.Collections.Generic; using Newtonsoft.Json; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json.Linq; /// <summary> /// Luis entity recommendation. Look at https://www.luis.ai/Help for more /// information. /// </summary> public partial class EntityRecommendation { /// <summary> /// Initializes a new instance of the EntityRecommendation class. /// </summary> public EntityRecommendation() { } /// <summary> /// Initializes a new instance of the EntityRecommendation class. /// </summary> public EntityRecommendation(string type, string role = default(string), string entity = default(string), int? startIndex = default(int?), int? endIndex = default(int?), double? score = default(double?), IDictionary<string, object> resolution = default(IDictionary<string, object>)) { Role = role; Entity = entity; Type = type; StartIndex = startIndex; EndIndex = endIndex; Score = score; Resolution = resolution; } /// <summary> /// Role of the entity. /// </summary> [JsonProperty(PropertyName = "role")] public string Role { get; set; } /// <summary> /// Entity extracted by LUIS. /// </summary> [JsonProperty(PropertyName = "entity")] public string Entity { get; set; } /// <summary> /// Type of the entity. /// </summary> [JsonProperty(PropertyName = "type")] public string Type { get; set; } /// <summary> /// Start index of the entity in the LUIS query string. /// </summary> [JsonProperty(PropertyName = "startIndex")] public int? StartIndex { get; set; } /// <summary> /// End index of the entity in the LUIS query string. /// </summary> [JsonProperty(PropertyName = "endIndex")] public int? EndIndex { get; set; } /// <summary> /// Score assigned by LUIS to detected entity. /// </summary> [JsonProperty(PropertyName = "score")] public double? Score { get; set; } /// <summary> /// A machine interpretable resolution of the entity. For example the /// string "one thousand" would have the resolution "1000". The /// exact form of the resolution is defined by the entity type and is /// documented here: https://www.luis.ai/Help#PreBuiltEntities. /// </summary> [JsonProperty(PropertyName = "resolution", ItemConverterType = typeof(ResolutionConverter))] public IDictionary<string, object> Resolution { get; set; } /// <summary> /// Validate the object. Throws ValidationException if validation fails. /// </summary> public virtual void Validate() { if (Type == null) { throw new ValidationException(ValidationRules.CannotBeNull, "Type"); } } internal class ResolutionConverter : JsonConverter { private const string UnexpectedEndError = "Unexpected end when reading IDictionary<string, object>"; public override bool CanConvert(Type objectType) { return (objectType == typeof(IDictionary<string, object>)); } public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { return ReadValue(reader); } private static object ReadValue(JsonReader reader) { while (reader.TokenType == JsonToken.Comment) { if (!reader.Read()) { throw new JsonSerializationException("Unexpected token when converting IDictionary<string, object>"); } } switch (reader.TokenType) { case JsonToken.StartObject: return ReadObject(reader); case JsonToken.StartArray: return ReadArray(reader); case JsonToken.Integer: case JsonToken.Float: case JsonToken.String: case JsonToken.Boolean: case JsonToken.Undefined: case JsonToken.Null: case JsonToken.Date: case JsonToken.Bytes: return reader.Value; default: throw new JsonSerializationException (string.Format("Unexpected token when converting IDictionary<string, object>: {0}", reader.TokenType)); } } private static object ReadArray(JsonReader reader) { IList<object> list = new List<object>(); while (reader.Read()) { switch (reader.TokenType) { case JsonToken.Comment: break; default: var value = ReadValue(reader); list.Add(value); break; case JsonToken.EndArray: return list; } } throw new JsonSerializationException(UnexpectedEndError); } private static object ReadObject(JsonReader reader) { var dictionary = new Dictionary<string, object>(); while (reader.Read()) { switch (reader.TokenType) { case JsonToken.PropertyName: var propertyName = reader.Value.ToString(); if (!reader.Read()) { throw new JsonSerializationException(UnexpectedEndError); } var value = ReadValue(reader); dictionary[propertyName] = value; break; case JsonToken.Comment: break; case JsonToken.EndObject: return dictionary; } } throw new JsonSerializationException(UnexpectedEndError); } public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) { serializer.Serialize(writer, value); } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections; using System.Globalization; namespace System.Collections.Specialized { /// <devdoc> /// <para> /// This data structure implements IDictionary first using a linked list /// (ListDictionary) and then switching over to use Hashtable when large. This is recommended /// for cases where the number of elements in a dictionary is unknown and might be small. /// /// It also has a single boolean parameter to allow case-sensitivity that is not affected by /// ambient culture and has been optimized for looking up case-insensitive symbols /// </para> /// </devdoc> public class HybridDictionary : IDictionary { // These numbers have been carefully tested to be optimal. Please don't change them // without doing thorough performance testing. private const int CutoverPoint = 9; private const int InitialHashtableSize = 13; private const int FixedSizeCutoverPoint = 6; // Instance variables. This keeps the HybridDictionary very light-weight when empty private ListDictionary _list; private Hashtable _hashtable; private readonly bool _caseInsensitive; public HybridDictionary() { } public HybridDictionary(int initialSize) : this(initialSize, false) { } public HybridDictionary(bool caseInsensitive) { _caseInsensitive = caseInsensitive; } public HybridDictionary(int initialSize, bool caseInsensitive) { _caseInsensitive = caseInsensitive; if (initialSize >= FixedSizeCutoverPoint) { if (caseInsensitive) { _hashtable = new Hashtable(initialSize, StringComparer.OrdinalIgnoreCase); } else { _hashtable = new Hashtable(initialSize); } } } public object this[object key] { get { // Hashtable supports multiple read, one writer thread safety. // Although we never made the same guarantee for HybridDictionary, // it is still nice to do the same thing here since we have recommended // HybridDictioary as replacement for Hashtable. ListDictionary cachedList = _list; if (_hashtable != null) { return _hashtable[key]; } else if (cachedList != null) { return cachedList[key]; } else { // cachedList can be null in too cases: // (1) The dictionary is empty, we will return null in this case // (2) There is writer which is doing ChangeOver. However in that case // we should see the change to hashtable as well. // So it should work just fine. if (key == null) { throw new ArgumentNullException("key", SR.ArgumentNull_Key); } return null; } } set { if (_hashtable != null) { _hashtable[key] = value; } else if (_list != null) { if (_list.Count >= CutoverPoint - 1) { ChangeOver(); _hashtable[key] = value; } else { _list[key] = value; } } else { _list = new ListDictionary(_caseInsensitive ? StringComparer.OrdinalIgnoreCase : null); _list[key] = value; } } } private ListDictionary List { get { if (_list == null) { _list = new ListDictionary(_caseInsensitive ? StringComparer.OrdinalIgnoreCase : null); } return _list; } } private void ChangeOver() { IDictionaryEnumerator en = _list.GetEnumerator(); Hashtable newTable; if (_caseInsensitive) { newTable = new Hashtable(InitialHashtableSize, StringComparer.OrdinalIgnoreCase); } else { newTable = new Hashtable(InitialHashtableSize); } while (en.MoveNext()) { newTable.Add(en.Key, en.Value); } // Keep the order of writing to hashtable and list. // We assume we will see the change in hashtable if list is set to null in // this method in another reader thread. _hashtable = newTable; _list = null; } public int Count { get { ListDictionary cachedList = _list; if (_hashtable != null) { return _hashtable.Count; } else if (cachedList != null) { return cachedList.Count; } else { return 0; } } } public ICollection Keys { get { if (_hashtable != null) { return _hashtable.Keys; } else { return List.Keys; } } } public bool IsReadOnly { get { return false; } } public bool IsFixedSize { get { return false; } } public bool IsSynchronized { get { return false; } } public object SyncRoot { get { return this; } } public ICollection Values { get { if (_hashtable != null) { return _hashtable.Values; } else { return List.Values; } } } public void Add(object key, object value) { if (_hashtable != null) { _hashtable.Add(key, value); } else { if (_list == null) { _list = new ListDictionary(_caseInsensitive ? StringComparer.OrdinalIgnoreCase : null); _list.Add(key, value); } else { if (_list.Count + 1 >= CutoverPoint) { ChangeOver(); _hashtable.Add(key, value); } else { _list.Add(key, value); } } } } public void Clear() { if (_hashtable != null) { Hashtable cachedHashtable = _hashtable; _hashtable = null; cachedHashtable.Clear(); } if (_list != null) { ListDictionary cachedList = _list; _list = null; cachedList.Clear(); } } public bool Contains(object key) { ListDictionary cachedList = _list; if (_hashtable != null) { return _hashtable.Contains(key); } else if (cachedList != null) { return cachedList.Contains(key); } else { if (key == null) { throw new ArgumentNullException("key", SR.ArgumentNull_Key); } return false; } } public void CopyTo(Array array, int index) { if (_hashtable != null) { _hashtable.CopyTo(array, index); } else { List.CopyTo(array, index); } } public IDictionaryEnumerator GetEnumerator() { if (_hashtable != null) { return _hashtable.GetEnumerator(); } if (_list == null) { _list = new ListDictionary(_caseInsensitive ? StringComparer.OrdinalIgnoreCase : null); } return _list.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { if (_hashtable != null) { return _hashtable.GetEnumerator(); } if (_list == null) { _list = new ListDictionary(_caseInsensitive ? StringComparer.OrdinalIgnoreCase : null); } return _list.GetEnumerator(); } public void Remove(object key) { if (_hashtable != null) { _hashtable.Remove(key); } else if (_list != null) { _list.Remove(key); } else { if (key == null) { throw new ArgumentNullException("key", SR.ArgumentNull_Key); } } } } }
/* * XmlNamedNodeMap.cs - Implementation of the * "System.Xml.XmlNamedNodeMap" class. * * Copyright (C) 2002 Southern Storm Software, Pty Ltd. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ namespace System.Xml { using System; using System.Collections; #if ECMA_COMPAT internal #else public #endif class XmlNamedNodeMap : IEnumerable { // Internal state. internal ArrayList map; private XmlNode parent; // Constructor. internal XmlNamedNodeMap(XmlNode parent) { this.map = new ArrayList(); this.parent = parent; } // Get the number of items in the map. public virtual int Count { get { return map.Count; } } // Enumerate over this map. public virtual IEnumerator GetEnumerator() { return map.GetEnumerator(); } // Get a particular node by name from this map. public virtual XmlNode GetNamedItem(String name) { if(name == null) { name = String.Empty; } int posn, count; XmlNode node; count = map.Count; for(posn = 0; posn < count; ++posn) { node = (XmlNode)(map[posn]); if(node.Name == name) { return node; } } return null; } public virtual XmlNode GetNamedItem(String localName, String namespaceURI) { if(localName == null) { localName = String.Empty; } if(namespaceURI == null) { namespaceURI = String.Empty; } int posn, count; XmlNode node; count = map.Count; for(posn = 0; posn < count; ++posn) { node = (XmlNode)(map[posn]); if(node.LocalName == localName && node.NamespaceURI == namespaceURI) { return node; } } return null; } // Retrieve a particular item. public virtual XmlNode Item(int index) { if(index < 0 || index >= map.Count) { return null; } else { return (XmlNode)(map[index]); } } // Remove a particular node by name from this map. public virtual XmlNode RemoveNamedItem(String name) { if(name == null) { name = String.Empty; } int posn, count; XmlNode node; count = map.Count; for(posn = 0; posn < count; ++posn) { node = (XmlNode)(map[posn]); if(node.Name == name) { map.RemoveAt(posn); return node; } } return null; } public virtual XmlNode RemoveNamedItem (String localName, String namespaceURI) { if(localName == null) { localName = String.Empty; } if(namespaceURI == null) { namespaceURI = String.Empty; } int posn, count; XmlNode node; count = map.Count; for(posn = 0; posn < count; ++posn) { node = (XmlNode)(map[posn]); if(node.LocalName == localName && node.NamespaceURI == namespaceURI) { map.RemoveAt(posn); return node; } } return null; } // Set or append an item into this map. internal XmlNode SetOrAppend(XmlNode node, bool append) { XmlNode oldNode; String name; int posn, count; if(node == null) { return null; } if(node.OwnerDocument != parent.OwnerDocument) { throw new ArgumentException (S._("Xml_NotSameDocument"), "node"); } if(parent.IsReadOnly) { throw new ArgumentException(S._("Xml_ReadOnly")); } count = map.Count; name = node.Name; for(posn = 0; posn < count; ++posn) { oldNode = (XmlNode)(map[posn]); if(oldNode.Name == name) { if(append && posn < (count - 1)) { map.RemoveAt(posn); map.Add(node); } else { map[posn] = node; } return oldNode; } } map.Add(node); return null; } // Set an item into this map. public virtual XmlNode SetNamedItem(XmlNode node) { return SetOrAppend(node, false); } }; // class XmlNamedNodeMap }; // namespace System.Xml
using System; using System.Diagnostics; using System.Globalization; using System.Windows.Input; using Xamarin.Forms.Internals; using Xamarin.Forms.Platform; namespace Xamarin.Forms { [RenderWith(typeof(_ButtonRenderer))] public class Button : View, IFontElement, IButtonController, IElementConfiguration<Button> { public static readonly BindableProperty CommandProperty = BindableProperty.Create("Command", typeof(ICommand), typeof(Button), null, propertyChanged: (bo, o, n) => ((Button)bo).OnCommandChanged()); public static readonly BindableProperty CommandParameterProperty = BindableProperty.Create("CommandParameter", typeof(object), typeof(Button), null, propertyChanged: (bindable, oldvalue, newvalue) => ((Button)bindable).CommandCanExecuteChanged(bindable, EventArgs.Empty)); public static readonly BindableProperty ContentLayoutProperty = BindableProperty.Create("ContentLayout", typeof(ButtonContentLayout), typeof(Button), new ButtonContentLayout(ButtonContentLayout.ImagePosition.Left, DefaultSpacing)); public static readonly BindableProperty TextProperty = BindableProperty.Create("Text", typeof(string), typeof(Button), null, propertyChanged: (bindable, oldVal, newVal) => ((Button)bindable).InvalidateMeasureInternal(InvalidationTrigger.MeasureChanged)); public static readonly BindableProperty TextColorProperty = BindableProperty.Create("TextColor", typeof(Color), typeof(Button), Color.Default); public static readonly BindableProperty FontProperty = BindableProperty.Create("Font", typeof(Font), typeof(Button), default(Font), propertyChanged: FontStructPropertyChanged); public static readonly BindableProperty FontFamilyProperty = BindableProperty.Create("FontFamily", typeof(string), typeof(Button), default(string), propertyChanged: SpecificFontPropertyChanged); public static readonly BindableProperty FontSizeProperty = BindableProperty.Create("FontSize", typeof(double), typeof(Button), -1.0, propertyChanged: SpecificFontPropertyChanged, defaultValueCreator: bindable => Device.GetNamedSize(NamedSize.Default, (Button)bindable)); public static readonly BindableProperty FontAttributesProperty = BindableProperty.Create("FontAttributes", typeof(FontAttributes), typeof(Button), FontAttributes.None, propertyChanged: SpecificFontPropertyChanged); public static readonly BindableProperty BorderWidthProperty = BindableProperty.Create("BorderWidth", typeof(double), typeof(Button), -1d); public static readonly BindableProperty BorderColorProperty = BindableProperty.Create("BorderColor", typeof(Color), typeof(Button), Color.Default); public static readonly BindableProperty BorderRadiusProperty = BindableProperty.Create("BorderRadius", typeof(int), typeof(Button), 5); public static readonly BindableProperty ImageProperty = BindableProperty.Create("Image", typeof(FileImageSource), typeof(Button), default(FileImageSource), propertyChanging: (bindable, oldvalue, newvalue) => ((Button)bindable).OnSourcePropertyChanging((ImageSource)oldvalue, (ImageSource)newvalue), propertyChanged: (bindable, oldvalue, newvalue) => ((Button)bindable).OnSourcePropertyChanged((ImageSource)oldvalue, (ImageSource)newvalue)); readonly Lazy<PlatformConfigurationRegistry<Button>> _platformConfigurationRegistry; bool _cancelEvents; const double DefaultSpacing = 10; public Color BorderColor { get { return (Color)GetValue(BorderColorProperty); } set { SetValue(BorderColorProperty, value); } } public int BorderRadius { get { return (int)GetValue(BorderRadiusProperty); } set { SetValue(BorderRadiusProperty, value); } } public double BorderWidth { get { return (double)GetValue(BorderWidthProperty); } set { SetValue(BorderWidthProperty, value); } } public ButtonContentLayout ContentLayout { get { return (ButtonContentLayout)GetValue(ContentLayoutProperty); } set { SetValue(ContentLayoutProperty, value); } } public ICommand Command { get { return (ICommand)GetValue(CommandProperty); } set { SetValue(CommandProperty, value); } } public object CommandParameter { get { return GetValue(CommandParameterProperty); } set { SetValue(CommandParameterProperty, value); } } public Font Font { get { return (Font)GetValue(FontProperty); } set { SetValue(FontProperty, value); } } public FileImageSource Image { get { return (FileImageSource)GetValue(ImageProperty); } set { SetValue(ImageProperty, value); } } public string Text { get { return (string)GetValue(TextProperty); } set { SetValue(TextProperty, value); } } public Color TextColor { get { return (Color)GetValue(TextColorProperty); } set { SetValue(TextColorProperty, value); } } bool IsEnabledCore { set { SetValueCore(IsEnabledProperty, value); } } void IButtonController.SendClicked() { ICommand cmd = Command; if (cmd != null) cmd.Execute(CommandParameter); EventHandler handler = Clicked; if (handler != null) handler(this, EventArgs.Empty); } public FontAttributes FontAttributes { get { return (FontAttributes)GetValue(FontAttributesProperty); } set { SetValue(FontAttributesProperty, value); } } public string FontFamily { get { return (string)GetValue(FontFamilyProperty); } set { SetValue(FontFamilyProperty, value); } } [TypeConverter(typeof(FontSizeConverter))] public double FontSize { get { return (double)GetValue(FontSizeProperty); } set { SetValue(FontSizeProperty, value); } } public event EventHandler Clicked; public Button() { _platformConfigurationRegistry = new Lazy<PlatformConfigurationRegistry<Button>>(() => new PlatformConfigurationRegistry<Button>(this)); } public IPlatformElementConfiguration<T, Button> On<T>() where T : IConfigPlatform { return _platformConfigurationRegistry.Value.On<T>(); } protected override void OnBindingContextChanged() { FileImageSource image = Image; if (image != null) SetInheritedBindingContext(image, BindingContext); base.OnBindingContextChanged(); } protected override void OnPropertyChanging(string propertyName = null) { if (propertyName == CommandProperty.PropertyName) { ICommand cmd = Command; if (cmd != null) cmd.CanExecuteChanged -= CommandCanExecuteChanged; } base.OnPropertyChanging(propertyName); } void CommandCanExecuteChanged(object sender, EventArgs eventArgs) { ICommand cmd = Command; if (cmd != null) IsEnabledCore = cmd.CanExecute(CommandParameter); } static void FontStructPropertyChanged(BindableObject bindable, object oldValue, object newValue) { var button = (Button)bindable; if (button._cancelEvents) return; button.InvalidateMeasureInternal(InvalidationTrigger.MeasureChanged); button._cancelEvents = true; if (button.Font == Font.Default) { button.FontFamily = null; button.FontSize = Device.GetNamedSize(NamedSize.Default, button); button.FontAttributes = FontAttributes.None; } else { button.FontFamily = button.Font.FontFamily; if (button.Font.UseNamedSize) { button.FontSize = Device.GetNamedSize(button.Font.NamedSize, button.GetType(), true); } else { button.FontSize = button.Font.FontSize; } button.FontAttributes = button.Font.FontAttributes; } button._cancelEvents = false; } void OnCommandChanged() { if (Command != null) { Command.CanExecuteChanged += CommandCanExecuteChanged; CommandCanExecuteChanged(this, EventArgs.Empty); } else IsEnabledCore = true; } void OnSourceChanged(object sender, EventArgs eventArgs) { OnPropertyChanged(ImageProperty.PropertyName); InvalidateMeasureInternal(InvalidationTrigger.MeasureChanged); } void OnSourcePropertyChanged(ImageSource oldvalue, ImageSource newvalue) { if (newvalue != null) { newvalue.SourceChanged += OnSourceChanged; SetInheritedBindingContext(newvalue, BindingContext); } InvalidateMeasureInternal(InvalidationTrigger.MeasureChanged); } void OnSourcePropertyChanging(ImageSource oldvalue, ImageSource newvalue) { if (oldvalue != null) oldvalue.SourceChanged -= OnSourceChanged; } static void SpecificFontPropertyChanged(BindableObject bindable, object oldValue, object newValue) { var button = (Button)bindable; if (button._cancelEvents) return; button.InvalidateMeasureInternal(InvalidationTrigger.MeasureChanged); button._cancelEvents = true; if (button.FontFamily != null) { button.Font = Font.OfSize(button.FontFamily, button.FontSize).WithAttributes(button.FontAttributes); } else { button.Font = Font.SystemFontOfSize(button.FontSize, button.FontAttributes); } button._cancelEvents = false; } [DebuggerDisplay("Image Position = {Position}, Spacing = {Spacing}")] [TypeConverter(typeof(ButtonContentTypeConverter))] public sealed class ButtonContentLayout { public enum ImagePosition { Left, Top, Right, Bottom } public ButtonContentLayout(ImagePosition position, double spacing) { Position = position; Spacing = spacing; } public ImagePosition Position { get; } public double Spacing { get; } public override string ToString() { return $"Image Position = {Position}, Spacing = {Spacing}"; } } public sealed class ButtonContentTypeConverter : TypeConverter { public override object ConvertFromInvariantString(string value) { if (value == null) { throw new InvalidOperationException($"Cannot convert null into {typeof(ButtonContentLayout)}"); } string[] parts = value.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries); if (parts.Length != 1 && parts.Length != 2) { throw new InvalidOperationException($"Cannot convert \"{value}\" into {typeof(ButtonContentLayout)}"); } double spacing = DefaultSpacing; var position = ButtonContentLayout.ImagePosition.Left; var spacingFirst = char.IsDigit(parts[0][0]); int positionIndex = spacingFirst ? (parts.Length == 2 ? 1 : -1) : 0; int spacingIndex = spacingFirst ? 0 : (parts.Length == 2 ? 1 : -1); if (spacingIndex > -1) { spacing = double.Parse(parts[spacingIndex]); } if (positionIndex > -1) { position = (ButtonContentLayout.ImagePosition)Enum.Parse(typeof(ButtonContentLayout.ImagePosition), parts[positionIndex], true); } return new ButtonContentLayout(position, spacing); } } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace TestCases.OpenXml4Net.OPC.Compliance { using System; using NPOI.OpenXml4Net.OPC; using NPOI.OpenXml4Net.Exceptions; using NUnit.Framework; using System.IO; /** * Test core properties Open Packaging Convention compliance. * * M4.1: The format designer shall specify and the format producer shall create * at most one core properties relationship for a package. A format consumer * shall consider more than one core properties relationship for a package to be * an error. If present, the relationship shall target the Core Properties part. * (POI relaxes this on reading, as Office sometimes breaks this) * * M4.2: The format designer shall not specify and the format producer shall not * create Core Properties that use the Markup Compatibility namespace as defined * in Annex F, "Standard Namespaces and Content Types". A format consumer shall * consider the use of the Markup Compatibility namespace to be an error. * * M4.3: Producers shall not create a document element that contains refinements * to the Dublin Core elements, except for the two specified in the schema: * <dcterms:created> and <dcterms:modified> Consumers shall consider a document * element that violates this constraint to be an error. * * M4.4: Producers shall not create a document element that contains the * xml:lang attribute. Consumers shall consider a document element that violates * this constraint to be an error. * * M4.5: Producers shall not create a document element that contains the * xsi:type attribute, except for a <dcterms:created> or <dcterms:modified> * element where the xsi:type attribute shall be present and shall hold the * value dcterms:W3CDTF, where dcterms is the namespace prefix of the Dublin * Core namespace. Consumers shall consider a document element that violates * this constraint to be an error. * * @author Julien Chable */ [TestFixture] public class TestOPCComplianceCoreProperties { [Test] public void TestCorePropertiesPart() { OPCPackage pkg; string path = OpenXml4NetTestDataSamples.GetSampleFileName("OPCCompliance_CoreProperties_OnlyOneCorePropertiesPart.docx"); pkg = OPCPackage.Open(path); pkg.Revert(); } private static String ExtractInvalidFormatMessage(String sampleNameSuffix) { Stream is1 = OpenXml4NetTestDataSamples.OpenComplianceSampleStream("OPCCompliance_CoreProperties_" + sampleNameSuffix); OPCPackage pkg; try { pkg = OPCPackage.Open(is1); } catch (InvalidFormatException e) { // no longer required for successful test return e.Message; } pkg.Revert(); throw new AssertionException("expected OPC compliance exception was not thrown"); } /** * Test M4.1 rule. */ [Test] public void TestOnlyOneCorePropertiesPart() { // We have relaxed this check, so we can read the file anyway try { ExtractInvalidFormatMessage("OnlyOneCorePropertiesPartFAIL.docx"); Assert.Fail("M4.1 should be being relaxed"); } catch (AssertionException e) { } // We will use the first core properties, and ignore the others Stream is1 = OpenXml4NetTestDataSamples.OpenSampleStream("MultipleCoreProperties.docx"); OPCPackage pkg = OPCPackage.Open(is1); // We can see 2 by type Assert.AreEqual(2, pkg.GetPartsByContentType(ContentTypes.CORE_PROPERTIES_PART).Count); // But only the first one by relationship Assert.AreEqual(1, pkg.GetPartsByRelationshipType(PackageRelationshipTypes.CORE_PROPERTIES).Count); // It should be core.xml not the older core1.xml Assert.AreEqual( "/docProps/core.xml", pkg.GetPartsByRelationshipType(PackageRelationshipTypes.CORE_PROPERTIES)[0].PartName.ToString() ); } private static Uri CreateURI(String text) { return new Uri(text,UriKind.RelativeOrAbsolute); } /** * Test M4.1 rule. */ [Test] public void TestOnlyOneCorePropertiesPart_AddRelationship() { Stream is1 = OpenXml4NetTestDataSamples.OpenComplianceSampleStream("OPCCompliance_CoreProperties_OnlyOneCorePropertiesPart.docx"); OPCPackage pkg; pkg = OPCPackage.Open(is1); Uri partUri = CreateURI("/docProps/core2.xml"); try { pkg.AddRelationship(PackagingUriHelper.CreatePartName(partUri), TargetMode.Internal, PackageRelationshipTypes.CORE_PROPERTIES); // no longer fail on compliance error //fail("expected OPC compliance exception was not thrown"); } catch (InvalidFormatException e) { throw; } catch (InvalidOperationException e) { // expected during successful test Assert.AreEqual("OPC Compliance error [M4.1]: can't add another core properties part ! Use the built-in package method instead.", e.Message); } pkg.Revert(); } /** * Test M4.1 rule. */ [Test] public void TestOnlyOneCorePropertiesPart_AddPart() { String sampleFileName = "OPCCompliance_CoreProperties_OnlyOneCorePropertiesPart.docx"; OPCPackage pkg = null; pkg = OPCPackage.Open(POIDataSamples.GetOpenXml4NetInstance().GetFile(sampleFileName)); Uri partUri = CreateURI("/docProps/core2.xml"); try { pkg.CreatePart(PackagingUriHelper.CreatePartName(partUri), ContentTypes.CORE_PROPERTIES_PART); // no longer fail on compliance error //fail("expected OPC compliance exception was not thrown"); } catch (InvalidFormatException e) { throw; } catch (InvalidOperationException e) { // expected during successful test Assert.AreEqual("OPC Compliance error [M4.1]: you try to add more than one core properties relationship in the package !", e.Message); } pkg.Revert(); } /** * Test M4.2 rule. */ [Test] public void TestDoNotUseCompatibilityMarkup() { String msg = ExtractInvalidFormatMessage("DoNotUseCompatibilityMarkupFAIL.docx"); Assert.AreEqual("OPC Compliance error [M4.2]: A format consumer shall consider the use of the Markup Compatibility namespace to be an error.", msg); } /** * Test M4.3 rule. */ [Test] public void TestDCTermsNamespaceLimitedUse() { String msg = ExtractInvalidFormatMessage("DCTermsNamespaceLimitedUseFAIL.docx"); Assert.AreEqual("OPC Compliance error [M4.3]: Producers shall not create a document element that contains refinements to the Dublin Core elements, except for the two specified in the schema: <dcterms:created> and <dcterms:modified> Consumers shall consider a document element that violates this constraint to be an error.", msg); } /** * Test M4.4 rule. */ [Test] public void TestUnauthorizedXMLLangAttribute() { String msg = ExtractInvalidFormatMessage("UnauthorizedXMLLangAttributeFAIL.docx"); Assert.AreEqual("OPC Compliance error [M4.4]: Producers shall not create a document element that contains the xml:lang attribute. Consumers shall consider a document element that violates this constraint to be an error.", msg); } /** * Test M4.5 rule. */ [Test] public void TestLimitedXSITypeAttribute_NotPresent() { String msg = ExtractInvalidFormatMessage("LimitedXSITypeAttribute_NotPresentFAIL.docx"); Assert.AreEqual("The element 'created' must have the 'xsi:type' attribute present !", msg); } /** * Test M4.5 rule. */ [Test] public void TestLimitedXSITypeAttribute_PresentWithUnauthorizedValue() { String msg = ExtractInvalidFormatMessage("LimitedXSITypeAttribute_PresentWithUnauthorizedValueFAIL.docx"); Assert.AreEqual("The element 'modified' must have the 'xsi:type' attribute with the value 'dcterms:W3CDTF' !", msg); } } }
//------------------------------------------------------------------------------ // <copyright file="ConfigurationSectionGroupCollection.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ namespace System.Configuration { using System.Collections; using System.Collections.Specialized; using System.Runtime.Serialization; using System.Security.Permissions; [Serializable()] public sealed class ConfigurationSectionGroupCollection : NameObjectCollectionBase { private MgmtConfigurationRecord _configRecord; private ConfigurationSectionGroup _configSectionGroup; // // Create the collection of all section groups in the section group. // internal ConfigurationSectionGroupCollection(MgmtConfigurationRecord configRecord, ConfigurationSectionGroup configSectionGroup) : base(StringComparer.Ordinal) { _configRecord = configRecord; _configSectionGroup = configSectionGroup; foreach (DictionaryEntry de in _configRecord.SectionGroupFactories) { FactoryId factoryId = (FactoryId) de.Value; if (factoryId.Group == _configSectionGroup.SectionGroupName) { BaseAdd(factoryId.Name, factoryId.Name); } } } [SecurityPermissionAttribute(SecurityAction.Demand,SerializationFormatter=true)] public override void GetObjectData(SerializationInfo info, StreamingContext context) { base.GetObjectData(info, context); } // // Remove the collection from configuration system, and remove all entries // in the base collection so that enumeration will return an empty collection. // internal void DetachFromConfigurationRecord() { _configRecord = null; BaseClear(); } private void VerifyIsAttachedToConfigRecord() { if (_configRecord == null) { throw new InvalidOperationException(SR.GetString(SR.Config_cannot_edit_configurationsectiongroup_when_not_attached)); } } // // Public Properties // // Indexer via name public ConfigurationSectionGroup this[string name] { get { return Get(name); } } // Indexer via integer index. public ConfigurationSectionGroup this[int index] { get { return Get(index); } } // // Public methods // // // Add a new section group to the collection. This will result in a new declaration and definition. // // It is an error if the section already exists. // public void Add(string name, ConfigurationSectionGroup sectionGroup) { VerifyIsAttachedToConfigRecord(); _configRecord.AddConfigurationSectionGroup(_configSectionGroup.SectionGroupName, name, sectionGroup); BaseAdd(name, name); } // // Remove all section groups from the collection. // public void Clear() { VerifyIsAttachedToConfigRecord(); // // If this is the root section group, do not require the location section to be written // to the file. // if (_configSectionGroup.IsRoot) { _configRecord.RemoveLocationWriteRequirement(); } string[] allKeys = BaseGetAllKeys(); foreach (string key in allKeys) { Remove(key); } } // // Return the number of section groups in the collection. // public override int Count { get { return base.Count; } } // // Copy all section groups to an array. // public void CopyTo(ConfigurationSectionGroup[] array, int index) { if (array == null) { throw new ArgumentNullException("array"); } int c = Count; if (array.Length < c + index) { throw new ArgumentOutOfRangeException("index"); } for (int i = 0, j = index; i < c; i++, j++) { array[j] = Get(i); } } // // Get the section group at a given index. // public ConfigurationSectionGroup Get(int index) { return Get(GetKey(index)); } // // Get the section group with a given name. // public ConfigurationSectionGroup Get(string name) { VerifyIsAttachedToConfigRecord(); // validate name if (String.IsNullOrEmpty(name)) throw ExceptionUtil.ParameterNullOrEmpty("name"); // prevent GetConfig from returning config not in this collection if (name.IndexOf('/') >= 0) return null; // get the section group string configKey = BaseConfigurationRecord.CombineConfigKey(_configSectionGroup.SectionGroupName, name); return _configRecord.GetSectionGroup(configKey); } // Get an enumerator public override IEnumerator GetEnumerator() { int c = Count; for (int i = 0; i < c; i++) { yield return this[i]; } } // Get the string key at a given index. public string GetKey(int index) { return (string) BaseGetKey(index); } // Return the string keys of the collection. public override KeysCollection Keys { get { return base.Keys; } } // // Remove the declaration and definition of a section in this config file, including any // location sections in the file. This will also remove any descendant sections and // section groups. // // Note that if the section group is declared in a parent, we still remove the declaration and // definition, and the instance of ConfigurationSectionGroup will be detached from the collection. // However, the collection will still have a ConfigurationSectionGroup of that name in the collection, // only it will have the value of the immediate parent. // public void Remove(string name) { VerifyIsAttachedToConfigRecord(); _configRecord.RemoveConfigurationSectionGroup(_configSectionGroup.SectionGroupName, name); // // Remove the section from the collection if it is no longer in the list of all SectionGroupFactories. // string configKey = BaseConfigurationRecord.CombineConfigKey(_configSectionGroup.SectionGroupName, name); if (!_configRecord.SectionFactories.Contains(configKey)) { BaseRemove(name); } } // // Remove the section at that index. // public void RemoveAt(int index) { VerifyIsAttachedToConfigRecord(); Remove(GetKey(index)); } } }
using System; using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using System.ComponentModel; using System.Linq; using System.Linq.Expressions; using System.Reflection; using System.Runtime.CompilerServices; using System.Xml; using Umbraco.Core.Collections; namespace Umbraco.Core { /// <summary> /// Provides object extension methods. /// </summary> public static class ObjectExtensions { // Cache the various type lookups private static readonly ConcurrentDictionary<Type, Type> NullableGenericCache = new ConcurrentDictionary<Type, Type>(); private static readonly ConcurrentDictionary<CompositeTypeTypeKey, TypeConverter> InputTypeConverterCache = new ConcurrentDictionary<CompositeTypeTypeKey, TypeConverter>(); private static readonly ConcurrentDictionary<CompositeTypeTypeKey, TypeConverter> DestinationTypeConverterCache = new ConcurrentDictionary<CompositeTypeTypeKey, TypeConverter>(); private static readonly ConcurrentDictionary<CompositeTypeTypeKey, bool> AssignableTypeCache = new ConcurrentDictionary<CompositeTypeTypeKey, bool>(); private static readonly ConcurrentDictionary<Type, bool> BoolConvertCache = new ConcurrentDictionary<Type, bool>(); private static readonly char[] NumberDecimalSeparatorsToNormalize = { '.', ',' }; private static readonly CustomBooleanTypeConverter CustomBooleanTypeConverter = new CustomBooleanTypeConverter(); //private static readonly ConcurrentDictionary<Type, Func<object>> ObjectFactoryCache = new ConcurrentDictionary<Type, Func<object>>(); /// <summary> /// /// </summary> /// <param name="input"></param> /// <typeparam name="T"></typeparam> /// <returns></returns> public static IEnumerable<T> AsEnumerableOfOne<T>(this T input) { return Enumerable.Repeat(input, 1); } /// <summary> /// /// </summary> /// <param name="input"></param> public static void DisposeIfDisposable(this object input) { var disposable = input as IDisposable; if (disposable != null) disposable.Dispose(); } /// <summary> /// Provides a shortcut way of safely casting an input when you cannot guarantee the <typeparamref name="T"/> is /// an instance type (i.e., when the C# AS keyword is not applicable). /// </summary> /// <typeparam name="T"></typeparam> /// <param name="input">The input.</param> /// <returns></returns> internal static T SafeCast<T>(this object input) { if (ReferenceEquals(null, input) || ReferenceEquals(default(T), input)) return default(T); if (input is T) return (T)input; return default(T); } /// <summary> /// Attempts to convert the input object to the output type. /// </summary> /// <remarks>This code is an optimized version of the original Umbraco method</remarks> /// <typeparam name="T">The type to convert to</typeparam> /// <param name="input">The input.</param> /// <returns>The <see cref="Attempt{T}"/></returns> public static Attempt<T> TryConvertTo<T>(this object input) { var result = TryConvertTo(input, typeof(T)); if (result.Success) return Attempt<T>.Succeed((T)result.Result); // just try to cast try { return Attempt<T>.Succeed((T)input); } catch (Exception e) { return Attempt<T>.Fail(e); } } /// <summary> /// Attempts to convert the input object to the output type. /// </summary> /// <remarks>This code is an optimized version of the original Umbraco method</remarks> /// <param name="input">The input.</param> /// <param name="target">The type to convert to</param> /// <returns>The <see cref="Attempt{Object}"/></returns> public static Attempt<object> TryConvertTo(this object input, Type target) { if (target == null) { return Attempt<object>.Fail(); } try { if (input == null) { // Nullable is ok if (target.IsGenericType && GetCachedGenericNullableType(target) != null) { return Attempt<object>.Succeed(null); } // Reference types are ok return Attempt<object>.SucceedIf(target.IsValueType == false, null); } var inputType = input.GetType(); // Easy if (target == typeof(object) || inputType == target) { return Attempt.Succeed(input); } // Check for string so that overloaders of ToString() can take advantage of the conversion. if (target == typeof(string)) { return Attempt<object>.Succeed(input.ToString()); } // If we've got a nullable of something, we try to convert directly to that thing. // We cache the destination type and underlying nullable types // Any other generic types need to fall through if (target.IsGenericType) { var underlying = GetCachedGenericNullableType(target); if (underlying != null) { // Special case for empty strings for bools/dates which should return null if an empty string. if (input is string inputString) { //TODO: Why the check against only bool/date when a string is null/empty? In what scenario can we convert to another type when the string is null or empty other than just being null? if (string.IsNullOrEmpty(inputString) && (underlying == typeof(DateTime) || underlying == typeof(bool))) { return Attempt<object>.Succeed(null); } } // Recursively call into this method with the inner (not-nullable) type and handle the outcome var inner = input.TryConvertTo(underlying); // And if sucessful, fall on through to rewrap in a nullable; if failed, pass on the exception if (inner.Success) { input = inner.Result; // Now fall on through... } else { return Attempt<object>.Fail(inner.Exception); } } } else { // target is not a generic type if (input is string inputString) { // Try convert from string, returns an Attempt if the string could be // processed (either succeeded or failed), else null if we need to try // other methods var result = TryConvertToFromString(inputString, target); if (result.HasValue) { return result.Value; } } // TODO: Do a check for destination type being IEnumerable<T> and source type implementing IEnumerable<T> with // the same 'T', then we'd have to find the extension method for the type AsEnumerable() and execute it. if (GetCachedCanAssign(input, inputType, target)) { return Attempt.Succeed(Convert.ChangeType(input, target)); } } if (target == typeof(bool)) { if (GetCachedCanConvertToBoolean(inputType)) { return Attempt.Succeed(CustomBooleanTypeConverter.ConvertFrom(input)); } } var inputConverter = GetCachedSourceTypeConverter(inputType, target); if (inputConverter != null) { return Attempt.Succeed(inputConverter.ConvertTo(input, target)); } var outputConverter = GetCachedTargetTypeConverter(inputType, target); if (outputConverter != null) { return Attempt.Succeed(outputConverter.ConvertFrom(input)); } if (target.IsGenericType && GetCachedGenericNullableType(target) != null) { // cannot Convert.ChangeType as that does not work with nullable // input has already been converted to the underlying type - just // return input, there's an implicit conversion from T to T? anyways return Attempt.Succeed(input); } // Re-check convertables since we altered the input through recursion if (input is IConvertible convertible2) { return Attempt.Succeed(Convert.ChangeType(convertible2, target)); } } catch (Exception e) { return Attempt<object>.Fail(e); } return Attempt<object>.Fail(); } /// <summary> /// Attempts to convert the input string to the output type. /// </summary> /// <remarks>This code is an optimized version of the original Umbraco method</remarks> /// <param name="input">The input.</param> /// <param name="target">The type to convert to</param> /// <returns>The <see cref="Nullable{Attempt}"/></returns> private static Attempt<object>? TryConvertToFromString(this string input, Type target) { // Easy if (target == typeof(string)) { return Attempt<object>.Succeed(input); } // Null, empty, whitespaces if (string.IsNullOrWhiteSpace(input)) { if (target == typeof(bool)) { // null/empty = bool false return Attempt<object>.Succeed(false); } if (target == typeof(DateTime)) { // null/empty = min DateTime value return Attempt<object>.Succeed(DateTime.MinValue); } // Cannot decide here, // Any of the types below will fail parsing and will return a failed attempt // but anything else will not be processed and will return null // so even though the string is null/empty we have to proceed. } // Look for type conversions in the expected order of frequency of use. // // By using a mixture of ordered if statements and switches we can optimize both for // fast conditional checking for most frequently used types and the branching // that does not depend on previous values available to switch statements. if (target.IsPrimitive) { if (target == typeof(int)) { if (int.TryParse(input, out var value)) { return Attempt<object>.Succeed(value); } // Because decimal 100.01m will happily convert to integer 100, it // makes sense that string "100.01" *also* converts to integer 100. var input2 = NormalizeNumberDecimalSeparator(input); return Attempt<object>.SucceedIf(decimal.TryParse(input2, out var value2), Convert.ToInt32(value2)); } if (target == typeof(long)) { if (long.TryParse(input, out var value)) { return Attempt<object>.Succeed(value); } // Same as int var input2 = NormalizeNumberDecimalSeparator(input); return Attempt<object>.SucceedIf(decimal.TryParse(input2, out var value2), Convert.ToInt64(value2)); } // TODO: Should we do the decimal trick for short, byte, unsigned? if (target == typeof(bool)) { if (bool.TryParse(input, out var value)) { return Attempt<object>.Succeed(value); } // Don't declare failure so the CustomBooleanTypeConverter can try return null; } // Calling this method directly is faster than any attempt to cache it. switch (Type.GetTypeCode(target)) { case TypeCode.Int16: return Attempt<object>.SucceedIf(short.TryParse(input, out var value), value); case TypeCode.Double: var input2 = NormalizeNumberDecimalSeparator(input); return Attempt<object>.SucceedIf(double.TryParse(input2, out var valueD), valueD); case TypeCode.Single: var input3 = NormalizeNumberDecimalSeparator(input); return Attempt<object>.SucceedIf(float.TryParse(input3, out var valueF), valueF); case TypeCode.Char: return Attempt<object>.SucceedIf(char.TryParse(input, out var valueC), valueC); case TypeCode.Byte: return Attempt<object>.SucceedIf(byte.TryParse(input, out var valueB), valueB); case TypeCode.SByte: return Attempt<object>.SucceedIf(sbyte.TryParse(input, out var valueSb), valueSb); case TypeCode.UInt32: return Attempt<object>.SucceedIf(uint.TryParse(input, out var valueU), valueU); case TypeCode.UInt16: return Attempt<object>.SucceedIf(ushort.TryParse(input, out var valueUs), valueUs); case TypeCode.UInt64: return Attempt<object>.SucceedIf(ulong.TryParse(input, out var valueUl), valueUl); } } else if (target == typeof(Guid)) { return Attempt<object>.SucceedIf(Guid.TryParse(input, out var value), value); } else if (target == typeof(DateTime)) { if (DateTime.TryParse(input, out var value)) { switch (value.Kind) { case DateTimeKind.Unspecified: case DateTimeKind.Utc: return Attempt<object>.Succeed(value); case DateTimeKind.Local: return Attempt<object>.Succeed(value.ToUniversalTime()); default: throw new ArgumentOutOfRangeException(); } } return Attempt<object>.Fail(); } else if (target == typeof(DateTimeOffset)) { return Attempt<object>.SucceedIf(DateTimeOffset.TryParse(input, out var value), value); } else if (target == typeof(TimeSpan)) { return Attempt<object>.SucceedIf(TimeSpan.TryParse(input, out var value), value); } else if (target == typeof(decimal)) { var input2 = NormalizeNumberDecimalSeparator(input); return Attempt<object>.SucceedIf(decimal.TryParse(input2, out var value), value); } else if (input != null && target == typeof(Version)) { return Attempt<object>.SucceedIf(Version.TryParse(input, out var value), value); } // E_NOTIMPL IPAddress, BigInteger return null; // We can't decide... } internal static void CheckThrowObjectDisposed(this IDisposable disposable, bool isDisposed, string objectname) { //TODO: Localise this exception if (isDisposed) throw new ObjectDisposedException(objectname); } //public enum PropertyNamesCaseType //{ // CamelCase, // CaseInsensitive //} ///// <summary> ///// Convert an object to a JSON string with camelCase formatting ///// </summary> ///// <param name="obj"></param> ///// <returns></returns> //public static string ToJsonString(this object obj) //{ // return obj.ToJsonString(PropertyNamesCaseType.CamelCase); //} ///// <summary> ///// Convert an object to a JSON string with the specified formatting ///// </summary> ///// <param name="obj">The obj.</param> ///// <param name="propertyNamesCaseType">Type of the property names case.</param> ///// <returns></returns> //public static string ToJsonString(this object obj, PropertyNamesCaseType propertyNamesCaseType) //{ // var type = obj.GetType(); // var dateTimeStyle = "yyyy-MM-dd HH:mm:ss"; // if (type.IsPrimitive || typeof(string).IsAssignableFrom(type)) // { // return obj.ToString(); // } // if (typeof(DateTime).IsAssignableFrom(type) || typeof(DateTimeOffset).IsAssignableFrom(type)) // { // return Convert.ToDateTime(obj).ToString(dateTimeStyle); // } // var serializer = new JsonSerializer(); // switch (propertyNamesCaseType) // { // case PropertyNamesCaseType.CamelCase: // serializer.ContractResolver = new CamelCasePropertyNamesContractResolver(); // break; // } // var dateTimeConverter = new IsoDateTimeConverter // { // DateTimeStyles = System.Globalization.DateTimeStyles.None, // DateTimeFormat = dateTimeStyle // }; // if (typeof(IDictionary).IsAssignableFrom(type)) // { // return JObject.FromObject(obj, serializer).ToString(Formatting.None, dateTimeConverter); // } // if (type.IsArray || (typeof(IEnumerable).IsAssignableFrom(type))) // { // return JArray.FromObject(obj, serializer).ToString(Formatting.None, dateTimeConverter); // } // return JObject.FromObject(obj, serializer).ToString(Formatting.None, dateTimeConverter); //} /// <summary> /// Converts an object into a dictionary /// </summary> /// <typeparam name="T"></typeparam> /// <typeparam name="TProperty"></typeparam> /// <typeparam name="TVal"> </typeparam> /// <param name="o"></param> /// <param name="ignoreProperties"></param> /// <returns></returns> public static IDictionary<string, TVal> ToDictionary<T, TProperty, TVal>(this T o, params Expression<Func<T, TProperty>>[] ignoreProperties) { return o.ToDictionary<TVal>(ignoreProperties.Select(e => o.GetPropertyInfo(e)).Select(propInfo => propInfo.Name).ToArray()); } /// <summary> /// Turns object into dictionary /// </summary> /// <param name="o"></param> /// <param name="ignoreProperties">Properties to ignore</param> /// <returns></returns> public static IDictionary<string, TVal> ToDictionary<TVal>(this object o, params string[] ignoreProperties) { if (o != null) { var props = TypeDescriptor.GetProperties(o); var d = new Dictionary<string, TVal>(); foreach (var prop in props.Cast<PropertyDescriptor>().Where(x => ignoreProperties.Contains(x.Name) == false)) { var val = prop.GetValue(o); if (val != null) { d.Add(prop.Name, (TVal)val); } } return d; } return new Dictionary<string, TVal>(); } internal static string ToDebugString(this object obj, int levels = 0) { if (obj == null) return "{null}"; try { if (obj is string) { return "\"{0}\"".InvariantFormat(obj); } if (obj is int || obj is Int16 || obj is Int64 || obj is float || obj is double || obj is bool || obj is int? || obj is Int16? || obj is Int64? || obj is float? || obj is double? || obj is bool?) { return "{0}".InvariantFormat(obj); } if (obj is Enum) { return "[{0}]".InvariantFormat(obj); } if (obj is IEnumerable) { var enumerable = (obj as IEnumerable); var items = (from object enumItem in enumerable let value = GetEnumPropertyDebugString(enumItem, levels) where value != null select value).Take(10).ToList(); return items.Any() ? "{{ {0} }}".InvariantFormat(String.Join(", ", items)) : null; } var props = obj.GetType().GetProperties(); if ((props.Length == 2) && props[0].Name == "Key" && props[1].Name == "Value" && levels > -2) { try { var key = props[0].GetValue(obj, null) as string; var value = props[1].GetValue(obj, null).ToDebugString(levels - 1); return "{0}={1}".InvariantFormat(key, value); } catch (Exception) { return "[KeyValuePropertyException]"; } } if (levels > -1) { var items = (from propertyInfo in props let value = GetPropertyDebugString(propertyInfo, obj, levels) where value != null select "{0}={1}".InvariantFormat(propertyInfo.Name, value)).ToArray(); return items.Any() ? "[{0}]:{{ {1} }}".InvariantFormat(obj.GetType().Name, String.Join(", ", items)) : null; } } catch (Exception ex) { return "[Exception:{0}]".InvariantFormat(ex.Message); } return null; } /// <summary> /// Attempts to serialize the value to an XmlString using ToXmlString /// </summary> /// <param name="value"></param> /// <param name="type"></param> /// <returns></returns> internal static Attempt<string> TryConvertToXmlString(this object value, Type type) { try { var output = value.ToXmlString(type); return Attempt.Succeed(output); } catch (NotSupportedException ex) { return Attempt<string>.Fail(ex); } } /// <summary> /// Returns an XmlSerialized safe string representation for the value /// </summary> /// <param name="value"></param> /// <param name="type">The Type can only be a primitive type or Guid and byte[] otherwise an exception is thrown</param> /// <returns></returns> internal static string ToXmlString(this object value, Type type) { if (value == null) return string.Empty; if (type == typeof(string)) return (value.ToString().IsNullOrWhiteSpace() ? "" : value.ToString()); if (type == typeof(bool)) return XmlConvert.ToString((bool)value); if (type == typeof(byte)) return XmlConvert.ToString((byte)value); if (type == typeof(char)) return XmlConvert.ToString((char)value); if (type == typeof(DateTime)) return XmlConvert.ToString((DateTime)value, XmlDateTimeSerializationMode.Unspecified); if (type == typeof(DateTimeOffset)) return XmlConvert.ToString((DateTimeOffset)value); if (type == typeof(decimal)) return XmlConvert.ToString((decimal)value); if (type == typeof(double)) return XmlConvert.ToString((double)value); if (type == typeof(float)) return XmlConvert.ToString((float)value); if (type == typeof(Guid)) return XmlConvert.ToString((Guid)value); if (type == typeof(int)) return XmlConvert.ToString((int)value); if (type == typeof(long)) return XmlConvert.ToString((long)value); if (type == typeof(sbyte)) return XmlConvert.ToString((sbyte)value); if (type == typeof(short)) return XmlConvert.ToString((short)value); if (type == typeof(TimeSpan)) return XmlConvert.ToString((TimeSpan)value); if (type == typeof(bool)) return XmlConvert.ToString((bool)value); if (type == typeof(uint)) return XmlConvert.ToString((uint)value); if (type == typeof(ulong)) return XmlConvert.ToString((ulong)value); if (type == typeof(ushort)) return XmlConvert.ToString((ushort)value); throw new NotSupportedException("Cannot convert type " + type.FullName + " to a string using ToXmlString as it is not supported by XmlConvert"); } /// <summary> /// Returns an XmlSerialized safe string representation for the value and type /// </summary> /// <typeparam name="T"></typeparam> /// <param name="value"></param> /// <returns></returns> internal static string ToXmlString<T>(this object value) { return value.ToXmlString(typeof(T)); } private static string GetEnumPropertyDebugString(object enumItem, int levels) { try { return enumItem.ToDebugString(levels - 1); } catch (Exception) { return "[GetEnumPartException]"; } } private static string GetPropertyDebugString(PropertyInfo propertyInfo, object obj, int levels) { try { return propertyInfo.GetValue(obj, null).ToDebugString(levels - 1); } catch (Exception) { return "[GetPropertyValueException]"; } } internal static Guid AsGuid(this object value) { return value is Guid ? (Guid)value : Guid.Empty; } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static string NormalizeNumberDecimalSeparator(string s) { var normalized = System.Threading.Thread.CurrentThread.CurrentCulture.NumberFormat.NumberDecimalSeparator[0]; return s.ReplaceMany(NumberDecimalSeparatorsToNormalize, normalized); } // gets a converter for source, that can convert to target, or null if none exists [MethodImpl(MethodImplOptions.AggressiveInlining)] private static TypeConverter GetCachedSourceTypeConverter(Type source, Type target) { var key = new CompositeTypeTypeKey(source, target); if (InputTypeConverterCache.TryGetValue(key, out TypeConverter typeConverter)) { return typeConverter; } TypeConverter converter = TypeDescriptor.GetConverter(source); if (converter.CanConvertTo(target)) { return InputTypeConverterCache[key] = converter; } return InputTypeConverterCache[key] = null; } // gets a converter for target, that can convert from source, or null if none exists [MethodImpl(MethodImplOptions.AggressiveInlining)] private static TypeConverter GetCachedTargetTypeConverter(Type source, Type target) { var key = new CompositeTypeTypeKey(source, target); if (DestinationTypeConverterCache.TryGetValue(key, out TypeConverter typeConverter)) { return typeConverter; } TypeConverter converter = TypeDescriptor.GetConverter(target); if (converter.CanConvertFrom(source)) { return DestinationTypeConverterCache[key] = converter; } return DestinationTypeConverterCache[key] = null; } // gets the underlying type of a nullable type, or null if the type is not nullable [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Type GetCachedGenericNullableType(Type type) { if (NullableGenericCache.TryGetValue(type, out Type underlyingType)) { return underlyingType; } if (type.GetGenericTypeDefinition() == typeof(Nullable<>)) { Type underlying = Nullable.GetUnderlyingType(type); return NullableGenericCache[type] = underlying; } return NullableGenericCache[type] = null; } // gets an IConvertible from source to target type, or null if none exists [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool GetCachedCanAssign(object input, Type source, Type target) { var key = new CompositeTypeTypeKey(source, target); if (AssignableTypeCache.TryGetValue(key, out bool canConvert)) { return canConvert; } // "object is" is faster than "Type.IsAssignableFrom. // We can use it to very quickly determine whether true/false if (input is IConvertible && target.IsAssignableFrom(source)) { return AssignableTypeCache[key] = true; } return AssignableTypeCache[key] = false; } // determines whether a type can be converted to boolean [MethodImpl(MethodImplOptions.AggressiveInlining)] private static bool GetCachedCanConvertToBoolean(Type type) { if (BoolConvertCache.TryGetValue(type, out bool result)) { return result; } if (CustomBooleanTypeConverter.CanConvertFrom(type)) { return BoolConvertCache[type] = true; } return BoolConvertCache[type] = false; } } }
using System; using System.Data; using Csla; using Csla.Data; using SelfLoadSoftDelete.DataAccess; using SelfLoadSoftDelete.DataAccess.ERCLevel; namespace SelfLoadSoftDelete.Business.ERCLevel { /// <summary> /// H05_SubContinent_ReChild (editable child object).<br/> /// This is a generated base class of <see cref="H05_SubContinent_ReChild"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="H04_SubContinent"/> collection. /// </remarks> [Serializable] public partial class H05_SubContinent_ReChild : BusinessBase<H05_SubContinent_ReChild> { #region Business Properties /// <summary> /// Maintains metadata about <see cref="SubContinent_Child_Name"/> property. /// </summary> public static readonly PropertyInfo<string> SubContinent_Child_NameProperty = RegisterProperty<string>(p => p.SubContinent_Child_Name, "Countries Child Name"); /// <summary> /// Gets or sets the Countries Child Name. /// </summary> /// <value>The Countries Child Name.</value> public string SubContinent_Child_Name { get { return GetProperty(SubContinent_Child_NameProperty); } set { SetProperty(SubContinent_Child_NameProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="H05_SubContinent_ReChild"/> object. /// </summary> /// <returns>A reference to the created <see cref="H05_SubContinent_ReChild"/> object.</returns> internal static H05_SubContinent_ReChild NewH05_SubContinent_ReChild() { return DataPortal.CreateChild<H05_SubContinent_ReChild>(); } /// <summary> /// Factory method. Loads a <see cref="H05_SubContinent_ReChild"/> object, based on given parameters. /// </summary> /// <param name="subContinent_ID2">The SubContinent_ID2 parameter of the H05_SubContinent_ReChild to fetch.</param> /// <returns>A reference to the fetched <see cref="H05_SubContinent_ReChild"/> object.</returns> internal static H05_SubContinent_ReChild GetH05_SubContinent_ReChild(int subContinent_ID2) { return DataPortal.FetchChild<H05_SubContinent_ReChild>(subContinent_ID2); } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="H05_SubContinent_ReChild"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public H05_SubContinent_ReChild() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="H05_SubContinent_ReChild"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="H05_SubContinent_ReChild"/> object from the database, based on given criteria. /// </summary> /// <param name="subContinent_ID2">The Sub Continent ID2.</param> protected void Child_Fetch(int subContinent_ID2) { var args = new DataPortalHookArgs(subContinent_ID2); OnFetchPre(args); using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var dal = dalManager.GetProvider<IH05_SubContinent_ReChildDal>(); var data = dal.Fetch(subContinent_ID2); Fetch(data); } OnFetchPost(args); // check all object rules and property rules BusinessRules.CheckRules(); } private void Fetch(IDataReader data) { using (var dr = new SafeDataReader(data)) { if (dr.Read()) { Fetch(dr); } } } /// <summary> /// Loads a <see cref="H05_SubContinent_ReChild"/> object from the given SafeDataReader. /// </summary> /// <param name="dr">The SafeDataReader to use.</param> private void Fetch(SafeDataReader dr) { // Value properties LoadProperty(SubContinent_Child_NameProperty, dr.GetString("SubContinent_Child_Name")); var args = new DataPortalHookArgs(dr); OnFetchRead(args); } /// <summary> /// Inserts a new <see cref="H05_SubContinent_ReChild"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(H04_SubContinent parent) { using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(); OnInsertPre(args); var dal = dalManager.GetProvider<IH05_SubContinent_ReChildDal>(); using (BypassPropertyChecks) { dal.Insert( parent.SubContinent_ID, SubContinent_Child_Name ); } OnInsertPost(args); } } /// <summary> /// Updates in the database all changes made to the <see cref="H05_SubContinent_ReChild"/> object. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update(H04_SubContinent parent) { if (!IsDirty) return; using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(); OnUpdatePre(args); var dal = dalManager.GetProvider<IH05_SubContinent_ReChildDal>(); using (BypassPropertyChecks) { dal.Update( parent.SubContinent_ID, SubContinent_Child_Name ); } OnUpdatePost(args); } } /// <summary> /// Self deletes the <see cref="H05_SubContinent_ReChild"/> object from database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf(H04_SubContinent parent) { using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(); OnDeletePre(args); var dal = dalManager.GetProvider<IH05_SubContinent_ReChildDal>(); using (BypassPropertyChecks) { dal.Delete(parent.SubContinent_ID); } OnDeletePost(args); } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
/* * Copyright (c) Kuno Contributors * * This file is subject to the terms and conditions defined in * the LICENSE file, which is part of this source code package. */ using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Text; using System.Text.RegularExpressions; using Kuno.Configuration; using Kuno.Services.Registry; using Kuno.Text; using Kuno.Utilities.NewId; using Kuno.Validation; namespace Kuno.Services.OpenApi { /// <summary> /// This is the root document object for the API specification. It combines what previously was the Resource Listing and API Declaration (version 1.2 and earlier) together into one document. /// </summary> /// <seealso href="http://swagger.io/specification/#swaggerObject"/> public class OpenApiDocument { /// <summary> /// Gets or sets the security scheme definitions that can be used across the specification. /// </summary> /// <value> /// The security scheme definitions that can be used across the specification. /// </value> public IDictionary<string, SecurityScheme> SecurityDefinitions { get; set; } = new SortedDictionary<string, SecurityScheme> { { "api_key", new SecurityScheme { Type = "apiKey", Name = "api_key", In = "header" } } }; /// <summary> /// Gets or sets the base path on which the API is served, which is relative to the host. If it is not included, the API is served directly under the host. The value MUST start with a leading slash (/). The basePath does not support path templating. /// </summary> /// <value> /// The base path on which the API is served. /// </value> public string BasePath { get; set; } /// <summary> /// Gets or sets the object to hold data types produced and consumed by operations. /// </summary> /// <value> /// The object to hold data types produced and consumed by operations. /// </value> public SchemaCollection Definitions { get; set; } = new SchemaCollection(); /// <summary> /// Gets or sets the additional external documentation. /// </summary> /// <value> /// The external additional external documentation. /// </value> public ExternalDocs ExternalDocs { get; set; } /// <summary> /// Gets or sets the host (name or ip) serving the API. This MUST be the host only and does not include the scheme nor sub-paths. It MAY include a port. If the host is not included, the host serving the documentation is to be used (including the port). The host does not support path templating. /// </summary> /// <value> /// The host (name or ip) serving the API. /// </value> public string Host { get; set; } /// <summary> /// Gets or sets the metadata about the API. The metadata can be used by the clients if needed. /// </summary> /// <value> /// The metadata about the API. The metadata can be used by the clients if needed. /// </value> public ApplicationInformation Info { get; set; } /// <summary> /// Gets or sets the available paths and operations for the API. /// </summary> /// <value> /// The available paths and operations for the API. /// </value> public IDictionary<string, PathItem> Paths { get; set; } = new SortedDictionary<string, PathItem>(new PathComparer()); /// <summary> /// Gets or sets the transfer protocol of the API. Values MUST be from the list: "http", "https", "ws", "wss". If the schemes is not included, the default scheme to be used is the one used to access the Swagger definition itself. /// </summary> /// <value> /// The transfer protocol of the API. Values MUST be from the list: "http", "https", "ws", "wss". If the schemes is not included, the default scheme to be used is the one used to access the Swagger definition itself. /// </value> public string[] Schemes { get; set; } = { "http", "https" }; /// <summary> /// Gets or sets the Swagger Specification version being used. It can be used by the Swagger UI and other clients to interpret the API listing. The value MUST be "2.0". /// </summary> /// <value> /// The Swagger Specification version being used. It can be used by the Swagger UI and other clients to interpret the API listing. The value MUST be "2.0". /// </value> public string Swagger { get; set; } = "2.0"; /// <summary> /// Gets or sets the list of tags used by the specification with additional metadata. The order of the tags can be used to reflect on their order by the parsing tools. Not all tags that are used by the Operation Object must be declared. The tags that are not declared may be organized randomly or based on the tools' logic. Each tag name in the list MUST be unique. /// </summary> /// <value> /// The list of tags used by the specification with additional metadata. /// </value> public List<Tag> Tags { get; set; } = new List<Tag> { new Tag { Name = "Kuno", Description = "System defined endpoints." } }; /// <summary> /// Loads the document using the specified service inventory. /// </summary> /// <param name="services">The service inventory.</param> /// <param name="includeAll">Indicates whether all endpoints should be retreived or just public.</param> /// <param name="versions">Indicates whether versioned paths should be returned.</param> public void Load(ServiceRegistry services, bool includeAll = false, bool versions = false) { this.Info = services.ApplicationInformation; var endPoints = services.EndPoints.Where(e => includeAll || e.Public).ToList(); foreach (var group in endPoints.GroupBy(e => e.Path)) { foreach (var endPoint in group) { if (endPoint.Function.RequestType != null) { this.Definitions.GetOrAdd(endPoint.Function.RequestType); } if (endPoint.Function.ResponseType != null) { this.Definitions.GetOrAdd(endPoint.Function.ResponseType); } if (endPoint.Path != null) { if (versions) { this.Paths.Add("/" + endPoint.VersionedPath, new PathItem { Post = this.GetPostOperation(endPoint), Get = this.GetGetOperation(endPoint) }); } else if (endPoint.Version == group.Max(e => e.Version)) { this.Paths.Add("/" + endPoint.Path, new PathItem { Post = this.GetPostOperation(endPoint), Get = this.GetGetOperation(endPoint) }); } } } } } private Operation GetGetOperation(EndPoint endPoint) { if (endPoint.HttpMethod == "GET") { var parameters = new List<IParameter>(); foreach (var property in endPoint.Function.RequestType.GetProperties()) { var schema = this.Definitions.CreatePrimitiveSchema(property.PropertyType); var required = property.GetCustomAttributes<ValidationAttribute>(true).Any(); parameters.Add(new NonBodyParameter { Name = property.Name, Required = required, In = "query", Description = property.GetComments()?.Value, Type = schema.Type, Format = schema.Format }); } var operation = new Operation { Tags = this.GetTags(endPoint).ToList(), Summary = endPoint.Name, Description = endPoint.Function.Summary, Consumes = new List<string> { "application/json" }, Produces = new List<string> { "application/json" }, OperationId = this.GetName("GET_", endPoint), Parameters = parameters, Responses = this.GetResponses(endPoint) }; if (operation.Responses.ContainsKey("401")) { operation.IncludeSecurity("api_key"); } if (endPoint.IsVersioned) { operation.Deprecated = true; } return operation; } return null; } private readonly List<string> _usedNames = new List<string>(); private string GetName(string prefix, EndPoint endPoint) { var regex = new Regex("[^a-zA-Z0-9]"); var content = $"{prefix}_{regex.Replace(endPoint.Name, "")}_v{endPoint.Version}"; if (_usedNames.Contains(content)) { int i = 0; while (_usedNames.Contains(content + "_" + ++i)) { } content = content + "_" + i; } _usedNames.Add(content); return content; } private Operation GetPostOperation(EndPoint endPoint) { if (endPoint.HttpMethod == "POST") { var operation = new Operation { Tags = this.GetTags(endPoint).ToList(), Summary = endPoint.Name, Description = endPoint.Function.Summary, Consumes = new List<string> { "application/json" }, Produces = new List<string> { "application/json" }, OperationId = this.GetName("POST_", endPoint), Parameters = this.GetPostParameters(endPoint).ToList(), Responses = this.GetResponses(endPoint) }; if (operation.Responses.ContainsKey("401")) { operation.IncludeSecurity("api_key"); } if (endPoint.IsVersioned) { operation.Deprecated = true; } return operation; } return null; } private IEnumerable<IParameter> GetPostParameters(EndPoint endPoint) { if (endPoint.Function.RequestType != null && endPoint.Function.RequestType != typeof(object)) { yield return new BodyParameter { Schema = this.Definitions.GetReferenceSchema(endPoint.Function.RequestType, endPoint.Function.RequestType.GetComments()?.Summary) }; } } private Dictionary<string, Response> GetResponses(EndPoint endPoint) { var responses = new Dictionary<string, Response>(); if (endPoint.Function.ResponseType == null) { responses.Add("204", new Response { Description = "No content is returned from this endpoint." }); } else { var responseType = endPoint.Function.ResponseType; responses.Add("200", new Response { Description = responseType.GetComments()?.Summary ?? "", Schema = this.Definitions.GetReferenceSchema(responseType, endPoint.Function.ResponseType.GetComments()?.Summary) }); } var builder = new StringBuilder(); foreach (var property in endPoint.Function.RequestType.GetProperties()) { foreach (var attribute in property.GetCustomAttributes<ValidationAttribute>(true)) { builder.AppendLine("1. " + attribute.GetValidationError(property).Message + "\r\n"); } } foreach (var source in endPoint.Function.Rules.Where(e => e.RuleType == ValidationType.Input)) { builder.AppendLine(source.Name.ToTitle() + ". "); } if (builder.Length > 0) { responses.Add("400", new Response { Schema = this.Definitions.GetReferenceSchema(typeof(ValidationError[]), null), Description = builder.ToString() }); } builder.Clear(); if (endPoint.Secure) { responses.Add("401", new Response { Schema = this.Definitions.GetReferenceSchema(typeof(ValidationError[]), null), Description = "This endpoint requires authorization." }); } foreach (var source in endPoint.Function.Rules.Where(e => e.RuleType == ValidationType.Business)) { builder.AppendLine("1. " + source.Name.ToTitle() + ".\r\n"); } if (builder.Length > 0) { responses.Add("409", new Response { Schema = this.Definitions.GetReferenceSchema(typeof(ValidationError[]), null), Description = builder.ToString() }); } builder.Clear(); foreach (var source in endPoint.Function.Rules.Where(e => e.RuleType == ValidationType.Security)) { builder.AppendLine("1. " + source.Name.ToTitle() + ".\r\n"); } if (builder.Length > 0) { if (!responses.ContainsKey("401")) { responses.Add("401", new Response { Schema = this.Definitions.GetReferenceSchema(typeof(ValidationError[]), null), Description = "This endpoint requires authorization." }); } responses.Add("403", new Response { Schema = this.Definitions.GetReferenceSchema(typeof(ValidationError[]), null), Description = builder.ToString() }); } return responses; } private IEnumerable<string> GetTags(EndPoint endPoint) { if (endPoint.Path == null) { yield break; } if (endPoint.Path.StartsWith("_") || endPoint.IsVersioned && endPoint.Path.Split('/').ElementAt(1)?.StartsWith("_") == true) { yield return "System"; yield break; } if (endPoint.Tags != null) { foreach (var tag in endPoint.Tags) { yield return tag; } yield break; } var segments = endPoint.Path.Split('/'); if (segments.Length >= 3) { if (endPoint.IsVersioned) { yield return segments[2].Replace("-", " ").ToTitle(); } else { yield return segments[1].Replace("-", " ").ToTitle(); } } } private class PathComparer : IComparer<string> { public int Compare(string x, string y) { var left = new EndPointPath(x); var right = new EndPointPath(y); return left.CompareTo(right); } } } }
using System; using System.Collections.Generic; using System.Text; using gView.Framework.FDB; using gView.Framework.Data; using gView.Framework.system; using gView.Framework.UI.Dialogs; using gView.Framework.system.UI; using gView.Framework.Offline; using gView.DataSources.Fdb.UI; using gView.DataSources.Fdb.MSAccess; using gView.Framework.Geometry; namespace copyFeatureClass { class Program { static int Main(string[] args) { string source_connstr = "", source_fc = ""; string dest_connstr = "", dest_fc = ""; string[] sourceFields = null, destFields = null; Guid source_guid = Guid.Empty, dest_guid = Guid.Empty; bool checkout = false; string checkoutDescription = String.Empty; string child_rights = "iud"; string parent_rights = "iud"; string conflict_handling = "normal"; ISpatialIndexDef treeDef = null; geometryType? sourceGeometryType = null; for (int i = 0; i < args.Length - 1; i++) { if (args[i] == "-source_connstr") source_connstr = args[++i]; else if (args[i] == "-source_guid") source_guid = new Guid(args[++i]); else if (args[i] == "-source_fc") source_fc = args[++i]; else if (args[i] == "-dest_connstr") dest_connstr = args[++i]; else if (args[i] == "-dest_guid") dest_guid = new Guid(args[++i]); else if (args[i] == "-dest_fc") dest_fc = args[++i]; else if (args[i] == "-sourcefields") sourceFields = args[++i].Split(';'); else if (args[i] == "-destfields") destFields = args[++i].Split(';'); else if (args[i] == "-checkout") { checkout = true; checkoutDescription = args[++i]; } else if(args[i]=="-source_geometrytype") { geometryType geomType; if (Enum.TryParse<geometryType>(args[++i], out geomType)) { sourceGeometryType = geomType; Console.WriteLine("Source geometry type: " + sourceGeometryType); } } else if (args[i] == "-pr") parent_rights = args[++i]; else if (args[i] == "-cr") child_rights = args[++i]; else if (args[i] == "-ch") conflict_handling = args[++i]; //else if (args[i] == "-si") //{ // treeDef = BinaryTreeDef.FromString(args[++i]); // if (treeDef == null) // { // Console.WriteLine("Invalid Spatial Index Def. " + args[i]); // } //} } if (source_connstr == "" || source_fc == "" || source_guid == Guid.Empty || dest_connstr == "" || dest_fc == "" || dest_guid == Guid.Empty) { Console.WriteLine("USAGE:"); Console.WriteLine("gView.Cmd.CopyFeatureclass -source_connstr <Source Dataset Connection String>"); Console.WriteLine(" -source_guid <GUID of Dataset Extension>"); Console.WriteLine(" -source_fc <Featureclass name>"); Console.WriteLine(" -dest_connstr <Destination Dataset Connection String>"); Console.WriteLine(" -dest_guid <GUID of Dataset Extension>"); Console.WriteLine(" -dest_fc <Featureclass name>"); Console.WriteLine(" when check out featureclass:"); Console.WriteLine(" -checkout <Description> ... Write checkout information"); Console.WriteLine(" -pr ... parent rights. <iud|iu|ud|...> (i..INSERT, u..UPDATE, d..DELETE)"); Console.WriteLine(" -cr ... child rights. <iud|iu|ud|...> (i..INSERT, u..UPDATE, d..DELETE)"); Console.WriteLine(" -ch <none|normal|parent_wins|child_wins|newer_wins> ... conflict handling"); Console.WriteLine(" optional:"); Console.WriteLine(" -source_geometrytype <Point,Polyline,Polygon> ... if source geometrytype is not explizit specified (SQL Geometry)"); return 1; } IFeatureDataset sourceDS, destDS; IFeatureClass sourceFC; PlugInManager compMan = new PlugInManager(); object comp = compMan.CreateInstance(source_guid); if (!(comp is IFeatureDataset)) { Console.WriteLine("Component with GUID '" + source_guid.ToString() + "' is not a feature dataset..."); return 1; } sourceDS = (IFeatureDataset)comp; sourceDS.ConnectionString = source_connstr; sourceDS.Open(); sourceFC = GetFeatureclass(sourceDS, source_fc); if (sourceFC == null) { Console.WriteLine("Can't find featureclass '" + source_fc + "' in source dataset..."); sourceDS.Dispose(); return 1; } if (String.IsNullOrWhiteSpace(sourceFC.IDFieldName)) { Console.WriteLine("WARNING: Souorce FeatureClass has no IDField -> Bad performance!!"); } Console.WriteLine("Source FeatureClass: " + sourceFC.Name); Console.WriteLine("-----------------------------------------------------"); Console.WriteLine("Shape Field: " + sourceFC.ShapeFieldName); if (String.IsNullOrWhiteSpace(sourceFC.IDFieldName)) { Console.WriteLine("WARNING: Souorce FeatureClass has no IDField -> Bad performance!!"); } else { Console.WriteLine("Id Field : " + sourceFC.IDFieldName); } Console.WriteLine(); Console.WriteLine("Import: " + source_fc); Console.WriteLine("-----------------------------------------------------"); FieldTranslation fieldTranslation = new FieldTranslation(); if (sourceFields != null && destFields != null) { if (sourceFields.Length != destFields.Length) { Console.WriteLine("Error in field definition..."); sourceDS.Dispose(); return 1; } for (int i = 0; i < sourceFields.Length; i++) { IField field = sourceFC.FindField(sourceFields[i]); if (field == null) { Console.WriteLine("Error: Can't find field '" + sourceFields[i] + "'..."); sourceDS.Dispose(); return 1; } fieldTranslation.Add(field, destFields[i]); } } else { foreach (IField field in sourceFC.Fields.ToEnumerable()) { if (field.type == FieldType.ID || field.type == FieldType.Shape) continue; fieldTranslation.Add(field, FieldTranslation.CheckName(field.name)); } } comp = compMan.CreateInstance(dest_guid); if (comp is IFileFeatureDatabase) { IFileFeatureDatabase fileDB = (IFileFeatureDatabase)comp; if (!fileDB.Open(dest_connstr)) { Console.WriteLine("Error opening destination database:" + fileDB.lastErrorMsg); return 1; } destDS = fileDB[dest_connstr]; } else if (comp is IFeatureDataset) { destDS = (IFeatureDataset)comp; destDS.ConnectionString = dest_connstr; if (!destDS.Open()) { Console.WriteLine("Error opening destination dataset:" + destDS.lastErrorMsg); return 1; } } else { Console.WriteLine("Component with GUID '" + dest_guid.ToString() + "' is not a feature dataset..."); return 1; } string replIDField = String.Empty; if (checkout) { if (!(destDS.Database is IFeatureDatabaseReplication) || !(sourceDS.Database is IFeatureDatabaseReplication)) { Console.WriteLine("Can't checkout FROM/TO databasetype..."); return 1; } replIDField = Replication.FeatureClassReplicationIDFieldname(sourceFC); if (String.IsNullOrEmpty(replIDField)) { Console.WriteLine("Can't checkout from source featureclass. No replication ID!"); return 1; } IDatasetElement element = destDS[dest_fc]; if (element != null) { List<Guid> checkout_guids = Replication.FeatureClassSessions(element.Class as IFeatureClass); if (checkout_guids != null && checkout_guids.Count != 0) { string errMsg = "Can't check out to this featureclass\n"; errMsg += "Check in the following Sessions first:\n"; foreach (Guid g in checkout_guids) errMsg += " CHECKOUT_GUID: " + g.ToString(); Console.WriteLine("ERROR:\n" + errMsg); return 1; } } } if (destDS.Database is IFeatureDatabase) { if (destDS.Database is AccessFDB) { //Console.WriteLine(); //Console.WriteLine("Import: " + source_fc); //Console.WriteLine("-----------------------------------------------------"); FDBImport import = new FDBImport(((IFeatureUpdater)destDS.Database).SuggestedInsertFeatureCountPerTransaction); import.ReportAction += new FDBImport.ReportActionEvent(import_ReportAction); import.ReportProgress += new FDBImport.ReportProgressEvent(import_ReportProgress); if (checkout) { if (sourceDS.Database is AccessFDB) { treeDef = ((AccessFDB)sourceDS.Database).FcSpatialIndexDef(source_fc); if (destDS.Database is AccessFDB) { ISpatialIndexDef dsTreeDef = ((AccessFDB)destDS.Database).SpatialIndexDef(destDS.DatasetName); if (treeDef.GeometryType != dsTreeDef.GeometryType) treeDef = dsTreeDef; } } } if (!import.ImportToNewFeatureclass((IFeatureDatabase)destDS.Database, destDS.DatasetName, dest_fc, sourceFC, fieldTranslation, true, null, treeDef, sourceGeometryType: sourceGeometryType)) { Console.WriteLine("ERROR: " + import.lastErrorMsg); } } else { Console.WriteLine(); Console.WriteLine("Create: " + source_fc); Console.WriteLine("-----------------------------------------------------"); FeatureImport import = new FeatureImport(); import.ReportAction += new FeatureImport.ReportActionEvent(import_ReportAction2); import.ReportProgress += new FeatureImport.ReportProgressEvent(import_ReportProgress2); if (!import.ImportToNewFeatureclass(destDS, dest_fc, sourceFC, fieldTranslation, true, sourceGeometryType: sourceGeometryType)) { Console.WriteLine("ERROR: " + import.lastErrorMsg); return 1; } } if (checkout) { IDatasetElement element = destDS[dest_fc]; if (element == null) { Console.WriteLine("ERROR: Can't write checkout information..."); return 1; } IFeatureClass destFC = element.Class as IFeatureClass; string errMsg; if (!Replication.InsertReplicationIDFieldname(destFC, replIDField, out errMsg)) { Console.WriteLine("ERROR: " + errMsg); return 1; } Replication.VersionRights cr = Replication.VersionRights.NONE; Replication.VersionRights pr = Replication.VersionRights.NONE; Replication.ConflictHandling ch = Replication.ConflictHandling.NORMAL; if (child_rights.ToLower().Contains("i")) cr |= Replication.VersionRights.INSERT; if (child_rights.ToLower().Contains("u")) cr |= Replication.VersionRights.UPDATE; if (child_rights.ToLower().Contains("d")) cr |= Replication.VersionRights.DELETE; if (parent_rights.ToLower().Contains("i")) pr |= Replication.VersionRights.INSERT; if (parent_rights.ToLower().Contains("u")) pr |= Replication.VersionRights.UPDATE; if (parent_rights.ToLower().Contains("d")) pr |= Replication.VersionRights.DELETE; switch (conflict_handling.ToLower()) { case "none": ch = Replication.ConflictHandling.NONE; break; case "normal": ch = Replication.ConflictHandling.NORMAL; break; case "parent_wins": ch = Replication.ConflictHandling.PARENT_WINS; break; case "child_wins": ch = Replication.ConflictHandling.CHILD_WINS; break; case "newer_wins": ch = Replication.ConflictHandling.NEWER_WINS; break; } if (!Replication.InsertNewCheckoutSession(sourceFC, pr, destFC, cr, ch, SystemInformation.Replace(checkoutDescription), out errMsg)) { Console.WriteLine("ERROR: " + errMsg); return 1; } if (!Replication.InsertCheckoutLocks(sourceFC, destFC, out errMsg)) { Console.WriteLine("ERROR: " + errMsg); return 1; } } } else { Console.WriteLine("Destination dataset has no feature database..."); Console.WriteLine("Can't create featureclasses for this kind of dataset..."); return 1; } sourceDS.Dispose(); destDS.Dispose(); return 0; } static bool newLine = false; static void import_ReportProgress(FDBImport sender, int progress) { Console.Write("..." + progress); newLine = true; } static void import_ReportAction(FDBImport sender, string action) { if (newLine) { Console.WriteLine(); newLine = false; } Console.WriteLine(action); } static void import_ReportProgress2(FeatureImport sender, int progress) { Console.Write("..." + progress); newLine = true; } static void import_ReportAction2(FeatureImport sender, string action) { if (newLine) { Console.WriteLine(); newLine = false; } Console.WriteLine(action); } private static IFeatureClass GetFeatureclass(IFeatureDataset ds, string name) { IDatasetElement element = ds[name]; if (element != null && element.Class is IFeatureClass) return element.Class as IFeatureClass; foreach (IDatasetElement element2 in ds.Elements) { if (element2.Class is IFeatureClass) { if (element2.Class.Name == name) return element2.Class as IFeatureClass; } } return null; } } class SystemInformation { public static string Replace(string str) { return str.Replace("[MACHINENAME]", MachineName).Replace("[USER]", UserName); } public static string MachineName { get { return System.Environment.MachineName; } } public static string UserName { get { return System.Environment.UserName; } } } }
#region License /* * CookieCollection.cs * * This code is derived from System.Net.CookieCollection.cs of Mono * (http://www.mono-project.com). * * The MIT License * * Copyright (c) 2004,2009 Novell, Inc. (http://www.novell.com) * Copyright (c) 2012-2014 sta.blockhead * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ #endregion #region Authors /* * Authors: * - Lawrence Pit <loz@cable.a2000.nl> * - Gonzalo Paniagua Javier <gonzalo@ximian.com> * - Sebastien Pouliot <sebastien@ximian.com> */ #endregion using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.Text; namespace WebSocketSharp.Net { /// <summary> /// Provides a collection container for instances of the <see cref="Cookie"/> class. /// </summary> [Serializable] public class CookieCollection : ICollection, IEnumerable { #region Private Fields private List<Cookie> _list; private object _sync; #endregion #region Public Constructors /// <summary> /// Initializes a new instance of the <see cref="CookieCollection"/> class. /// </summary> public CookieCollection () { _list = new List<Cookie> (); } #endregion #region Internal Properties internal IList<Cookie> List { get { return _list; } } internal IEnumerable<Cookie> Sorted { get { var list = new List<Cookie> (_list); if (list.Count > 1) list.Sort (compareCookieWithinSorted); return list; } } #endregion #region Public Properties /// <summary> /// Gets the number of cookies in the collection. /// </summary> /// <value> /// An <see cref="int"/> that represents the number of cookies in the collection. /// </value> public int Count { get { return _list.Count; } } /// <summary> /// Gets a value indicating whether the collection is read-only. /// </summary> /// <value> /// <c>true</c> if the collection is read-only; otherwise, <c>false</c>. /// The default value is <c>true</c>. /// </value> public bool IsReadOnly { // LAMESPEC: So how is one supposed to create a writable CookieCollection instance? // We simply ignore this property, as this collection is always writable. get { return true; } } /// <summary> /// Gets a value indicating whether the access to the collection is thread safe. /// </summary> /// <value> /// <c>true</c> if the access to the collection is thread safe; otherwise, <c>false</c>. /// The default value is <c>false</c>. /// </value> public bool IsSynchronized { get { return false; } } /// <summary> /// Gets the <see cref="Cookie"/> at the specified <paramref name="index"/> from /// the collection. /// </summary> /// <value> /// A <see cref="Cookie"/> at the specified <paramref name="index"/> in the collection. /// </value> /// <param name="index"> /// An <see cref="int"/> that represents the zero-based index of the <see cref="Cookie"/> /// to find. /// </param> /// <exception cref="ArgumentOutOfRangeException"> /// <paramref name="index"/> is out of allowable range of indexes for the collection. /// </exception> public Cookie this[int index] { get { if (index < 0 || index >= _list.Count) throw new ArgumentOutOfRangeException ("index"); return _list[index]; } } /// <summary> /// Gets the <see cref="Cookie"/> with the specified <paramref name="name"/> from /// the collection. /// </summary> /// <value> /// A <see cref="Cookie"/> with the specified <paramref name="name"/> in the collection. /// </value> /// <param name="name"> /// A <see cref="string"/> that represents the name of the <see cref="Cookie"/> to find. /// </param> /// <exception cref="ArgumentNullException"> /// <paramref name="name"/> is <see langword="null"/>. /// </exception> public Cookie this[string name] { get { if (name == null) throw new ArgumentNullException ("name"); foreach (var cookie in Sorted) if (cookie.Name.Equals (name, StringComparison.InvariantCultureIgnoreCase)) return cookie; return null; } } /// <summary> /// Gets an object used to synchronize access to the collection. /// </summary> /// <value> /// An <see cref="Object"/> used to synchronize access to the collection. /// </value> public Object SyncRoot { get { return _sync ?? (_sync = ((ICollection) _list).SyncRoot); } } #endregion #region Private Methods private static int compareCookieWithinSort (Cookie x, Cookie y) { return (x.Name.Length + x.Value.Length) - (y.Name.Length + y.Value.Length); } private static int compareCookieWithinSorted (Cookie x, Cookie y) { var ret = 0; return (ret = x.Version - y.Version) != 0 ? ret : (ret = x.Name.CompareTo (y.Name)) != 0 ? ret : y.Path.Length - x.Path.Length; } private static CookieCollection parseRequest (string value) { var cookies = new CookieCollection (); Cookie cookie = null; var ver = 0; var pairs = splitCookieHeaderValue (value); for (var i = 0; i < pairs.Length; i++) { var pair = pairs[i].Trim (); if (pair.Length == 0) continue; if (pair.StartsWith ("$version", StringComparison.InvariantCultureIgnoreCase)) { ver = Int32.Parse (pair.GetValue ('=', true)); } else if (pair.StartsWith ("$path", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.Path = pair.GetValue ('='); } else if (pair.StartsWith ("$domain", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.Domain = pair.GetValue ('='); } else if (pair.StartsWith ("$port", StringComparison.InvariantCultureIgnoreCase)) { var port = pair.Equals ("$port", StringComparison.InvariantCultureIgnoreCase) ? "\"\"" : pair.GetValue ('='); if (cookie != null) cookie.Port = port; } else { if (cookie != null) cookies.Add (cookie); string name; string val = String.Empty; var pos = pair.IndexOf ('='); if (pos == -1) { name = pair; } else if (pos == pair.Length - 1) { name = pair.Substring (0, pos).TrimEnd (' '); } else { name = pair.Substring (0, pos).TrimEnd (' '); val = pair.Substring (pos + 1).TrimStart (' '); } cookie = new Cookie (name, val); if (ver != 0) cookie.Version = ver; } } if (cookie != null) cookies.Add (cookie); return cookies; } private static CookieCollection parseResponse (string value) { var cookies = new CookieCollection (); Cookie cookie = null; var pairs = splitCookieHeaderValue (value); for (var i = 0; i < pairs.Length; i++) { var pair = pairs[i].Trim (); if (pair.Length == 0) continue; if (pair.StartsWith ("version", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.Version = Int32.Parse (pair.GetValue ('=', true)); } else if (pair.StartsWith ("expires", StringComparison.InvariantCultureIgnoreCase)) { var buff = new StringBuilder (pair.GetValue ('='), 32); if (i < pairs.Length - 1) buff.AppendFormat (", {0}", pairs[++i].Trim ()); DateTime expires; if (!DateTime.TryParseExact ( buff.ToString (), new[] { "ddd, dd'-'MMM'-'yyyy HH':'mm':'ss 'GMT'", "r" }, CultureInfo.CreateSpecificCulture ("en-US"), DateTimeStyles.AdjustToUniversal | DateTimeStyles.AssumeUniversal, out expires)) expires = DateTime.Now; if (cookie != null && cookie.Expires == DateTime.MinValue) cookie.Expires = expires.ToLocalTime (); } else if (pair.StartsWith ("max-age", StringComparison.InvariantCultureIgnoreCase)) { var max = Int32.Parse (pair.GetValue ('=', true)); var expires = DateTime.Now.AddSeconds ((double) max); if (cookie != null) cookie.Expires = expires; } else if (pair.StartsWith ("path", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.Path = pair.GetValue ('='); } else if (pair.StartsWith ("domain", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.Domain = pair.GetValue ('='); } else if (pair.StartsWith ("port", StringComparison.InvariantCultureIgnoreCase)) { var port = pair.Equals ("port", StringComparison.InvariantCultureIgnoreCase) ? "\"\"" : pair.GetValue ('='); if (cookie != null) cookie.Port = port; } else if (pair.StartsWith ("comment", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.Comment = pair.GetValue ('=').UrlDecode (); } else if (pair.StartsWith ("commenturl", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.CommentUri = pair.GetValue ('=', true).ToUri (); } else if (pair.StartsWith ("discard", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.Discard = true; } else if (pair.StartsWith ("secure", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.Secure = true; } else if (pair.StartsWith ("httponly", StringComparison.InvariantCultureIgnoreCase)) { if (cookie != null) cookie.HttpOnly = true; } else { if (cookie != null) cookies.Add (cookie); string name; string val = String.Empty; var pos = pair.IndexOf ('='); if (pos == -1) { name = pair; } else if (pos == pair.Length - 1) { name = pair.Substring (0, pos).TrimEnd (' '); } else { name = pair.Substring (0, pos).TrimEnd (' '); val = pair.Substring (pos + 1).TrimStart (' '); } cookie = new Cookie (name, val); } } if (cookie != null) cookies.Add (cookie); return cookies; } private int searchCookie (Cookie cookie) { var name = cookie.Name; var path = cookie.Path; var domain = cookie.Domain; var ver = cookie.Version; for (var i = _list.Count - 1; i >= 0; i--) { var c = _list[i]; if (c.Name.Equals (name, StringComparison.InvariantCultureIgnoreCase) && c.Path.Equals (path, StringComparison.InvariantCulture) && c.Domain.Equals (domain, StringComparison.InvariantCultureIgnoreCase) && c.Version == ver) return i; } return -1; } private static string[] splitCookieHeaderValue (string value) { return new List<string> (value.SplitHeaderValue (',', ';')).ToArray (); } #endregion #region Internal Methods internal static CookieCollection Parse (string value, bool response) { return response ? parseResponse (value) : parseRequest (value); } internal void SetOrRemove (Cookie cookie) { var pos = searchCookie (cookie); if (pos == -1) { if (!cookie.Expired) _list.Add (cookie); return; } if (!cookie.Expired) { _list[pos] = cookie; return; } _list.RemoveAt (pos); } internal void SetOrRemove (CookieCollection cookies) { foreach (Cookie cookie in cookies) SetOrRemove (cookie); } internal void Sort () { if (_list.Count > 1) _list.Sort (compareCookieWithinSort); } #endregion #region Public Methods /// <summary> /// Adds the specified <paramref name="cookie"/> to the collection. /// </summary> /// <param name="cookie"> /// A <see cref="Cookie"/> to add. /// </param> /// <exception cref="ArgumentNullException"> /// <paramref name="cookie"/> is <see langword="null"/>. /// </exception> public void Add (Cookie cookie) { if (cookie == null) throw new ArgumentNullException ("cookie"); var pos = searchCookie (cookie); if (pos == -1) { _list.Add (cookie); return; } _list[pos] = cookie; } /// <summary> /// Adds the specified <paramref name="cookies"/> to the collection. /// </summary> /// <param name="cookies"> /// A <see cref="CookieCollection"/> that contains the cookies to add. /// </param> /// <exception cref="ArgumentNullException"> /// <paramref name="cookies"/> is <see langword="null"/>. /// </exception> public void Add (CookieCollection cookies) { if (cookies == null) throw new ArgumentNullException ("cookies"); foreach (Cookie cookie in cookies) Add (cookie); } /// <summary> /// Copies the elements of the collection to the specified <see cref="Array"/>, starting at /// the specified <paramref name="index"/> in the <paramref name="array"/>. /// </summary> /// <param name="array"> /// An <see cref="Array"/> that represents the destination of the elements copied from /// the collection. /// </param> /// <param name="index"> /// An <see cref="int"/> that represents the zero-based index in <paramref name="array"/> /// at which copying begins. /// </param> /// <exception cref="ArgumentNullException"> /// <paramref name="array"/> is <see langword="null"/>. /// </exception> /// <exception cref="ArgumentOutOfRangeException"> /// <paramref name="index"/> is less than zero. /// </exception> /// <exception cref="ArgumentException"> /// <para> /// <paramref name="array"/> is multidimensional. /// </para> /// <para> /// -or- /// </para> /// <para> /// The number of elements in the collection is greater than the available space from /// <paramref name="index"/> to the end of the destination <paramref name="array"/>. /// </para> /// </exception> /// <exception cref="InvalidCastException"> /// The elements in the collection cannot be cast automatically to the type of the destination /// <paramref name="array"/>. /// </exception> public void CopyTo (Array array, int index) { if (array == null) throw new ArgumentNullException ("array"); if (index < 0) throw new ArgumentOutOfRangeException ("index", "Less than zero."); if (array.Rank > 1) throw new ArgumentException ("Multidimensional.", "array"); if (array.Length - index < _list.Count) throw new ArgumentException ( "The number of elements in this collection is greater than the available space of the destination array."); if (!array.GetType ().GetElementType ().IsAssignableFrom (typeof (Cookie))) throw new InvalidCastException ( "The elements in this collection cannot be cast automatically to the type of the destination array."); ((IList) _list).CopyTo (array, index); } /// <summary> /// Copies the elements of the collection to the specified array of <see cref="Cookie"/>, /// starting at the specified <paramref name="index"/> in the <paramref name="array"/>. /// </summary> /// <param name="array"> /// An array of <see cref="Cookie"/> that represents the destination of the elements /// copied from the collection. /// </param> /// <param name="index"> /// An <see cref="int"/> that represents the zero-based index in <paramref name="array"/> /// at which copying begins. /// </param> /// <exception cref="ArgumentNullException"> /// <paramref name="array"/> is <see langword="null"/>. /// </exception> /// <exception cref="ArgumentOutOfRangeException"> /// <paramref name="index"/> is less than zero. /// </exception> /// <exception cref="ArgumentException"> /// The number of elements in the collection is greater than the available space from /// <paramref name="index"/> to the end of the destination <paramref name="array"/>. /// </exception> public void CopyTo (Cookie[] array, int index) { if (array == null) throw new ArgumentNullException ("array"); if (index < 0) throw new ArgumentOutOfRangeException ("index", "Less than zero."); if (array.Length - index < _list.Count) throw new ArgumentException ( "The number of elements in this collection is greater than the available space of the destination array."); _list.CopyTo (array, index); } /// <summary> /// Gets the enumerator used to iterate through the collection. /// </summary> /// <returns> /// An <see cref="IEnumerator"/> instance used to iterate through the collection. /// </returns> public IEnumerator GetEnumerator () { return _list.GetEnumerator (); } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Immutable; using System.Globalization; using System.Threading; using Microsoft.CodeAnalysis.DocumentationComments; namespace Microsoft.CodeAnalysis.MetadataAsSource { internal partial class AbstractMetadataAsSourceService { private abstract class AbstractWrappedSymbol : ISymbol { private readonly ISymbol _symbol; protected readonly bool CanImplementImplicitly; protected readonly IDocumentationCommentFormattingService DocCommentFormattingService; protected AbstractWrappedSymbol(ISymbol symbol, bool canImplementImplicitly, IDocumentationCommentFormattingService docCommentFormattingService) { _symbol = symbol; this.CanImplementImplicitly = canImplementImplicitly; this.DocCommentFormattingService = docCommentFormattingService; } public bool CanBeReferencedByName { get { return _symbol.CanBeReferencedByName; } } public IAssemblySymbol ContainingAssembly { get { return _symbol.ContainingAssembly; } } public IModuleSymbol ContainingModule { get { return _symbol.ContainingModule; } } public INamespaceSymbol ContainingNamespace { get { return _symbol.ContainingNamespace; } } public ISymbol ContainingSymbol { get { return _symbol.ContainingSymbol; } } public INamedTypeSymbol ContainingType { get { return _symbol.ContainingType; } } public Accessibility DeclaredAccessibility { get { return _symbol.DeclaredAccessibility; } } public ImmutableArray<SyntaxReference> DeclaringSyntaxReferences { get { return _symbol.DeclaringSyntaxReferences; } } public bool IsAbstract { get { return _symbol.IsAbstract; } } public bool IsDefinition { get { return _symbol.IsDefinition; } } public bool IsExtern { get { return _symbol.IsExtern; } } public bool IsImplicitlyDeclared { get { return _symbol.IsImplicitlyDeclared; } } public bool IsOverride { get { return _symbol.IsOverride; } } public bool IsSealed { get { return _symbol.IsSealed; } } public bool IsStatic { get { return _symbol.IsStatic; } } public bool IsVirtual { get { return _symbol.IsVirtual; } } public SymbolKind Kind { get { return _symbol.Kind; } } public string Language { get { return _symbol.Language; } } public ImmutableArray<Location> Locations { get { return _symbol.Locations; } } public string MetadataName { get { return _symbol.MetadataName; } } public string Name { get { return _symbol.Name; } } public ISymbol OriginalDefinition { get { return _symbol.OriginalDefinition; } } public bool HasUnsupportedMetadata { get { return _symbol.HasUnsupportedMetadata; } } public void Accept(SymbolVisitor visitor) { _symbol.Accept(visitor); } public TResult Accept<TResult>(SymbolVisitor<TResult> visitor) { return _symbol.Accept<TResult>(visitor); } public ImmutableArray<AttributeData> GetAttributes() { return _symbol.GetAttributes(); } public string GetDocumentationCommentId() { return _symbol.GetDocumentationCommentId(); } public string GetDocumentationCommentXml(CultureInfo preferredCulture = null, bool expandIncludes = false, CancellationToken cancellationToken = default(CancellationToken)) { return _symbol.GetDocumentationCommentXml(preferredCulture, expandIncludes, cancellationToken); } public ImmutableArray<SymbolDisplayPart> ToDisplayParts(SymbolDisplayFormat format = null) { return _symbol.ToDisplayParts(format); } public string ToDisplayString(SymbolDisplayFormat format = null) { return _symbol.ToDisplayString(format); } public string ToMinimalDisplayString(SemanticModel semanticModel, int position, SymbolDisplayFormat format = null) { return _symbol.ToMinimalDisplayString(semanticModel, position, format); } public ImmutableArray<SymbolDisplayPart> ToMinimalDisplayParts(SemanticModel semanticModel, int position, SymbolDisplayFormat format = null) { return _symbol.ToMinimalDisplayParts(semanticModel, position, format); } public bool Equals(ISymbol other) { return this.Equals((object)other); } } } }
/* * This file is part of UniERM ReportDesigner, based on reportFU by Josh Wilson, * the work of Kim Sheffield and the fyiReporting project. * * Prior Copyrights: * _________________________________________________________ * |Copyright (C) 2010 devFU Pty Ltd, Josh Wilson and Others| * | (http://reportfu.org) | * ========================================================= * _________________________________________________________ * |Copyright (C) 2004-2008 fyiReporting Software, LLC | * |For additional information, email info@fyireporting.com | * |or visit the website www.fyiReporting.com. | * ========================================================= * * License: * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections; using System.Collections.Generic; using System.Xml; namespace Reporting.Rdl { ///<summary> /// Collection of row groupings. ///</summary> [Serializable] internal class RowGroupings : ReportLink { List<RowGrouping> _Items; // list of RowGrouping int _StaticCount; internal RowGroupings(ReportDefn r, ReportLink p, XmlNode xNode) : base(r, p) { RowGrouping g; _Items = new List<RowGrouping>(); // Loop thru all the child nodes foreach(XmlNode xNodeLoop in xNode.ChildNodes) { if (xNodeLoop.NodeType != XmlNodeType.Element) continue; switch (xNodeLoop.Name) { case "RowGrouping": g = new RowGrouping(r, this, xNodeLoop); break; default: g=null; // don't know what this is // don't know this element - log it OwnerReport.rl.LogError(4, "Unknown RowGroupings element '" + xNodeLoop.Name + "' ignored."); break; } if (g != null) _Items.Add(g); } if (_Items.Count == 0) OwnerReport.rl.LogError(8, "For RowGroupings at least one RowGrouping is required."); else { _Items.TrimExcess(); _StaticCount = GetStaticCount(); } } override internal void FinalPass() { foreach (RowGrouping g in _Items) { g.FinalPass(); } return; } internal List<RowGrouping> Items { get { return _Items; } } internal MatrixEntry GetME(Report rpt) { WorkClass wc = GetWC(rpt); return wc.ME; } internal void SetME(Report rpt, MatrixEntry me) { WorkClass wc = GetWC(rpt); wc.ME = me; } private WorkClass GetWC(Report rpt) { if (rpt == null) return new WorkClass(); WorkClass wc = rpt.Cache.Get(this, "wc") as WorkClass; if (wc == null) { wc = new WorkClass(); rpt.Cache.Add(this, "wc", wc); } return wc; } private void RemoveWC(Report rpt) { rpt.Cache.Remove(this, "wc"); } private int GetStaticCount() { // Find the static column foreach (RowGrouping rg in _Items) { if (rg.StaticRows == null) continue; return rg.StaticRows.Items.Count; } return 0; } internal int StaticCount { get {return _StaticCount;} } class WorkClass { internal MatrixEntry ME; // Used at runtime to contain data values internal WorkClass() { ME=null; } } } ///<summary> /// Matrix row grouping definition. ///</summary> [Serializable] internal class RowGrouping : ReportLink { RSize _Width; // Width of the row header DynamicRows _DynamicRows; // Dynamic row headings for this grouping StaticRows _StaticRows; // Static row headings for this grouping internal RowGrouping(ReportDefn r, ReportLink p, XmlNode xNode) : base(r, p) { _Width = null; _DynamicRows = null; _StaticRows = null; // Loop thru all the child nodes foreach (XmlNode xNodeLoop in xNode.ChildNodes) { if (xNodeLoop.NodeType != XmlNodeType.Element) continue; switch (xNodeLoop.Name) { case "Width": _Width = new RSize(r, xNodeLoop); break; case "DynamicRows": _DynamicRows = new DynamicRows(r, this, xNodeLoop); break; case "StaticRows": _StaticRows = new StaticRows(r, this, xNodeLoop); break; default: // don't know this element - log it OwnerReport.rl.LogError(4, "Unknown RowGrouping element '" + xNodeLoop.Name + "' ignored."); break; } } if (_Width == null) OwnerReport.rl.LogError(8, "RowGrouping requires the Width element."); } override internal void FinalPass() { if (_DynamicRows != null) _DynamicRows.FinalPass(); if (_StaticRows != null) _StaticRows.FinalPass(); return; } internal RSize Width { get { return _Width; } set { _Width = value; } } internal DynamicRows DynamicRows { get { return _DynamicRows; } set { _DynamicRows = value; } } internal StaticRows StaticRows { get { return _StaticRows; } set { _StaticRows = value; } } } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // namespace Microsoft.DeviceModels.Chipset.LPC3180 { using System; using System.Runtime.CompilerServices; using Microsoft.Zelig.Runtime; [MemoryMappedPeripheral(Base=0x40080000U,Length=0x00018020U)] public class StandardUART { public enum Id { UART3 = 0, UART4 = 1, UART5 = 2, UART6 = 3, } [BitFieldPeripheral(PhysicalType=typeof(byte))] public struct UnIER_bitfield { [BitFieldRegister(Position=2)] public bool Rx; // Rx Line Status Interrupt Enable // // *0: Disable the Rx line status interrupts. // 1: Enable the Rx line status interrupts. // // This bit enables the UARTn Receiver Line Status interrupt. // This interrupt reflects Overrun Error, Parity Error, Framing Error, and Break conditions. // The status of this interrupt can be read from UnLSR[4:1]. // [BitFieldRegister(Position=1)] public bool THRE; // THRE Interrupt Enable // // *0: Disable the THRE interrupt. // 1: Enable the THRE interrupt. // // This bit enables the Transmit Holding Register Empty (THRE) interrupt for UARTn. // The status of this interrupt can be read from UnLSR[5]. // // [BitFieldRegister(Position=0)] public bool RDAE; // RDA Interrupt Enable // // *0: Disable the RDA interrupt. // 1: Enable the RDA interrupt. // // This bit enables the Receive Data Available (RDA) interrupt for UARTn. // } //--// [BitFieldPeripheral(PhysicalType=typeof(byte))] public struct UnIIR_bitfield { public enum InterruptType : uint { // // // Priority | Interrupt type | Interrupt source | Method of clearing interrupt // ---------+----------------------------------------+------------------------------------------------------------------------------+------------------------------------------- RLS = 0x3, // 1 (High) | Receiver Line Status (RLS) | OE (Overrun Error), PE (Parity Error), FE (Framing Error), or | Read of UnLSR. // | | BI (Break Indication). | // | | Note that an RLS interrupt is asserted immediately rather | // | | than waiting for the corresponding character to reach the top of the FIFO. | // | | | RDA = 0x2, // 2 | Receiver Data Available (RDA) | When the FIFO is turned off (UnFCR[0] = 0), | Read of UnRBR when UnFCR[0] = 0, // | | this interrupt is asserted when receive data is available. | or UARTn FIFO contents go below the trigger level when UnFCR[0] = 1. // | | | // | | When the FIFO is turned on (UnFCR[0] = 1), | // | | this interrupt is asserted when the receive trigger level (as specified by | // | | UnFCR[7:6]) has been reached in the FIFO. | // | | | CTI = 0x6, // 2 | Character Time-out Indication (CTI) | This case occurs when there is at least one character in the Rx FIFO and | Read of UnRBR, or a Stop bit is received. // | | no character has been received or removed from the FIFO | // | | during the last 4 character times. | // | | | THRE = 0x1, // 3 | Transmit Holding Register Empty (THRE) | When the FIFO is turned off (UnFCR[0] = 0), | Read of UnIIR or write to THR. // | | this interrupt is asserted when the transmit holding register is empty. | // | | When the FIFO is turned on (UnFCR[0] = 1), | // | | this interrupt is asserted when the transmit trigger level (as specified by | // | | UnFCR[5:4]) has been reached in the FIFO. | } [BitFieldRegister(Position=1,Size=3)] public InterruptType IntId; // Interrupt Identification // [BitFieldRegister(Position=0 )] public bool NoIntPending; // Interrupt Pending // // This flag indicates when there are no UARTn related interrupts pending. // Note that this bit is active LOW. The pending interrupt can be determined by evaluating UnIIR[3:0]. // // 0: At least one interrupt is pending. // *1: No pending interrupts. // } //--// [BitFieldPeripheral(PhysicalType=typeof(byte))] public struct UnFCR_bitfield { public enum RxTriggerLevel : uint { TriggerAt16 = 0, // *00: trigger level = 16 TriggerAt32 = 1, // 01: trigger level = 32 TriggerAt48 = 2, // 10: trigger level = 48 TriggerAt60 = 3, // 11: trigger level = 60 } public enum TxTriggerLevel : uint { TriggerAt0 = 0, // *00: trigger level = 0 TriggerAt4 = 1, // 01: trigger level = 4 TriggerAt8 = 2, // 10: trigger level = 8 TriggerAt16 = 3, // 11: trigger level = 16 } [BitFieldRegister(Position=6,Size=2)] public RxTriggerLevel RxLvl; // Receiver Trigger Level Select // These two bits determine how many receiver UARTn FIFO characters must be present before an interrupt is activated. // [BitFieldRegister(Position=4,Size=2)] public TxTriggerLevel TxLvl; // Transmitter Trigger Level Select // These two bits determine the level of the UARTn transmitter FIFO causes an interrupt. // [BitFieldRegister(Position=3 )] public bool FIFOControl; // FIFO Control. // // Internal UARTn FIFO control. This bit must be set to 1 for proper FIFO operation (default off) // [BitFieldRegister(Position=2 )] public bool ResetTxFIFO; // Transmitter FIFO Reset // // Writing a logic 1 to UnFCR[2] will clear all bytes in UARTn Tx FIFO and reset the pointer logic. // This bit is self-clearing. // [BitFieldRegister(Position=1 )] public bool ResetRxFIFO; // Receiver FIFO Reset // // Writing a logic 1 to UnFCR[1] will clear all bytes in UARTn Rx FIFO and reset the pointer logic. // This bit is self-clearing. // [BitFieldRegister(Position=0 )] public bool FIFOEnable; // FIFO Enable // // UARTn transmit and receive FIFO enable. // Any transition on this bit will automatically clear the UARTn FIFOs. // // *0: UARTn Rx and Tx FIFOs disabled. // 1: UARTn Rx and Tx FIFOs enabled and other UnFCR bits activated. // } //--// [BitFieldPeripheral(PhysicalType=typeof(byte))] public struct UnLCR_bitfield { public enum ParitySettings : uint { Odd = 0x0, // *00: Odd parity Even = 0x1, // 01: Even parity Forced1 = 0x2, // 10: Forced "1" stick parity Forced0 = 0x3, // 11: Forced "0" stick parity } public enum LengthSettings : uint { Use5bits = 0x0, // *00: 5 bit character length Use6bits = 0x1, // 01: 6 bit character length Use7bits = 0x2, // 10: 7 bit character length Use8bits = 0x3, // 11: 8 bit character length } [BitFieldRegister(Position=7 )] public bool DLAB; // Divisor Latch Access Bit // // Allows access to the alternate registers at address offsets 0 and 4. // // *0: Disable access to the baud rate Divisor Latches, enabling access to UnRBR, UnTHR, and UnIER. // 1: Enable access to the baud rate Divisor Latches, disabling access to UnRBR, UnTHR, and UnIER. // [BitFieldRegister(Position=6 )] public bool Break; // Break Control // // Allows forcing the Un_TX output low in order to generate a break condition. // // *0: Disable break transmission // 1: Enable break transmission. // [BitFieldRegister(Position=4,Size=2)] public ParitySettings Parity; // If bit UnLCR[3] = 1, selects the type of parity used by the UART. // // [BitFieldRegister(Position=3 )] public bool ParityEnable; // Parity Enable // // Selects the whether or not the UART uses parity. // // *0: Disable parity generation and checking // 1: Enable parity generation and checking // [BitFieldRegister(Position=2 )] public bool TwoStopBits; // Stop Bit Select // // Selects the number of stop bits used by the UART. // // *0: 1 stop bit // 1: 2 stop bits (1.5 if UnLCR[1:0] = 00) // [BitFieldRegister(Position=0,Size=2)] public LengthSettings WordLen; // Word Length Select // // Selects the character length (in bits) used by the UART // } //--// [BitFieldPeripheral(PhysicalType=typeof(byte))] public struct UnLSR_bitfield { [BitFieldRegister(Position=7)] public bool FIFO_Rx_Error; // FIFO Rx Error // // This bit is set when a character with a receive error such as framing error, parity // error or break interrupt, is loaded into the UnRBR. This bit is cleared when the // UnLSR register is read and there are no subsequent errors in the UARTn FIFO. // // *0: UnRBR contains no UARTn Rx errors or UnFCR[0] = 0. // 1: UARTn RBR contains at least one UARTn Rx error. // [BitFieldRegister(Position=6)] public bool TEMT; // Transmitter Empty // // This bit is set when the last character has been transmitted from the Transmit // Shift Register. TEMT is cleared when another character is written to UnTHR. // // 0: UnTHR and/or the UnTSR contains valid data. // *1: UnTHR and the UnTSR are empty. // [BitFieldRegister(Position=5)] public bool THRE; // Transmitter Holding Register Empty // // This bit is set when the transmitter FIFO reaches the level selected in UnFCR. // THRE is cleared on a UnTHR write. // // 0: UnTHR contains valid data. // *1: UnTHR is empty. // [BitFieldRegister(Position=4)] public bool BI; // Break Interrupt // // When the Un_RX pin is held low for one full character transmission (start, data, // parity, stop), a break interrupt occurs. Once the break condition has been // detected, the receiver goes idle until the Un_RX pin goes high. A read of UnLSR // clears this status bit. // // *0: Break interrupt status is inactive. // 1: Break interrupt status is active. // [BitFieldRegister(Position=3)] public bool FE; // Framing Error // // When the stop bit of a received character is a logic 0, a framing error occurs. A // read of UnLSR clears this bit. A framing error is associated with the character at // the top of the UARTn RBR FIFO. // Upon detection of a framing error, the receiver will attempt to resynchronize to // the data and assume that the bad stop bit is actually an early start bit. // However, it cannot be assumed that the next received byte will be correct even if there is no Framing Error. // // *0: Framing error status is inactive. // 1: Framing error status is active. // [BitFieldRegister(Position=2)] public bool PE; // Parity Error // // When the parity bit of a received character is in the wrong state, a parity error occurs. // A read of UnLSR clears this bit. A parity error is associated with the character at the top of the UARTn RBR FIFO. // // *0: Parity error status is inactive. // 1: Parity error status is active. // [BitFieldRegister(Position=1)] public bool OE; // Overrun Error // // This bit is set when the UARTn RSR has a new character assembled and the UARTn RBR FIFO is full. // In this case, the UARTn RBR FIFO will not be overwritten and the character in the UARTn RSR will be lost. // The overrun error condition is set as soon as it occurs. A read of UnLSR clears the OE flag. // // *0: Overrun error status is inactive. // 1: Overrun error status is active. // [BitFieldRegister(Position=0)] public bool RDR; // Receiver Data Ready // // This bit is set when the UnRBR holds an unread character and is cleared when the UARTn RBR FIFO is empty. // 0: UnRBR is empty. // 1: UnRBR contains valid data. // } //--// [MemoryMappedPeripheral(Base=0x0000U,Length=0x8000U)] public class Port { [Register(Offset=0x00U)] public byte UnRBR; // Receiver Buffer Register R [Register(Offset=0x00U)] public byte UnTHR; // Transmit Holding Register W [Register(Offset=0x04U)] public UnIER_bitfield UnIER; // Interrupt Enable Register [Register(Offset=0x00U)] public byte UnDLL; // Divisor Latch Lower Byte W [Register(Offset=0x04U)] public byte UnDLM; // Divisor Latch Upper Byte W [Register(Offset=0x08U)] public UnIIR_bitfield UnIIR; // Interrupt ID Register R [Register(Offset=0x08U)] public UnFCR_bitfield UnFCR; // FIFO Control Register W [Register(Offset=0x0CU)] public UnLCR_bitfield UnLCR; // Line Control Register [Register(Offset=0x14U)] public UnLSR_bitfield UnLSR; // Line Status Register [Register(Offset=0x1CU)] public byte UnRXLEV; // Receive FIFO Level Register // // Helper Methods // [Inline] public void EnableReceiveInterrupt() { this.UnIER.RDAE = true; } [Inline] public void DisableReceiveInterrupt() { this.UnIER.RDAE = false; } [Inline] public void EnableTransmitInterrupt() { this.UnIER.THRE = true; } [Inline] public void DisableTransmitInterrupt() { this.UnIER.THRE = false; } [Inline] public bool ReadByte( out byte rx ) { if(this.CanReceive) { rx = this.UnRBR; return true; } else { rx = 0; return false; } } [Inline] public bool WriteByte( byte tx ) { if(this.CanSend) { this.UnTHR = tx; return true; } return false; } // // Access Methods // public bool CanSend { [Inline] get { return this.UnLSR.THRE; } } public bool CanReceive { [Inline] get { return this.UnLSR.RDR; } } public bool IsTransmitInterruptEnabled { [Inline] get { return this.UnIER.THRE; } } public bool IsReceiveInterruptEnabled { [Inline] get { return this.UnIER.RDAE; } } // // Debug Methods // public void DEBUG_WriteLine( string text , uint value ) { DEBUG_Write( text, value ); DEBUG_Write( Environment.NewLine ); } public void DEBUG_WriteLine( string text ) { DEBUG_Write( text ); DEBUG_Write( Environment.NewLine ); } public void DEBUG_Write( string text , uint value ) { DEBUG_Write ( text ); DEBUG_WriteHex( value ); } [DisableBoundsChecks()] [DisableNullChecks] public void DEBUG_Write( string s ) { if(s != null) { for( int i = 0; i < 0; ++i ) { DEBUG_Write( s[ i ] ); } } } public void DEBUG_WriteHex( uint value ) { DEBUG_Write( "0x" ); for(int pos = 32 - 4; pos >= 0; pos -= 4) { uint digit = (value >> pos) & 0xF; DEBUG_Write( digit >= 10 ? (char)('A' + (digit - 10)) : (char)('0' + digit) ); } } public void DEBUG_Write( char c ) { while(this.CanSend == false) { } this.UnTHR = (byte)c; } } //--// [Register(Offset=0x00000000U,Instances=4)] public Port[] Ports; //--// // // Helper Methods // //// [Inline] public Port Configure( StandardUART.Id portNo , bool fAutoClock , int baudrate ) { var cfg = new BaseSerialStream.Configuration( null ) { BaudRate = baudrate, DataBits = 8, Parity = System.IO.Ports.Parity.None, StopBits = System.IO.Ports.StopBits.One, }; return Configure( portNo, fAutoClock, ref cfg ); } //// [Inline] public Port Configure( StandardUART.Id portNo , bool fAutoClock , ref BaseSerialStream.Configuration cfg ) { uint preDivX; uint preDivY; int divisor; switch(cfg.BaudRate) { case 2400: preDivX = 1; preDivY = 169; divisor = 2; break; case 4800: preDivX = 1; preDivY = 169; divisor = 1; break; case 9600: preDivX = 3; preDivY = 254; divisor = 1; break; case 19200: preDivX = 3; preDivY = 127; divisor = 1; break; case 38400: preDivX = 6; preDivY = 127; divisor = 1; break; case 57600: preDivX = 9; preDivY = 127; divisor = 1; break; case 115200: preDivX = 19; preDivY = 134; divisor = 1; break; case 230400: preDivX = 19; preDivY = 67; divisor = 1; break; case 460800: preDivX = 38; preDivY = 67; divisor = 1; break; default: return null; } var sysCtrl = SystemControl.Instance; var valUART_CLKMODE__UARTx_CLK = fAutoClock ? SystemControl.UART_CLKMODE_bitfield.Mode.AutoClock : SystemControl.UART_CLKMODE_bitfield.Mode.ClockOn; var valUxCLK = new SystemControl.UxCLK_bitfield(); valUxCLK.UseHCLK = false; valUxCLK.X = preDivX; valUxCLK.Y = preDivY; switch(portNo) { case Id.UART3: sysCtrl.UARTCLK_CTRL.Uart3_Enable = true; sysCtrl.UART_CLKMODE.UART3_CLK = valUART_CLKMODE__UARTx_CLK; sysCtrl.U3CLK = valUxCLK; break; case Id.UART4: sysCtrl.UARTCLK_CTRL.Uart4_Enable = true; sysCtrl.UART_CLKMODE.UART4_CLK = valUART_CLKMODE__UARTx_CLK; sysCtrl.U4CLK = valUxCLK; break; case Id.UART5: sysCtrl.UARTCLK_CTRL.Uart5_Enable = true; sysCtrl.UART_CLKMODE.UART5_CLK = valUART_CLKMODE__UARTx_CLK; sysCtrl.U5CLK = valUxCLK; break; case Id.UART6: sysCtrl.UARTCLK_CTRL.Uart6_Enable = true; sysCtrl.UART_CLKMODE.UART6_CLK = valUART_CLKMODE__UARTx_CLK; sysCtrl.U6CLK = valUxCLK; break; default: return null; } //--// var lcr = new UnLCR_bitfield(); if(cfg.Parity != System.IO.Ports.Parity.None) { lcr.ParityEnable = true; switch(cfg.Parity) { case System.IO.Ports.Parity.Even: lcr.Parity = UnLCR_bitfield.ParitySettings.Even; break; case System.IO.Ports.Parity.Odd: lcr.Parity = UnLCR_bitfield.ParitySettings.Odd; break; default: return null; } } switch(cfg.StopBits) { case System.IO.Ports.StopBits.One: break; case System.IO.Ports.StopBits.Two: lcr.TwoStopBits = true; break; default: return null; } switch(cfg.DataBits) { case 5: lcr.WordLen = UnLCR_bitfield.LengthSettings.Use5bits; break; case 6: lcr.WordLen = UnLCR_bitfield.LengthSettings.Use6bits; break; case 7: lcr.WordLen = UnLCR_bitfield.LengthSettings.Use7bits; break; case 8: lcr.WordLen = UnLCR_bitfield.LengthSettings.Use8bits; break; default: return null; } //--// Port uart = this.Ports[(int)portNo]; { var val = new UnIER_bitfield(); uart.UnIER = val; // Disable both Rx and Tx interrupts } //--// var fcr = new UnFCR_bitfield(); fcr.RxLvl = UnFCR_bitfield.RxTriggerLevel.TriggerAt16; fcr.TxLvl = UnFCR_bitfield.TxTriggerLevel.TriggerAt0; fcr.ResetRxFIFO = true; fcr.ResetTxFIFO = true; fcr.FIFOEnable = true; uart.UnFCR = fcr; //--// { var lcr2 = new UnLCR_bitfield(); lcr2.DLAB = true; uart.UnLCR = lcr2; } uart.UnDLL = (byte) divisor; uart.UnDLM = (byte)(divisor >> 8); uart.UnLCR = lcr; //--// return uart; } // // Access Methods // public static extern StandardUART Instance { [SingletonFactory()] [MethodImpl( MethodImplOptions.InternalCall )] get; } } }
#region License // Copyright (c) Jeremy Skinner (http://www.jeremyskinner.co.uk) // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // The latest version of this file can be found at https://github.com/jeremyskinner/FluentValidation #endregion namespace FluentValidation.Tests { using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Internal; using Xunit; using Validators; public class DefaultValidatorExtensionTester { private AbstractValidator<Person> validator; public DefaultValidatorExtensionTester() { validator = new TestValidator(); } [Fact] public void NotNull_should_create_NotNullValidator() { validator.RuleFor(x => x.Surname).NotNull(); AssertValidator<NotNullValidator>(); } [Fact] public void NotEmpty_should_create_NotEmptyValidator() { validator.RuleFor(x => x.Surname).NotEmpty(); AssertValidator<NotEmptyValidator>(); } [Fact] public void Empty_should_create_EmptyValidator() { validator.RuleFor(x => x.Surname).Empty(); AssertValidator<EmptyValidator>(); } [Fact] public void Length_should_create_LengthValidator() { validator.RuleFor(x => x.Surname).Length(1, 20); AssertValidator<LengthValidator>(); } [Fact] public void Length_should_create_ExactLengthValidator() { validator.RuleFor(x => x.Surname).Length(5); AssertValidator<ExactLengthValidator>(); } [Fact] public void NotEqual_should_create_NotEqualValidator_with_explicit_value() { validator.RuleFor(x => x.Surname).NotEqual("Foo"); AssertValidator<NotEqualValidator>(); } [Fact] public void NotEqual_should_create_NotEqualValidator_with_lambda() { validator.RuleFor(x => x.Surname).NotEqual(x => "Foo"); AssertValidator<NotEqualValidator>(); } [Fact] public void Equal_should_create_EqualValidator_with_explicit_value() { validator.RuleFor(x => x.Surname).Equal("Foo"); AssertValidator<EqualValidator>(); } [Fact] public void Equal_should_create_EqualValidator_with_lambda() { validator.RuleFor(x => x.Surname).Equal(x => "Foo"); AssertValidator<EqualValidator>(); } [Fact] public void Must_should_create_PredicteValidator() { validator.RuleFor(x => x.Surname).Must(x => true); AssertValidator<PredicateValidator>(); } [Fact] public void Must_should_create_PredicateValidator_with_context() { validator.RuleFor(x => x.Surname).Must((x, val) => true); AssertValidator<PredicateValidator>(); } [Fact] public void Must_should_create_PredicateValidator_with_PropertyValidatorContext() { var hasPropertyValidatorContext = false; this.validator.RuleFor(x => x.Surname).Must((x, val, ctx) => { hasPropertyValidatorContext = ctx != null; return true; }); this.validator.Validate(new Person() { Surname = "Surname" }); this.AssertValidator<PredicateValidator>(); hasPropertyValidatorContext.ShouldBeTrue(); } [Fact] public void MustAsync_should_create_AsyncPredicteValidator() { validator.RuleFor(x => x.Surname).MustAsync((x, cancel) => TaskHelpers.FromResult(true)); AssertValidator<AsyncPredicateValidator>(); } [Fact] public void MustAsync_should_create_AsyncPredicateValidator_with_context() { validator.RuleFor(x => x.Surname).MustAsync((x, val) => TaskHelpers.FromResult(true)); AssertValidator<AsyncPredicateValidator>(); } [Fact] public void MustAsync_should_create_AsyncPredicateValidator_with_PropertyValidatorContext() { var hasPropertyValidatorContext = false; this.validator.RuleFor(x => x.Surname).MustAsync((x, val, ctx, cancel) => { hasPropertyValidatorContext = ctx != null; return TaskHelpers.FromResult(true); }); this.validator.ValidateAsync(new Person { Surname = "Surname" }).Wait(); this.AssertValidator<AsyncPredicateValidator>(); hasPropertyValidatorContext.ShouldBeTrue(); } [Fact] public void LessThan_should_create_LessThanValidator_with_explicit_value() { validator.RuleFor(x => x.Surname).LessThan("foo"); AssertValidator<LessThanValidator>(); } [Fact] public void LessThan_should_create_LessThanValidator_with_lambda() { validator.RuleFor(x => x.Surname).LessThan(x => "foo"); AssertValidator<LessThanValidator>(); } [Fact] public void LessThanOrEqual_should_create_LessThanOrEqualValidator_with_explicit_value() { validator.RuleFor(x => x.Surname).LessThanOrEqualTo("foo"); AssertValidator<LessThanOrEqualValidator>(); } [Fact] public void LessThanOrEqual_should_create_LessThanOrEqualValidator_with_lambda() { validator.RuleFor(x => x.Surname).LessThanOrEqualTo(x => "foo"); AssertValidator<LessThanOrEqualValidator>(); } [Fact] public void LessThanOrEqual_should_create_LessThanOrEqualValidator_with_lambda_with_other_Nullable() { validator.RuleFor(x => x.NullableInt).LessThanOrEqualTo(x => x.OtherNullableInt); AssertValidator<LessThanOrEqualValidator>(); } [Fact] public void GreaterThan_should_create_GreaterThanValidator_with_explicit_value() { validator.RuleFor(x => x.Surname).GreaterThan("foo"); AssertValidator<GreaterThanValidator>(); } [Fact] public void GreaterThan_should_create_GreaterThanValidator_with_lambda() { validator.RuleFor(x => x.Surname).GreaterThan(x => "foo"); AssertValidator<GreaterThanValidator>(); } [Fact] public void GreaterThanOrEqual_should_create_GreaterThanOrEqualValidator_with_explicit_value() { validator.RuleFor(x => x.Surname).GreaterThanOrEqualTo("foo"); AssertValidator<GreaterThanOrEqualValidator>(); } [Fact] public void GreaterThanOrEqual_should_create_GreaterThanOrEqualValidator_with_lambda() { validator.RuleFor(x => x.Surname).GreaterThanOrEqualTo(x => "foo"); AssertValidator<GreaterThanOrEqualValidator>(); } [Fact] public void GreaterThanOrEqual_should_create_GreaterThanOrEqualValidator_with_lambda_with_other_Nullable() { validator.RuleFor(x => x.NullableInt).GreaterThanOrEqualTo(x => x.OtherNullableInt); AssertValidator<GreaterThanOrEqualValidator>(); } [Fact] public void MustAsync_should_not_throw_InvalidCastException() { var model = new Model { Ids = new Guid[0] }; var validator = new AsyncModelTestValidator(); // this fails with "Specified cast is not valid" error var result = validator.ValidateAsync(model).Result; result.IsValid.ShouldBeTrue(); } private void AssertValidator<TValidator>() { var rule = (PropertyRule)validator.First(); rule.CurrentValidator.ShouldBe<TValidator>(); } class Model { public IEnumerable<Guid> Ids { get; set; } } class AsyncModelTestValidator : AbstractValidator<Model> { public AsyncModelTestValidator() { RuleForEach(m => m.Ids) .MustAsync((g, cancel) => Task.FromResult(true)); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** ** ** Purpose: ** ** ===========================================================*/ using System; using System.Collections; using System.Security; using System.Security.Permissions; using Microsoft.Win32; using System.Text; using System.Runtime.InteropServices; using System.Runtime.Serialization; using System.Runtime.Versioning; using System.Diagnostics.Contracts; namespace System.IO { #if FEATURE_SERIALIZATION [Serializable] #endif #if !FEATURE_CORECLR [FileIOPermissionAttribute(SecurityAction.InheritanceDemand,Unrestricted=true)] #endif [ComVisible(true)] #if FEATURE_REMOTING public abstract class FileSystemInfo : MarshalByRefObject, ISerializable { #else // FEATURE_REMOTING public abstract class FileSystemInfo : ISerializable { #endif //FEATURE_REMOTING [System.Security.SecurityCritical] // auto-generated internal Win32Native.WIN32_FILE_ATTRIBUTE_DATA _data; // Cache the file information internal int _dataInitialised = -1; // We use this field in conjunction with the Refresh methods, if we succeed // we store a zero, on failure we store the HResult in it so that we can // give back a generic error back. private const int ERROR_INVALID_PARAMETER = 87; internal const int ERROR_ACCESS_DENIED = 0x5; protected String FullPath; // fully qualified path of the directory protected String OriginalPath; // path passed in by the user private String _displayPath = ""; // path that can be displayed to the user #if FEATURE_CORECLR #if FEATURE_CORESYSTEM [System.Security.SecurityCritical] #else [System.Security.SecuritySafeCritical] #endif //FEATURE_CORESYSTEM #endif protected FileSystemInfo() { } protected FileSystemInfo(SerializationInfo info, StreamingContext context) { if (info == null) throw new ArgumentNullException("info"); Contract.EndContractBlock(); // Must use V1 field names here, since V1 didn't implement // ISerializable. FullPath = Path.GetFullPathInternal(info.GetString("FullPath")); OriginalPath = info.GetString("OriginalPath"); // Lazily initialize the file attributes. _dataInitialised = -1; } [System.Security.SecurityCritical] internal void InitializeFrom(Win32Native.WIN32_FIND_DATA findData) { _data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA(); _data.PopulateFrom(findData); _dataInitialised = 0; } // Full path of the direcory/file public virtual String FullName { [System.Security.SecuritySafeCritical] get { String demandDir; if (this is DirectoryInfo) demandDir = Directory.GetDemandDir(FullPath, true); else demandDir = FullPath; #if FEATURE_CORECLR FileSecurityState sourceState = new FileSecurityState(FileSecurityStateAccess.PathDiscovery, String.Empty, demandDir); sourceState.EnsureState(); #else new FileIOPermission(FileIOPermissionAccess.PathDiscovery, demandDir).Demand(); #endif return FullPath; } } internal virtual String UnsafeGetFullName { [System.Security.SecurityCritical] get { String demandDir; if (this is DirectoryInfo) demandDir = Directory.GetDemandDir(FullPath, true); else demandDir = FullPath; #if !FEATURE_CORECLR new FileIOPermission(FileIOPermissionAccess.PathDiscovery, demandDir).Demand(); #endif return FullPath; } } public String Extension { get { // GetFullPathInternal would have already stripped out the terminating "." if present. int length = FullPath.Length; for (int i = length; --i >= 0;) { char ch = FullPath[i]; if (ch == '.') return FullPath.Substring(i, length - i); if (ch == Path.DirectorySeparatorChar || ch == Path.AltDirectorySeparatorChar || ch == Path.VolumeSeparatorChar) break; } return String.Empty; } } // For files name of the file is returned, for directories the last directory in hierarchy is returned if possible, // otherwise the fully qualified name s returned public abstract String Name { get; } // Whether a file/directory exists public abstract bool Exists { get; } // Delete a file/directory public abstract void Delete(); public DateTime CreationTime { get { // depends on the security check in get_CreationTimeUtc return CreationTimeUtc.ToLocalTime(); } set { CreationTimeUtc = value.ToUniversalTime(); } } [ComVisible(false)] public DateTime CreationTimeUtc { [System.Security.SecuritySafeCritical] get { #if FEATURE_CORECLR // get_CreationTime also depends on this security check FileSecurityState sourceState = new FileSecurityState(FileSecurityStateAccess.Read, String.Empty, FullPath); sourceState.EnsureState(); #endif if (_dataInitialised == -1) { _data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA(); Refresh(); } if (_dataInitialised != 0) // Refresh was unable to initialise the data __Error.WinIOError(_dataInitialised, DisplayPath); long fileTime = ((long)_data.ftCreationTimeHigh << 32) | _data.ftCreationTimeLow; return DateTime.FromFileTimeUtc(fileTime); } set { if (this is DirectoryInfo) Directory.SetCreationTimeUtc(FullPath,value); else File.SetCreationTimeUtc(FullPath,value); _dataInitialised = -1; } } public DateTime LastAccessTime { get { // depends on the security check in get_LastAccessTimeUtc return LastAccessTimeUtc.ToLocalTime(); } set { LastAccessTimeUtc = value.ToUniversalTime(); } } [ComVisible(false)] public DateTime LastAccessTimeUtc { [System.Security.SecuritySafeCritical] get { #if FEATURE_CORECLR // get_LastAccessTime also depends on this security check FileSecurityState sourceState = new FileSecurityState(FileSecurityStateAccess.Read, String.Empty, FullPath); sourceState.EnsureState(); #endif if (_dataInitialised == -1) { _data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA(); Refresh(); } if (_dataInitialised != 0) // Refresh was unable to initialise the data __Error.WinIOError(_dataInitialised, DisplayPath); long fileTime = ((long)_data.ftLastAccessTimeHigh << 32) | _data.ftLastAccessTimeLow; return DateTime.FromFileTimeUtc(fileTime); } set { if (this is DirectoryInfo) Directory.SetLastAccessTimeUtc(FullPath,value); else File.SetLastAccessTimeUtc(FullPath,value); _dataInitialised = -1; } } public DateTime LastWriteTime { get { // depends on the security check in get_LastWriteTimeUtc return LastWriteTimeUtc.ToLocalTime(); } set { LastWriteTimeUtc = value.ToUniversalTime(); } } [ComVisible(false)] public DateTime LastWriteTimeUtc { [System.Security.SecuritySafeCritical] get { #if FEATURE_CORECLR // get_LastWriteTime also depends on this security check FileSecurityState sourceState = new FileSecurityState(FileSecurityStateAccess.Read, String.Empty, FullPath); sourceState.EnsureState(); #endif if (_dataInitialised == -1) { _data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA(); Refresh(); } if (_dataInitialised != 0) // Refresh was unable to initialise the data __Error.WinIOError(_dataInitialised, DisplayPath); long fileTime = ((long)_data.ftLastWriteTimeHigh << 32) | _data.ftLastWriteTimeLow; return DateTime.FromFileTimeUtc(fileTime); } set { if (this is DirectoryInfo) Directory.SetLastWriteTimeUtc(FullPath,value); else File.SetLastWriteTimeUtc(FullPath,value); _dataInitialised = -1; } } [System.Security.SecuritySafeCritical] // auto-generated public void Refresh() { _dataInitialised = File.FillAttributeInfo(FullPath, ref _data, false, false); } public FileAttributes Attributes { [System.Security.SecuritySafeCritical] get { #if FEATURE_CORECLR FileSecurityState sourceState = new FileSecurityState(FileSecurityStateAccess.Read, String.Empty, FullPath); sourceState.EnsureState(); #endif if (_dataInitialised == -1) { _data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA(); Refresh(); // Call refresh to intialise the data } if (_dataInitialised != 0) // Refresh was unable to initialise the data __Error.WinIOError(_dataInitialised, DisplayPath); return (FileAttributes) _data.fileAttributes; } #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #else [System.Security.SecuritySafeCritical] #endif set { #if !FEATURE_CORECLR new FileIOPermission(FileIOPermissionAccess.Write, FullPath).Demand(); #endif bool r = Win32Native.SetFileAttributes(FullPath, (int) value); if (!r) { int hr = Marshal.GetLastWin32Error(); if (hr==ERROR_INVALID_PARAMETER) throw new ArgumentException(Environment.GetResourceString("Arg_InvalidFileAttrs")); // For whatever reason we are turning ERROR_ACCESS_DENIED into // ArgumentException here (probably done for some 9x code path). // We can't change this now but special casing the error message instead. if (hr == ERROR_ACCESS_DENIED) throw new ArgumentException(Environment.GetResourceString("UnauthorizedAccess_IODenied_NoPathName")); __Error.WinIOError(hr, DisplayPath); } _dataInitialised = -1; } } [System.Security.SecurityCritical] // auto-generated_required [ComVisible(false)] public virtual void GetObjectData(SerializationInfo info, StreamingContext context) { #if !FEATURE_CORECLR new FileIOPermission(FileIOPermissionAccess.PathDiscovery, FullPath).Demand(); #endif info.AddValue("OriginalPath", OriginalPath, typeof(String)); info.AddValue("FullPath", FullPath, typeof(String)); } internal String DisplayPath { get { return _displayPath; } set { _displayPath = value; } } } }
using System; using System.Collections.Generic; using System.IO; using System.Text; using System.Windows.Forms; using BurnSystems; using DatenMeister.Core.EMOF.Implementation; using DatenMeister.Core.EMOF.Interface.Identifiers; using DatenMeister.Core.EMOF.Interface.Reflection; using DatenMeister.Core.Helper; using DatenMeister.Core.Models; using DatenMeister.Core.Provider.InMemory; using DatenMeister.Core.Runtime.Workspaces; using DatenMeister.Core.Uml.Helper; using DatenMeister.Html; using DatenMeister.HtmlEngine; using DatenMeister.Integration.DotNet; using DatenMeister.Reports; using DatenMeister.Reports.Adoc; using DatenMeister.Reports.Html; using DatenMeister.Reports.Simple; using DatenMeister.WPF.Forms.Base; using DatenMeister.WPF.Modules.ViewExtensions; using DatenMeister.WPF.Modules.ViewExtensions.Definition; using DatenMeister.WPF.Modules.ViewExtensions.Definition.Buttons; using DatenMeister.WPF.Modules.ViewExtensions.Information; using DatenMeister.WPF.Navigation; namespace DatenMeister.WPF.Modules.ReportManager { public class DefaultReportManagerViewExtensions : IViewExtensionFactory { public IEnumerable<ViewExtension> GetViewExtensions(ViewExtensionInfo viewExtensionInfo) { foreach (var viewExtension in OfferReportForDetailForm(viewExtensionInfo)) yield return viewExtension; foreach (var viewExtension in OfferSimpleReports(viewExtensionInfo)) yield return viewExtension; foreach (var viewExtension in OfferHtmlReport(viewExtensionInfo)) yield return viewExtension; foreach (var viewExtension in OfferAdocReport(viewExtensionInfo)) yield return viewExtension; foreach (var viewExtension in OfferSimpleReportsInExplorer(viewExtensionInfo)) yield return viewExtension; } private IEnumerable<ViewExtension> OfferReportForDetailForm(ViewExtensionInfo viewExtensionInfo) { // Offers the creation of a report in every item // Check if the current query is about the detail form var detailFormControl = viewExtensionInfo.GetDetailFormControlOfDetailWindow(); if (detailFormControl != null) { var effectiveForm = detailFormControl.EffectiveForm ?? throw new InvalidOperationException("effectiveForm == null"); yield return new ItemMenuButtonDefinition( "As Html", x => CreateReportForDetailElement(effectiveForm, x), null, "Item" ); } } private static IEnumerable<ViewExtension> OfferSimpleReports(ViewExtensionInfo viewExtensionInfo) { // Handles the simple report var simpleReportInfo = viewExtensionInfo.IsItemInDetailWindowOfType( _DatenMeister.TheOne.Reports.__SimpleReportConfiguration); if (simpleReportInfo != null) { yield return new RowItemButtonDefinition( "Create Report", async (x, y) => { var workspaceLogic = GiveMe.Scope.WorkspaceLogic; var tempObject = InMemoryObject.CreateEmpty(); simpleReportInfo.Value.Item1.StoreDialogContentIntoElement(tempObject); var result = await NavigatorForDialogs.Locate(viewExtensionInfo.NavigationHost, new NavigatorForDialogs.NavigatorForDialogConfiguration { DefaultWorkspace = workspaceLogic.GetDataWorkspace(), Title = "Create Report", OkButtonText = "Create Report", Description = "You can now select an object to which the simple report will be created. If you select a root element, then the report will be created upon all elements of the extent. " }); if (result != null) { var simpleReport = new SimpleReportCreator(workspaceLogic, tempObject); string tmpPath; using (var streamWriter = GetRandomWriter(out tmpPath)) { simpleReport.CreateReport(streamWriter); } DotNetHelper.CreateProcess(tmpPath); } }); } } private static IEnumerable<ViewExtension> OfferHtmlReport(ViewExtensionInfo viewExtensionInfo) { // Creates a html report var reportInstance = viewExtensionInfo.IsItemInDetailWindowOfType( _DatenMeister.TheOne.Reports.__HtmlReportInstance); if (reportInstance != null) { yield return new RowItemButtonDefinition( "Create Report", (x, y) => { using var streamWriter = GetRandomWriter(out string tmpPath); var reportGenerator = new HtmlReportCreator(streamWriter); var reportlogic = new ReportLogic( GiveMe.Scope.WorkspaceLogic, GiveMe.Scope.ScopeStorage, reportGenerator); CreateReportWithDefinition(reportlogic, y); DotNetHelper.CreateProcess(tmpPath); }); } } private static IEnumerable<ViewExtension> OfferAdocReport(ViewExtensionInfo viewExtensionInfo) { // Creates a html report var reportInstance = viewExtensionInfo.IsItemInDetailWindowOfType( _DatenMeister.TheOne.Reports.__AdocReportInstance); if (reportInstance != null) { yield return new RowItemButtonDefinition( "Create Report", (x, definition) => { string tmpPath; using (var streamWriter = GetRandomWriter(out tmpPath, ".adoc")) { var reportGenerator = new AdocReportCreator(streamWriter); var reportLogic = new ReportLogic( GiveMe.Scope.WorkspaceLogic, GiveMe.Scope.ScopeStorage, reportGenerator); CreateReportWithDefinition(reportLogic, definition); } }); } } /// <summary> /// Creates a report with the given definition /// </summary> /// <param name="reportLogic">Report generator to be used</param> /// <param name="definition">Definition to be used for the report</param> private static void CreateReportWithDefinition(ReportLogic reportLogic, IObject definition) { var sources = reportLogic.EvaluateSources(definition); foreach (var source in sources) { reportLogic.AddSource(source.Name, source.Collection); } var reportDefinition = definition.getOrDefault<IElement>(_DatenMeister._Reports._HtmlReportInstance.reportDefinition); if (reportDefinition == null) { MessageBox.Show($"The report is not found: {NamedElementMethods.GetName(definition)}"); return; } reportLogic.GenerateReportByDefinition(reportDefinition); } private IEnumerable<ViewExtension> OfferSimpleReportsInExplorer(ViewExtensionInfo viewExtensionInfo) { var itemExplorerControl = viewExtensionInfo.GetItemExplorerControl(); if (itemExplorerControl != null) { yield return new ItemMenuButtonDefinition( "Report as Html (Default)", x => { if (x is IExtent asExtent) { CreateReportForExplorerView(asExtent); } else { CreateReportForExplorerView(x); } }, null, "Export") {Priority = 2}; yield return new ItemMenuButtonDefinition( "Report as Html", async x => { var workspaceLogic = GiveMe.Scope.WorkspaceLogic; var simpleConfigurationType = workspaceLogic.GetTypesWorkspace() .ResolveById( "DatenMeister.Models.Reports.Simple.SimpleReportConfiguration") ?? throw new InvalidOperationException("SimpleReportConfiguration not found"); var form = workspaceLogic.GetInternalFormsExtent().element("#Form.Report.SimpleConfiguration"); var simpleConfiguration = InMemoryObject.TemporaryFactory.create(simpleConfigurationType); var result = await NavigatorForItems.NavigateToElementDetailView( viewExtensionInfo.NavigationHost, new NavigateToItemConfig(simpleConfiguration) { Title = "Configure simple report", Form = new FormDefinition(form) }); if (result?.Result == NavigationResult.Saved) { if (x is IExtent asExtent) { CreateReportForExplorerView( asExtent, simpleConfiguration); } else { CreateReportForExplorerView( x, simpleConfiguration); } } }, null, "Export") {Priority = 1}; } } /// <summary> /// Creates the report for the currently selected element. /// </summary> /// <param name="rootElement">Defines the item that is selected</param> /// <param name="simpleReportConfiguration">Describes the configuration to be used, otherwise a default /// configuration will be created</param> private void CreateReportForExplorerView(IObject rootElement, IElement? simpleReportConfiguration = null) { simpleReportConfiguration ??= new MofFactory(rootElement) .create(_DatenMeister.TheOne.Reports.__SimpleReportConfiguration) .SetProperty(_DatenMeister._Reports._SimpleReportConfiguration.showDescendents, true) .SetProperty(_DatenMeister._Reports._SimpleReportConfiguration.showRootElement, true) .SetProperty(_DatenMeister._Reports._SimpleReportConfiguration.showFullName, true) .SetProperty( _DatenMeister._Reports._SimpleReportConfiguration.rootElement, rootElement.GetUri() ?? throw new InvalidOperationException("Uri of element could not be retrieved")) .SetProperty( _DatenMeister._Reports._SimpleReportConfiguration.workspaceId, rootElement.GetUriExtentOf()?.GetWorkspace()?.id ?? WorkspaceNames.WorkspaceData); string tmpPath; using (var streamWriter = GetRandomWriter(out tmpPath)) { var reportCreator = new SimpleReportCreator(GiveMe.Scope.WorkspaceLogic, simpleReportConfiguration); reportCreator.CreateReport(streamWriter); } DotNetHelper.CreateProcess(tmpPath); } private static StreamWriter GetRandomWriter(out string tmpPath, string extension = ".html") { var id = StringManipulation.RandomString(10); tmpPath = Path.Combine(Path.GetTempPath(), id + extension); return new StreamWriter(tmpPath, false, Encoding.UTF8); } /// <summary> /// Creates the report for a certain detail html element /// </summary> /// <param name="effectiveForm">Defines the effective form</param> /// <param name="selectedItem">The item being selected</param> private void CreateReportForDetailElement(IObject effectiveForm, IObject selectedItem) { var id = StringManipulation.RandomString(10); var tmpPath = Path.Combine(Path.GetTempPath(), id + ".html"); using (var report = new HtmlReport(tmpPath)) { report.StartReport("Detail: " + selectedItem); report.Add(new HtmlHeadline("Detail Information", 1)); var itemFormatter = new ItemFormatter(report); itemFormatter.FormatItem(selectedItem, effectiveForm); report.EndReport(); } DotNetHelper.CreateProcess(tmpPath); } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gagvc = Google.Ads.GoogleAds.V9.Common; using gagvr = Google.Ads.GoogleAds.V9.Resources; using gaxgrpc = Google.Api.Gax.Grpc; using gr = Google.Rpc; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using NUnit.Framework; using Google.Ads.GoogleAds.V9.Services; namespace Google.Ads.GoogleAds.Tests.V9.Services { /// <summary>Generated unit tests.</summary> public sealed class GeneratedKeywordPlanServiceClientTest { [Category("Autogenerated")][Test] public void GetKeywordPlanRequestObject() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GetKeywordPlanRequest request = new GetKeywordPlanRequest { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; gagvr::KeywordPlan expectedResponse = new gagvr::KeywordPlan { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), ForecastPeriod = new gagvr::KeywordPlanForecastPeriod(), Id = -6774108720365892680L, KeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; mockGrpcClient.Setup(x => x.GetKeywordPlan(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); gagvr::KeywordPlan response = client.GetKeywordPlan(request); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GetKeywordPlanRequestObjectAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GetKeywordPlanRequest request = new GetKeywordPlanRequest { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; gagvr::KeywordPlan expectedResponse = new gagvr::KeywordPlan { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), ForecastPeriod = new gagvr::KeywordPlanForecastPeriod(), Id = -6774108720365892680L, KeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; mockGrpcClient.Setup(x => x.GetKeywordPlanAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::KeywordPlan>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); gagvr::KeywordPlan responseCallSettings = await client.GetKeywordPlanAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); gagvr::KeywordPlan responseCancellationToken = await client.GetKeywordPlanAsync(request, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GetKeywordPlan() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GetKeywordPlanRequest request = new GetKeywordPlanRequest { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; gagvr::KeywordPlan expectedResponse = new gagvr::KeywordPlan { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), ForecastPeriod = new gagvr::KeywordPlanForecastPeriod(), Id = -6774108720365892680L, KeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; mockGrpcClient.Setup(x => x.GetKeywordPlan(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); gagvr::KeywordPlan response = client.GetKeywordPlan(request.ResourceName); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GetKeywordPlanAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GetKeywordPlanRequest request = new GetKeywordPlanRequest { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; gagvr::KeywordPlan expectedResponse = new gagvr::KeywordPlan { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), ForecastPeriod = new gagvr::KeywordPlanForecastPeriod(), Id = -6774108720365892680L, KeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; mockGrpcClient.Setup(x => x.GetKeywordPlanAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::KeywordPlan>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); gagvr::KeywordPlan responseCallSettings = await client.GetKeywordPlanAsync(request.ResourceName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); gagvr::KeywordPlan responseCancellationToken = await client.GetKeywordPlanAsync(request.ResourceName, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GetKeywordPlanResourceNames() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GetKeywordPlanRequest request = new GetKeywordPlanRequest { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; gagvr::KeywordPlan expectedResponse = new gagvr::KeywordPlan { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), ForecastPeriod = new gagvr::KeywordPlanForecastPeriod(), Id = -6774108720365892680L, KeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; mockGrpcClient.Setup(x => x.GetKeywordPlan(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); gagvr::KeywordPlan response = client.GetKeywordPlan(request.ResourceNameAsKeywordPlanName); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GetKeywordPlanResourceNamesAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GetKeywordPlanRequest request = new GetKeywordPlanRequest { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; gagvr::KeywordPlan expectedResponse = new gagvr::KeywordPlan { ResourceNameAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), ForecastPeriod = new gagvr::KeywordPlanForecastPeriod(), Id = -6774108720365892680L, KeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; mockGrpcClient.Setup(x => x.GetKeywordPlanAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::KeywordPlan>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); gagvr::KeywordPlan responseCallSettings = await client.GetKeywordPlanAsync(request.ResourceNameAsKeywordPlanName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); gagvr::KeywordPlan responseCancellationToken = await client.GetKeywordPlanAsync(request.ResourceNameAsKeywordPlanName, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void MutateKeywordPlansRequestObject() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); MutateKeywordPlansRequest request = new MutateKeywordPlansRequest { CustomerId = "customer_id3b3724cb", Operations = { new KeywordPlanOperation(), }, PartialFailure = false, ValidateOnly = true, }; MutateKeywordPlansResponse expectedResponse = new MutateKeywordPlansResponse { Results = { new MutateKeywordPlansResult(), }, PartialFailureError = new gr::Status(), }; mockGrpcClient.Setup(x => x.MutateKeywordPlans(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); MutateKeywordPlansResponse response = client.MutateKeywordPlans(request); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task MutateKeywordPlansRequestObjectAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); MutateKeywordPlansRequest request = new MutateKeywordPlansRequest { CustomerId = "customer_id3b3724cb", Operations = { new KeywordPlanOperation(), }, PartialFailure = false, ValidateOnly = true, }; MutateKeywordPlansResponse expectedResponse = new MutateKeywordPlansResponse { Results = { new MutateKeywordPlansResult(), }, PartialFailureError = new gr::Status(), }; mockGrpcClient.Setup(x => x.MutateKeywordPlansAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MutateKeywordPlansResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); MutateKeywordPlansResponse responseCallSettings = await client.MutateKeywordPlansAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); MutateKeywordPlansResponse responseCancellationToken = await client.MutateKeywordPlansAsync(request, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void MutateKeywordPlans() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); MutateKeywordPlansRequest request = new MutateKeywordPlansRequest { CustomerId = "customer_id3b3724cb", Operations = { new KeywordPlanOperation(), }, }; MutateKeywordPlansResponse expectedResponse = new MutateKeywordPlansResponse { Results = { new MutateKeywordPlansResult(), }, PartialFailureError = new gr::Status(), }; mockGrpcClient.Setup(x => x.MutateKeywordPlans(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); MutateKeywordPlansResponse response = client.MutateKeywordPlans(request.CustomerId, request.Operations); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task MutateKeywordPlansAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); MutateKeywordPlansRequest request = new MutateKeywordPlansRequest { CustomerId = "customer_id3b3724cb", Operations = { new KeywordPlanOperation(), }, }; MutateKeywordPlansResponse expectedResponse = new MutateKeywordPlansResponse { Results = { new MutateKeywordPlansResult(), }, PartialFailureError = new gr::Status(), }; mockGrpcClient.Setup(x => x.MutateKeywordPlansAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MutateKeywordPlansResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); MutateKeywordPlansResponse responseCallSettings = await client.MutateKeywordPlansAsync(request.CustomerId, request.Operations, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); MutateKeywordPlansResponse responseCancellationToken = await client.MutateKeywordPlansAsync(request.CustomerId, request.Operations, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateForecastCurveRequestObject() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastCurveRequest request = new GenerateForecastCurveRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastCurveResponse expectedResponse = new GenerateForecastCurveResponse { CampaignForecastCurves = { new KeywordPlanCampaignForecastCurve(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastCurve(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastCurveResponse response = client.GenerateForecastCurve(request); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateForecastCurveRequestObjectAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastCurveRequest request = new GenerateForecastCurveRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastCurveResponse expectedResponse = new GenerateForecastCurveResponse { CampaignForecastCurves = { new KeywordPlanCampaignForecastCurve(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastCurveAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateForecastCurveResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastCurveResponse responseCallSettings = await client.GenerateForecastCurveAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateForecastCurveResponse responseCancellationToken = await client.GenerateForecastCurveAsync(request, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateForecastCurve() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastCurveRequest request = new GenerateForecastCurveRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastCurveResponse expectedResponse = new GenerateForecastCurveResponse { CampaignForecastCurves = { new KeywordPlanCampaignForecastCurve(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastCurve(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastCurveResponse response = client.GenerateForecastCurve(request.KeywordPlan); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateForecastCurveAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastCurveRequest request = new GenerateForecastCurveRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastCurveResponse expectedResponse = new GenerateForecastCurveResponse { CampaignForecastCurves = { new KeywordPlanCampaignForecastCurve(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastCurveAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateForecastCurveResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastCurveResponse responseCallSettings = await client.GenerateForecastCurveAsync(request.KeywordPlan, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateForecastCurveResponse responseCancellationToken = await client.GenerateForecastCurveAsync(request.KeywordPlan, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateForecastCurveResourceNames() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastCurveRequest request = new GenerateForecastCurveRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastCurveResponse expectedResponse = new GenerateForecastCurveResponse { CampaignForecastCurves = { new KeywordPlanCampaignForecastCurve(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastCurve(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastCurveResponse response = client.GenerateForecastCurve(request.KeywordPlanAsKeywordPlanName); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateForecastCurveResourceNamesAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastCurveRequest request = new GenerateForecastCurveRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastCurveResponse expectedResponse = new GenerateForecastCurveResponse { CampaignForecastCurves = { new KeywordPlanCampaignForecastCurve(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastCurveAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateForecastCurveResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastCurveResponse responseCallSettings = await client.GenerateForecastCurveAsync(request.KeywordPlanAsKeywordPlanName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateForecastCurveResponse responseCancellationToken = await client.GenerateForecastCurveAsync(request.KeywordPlanAsKeywordPlanName, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateForecastTimeSeriesRequestObject() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastTimeSeriesRequest request = new GenerateForecastTimeSeriesRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastTimeSeriesResponse expectedResponse = new GenerateForecastTimeSeriesResponse { WeeklyTimeSeriesForecasts = { new KeywordPlanWeeklyTimeSeriesForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastTimeSeries(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastTimeSeriesResponse response = client.GenerateForecastTimeSeries(request); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateForecastTimeSeriesRequestObjectAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastTimeSeriesRequest request = new GenerateForecastTimeSeriesRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastTimeSeriesResponse expectedResponse = new GenerateForecastTimeSeriesResponse { WeeklyTimeSeriesForecasts = { new KeywordPlanWeeklyTimeSeriesForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastTimeSeriesAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateForecastTimeSeriesResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastTimeSeriesResponse responseCallSettings = await client.GenerateForecastTimeSeriesAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateForecastTimeSeriesResponse responseCancellationToken = await client.GenerateForecastTimeSeriesAsync(request, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateForecastTimeSeries() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastTimeSeriesRequest request = new GenerateForecastTimeSeriesRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastTimeSeriesResponse expectedResponse = new GenerateForecastTimeSeriesResponse { WeeklyTimeSeriesForecasts = { new KeywordPlanWeeklyTimeSeriesForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastTimeSeries(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastTimeSeriesResponse response = client.GenerateForecastTimeSeries(request.KeywordPlan); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateForecastTimeSeriesAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastTimeSeriesRequest request = new GenerateForecastTimeSeriesRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastTimeSeriesResponse expectedResponse = new GenerateForecastTimeSeriesResponse { WeeklyTimeSeriesForecasts = { new KeywordPlanWeeklyTimeSeriesForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastTimeSeriesAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateForecastTimeSeriesResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastTimeSeriesResponse responseCallSettings = await client.GenerateForecastTimeSeriesAsync(request.KeywordPlan, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateForecastTimeSeriesResponse responseCancellationToken = await client.GenerateForecastTimeSeriesAsync(request.KeywordPlan, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateForecastTimeSeriesResourceNames() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastTimeSeriesRequest request = new GenerateForecastTimeSeriesRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastTimeSeriesResponse expectedResponse = new GenerateForecastTimeSeriesResponse { WeeklyTimeSeriesForecasts = { new KeywordPlanWeeklyTimeSeriesForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastTimeSeries(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastTimeSeriesResponse response = client.GenerateForecastTimeSeries(request.KeywordPlanAsKeywordPlanName); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateForecastTimeSeriesResourceNamesAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastTimeSeriesRequest request = new GenerateForecastTimeSeriesRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastTimeSeriesResponse expectedResponse = new GenerateForecastTimeSeriesResponse { WeeklyTimeSeriesForecasts = { new KeywordPlanWeeklyTimeSeriesForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastTimeSeriesAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateForecastTimeSeriesResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastTimeSeriesResponse responseCallSettings = await client.GenerateForecastTimeSeriesAsync(request.KeywordPlanAsKeywordPlanName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateForecastTimeSeriesResponse responseCancellationToken = await client.GenerateForecastTimeSeriesAsync(request.KeywordPlanAsKeywordPlanName, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateForecastMetricsRequestObject() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastMetricsRequest request = new GenerateForecastMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastMetricsResponse expectedResponse = new GenerateForecastMetricsResponse { CampaignForecasts = { new KeywordPlanCampaignForecast(), }, AdGroupForecasts = { new KeywordPlanAdGroupForecast(), }, KeywordForecasts = { new KeywordPlanKeywordForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastMetrics(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastMetricsResponse response = client.GenerateForecastMetrics(request); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateForecastMetricsRequestObjectAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastMetricsRequest request = new GenerateForecastMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastMetricsResponse expectedResponse = new GenerateForecastMetricsResponse { CampaignForecasts = { new KeywordPlanCampaignForecast(), }, AdGroupForecasts = { new KeywordPlanAdGroupForecast(), }, KeywordForecasts = { new KeywordPlanKeywordForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastMetricsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateForecastMetricsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastMetricsResponse responseCallSettings = await client.GenerateForecastMetricsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateForecastMetricsResponse responseCancellationToken = await client.GenerateForecastMetricsAsync(request, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateForecastMetrics() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastMetricsRequest request = new GenerateForecastMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastMetricsResponse expectedResponse = new GenerateForecastMetricsResponse { CampaignForecasts = { new KeywordPlanCampaignForecast(), }, AdGroupForecasts = { new KeywordPlanAdGroupForecast(), }, KeywordForecasts = { new KeywordPlanKeywordForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastMetrics(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastMetricsResponse response = client.GenerateForecastMetrics(request.KeywordPlan); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateForecastMetricsAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastMetricsRequest request = new GenerateForecastMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastMetricsResponse expectedResponse = new GenerateForecastMetricsResponse { CampaignForecasts = { new KeywordPlanCampaignForecast(), }, AdGroupForecasts = { new KeywordPlanAdGroupForecast(), }, KeywordForecasts = { new KeywordPlanKeywordForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastMetricsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateForecastMetricsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastMetricsResponse responseCallSettings = await client.GenerateForecastMetricsAsync(request.KeywordPlan, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateForecastMetricsResponse responseCancellationToken = await client.GenerateForecastMetricsAsync(request.KeywordPlan, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateForecastMetricsResourceNames() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastMetricsRequest request = new GenerateForecastMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastMetricsResponse expectedResponse = new GenerateForecastMetricsResponse { CampaignForecasts = { new KeywordPlanCampaignForecast(), }, AdGroupForecasts = { new KeywordPlanAdGroupForecast(), }, KeywordForecasts = { new KeywordPlanKeywordForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastMetrics(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastMetricsResponse response = client.GenerateForecastMetrics(request.KeywordPlanAsKeywordPlanName); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateForecastMetricsResourceNamesAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateForecastMetricsRequest request = new GenerateForecastMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateForecastMetricsResponse expectedResponse = new GenerateForecastMetricsResponse { CampaignForecasts = { new KeywordPlanCampaignForecast(), }, AdGroupForecasts = { new KeywordPlanAdGroupForecast(), }, KeywordForecasts = { new KeywordPlanKeywordForecast(), }, }; mockGrpcClient.Setup(x => x.GenerateForecastMetricsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateForecastMetricsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateForecastMetricsResponse responseCallSettings = await client.GenerateForecastMetricsAsync(request.KeywordPlanAsKeywordPlanName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateForecastMetricsResponse responseCancellationToken = await client.GenerateForecastMetricsAsync(request.KeywordPlanAsKeywordPlanName, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateHistoricalMetricsRequestObject() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateHistoricalMetricsRequest request = new GenerateHistoricalMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), AggregateMetrics = new gagvc::KeywordPlanAggregateMetrics(), HistoricalMetricsOptions = new gagvc::HistoricalMetricsOptions(), }; GenerateHistoricalMetricsResponse expectedResponse = new GenerateHistoricalMetricsResponse { Metrics = { new KeywordPlanKeywordHistoricalMetrics(), }, AggregateMetricResults = new gagvc::KeywordPlanAggregateMetricResults(), }; mockGrpcClient.Setup(x => x.GenerateHistoricalMetrics(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateHistoricalMetricsResponse response = client.GenerateHistoricalMetrics(request); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateHistoricalMetricsRequestObjectAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateHistoricalMetricsRequest request = new GenerateHistoricalMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), AggregateMetrics = new gagvc::KeywordPlanAggregateMetrics(), HistoricalMetricsOptions = new gagvc::HistoricalMetricsOptions(), }; GenerateHistoricalMetricsResponse expectedResponse = new GenerateHistoricalMetricsResponse { Metrics = { new KeywordPlanKeywordHistoricalMetrics(), }, AggregateMetricResults = new gagvc::KeywordPlanAggregateMetricResults(), }; mockGrpcClient.Setup(x => x.GenerateHistoricalMetricsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateHistoricalMetricsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateHistoricalMetricsResponse responseCallSettings = await client.GenerateHistoricalMetricsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateHistoricalMetricsResponse responseCancellationToken = await client.GenerateHistoricalMetricsAsync(request, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateHistoricalMetrics() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateHistoricalMetricsRequest request = new GenerateHistoricalMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateHistoricalMetricsResponse expectedResponse = new GenerateHistoricalMetricsResponse { Metrics = { new KeywordPlanKeywordHistoricalMetrics(), }, AggregateMetricResults = new gagvc::KeywordPlanAggregateMetricResults(), }; mockGrpcClient.Setup(x => x.GenerateHistoricalMetrics(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateHistoricalMetricsResponse response = client.GenerateHistoricalMetrics(request.KeywordPlan); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateHistoricalMetricsAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateHistoricalMetricsRequest request = new GenerateHistoricalMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateHistoricalMetricsResponse expectedResponse = new GenerateHistoricalMetricsResponse { Metrics = { new KeywordPlanKeywordHistoricalMetrics(), }, AggregateMetricResults = new gagvc::KeywordPlanAggregateMetricResults(), }; mockGrpcClient.Setup(x => x.GenerateHistoricalMetricsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateHistoricalMetricsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateHistoricalMetricsResponse responseCallSettings = await client.GenerateHistoricalMetricsAsync(request.KeywordPlan, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateHistoricalMetricsResponse responseCancellationToken = await client.GenerateHistoricalMetricsAsync(request.KeywordPlan, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public void GenerateHistoricalMetricsResourceNames() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateHistoricalMetricsRequest request = new GenerateHistoricalMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateHistoricalMetricsResponse expectedResponse = new GenerateHistoricalMetricsResponse { Metrics = { new KeywordPlanKeywordHistoricalMetrics(), }, AggregateMetricResults = new gagvc::KeywordPlanAggregateMetricResults(), }; mockGrpcClient.Setup(x => x.GenerateHistoricalMetrics(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateHistoricalMetricsResponse response = client.GenerateHistoricalMetrics(request.KeywordPlanAsKeywordPlanName); Assert.AreEqual(expectedResponse, response); mockGrpcClient.VerifyAll(); } [Category("Autogenerated")][Test] public async stt::Task GenerateHistoricalMetricsResourceNamesAsync() { moq::Mock<KeywordPlanService.KeywordPlanServiceClient> mockGrpcClient = new moq::Mock<KeywordPlanService.KeywordPlanServiceClient>(moq::MockBehavior.Strict); GenerateHistoricalMetricsRequest request = new GenerateHistoricalMetricsRequest { KeywordPlanAsKeywordPlanName = gagvr::KeywordPlanName.FromCustomerKeywordPlan("[CUSTOMER_ID]", "[KEYWORD_PLAN_ID]"), }; GenerateHistoricalMetricsResponse expectedResponse = new GenerateHistoricalMetricsResponse { Metrics = { new KeywordPlanKeywordHistoricalMetrics(), }, AggregateMetricResults = new gagvc::KeywordPlanAggregateMetricResults(), }; mockGrpcClient.Setup(x => x.GenerateHistoricalMetricsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<GenerateHistoricalMetricsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); KeywordPlanServiceClient client = new KeywordPlanServiceClientImpl(mockGrpcClient.Object, null); GenerateHistoricalMetricsResponse responseCallSettings = await client.GenerateHistoricalMetricsAsync(request.KeywordPlanAsKeywordPlanName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); Assert.AreEqual(expectedResponse, responseCallSettings); GenerateHistoricalMetricsResponse responseCancellationToken = await client.GenerateHistoricalMetricsAsync(request.KeywordPlanAsKeywordPlanName, st::CancellationToken.None); Assert.AreEqual(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using Microsoft.VisualStudio.Text; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.VisualStudio.InteractiveWindow.UnitTests { public class InteractiveWindowHistoryTests : IDisposable { #region Helpers private readonly InteractiveWindowTestHost _testHost; private readonly IInteractiveWindow _window; private readonly IInteractiveWindowOperations _operations; public InteractiveWindowHistoryTests() { _testHost = new InteractiveWindowTestHost(); _window = _testHost.Window; _operations = _window.Operations; } void IDisposable.Dispose() { _testHost.Dispose(); } /// <summary> /// Sets the active code to the specified text w/o executing it. /// </summary> private void SetActiveCode(string text) { using (var edit = _window.CurrentLanguageBuffer.CreateEdit(EditOptions.None, reiteratedVersionNumber: null, editTag: null)) { edit.Replace(new Span(0, _window.CurrentLanguageBuffer.CurrentSnapshot.Length), text); edit.Apply(); } } private void InsertAndExecuteInputs(params string[] inputs) { foreach (var input in inputs) { InsertAndExecuteInput(input); } } private void InsertAndExecuteInput(string input) { _window.InsertCode(input); AssertCurrentSubmission(input); ExecuteInput(); } private void ExecuteInput() { ((InteractiveWindow)_window).ExecuteInputAsync().PumpingWait(); } private void AssertCurrentSubmission(string expected) { Assert.Equal(expected, _window.CurrentLanguageBuffer.CurrentSnapshot.GetText()); } #endregion Helpers [Fact] public void CheckHistoryPrevious() { const string inputString = "1 "; InsertAndExecuteInput(inputString); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString); } [Fact] public void CheckHistoryPreviousNotCircular() { //submit, submit, up, up, up const string inputString1 = "1 "; const string inputString2 = "2 "; InsertAndExecuteInput(inputString1); InsertAndExecuteInput(inputString2); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString2); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString1); //this up should not be circular _operations.HistoryPrevious(); AssertCurrentSubmission(inputString1); } [Fact] public void CheckHistoryPreviousAfterSubmittingEntryFromHistory() { //submit, submit, submit, up, up, submit, up, up, up const string inputString1 = "1 "; const string inputString2 = "2 "; const string inputString3 = "3 "; InsertAndExecuteInput(inputString1); InsertAndExecuteInput(inputString2); InsertAndExecuteInput(inputString3); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString3); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString2); ExecuteInput(); //history navigation should start from the last history pointer _operations.HistoryPrevious(); AssertCurrentSubmission(inputString2); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString1); //has reached the top, no change _operations.HistoryPrevious(); AssertCurrentSubmission(inputString1); } [Fact] public void CheckHistoryPreviousAfterSubmittingNewEntryWhileNavigatingHistory() { //submit, submit, up, up, submit new, up, up, up const string inputString1 = "1 "; const string inputString2 = "2 "; const string inputString3 = "3 "; InsertAndExecuteInput(inputString1); InsertAndExecuteInput(inputString2); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString2); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString1); SetActiveCode(inputString3); AssertCurrentSubmission(inputString3); ExecuteInput(); //History pointer should be reset. Previous should now bring up last entry _operations.HistoryPrevious(); AssertCurrentSubmission(inputString3); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString2); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString1); //has reached the top, no change _operations.HistoryPrevious(); AssertCurrentSubmission(inputString1); } public void CheckHistoryNextNotCircular() { //submit, submit, down, up, down, down const string inputString1 = "1 "; const string inputString2 = "2 "; const string empty = ""; InsertAndExecuteInput(inputString1); InsertAndExecuteInput(inputString2); //Next should do nothing as history pointer is uninitialized and there is //no next entry. Buffer should be empty _operations.HistoryNext(); AssertCurrentSubmission(empty); //Go back once entry _operations.HistoryPrevious(); AssertCurrentSubmission(inputString2); //Go fwd one entry - should do nothing as history pointer is at last entry //buffer should have same value as before _operations.HistoryNext(); AssertCurrentSubmission(inputString2); //Next should again do nothing as it is the last item, bufer should have the same value _operations.HistoryNext(); AssertCurrentSubmission(inputString2); } [Fact] public void CheckHistoryNextAfterSubmittingEntryFromHistory() { //submit, submit, submit, up, up, submit, down, down, down const string inputString1 = "1 "; const string inputString2 = "2 "; const string inputString3 = "3 "; InsertAndExecuteInput(inputString1); InsertAndExecuteInput(inputString2); InsertAndExecuteInput(inputString3); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString3); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString2); //submit inputString2 again. Should be added at the end of history ExecuteInput(); //history navigation should start from the last history pointer _operations.HistoryNext(); AssertCurrentSubmission(inputString3); //This next should take us to the InputString2 which was resubmitted _operations.HistoryNext(); AssertCurrentSubmission(inputString2); //has reached the top, no change _operations.HistoryNext(); AssertCurrentSubmission(inputString2); } [Fact] public void CheckHistoryNextAfterSubmittingNewEntryWhileNavigatingHistory() { //submit, submit, up, up, submit new, down, up const string inputString1 = "1 "; const string inputString2 = "2 "; const string inputString3 = "3 "; const string empty = ""; InsertAndExecuteInput(inputString1); InsertAndExecuteInput(inputString2); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString2); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString1); SetActiveCode(inputString3); AssertCurrentSubmission(inputString3); ExecuteInput(); //History pointer should be reset. next should do nothing _operations.HistoryNext(); AssertCurrentSubmission(empty); _operations.HistoryPrevious(); AssertCurrentSubmission(inputString3); } [Fact] public void CheckUncommittedInputAfterNavigatingHistory() { //submit, submit, up, up, submit new, down, up const string inputString1 = "1 "; const string inputString2 = "2 "; const string uncommittedInput = "uncommittedInput"; InsertAndExecuteInput(inputString1); InsertAndExecuteInput(inputString2); //Add uncommitted input SetActiveCode(uncommittedInput); //Navigate history. This should save uncommitted input _operations.HistoryPrevious(); //Navigate to next item at the end of history. //This should bring back uncommitted input _operations.HistoryNext(); AssertCurrentSubmission(uncommittedInput); } [Fact] public void CheckHistoryPreviousAfterReset() { const string resetCommand1 = "#reset"; const string resetCommand2 = "#reset "; InsertAndExecuteInput(resetCommand1); InsertAndExecuteInput(resetCommand2); _operations.HistoryPrevious(); AssertCurrentSubmission(resetCommand2); _operations.HistoryPrevious(); AssertCurrentSubmission(resetCommand1); _operations.HistoryPrevious(); AssertCurrentSubmission(resetCommand1); } [Fact] public void TestHistoryPrevious() { InsertAndExecuteInputs("1", "2", "3"); _operations.HistoryPrevious(); AssertCurrentSubmission("3"); _operations.HistoryPrevious(); AssertCurrentSubmission("2"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); } [Fact] public void TestHistoryNext() { InsertAndExecuteInputs("1", "2", "3"); SetActiveCode("4"); _operations.HistoryNext(); AssertCurrentSubmission("4"); _operations.HistoryNext(); AssertCurrentSubmission("4"); _operations.HistoryPrevious(); AssertCurrentSubmission("3"); _operations.HistoryPrevious(); AssertCurrentSubmission("2"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistoryNext(); AssertCurrentSubmission("2"); _operations.HistoryNext(); AssertCurrentSubmission("3"); _operations.HistoryNext(); AssertCurrentSubmission("4"); _operations.HistoryNext(); AssertCurrentSubmission("4"); } [Fact] public void TestHistoryPreviousWithPattern_NoMatch() { InsertAndExecuteInputs("123", "12", "1"); _operations.HistoryPrevious("4"); AssertCurrentSubmission(""); _operations.HistoryPrevious("4"); AssertCurrentSubmission(""); } [Fact] public void TestHistoryPreviousWithPattern_PatternMaintained() { InsertAndExecuteInputs("123", "12", "1"); _operations.HistoryPrevious("12"); AssertCurrentSubmission("12"); // Skip over non-matching entry. _operations.HistoryPrevious("12"); AssertCurrentSubmission("123"); _operations.HistoryPrevious("12"); AssertCurrentSubmission("123"); } [Fact] public void TestHistoryPreviousWithPattern_PatternDropped() { InsertAndExecuteInputs("1", "2", "3"); _operations.HistoryPrevious("2"); AssertCurrentSubmission("2"); // Skip over non-matching entry. _operations.HistoryPrevious(null); AssertCurrentSubmission("1"); // Pattern isn't passed, so return to normal iteration. _operations.HistoryPrevious(null); AssertCurrentSubmission("1"); } [Fact] public void TestHistoryPreviousWithPattern_PatternChanged() { InsertAndExecuteInputs("10", "20", "15", "25"); _operations.HistoryPrevious("1"); AssertCurrentSubmission("15"); // Skip over non-matching entry. _operations.HistoryPrevious("2"); AssertCurrentSubmission("20"); // Skip over non-matching entry. _operations.HistoryPrevious("2"); AssertCurrentSubmission("20"); } [Fact] public void TestHistoryNextWithPattern_NoMatch() { InsertAndExecuteInputs("start", "1", "12", "123"); SetActiveCode("end"); _operations.HistoryPrevious(); AssertCurrentSubmission("123"); _operations.HistoryPrevious(); AssertCurrentSubmission("12"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistoryPrevious(); AssertCurrentSubmission("start"); _operations.HistoryNext("4"); AssertCurrentSubmission("end"); _operations.HistoryNext("4"); AssertCurrentSubmission("end"); } [Fact] public void TestHistoryNextWithPattern_PatternMaintained() { InsertAndExecuteInputs("start", "1", "12", "123"); SetActiveCode("end"); _operations.HistoryPrevious(); AssertCurrentSubmission("123"); _operations.HistoryPrevious(); AssertCurrentSubmission("12"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistoryPrevious(); AssertCurrentSubmission("start"); _operations.HistoryNext("12"); AssertCurrentSubmission("12"); // Skip over non-matching entry. _operations.HistoryNext("12"); AssertCurrentSubmission("123"); _operations.HistoryNext("12"); AssertCurrentSubmission("end"); } [Fact] public void TestHistoryNextWithPattern_PatternDropped() { InsertAndExecuteInputs("start", "3", "2", "1"); SetActiveCode("end"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistoryPrevious(); AssertCurrentSubmission("2"); _operations.HistoryPrevious(); AssertCurrentSubmission("3"); _operations.HistoryPrevious(); AssertCurrentSubmission("start"); _operations.HistoryNext("2"); AssertCurrentSubmission("2"); // Skip over non-matching entry. _operations.HistoryNext(null); AssertCurrentSubmission("1"); // Pattern isn't passed, so return to normal iteration. _operations.HistoryNext(null); AssertCurrentSubmission("end"); } [Fact] public void TestHistoryNextWithPattern_PatternChanged() { InsertAndExecuteInputs("start", "25", "15", "20", "10"); SetActiveCode("end"); _operations.HistoryPrevious(); AssertCurrentSubmission("10"); _operations.HistoryPrevious(); AssertCurrentSubmission("20"); _operations.HistoryPrevious(); AssertCurrentSubmission("15"); _operations.HistoryPrevious(); AssertCurrentSubmission("25"); _operations.HistoryPrevious(); AssertCurrentSubmission("start"); _operations.HistoryNext("1"); AssertCurrentSubmission("15"); // Skip over non-matching entry. _operations.HistoryNext("2"); AssertCurrentSubmission("20"); // Skip over non-matching entry. _operations.HistoryNext("2"); AssertCurrentSubmission("end"); } [Fact] public void TestHistorySearchPrevious() { InsertAndExecuteInputs("123", "12", "1"); // Default search string is empty. _operations.HistorySearchPrevious(); AssertCurrentSubmission("1"); // Pattern is captured before this step. _operations.HistorySearchPrevious(); AssertCurrentSubmission("12"); _operations.HistorySearchPrevious(); AssertCurrentSubmission("123"); _operations.HistorySearchPrevious(); AssertCurrentSubmission("123"); } [Fact] public void TestHistorySearchPreviousWithPattern() { InsertAndExecuteInputs("123", "12", "1"); SetActiveCode("12"); _operations.HistorySearchPrevious(); AssertCurrentSubmission("12"); // Pattern is captured before this step. _operations.HistorySearchPrevious(); AssertCurrentSubmission("123"); _operations.HistorySearchPrevious(); AssertCurrentSubmission("123"); } [Fact] public void TestHistorySearchNextWithPattern() { InsertAndExecuteInputs("12", "123", "12", "1"); SetActiveCode("end"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistoryPrevious(); AssertCurrentSubmission("12"); _operations.HistoryPrevious(); AssertCurrentSubmission("123"); _operations.HistoryPrevious(); AssertCurrentSubmission("12"); _operations.HistorySearchNext(); AssertCurrentSubmission("123"); // Pattern is captured before this step. _operations.HistorySearchNext(); AssertCurrentSubmission("12"); _operations.HistorySearchNext(); AssertCurrentSubmission("end"); } [Fact] public void TestHistoryPreviousAndSearchPrevious() { InsertAndExecuteInputs("200", "100", "30", "20", "10", "2", "1"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistorySearchPrevious(); AssertCurrentSubmission("10"); // Pattern is captured before this step. _operations.HistoryPrevious(); AssertCurrentSubmission("20"); // NB: Doesn't match pattern. _operations.HistorySearchPrevious(); AssertCurrentSubmission("100"); // NB: Reuses existing pattern. _operations.HistorySearchPrevious(); AssertCurrentSubmission("100"); _operations.HistoryPrevious(); AssertCurrentSubmission("200"); _operations.HistorySearchPrevious(); AssertCurrentSubmission("200"); // No-op results in non-matching history entry after SearchPrevious. } [Fact] public void TestHistoryPreviousAndSearchPrevious_ExplicitPattern() { InsertAndExecuteInputs("200", "100", "30", "20", "10", "2", "1"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistorySearchPrevious(); AssertCurrentSubmission("10"); // Pattern is captured before this step. _operations.HistoryPrevious("2"); AssertCurrentSubmission("20"); // NB: Doesn't match pattern. _operations.HistorySearchPrevious(); AssertCurrentSubmission("100"); // NB: Reuses existing pattern. _operations.HistorySearchPrevious(); AssertCurrentSubmission("100"); _operations.HistoryPrevious("2"); AssertCurrentSubmission("200"); _operations.HistorySearchPrevious(); AssertCurrentSubmission("200"); // No-op results in non-matching history entry after SearchPrevious. } [Fact] public void TestHistoryNextAndSearchNext() { InsertAndExecuteInputs("1", "2", "10", "20", "30", "100", "200"); SetActiveCode("4"); _operations.HistoryPrevious(); AssertCurrentSubmission("200"); _operations.HistoryPrevious(); AssertCurrentSubmission("100"); _operations.HistoryPrevious(); AssertCurrentSubmission("30"); _operations.HistoryPrevious(); AssertCurrentSubmission("20"); _operations.HistoryPrevious(); AssertCurrentSubmission("10"); _operations.HistoryPrevious(); AssertCurrentSubmission("2"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistorySearchNext(); AssertCurrentSubmission("10"); // Pattern is captured before this step. _operations.HistoryNext(); AssertCurrentSubmission("20"); // NB: Doesn't match pattern. _operations.HistorySearchNext(); AssertCurrentSubmission("100"); // NB: Reuses existing pattern. _operations.HistorySearchNext(); AssertCurrentSubmission("4"); // Restoring input results in non-matching history entry after SearchNext. _operations.HistoryNext(); AssertCurrentSubmission("4"); } [Fact] public void TestHistoryNextAndSearchNext_ExplicitPattern() { InsertAndExecuteInputs("1", "2", "10", "20", "30", "100", "200"); SetActiveCode("4"); _operations.HistoryPrevious(); AssertCurrentSubmission("200"); _operations.HistoryPrevious(); AssertCurrentSubmission("100"); _operations.HistoryPrevious(); AssertCurrentSubmission("30"); _operations.HistoryPrevious(); AssertCurrentSubmission("20"); _operations.HistoryPrevious(); AssertCurrentSubmission("10"); _operations.HistoryPrevious(); AssertCurrentSubmission("2"); _operations.HistoryPrevious(); AssertCurrentSubmission("1"); _operations.HistorySearchNext(); AssertCurrentSubmission("10"); // Pattern is captured before this step. _operations.HistoryNext("2"); AssertCurrentSubmission("20"); // NB: Doesn't match pattern. _operations.HistorySearchNext(); AssertCurrentSubmission("100"); // NB: Reuses existing pattern. _operations.HistorySearchNext(); AssertCurrentSubmission("4"); // Restoring input results in non-matching history entry after SearchNext. _operations.HistoryNext("2"); AssertCurrentSubmission("4"); } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace TinTorch.Dns.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using Microsoft.Research.DataStructures; namespace Microsoft.Research.CodeAnalysis { public abstract class MSILVisitor<Label, Local, Parameter, Method, Field, Type, Source, Dest, Data, Result> : IVisitMSIL<Label, Local, Parameter, Method, Field, Type, Source, Dest, Data, Result> { protected abstract Result Default(Label pc, Data data); #region IVisitMSIL<Label,Local,Parameter,Method,Field,Type,Source,Dest,Data,Result> Members public virtual Result Arglist(Label pc, Dest dest, Data data) { return Default(pc, data); } public virtual Result Binary(Label pc, BinaryOperator op, Dest dest, Source s1, Source s2, Data data) { return Default(pc, data); } public virtual Result BranchCond(Label pc, Label target, BranchOperator bop, Source value1, Source value2, Data data) { return Default(pc, data); } public virtual Result BranchFalse(Label pc, Label target, Source cond, Data data) { return Default(pc, data); } public virtual Result BranchTrue(Label pc, Label target, Source cond, Data data) { return Default(pc, data); } public virtual Result Branch(Label pc, Label target, bool leave, Data data) { return Default(pc, data); } public virtual Result Break(Label pc, Data data) { throw new Exception("The method or operation is not implemented."); } public virtual Result Call<TypeList, ArgList>(Label pc, Method method, bool tail, bool virt, TypeList extraVarargs, Dest dest, ArgList args, Data data) where TypeList : IIndexable<Type> where ArgList : IIndexable<Source> { return Default(pc, data); } public virtual Result Calli<TypeList, ArgList>(Label pc, Type returnType, TypeList argTypes, bool tail, bool isInstance, Dest dest, Source fp, ArgList args, Data data) where TypeList : IIndexable<Type> where ArgList : IIndexable<Source> { return Default(pc, data); } public virtual Result Ckfinite(Label pc, Dest dest, Source source, Data data) { return Default(pc, data); } public virtual Result Cpblk(Label pc, bool @volatile, Source destaddr, Source srcaddr, Source len, Data data) { return Default(pc, data); } public virtual Result Endfilter(Label pc, Source decision, Data data) { return Default(pc, data); } public virtual Result Endfinally(Label pc, Data data) { return Default(pc, data); } public virtual Result Initblk(Label pc, bool @volatile, Source destaddr, Source value, Source len, Data data) { return Default(pc, data); } public virtual Result Jmp(Label pc, Method method, Data data) { return Default(pc, data); } public virtual Result Ldarg(Label pc, Parameter argument, bool isOld, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldarga(Label pc, Parameter argument, bool isOld, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldconst(Label pc, object constant, Type type, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldnull(Label pc, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldftn(Label pc, Method method, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldind(Label pc, Type type, bool @volatile, Dest dest, Source ptr, Data data) { return Default(pc, data); } public virtual Result Ldloc(Label pc, Local local, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldloca(Label pc, Local local, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldstack(Label pc, int offset, Dest dest, Source source, bool isOld, Data data) { return Default(pc, data); } public virtual Result Ldstacka(Label pc, int offset, Dest dest, Source source, Type type, bool isOld, Data data) { return Default(pc, data); } public virtual Result Localloc(Label pc, Dest dest, Source size, Data data) { return Default(pc, data); } public virtual Result Pop(Label pc, Source source, Data data) { return Default(pc, data); } public virtual Result Return(Label pc, Source source, Data data) { return Default(pc, data); } public virtual Result Starg(Label pc, Parameter argument, Source source, Data data) { return Default(pc, data); } public virtual Result Stind(Label pc, Type type, bool @volatile, Source ptr, Source value, Data data) { return Default(pc, data); } public virtual Result Stloc(Label pc, Local local, Source source, Data data) { return Default(pc, data); } public virtual Result Switch(Label pc, Type type, IEnumerable<Pair<object, Label>> cases, Source value, Data data) { return Default(pc, data); } public virtual Result Unary(Label pc, UnaryOperator op, bool overflow, bool unsigned, Dest dest, Source source, Data data) { return Default(pc, data); } public virtual Result Box(Label pc, Type type, Dest dest, Source source, Data data) { return Default(pc, data); } public virtual Result ConstrainedCallvirt<TypeList, ArgList>(Label pc, Method method, bool tail, Type constraint, TypeList extraVarargs, Dest dest, ArgList args, Data data) where TypeList : IIndexable<Type> where ArgList : IIndexable<Source> { return Default(pc, data); } public virtual Result Castclass(Label pc, Type type, Dest dest, Source obj, Data data) { return Default(pc, data); } public virtual Result Cpobj(Label pc, Type type, Source destptr, Source srcptr, Data data) { return Default(pc, data); } public virtual Result Initobj(Label pc, Type type, Source ptr, Data data) { return Default(pc, data); } public virtual Result Isinst(Label pc, Type type, Dest dest, Source obj, Data data) { return Default(pc, data); } public virtual Result Ldelem(Label pc, Type type, Dest dest, Source array, Source index, Data data) { return Default(pc, data); } public virtual Result Ldelema(Label pc, Type type, bool @readonly, Dest dest, Source array, Source index, Data data) { return Default(pc, data); } public virtual Result Ldfld(Label pc, Field field, bool @volatile, Dest dest, Source obj, Data data) { return Default(pc, data); } public virtual Result Ldflda(Label pc, Field field, Dest dest, Source obj, Data data) { return Default(pc, data); } public virtual Result Ldlen(Label pc, Dest dest, Source array, Data data) { return Default(pc, data); } public virtual Result Ldsfld(Label pc, Field field, bool @volatile, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldsflda(Label pc, Field field, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldtypetoken(Label pc, Type type, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldfieldtoken(Label pc, Field type, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldmethodtoken(Label pc, Method type, Dest dest, Data data) { return Default(pc, data); } public virtual Result Ldvirtftn(Label pc, Method method, Dest dest, Source obj, Data data) { return Default(pc, data); } public virtual Result Mkrefany(Label pc, Type type, Dest dest, Source obj, Data data) { return Default(pc, data); } public virtual Result Newarray<ArgList>(Label pc, Type type, Dest dest, ArgList lengths, Data data) where ArgList : IIndexable<Source> { return Default(pc, data); } public virtual Result Newobj<ArgList>(Label pc, Method ctor, Dest dest, ArgList args, Data data) where ArgList : IIndexable<Source> { return Default(pc, data); } public virtual Result Refanytype(Label pc, Dest dest, Source source, Data data) { return Default(pc, data); } public virtual Result Refanyval(Label pc, Type type, Dest dest, Source source, Data data) { return Default(pc, data); } public virtual Result Rethrow(Label pc, Data data) { return Default(pc, data); } public virtual Result Sizeof(Label pc, Type type, Dest dest, Data data) { return Default(pc, data); } public virtual Result Stelem(Label pc, Type type, Source array, Source index, Source value, Data data) { return Default(pc, data); } public virtual Result Stfld(Label pc, Field field, bool @volatile, Source obj, Source value, Data data) { return Default(pc, data); } public virtual Result Stsfld(Label pc, Field field, bool @volatile, Source value, Data data) { return Default(pc, data); } public virtual Result Throw(Label pc, Source exn, Data data) { return Default(pc, data); } public virtual Result Unbox(Label pc, Type type, Dest dest, Source obj, Data data) { return Default(pc, data); } public virtual Result Unboxany(Label pc, Type type, Dest dest, Source obj, Data data) { return Default(pc, data); } #endregion #region IVisitSynthIL<Source,Data,Result> Members public virtual Result Assume(Label pc, string tag, Source condition, object provenance, Data data) { return Default(pc, data); } public virtual Result Nop(Label pc, Data data) { return Default(pc, data); } public virtual Result Assert(Label pc, string tag, Source condition, object provenance, Data data) { return Default(pc, data); } public virtual Result Entry(Label pc, Method method, Data data) { return Default(pc, data); } public virtual Result BeginOld(Label pc, Label matchingEnd, Data data) { return Default(pc, data); } public virtual Result EndOld(Label pc, Label matchingBegin, Type type, Dest dest, Source source, Data data) { return Default(pc, data); } public virtual Result Ldresult(Label pc, Type type, Dest dest, Source source, Data data) { return Default(pc, data); } #endregion } }
// Python Tools for Visual Studio // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABLITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System; using System.Runtime.InteropServices; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.VisualStudio; using Microsoft.VisualStudio.Debugger.Interop; using Microsoft.VisualStudioTools; namespace Microsoft.PythonTools.Debugger.DebugEngine { // An implementation of IDebugProperty2 // This interface represents a stack frame property, a program document property, or some other property. // The property is usually the result of an expression evaluation. // // The sample engine only supports locals and parameters for functions that have symbols loaded. class AD7Property : IDebugProperty2, IDebugProperty3 { private PythonEvaluationResult _evalResult; private readonly AD7StackFrame _frame; private readonly bool _writable; public AD7Property(AD7StackFrame frame, PythonEvaluationResult obj, bool writable = false) { _evalResult = obj; _frame = frame; _writable = writable; } // Construct a DEBUG_PROPERTY_INFO representing this local or parameter. public DEBUG_PROPERTY_INFO ConstructDebugPropertyInfo(uint radix, enum_DEBUGPROP_INFO_FLAGS dwFields) { DEBUG_PROPERTY_INFO propertyInfo = new DEBUG_PROPERTY_INFO(); if ((dwFields & enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_FULLNAME) != 0) { propertyInfo.bstrFullName = _evalResult.Expression; propertyInfo.dwFields |= enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_FULLNAME; } if ((dwFields & enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_NAME) != 0) { if (String.IsNullOrEmpty(_evalResult.ChildName)) { propertyInfo.bstrName = _evalResult.Expression; } else { propertyInfo.bstrName = _evalResult.ChildName; } propertyInfo.dwFields |= enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_NAME; } if ((dwFields & enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_TYPE) != 0) { if (_evalResult.ExceptionText != null) { propertyInfo.bstrType = "<error>"; } else { propertyInfo.bstrType = _evalResult.TypeName; } propertyInfo.dwFields |= enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_TYPE; } if ((dwFields & enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_VALUE) != 0) { if (_evalResult.ExceptionText != null) { propertyInfo.bstrValue = "error: " + _evalResult.ExceptionText; } else if (radix != 16) { propertyInfo.bstrValue = _evalResult.StringRepr; } else { propertyInfo.bstrValue = _evalResult.HexRepr ?? _evalResult.StringRepr; } propertyInfo.dwFields |= enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_VALUE; } if ((dwFields & enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_ATTRIB) != 0) { if (!_writable) { propertyInfo.dwAttrib |= enum_DBG_ATTRIB_FLAGS.DBG_ATTRIB_VALUE_READONLY; } if (_evalResult.ExceptionText != null) { propertyInfo.dwAttrib |= enum_DBG_ATTRIB_FLAGS.DBG_ATTRIB_VALUE_ERROR; } if (_evalResult.IsExpandable) { propertyInfo.dwAttrib |= enum_DBG_ATTRIB_FLAGS.DBG_ATTRIB_OBJ_IS_EXPANDABLE; } if (_evalResult.Flags.HasFlag(PythonEvaluationResultFlags.MethodCall)) { propertyInfo.dwAttrib |= enum_DBG_ATTRIB_FLAGS.DBG_ATTRIB_METHOD; } if (_evalResult.Flags.HasFlag(PythonEvaluationResultFlags.SideEffects)) { propertyInfo.dwAttrib |= enum_DBG_ATTRIB_FLAGS.DBG_ATTRIB_VALUE_SIDE_EFFECT; } if (_evalResult.Flags.HasFlag(PythonEvaluationResultFlags.HasRawRepr)) { propertyInfo.dwAttrib |= enum_DBG_ATTRIB_FLAGS.DBG_ATTRIB_VALUE_RAW_STRING; } } // Always Provide the property so that we can access locals from the automation object. propertyInfo.pProperty = (IDebugProperty2)this; propertyInfo.dwFields |= enum_DEBUGPROP_INFO_FLAGS.DEBUGPROP_INFO_PROP; return propertyInfo; } #region IDebugProperty2 Members // Enumerates the children of a property. This provides support for dereferencing pointers, displaying members of an array, or fields of a class or struct. // The sample debugger only supports pointer dereferencing as children. This means there is only ever one child. public int EnumChildren(enum_DEBUGPROP_INFO_FLAGS dwFields, uint dwRadix, ref System.Guid guidFilter, enum_DBG_ATTRIB_FLAGS dwAttribFilter, string pszNameFilter, uint dwTimeout, out IEnumDebugPropertyInfo2 ppEnum) { ppEnum = null; var children = _evalResult.GetChildren((int)dwTimeout); if (children != null) { DEBUG_PROPERTY_INFO[] properties = new DEBUG_PROPERTY_INFO[children.Length]; for (int i = 0; i < children.Length; i++) { properties[i] = new AD7Property(_frame, children[i], true).ConstructDebugPropertyInfo(dwRadix, dwFields); } ppEnum = new AD7PropertyEnum(properties); return VSConstants.S_OK; } return VSConstants.S_FALSE; } // Returns the property that describes the most-derived property of a property // This is called to support object oriented languages. It allows the debug engine to return an IDebugProperty2 for the most-derived // object in a hierarchy. This engine does not support this. public int GetDerivedMostProperty(out IDebugProperty2 ppDerivedMost) { throw new Exception("The method or operation is not implemented."); } // This method exists for the purpose of retrieving information that does not lend itself to being retrieved by calling the IDebugProperty2::GetPropertyInfo // method. This includes information about custom viewers, managed type slots and other information. // The sample engine does not support this. public int GetExtendedInfo(ref System.Guid guidExtendedInfo, out object pExtendedInfo) { throw new Exception("The method or operation is not implemented."); } // Returns the memory bytes for a property value. public int GetMemoryBytes(out IDebugMemoryBytes2 ppMemoryBytes) { throw new Exception("The method or operation is not implemented."); } // Returns the memory context for a property value. public int GetMemoryContext(out IDebugMemoryContext2 ppMemory) { throw new Exception("The method or operation is not implemented."); } // Returns the parent of a property. // The sample engine does not support obtaining the parent of properties. public int GetParent(out IDebugProperty2 ppParent) { throw new Exception("The method or operation is not implemented."); } // Fills in a DEBUG_PROPERTY_INFO structure that describes a property. public int GetPropertyInfo(enum_DEBUGPROP_INFO_FLAGS dwFields, uint dwRadix, uint dwTimeout, IDebugReference2[] rgpArgs, uint dwArgCount, DEBUG_PROPERTY_INFO[] pPropertyInfo) { pPropertyInfo[0] = new DEBUG_PROPERTY_INFO(); rgpArgs = null; pPropertyInfo[0] = ConstructDebugPropertyInfo(dwRadix, dwFields); return VSConstants.S_OK; } // Return an IDebugReference2 for this property. An IDebugReference2 can be thought of as a type and an address. public int GetReference(out IDebugReference2 ppReference) { throw new Exception("The method or operation is not implemented."); } // Returns the size, in bytes, of the property value. public int GetSize(out uint pdwSize) { throw new Exception("The method or operation is not implemented."); } // The debugger will call this when the user tries to edit the property's values // We only accept setting values as strings public int SetValueAsReference(IDebugReference2[] rgpArgs, uint dwArgCount, IDebugReference2 pValue, uint dwTimeout) { throw new Exception("The method or operation is not implemented."); } // The debugger will call this when the user tries to edit the property's values in one of the debugger windows. public int SetValueAsString(string pszValue, uint dwRadix, uint dwTimeout) { try { var timeoutToken = new CancellationTokenSource((int)dwTimeout).Token; _evalResult.Frame.ExecuteTextAsync(_evalResult.Expression + " = " + pszValue) .ContinueWith(t => t.Result, timeoutToken, TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Current) .GetAwaiter().GetResult(); return VSConstants.S_OK; } catch (OperationCanceledException) { return VSConstants.E_FAIL; } } #endregion #region IDebugProperty3 Members public int CreateObjectID() { return VSConstants.E_NOTIMPL; } public int DestroyObjectID() { return VSConstants.E_NOTIMPL; } public int GetCustomViewerCount(out uint pcelt) { pcelt = 0; return VSConstants.E_NOTIMPL; } public int GetCustomViewerList(uint celtSkip, uint celtRequested, DEBUG_CUSTOM_VIEWER[] rgViewers, out uint pceltFetched) { pceltFetched = 0; return VSConstants.E_NOTIMPL; } public int GetStringCharLength(out uint pLen) { var result = _evalResult.Frame.ExecuteTextAsync(_evalResult.Expression, PythonEvaluationResultReprKind.RawLen).GetAwaiter().GetResult(); pLen = (uint)(result.ExceptionText != null ? result.ExceptionText.Length : result.Length); return VSConstants.S_OK; } public unsafe int GetStringChars(uint buflen, ushort[] rgString, out uint pceltFetched) { if (rgString.Length == 0) { pceltFetched = 0; return VSConstants.S_OK; } var result = _evalResult.Frame.ExecuteTextAsync(_evalResult.Expression, PythonEvaluationResultReprKind.Raw).GetAwaiter().GetResult(); var value = result.ExceptionText ?? result.StringRepr; pceltFetched = Math.Min(buflen, (uint)value.Length); fixed (char* src = value) { fixed (ushort* dst = rgString) { Encoding.Unicode.GetBytes(src, checked((int)pceltFetched), (byte*)dst, checked((int)buflen * 2)); } } return VSConstants.S_OK; } public int SetValueAsStringWithError(string pszValue, uint dwRadix, uint dwTimeout, out string errorString) { errorString = null; return VSConstants.E_NOTIMPL; } #endregion } }
/* Copyright (c) 2014, Lars Brubaker All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. The views and conclusions contained in the software and documentation are those of the authors and should not be interpreted as representing official policies, either expressed or implied, of the FreeBSD Project. */ using System; using MatterHackers.Agg.Font; using MatterHackers.VectorMath; namespace MatterHackers.Agg.UI { public class TextEditWidget : ScrollableWidget { public InternalTextEditWidget InternalTextEditWidget { get; protected set; } public event KeyEventHandler EnterPressed; public event EventHandler EditComplete; private int borderWidth = 0; public static event EventHandler ShowSoftwareKeyboard; public static event EventHandler HideSoftwareKeyboard; public static event EventHandler KeyboardCollapsed; public Color TextColor { get { return InternalTextEditWidget.TextColor; } set { InternalTextEditWidget.TextColor = value; } } public override int TabIndex { get { return InternalTextEditWidget.TabIndex; } set { InternalTextEditWidget.TabIndex = value; } } public Color CursorColor { get { return InternalTextEditWidget.CursorColor; } set { InternalTextEditWidget.CursorColor = value; } } public new bool DoubleBuffer { get { return InternalTextEditWidget.DoubleBuffer; } set { InternalTextEditWidget.DoubleBuffer = value; } } public void ClearUndoHistory() { InternalTextEditWidget.ClearUndoHistory(); } public Color HighlightColor { get { return InternalTextEditWidget.HighlightColor; } set { InternalTextEditWidget.HighlightColor = value; } } public override Color BorderColor { get { return InternalTextEditWidget.BorderColor; } set { InternalTextEditWidget.BorderColor = value; } } public int BorderWidth { get { return borderWidth; } set { this.borderWidth = value; InternalTextEditWidget.BorderWidth = this.borderWidth; } } public bool Selecting { get { return InternalTextEditWidget.Selecting; } set { InternalTextEditWidget.Selecting = value; } } public bool Multiline { get { return InternalTextEditWidget.Multiline; } set { InternalTextEditWidget.Multiline = value; if (Multiline == true) { AutoScroll = true; VerticalScrollBar.Show = ScrollBar.ShowState.WhenRequired; } else { AutoScroll = false; VerticalScrollBar.Show = ScrollBar.ShowState.Never; } } } public int SelectionIndexToStartBefore { get { return InternalTextEditWidget.SelectionIndexToStartBefore; } set { InternalTextEditWidget.SelectionIndexToStartBefore = value; } } public int CharIndexToInsertBefore { get { return InternalTextEditWidget.CharIndexToInsertBefore; } set { InternalTextEditWidget.CharIndexToInsertBefore = value; } } public override string Text { get { return InternalTextEditWidget.Text; } set { InternalTextEditWidget.Text = value; } } public string Selection { get { return InternalTextEditWidget.Selection; } } internal TextEditWidget() { } public TextEditWidget(string text = "", double x = 0, double y = 0, double pointSize = 12, double pixelWidth = 0, double pixelHeight = 0, bool multiLine = false, int tabIndex = 0, TypeFace typeFace = null) { InternalTextEditWidget = new InternalTextEditWidget(text, pointSize, multiLine, tabIndex, typeFace: typeFace); HookUpToInternalWidget(pixelWidth, pixelHeight); OriginRelativeParent = new Vector2(x, y); BackgroundColor = Color.White; Multiline = multiLine; } public override GuiWidget AddChild(GuiWidget child, int indexInChildrenList = -1) { throw new Exception("You cannot add children to a TextEdit widget."); } protected void HookUpToInternalWidget(double pixelWidth, double pixelHeight) { Cursor = Cursors.IBeam; InternalTextEditWidget.EditComplete += new EventHandler(InternalTextEditWidget_EditComplete); InternalTextEditWidget.EnterPressed += new KeyEventHandler(InternalTextEditWidget_EnterPressed); if (pixelWidth == 0) { pixelWidth = InternalTextEditWidget.Width; } if (pixelHeight == 0) { pixelHeight = InternalTextEditWidget.Height; } this.LocalBounds = new RectangleDouble(0, 0, pixelWidth, pixelHeight); InternalTextEditWidget.InsertBarPositionChanged += new EventHandler(InternalTextEditWidget_InsertBarPositionChanged); InternalTextEditWidget.FocusChanged += new EventHandler(InternalTextEditWidget_FocusChanged); InternalTextEditWidget.TextChanged += new EventHandler(InternalTextEditWidget_TextChanged); base.AddChild(InternalTextEditWidget); } private void InternalTextEditWidget_TextChanged(object sender, EventArgs e) { OnTextChanged(e); } /// <summary> /// Make this widget the focus of keyboard input. /// </summary> public override void Focus() { #if DEBUG if (Parent == null) { throw new Exception("Don't call Focus() until you have a Parent.\nCalling focus without a parent will not result in the focus chain pointing to the widget, so it will not work."); } #endif InternalTextEditWidget.Focus(); } private void InternalTextEditWidget_FocusChanged(object sender, EventArgs e) { if (ContainsFocus) { if (ShowSoftwareKeyboard != null) { UiThread.RunOnIdle(() => { ShowSoftwareKeyboard(this, null); }); } } else { OnHideSoftwareKeyboard(); } OnFocusChanged(e); } public override void OnClosed(EventArgs e) { if (Focused) { OnHideSoftwareKeyboard(); } base.OnClosed(e); } private void OnHideSoftwareKeyboard() { UiThread.RunOnIdle(() => { HideSoftwareKeyboard?.Invoke(this, null); KeyboardCollapsed?.Invoke(this, null); }); } public static void OnKeyboardCollapsed() { KeyboardCollapsed?.Invoke(null, null); } private void InternalTextEditWidget_EnterPressed(object sender, KeyEventArgs keyEvent) { EnterPressed?.Invoke(this, keyEvent); } public override void OnMouseDown(MouseEventArgs mouseEvent) { ScrollBar scrollBar = this.VerticalScrollBar; double scrollBarX = mouseEvent.X; double scrollBarY = mouseEvent.Y; scrollBar.ParentToChildTransform.inverse_transform(ref scrollBarX, ref scrollBarY); bool clickIsOnScrollBar = scrollBar.Visible && scrollBar.PositionWithinLocalBounds(scrollBarX, scrollBarY); if (!clickIsOnScrollBar) { double scrollingAreaX = mouseEvent.X; double scrollingAreaY = mouseEvent.Y; ScrollArea.ParentToChildTransform.inverse_transform(ref scrollingAreaX, ref scrollingAreaY); if (scrollingAreaX > InternalTextEditWidget.LocalBounds.Right) { scrollingAreaX = InternalTextEditWidget.LocalBounds.Right - 1; } else if (scrollingAreaX < InternalTextEditWidget.LocalBounds.Left) { scrollingAreaX = InternalTextEditWidget.LocalBounds.Left; } if (scrollingAreaY > InternalTextEditWidget.LocalBounds.Top) { scrollingAreaY = InternalTextEditWidget.LocalBounds.Top - 1; } else if (scrollingAreaY < InternalTextEditWidget.LocalBounds.Bottom) { scrollingAreaY = InternalTextEditWidget.LocalBounds.Bottom; } ScrollArea.ParentToChildTransform.transform(ref scrollingAreaX, ref scrollingAreaY); mouseEvent.X = scrollingAreaX; mouseEvent.Y = scrollingAreaY; } base.OnMouseDown(mouseEvent); if (Focused) { throw new Exception("We should have moved the mouse so that it gave selection to the internal text edit widget."); } } private void InternalTextEditWidget_EditComplete(object sender, EventArgs e) { EditComplete?.Invoke(this, null); } public TypeFacePrinter Printer { get { return InternalTextEditWidget.Printer; } } private void InternalTextEditWidget_InsertBarPositionChanged(object sender, EventArgs e) { double fontHeight = Printer.TypeFaceStyle.EmSizeInPixels; Vector2 barPosition = InternalTextEditWidget.InsertBarPosition; // move the minimum amount required to keep the bar in view Vector2 currentOffsetInView = barPosition + TopLeftOffset; Vector2 requiredOffet = Vector2.Zero; if (currentOffsetInView.X > Width - 2) { requiredOffet.X = currentOffsetInView.X - Width + 2; } else if (currentOffsetInView.X < 0) { requiredOffet.X = currentOffsetInView.X; } if (currentOffsetInView.Y <= -(Height - fontHeight)) { requiredOffet.Y = -(currentOffsetInView.Y + Height) + fontHeight; } else if (currentOffsetInView.Y > 0) { requiredOffet.Y = -currentOffsetInView.Y; } TopLeftOffset = new VectorMath.Vector2(TopLeftOffset.X - requiredOffet.X, TopLeftOffset.Y + requiredOffet.Y); } public bool SelectAllOnFocus { get { return InternalTextEditWidget.SelectAllOnFocus; } set { InternalTextEditWidget.SelectAllOnFocus = value; } } public bool ReadOnly { get => InternalTextEditWidget.ReadOnly; set => InternalTextEditWidget.ReadOnly = value; } } }
// Copyright (c) 2012-2013 Rotorz Limited. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. using UnityEngine; using UnityEditor; using System.Collections.Generic; using InControl.ReorderableList.Internal; namespace InControl.ReorderableList { /// <summary> /// Utility class for drawing reorderable lists. /// </summary> public static class ReorderableListGUI { /// <summary> /// Default list item height is 18 pixels. /// </summary> public const float DefaultItemHeight = 18; /// <summary> /// Gets or sets zero-based index of last item which was changed. A value of -1 /// indicates that no item was changed by list. /// </summary> /// <remarks> /// <para>This property should not be set when items are added or removed.</para> /// </remarks> public static int indexOfChangedItem { get; internal set; } /// <summary> /// Gets zero-based index of list item which is currently being drawn; /// or a value of -1 if no item is currently being drawn. /// </summary> public static int currentItemIndex { get { return ReorderableListControl.currentItemIndex; } } #region Basic Item Drawers /// <summary> /// Default list item drawer implementation. /// </summary> /// <remarks> /// <para>Always presents the label "Item drawer not implemented.".</para> /// </remarks> /// <param name="position">Position to draw list item control(s).</param> /// <param name="item">Value of list item.</param> /// <returns> /// Unmodified value of list item. /// </returns> /// <typeparam name="T">Type of list item.</typeparam> public static T DefaultItemDrawer<T>( Rect position, T item ) { GUI.Label( position, "Item drawer not implemented." ); return item; } /// <summary> /// Draws text field allowing list items to be edited. /// </summary> /// <remarks> /// <para>Null values are automatically changed to empty strings since null /// values cannot be edited using a text field.</para> /// <para>Value of <c>GUI.changed</c> is set to <c>true</c> if value of item /// is modified.</para> /// </remarks> /// <param name="position">Position to draw list item control(s).</param> /// <param name="item">Value of list item.</param> /// <returns> /// Modified value of list item. /// </returns> public static string TextFieldItemDrawer( Rect position, string item ) { if (item == null) { item = ""; GUI.changed = true; } return EditorGUI.TextField( position, item ); } #endregion /// <summary> /// Gets the default list control implementation. /// </summary> private static ReorderableListControl defaultListControl { get; set; } static ReorderableListGUI() { InitStyles(); defaultListControl = new ReorderableListControl(); // Duplicate default styles to prevent user scripts from interferring with // the default list control instance. defaultListControl.containerStyle = new GUIStyle( defaultContainerStyle ); defaultListControl.addButtonStyle = new GUIStyle( defaultAddButtonStyle ); defaultListControl.removeButtonStyle = new GUIStyle( defaultRemoveButtonStyle ); indexOfChangedItem = -1; } #region Custom Styles /// <summary> /// Gets default style for title header. /// </summary> public static GUIStyle defaultTitleStyle { get; private set; } /// <summary> /// Gets default style for background of list control. /// </summary> public static GUIStyle defaultContainerStyle { get; private set; } /// <summary> /// Gets default style for add item button. /// </summary> public static GUIStyle defaultAddButtonStyle { get; private set; } /// <summary> /// Gets default style for remove item button. /// </summary> public static GUIStyle defaultRemoveButtonStyle { get; private set; } private static void InitStyles() { defaultTitleStyle = new GUIStyle(); defaultTitleStyle.border = new RectOffset( 2, 2, 2, 1 ); defaultTitleStyle.margin = new RectOffset( 5, 5, 5, 0 ); defaultTitleStyle.padding = new RectOffset( 5, 5, 0, 0 ); defaultTitleStyle.alignment = TextAnchor.MiddleLeft; defaultTitleStyle.normal.background = ReorderableListResources.texTitleBackground; defaultTitleStyle.normal.textColor = EditorGUIUtility.isProSkin ? new Color( 0.8f, 0.8f, 0.8f ) : new Color( 0.2f, 0.2f, 0.2f ); defaultContainerStyle = new GUIStyle(); defaultContainerStyle.border = new RectOffset( 2, 2, 1, 2 ); defaultContainerStyle.margin = new RectOffset( 5, 5, 5, 5 ); defaultContainerStyle.padding = new RectOffset( 1, 1, 2, 2 ); defaultContainerStyle.normal.background = ReorderableListResources.texContainerBackground; defaultAddButtonStyle = new GUIStyle(); defaultAddButtonStyle.fixedWidth = 30; defaultAddButtonStyle.fixedHeight = 16; defaultAddButtonStyle.normal.background = ReorderableListResources.texAddButton; defaultAddButtonStyle.active.background = ReorderableListResources.texAddButtonActive; defaultRemoveButtonStyle = new GUIStyle(); defaultRemoveButtonStyle.fixedWidth = 27; defaultRemoveButtonStyle.active.background = ReorderableListResources.CreatePixelTexture( "Dark Pixel (List GUI)", new Color32( 18, 18, 18, 255 ) ); defaultRemoveButtonStyle.imagePosition = ImagePosition.ImageOnly; defaultRemoveButtonStyle.alignment = TextAnchor.MiddleCenter; } #endregion private static GUIContent s_Temp = new GUIContent(); #region Title Control /// <summary> /// Draw title control for list field. /// </summary> /// <remarks> /// <para>When needed, should be shown immediately before list field.</para> /// </remarks> /// <example> /// <code language="csharp"><![CDATA[ /// ReorderableListGUI.Title(titleContent); /// ReorderableListGUI.ListField(list, DynamicListGU.TextFieldItemDrawer); /// ]]></code> /// <code language="unityscript"><![CDATA[ /// ReorderableListGUI.Title(titleContent); /// ReorderableListGUI.ListField(list, DynamicListGU.TextFieldItemDrawer); /// ]]></code> /// </example> /// <param name="title">Content for title control.</param> public static void Title( GUIContent title ) { Rect position = GUILayoutUtility.GetRect( title, defaultTitleStyle ); position.height += 6; Title( position, title ); } /// <summary> /// Draw title control for list field. /// </summary> /// <remarks> /// <para>When needed, should be shown immediately before list field.</para> /// </remarks> /// <example> /// <code language="csharp"><![CDATA[ /// ReorderableListGUI.Title("Your Title"); /// ReorderableListGUI.ListField(list, DynamicListGU.TextFieldItemDrawer); /// ]]></code> /// <code language="unityscript"><![CDATA[ /// ReorderableListGUI.Title('Your Title'); /// ReorderableListGUI.ListField(list, DynamicListGU.TextFieldItemDrawer); /// ]]></code> /// </example> /// <param name="title">Text for title control.</param> public static void Title( string title ) { s_Temp.text = title; Title( s_Temp ); } /// <summary> /// Draw title control for list field with absolute positioning. /// </summary> /// <param name="position">Position of control.</param> /// <param name="title">Content for title control.</param> public static void Title( Rect position, GUIContent title ) { if (Event.current.type == EventType.Repaint) defaultTitleStyle.Draw( position, title, false, false, false, false ); } /// <summary> /// Draw title control for list field with absolute positioning. /// </summary> /// <param name="position">Position of control.</param> /// <param name="text">Text for title control.</param> public static void Title( Rect position, string text ) { s_Temp.text = text; Title( position, s_Temp ); } #endregion #region List<T> Control /// <summary> /// Draw list field control. /// </summary> /// <param name="list">The list which can be reordered.</param> /// <param name="drawItem">Callback to draw list item.</param> /// <param name="drawEmpty">Callback to draw custom content for empty list (optional).</param> /// <param name="itemHeight">Height of a single list item.</param> /// <param name="flags">Optional flags to pass into list field.</param> /// <typeparam name="T">Type of list item.</typeparam> private static void DoListField<T>( IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmpty drawEmpty, float itemHeight, ReorderableListFlags flags ) { var adaptor = new GenericListAdaptor<T>( list, drawItem, itemHeight ); ReorderableListControl.DrawControlFromState( adaptor, drawEmpty, flags ); } /// <summary> /// Draw list field control with absolute positioning. /// </summary> /// <param name="position">Position of control.</param> /// <param name="list">The list which can be reordered.</param> /// <param name="drawItem">Callback to draw list item.</param> /// <param name="drawEmpty">Callback to draw custom content for empty list (optional).</param> /// <param name="itemHeight">Height of a single list item.</param> /// <param name="flags">Optional flags to pass into list field.</param> /// <typeparam name="T">Type of list item.</typeparam> private static void DoListFieldAbsolute<T>( Rect position, IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmptyAbsolute drawEmpty, float itemHeight, ReorderableListFlags flags ) { var adaptor = new GenericListAdaptor<T>( list, drawItem, itemHeight ); ReorderableListControl.DrawControlFromState( position, adaptor, drawEmpty, flags ); } /// <inheritdoc cref="DoListField{T}(IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmpty, float, ReorderableListFlags)"/> public static void ListField<T>( IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmpty drawEmpty, float itemHeight, ReorderableListFlags flags ) { DoListField<T>( list, drawItem, drawEmpty, itemHeight, flags ); } /// <inheritdoc cref="DoListFieldAbsolute{T}(Rect, IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmptyAbsolute, float, ReorderableListFlags)"/> public static void ListFieldAbsolute<T>( Rect position, IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmptyAbsolute drawEmpty, float itemHeight, ReorderableListFlags flags ) { DoListFieldAbsolute<T>( position, list, drawItem, drawEmpty, itemHeight, flags ); } /// <inheritdoc cref="DoListField{T}(IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmpty, float, ReorderableListFlags)"/> public static void ListField<T>( IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmpty drawEmpty, float itemHeight ) { DoListField<T>( list, drawItem, drawEmpty, itemHeight, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute{T}(Rect, IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmptyAbsolute, float, ReorderableListFlags)"/> public static void ListFieldAbsolute<T>( Rect position, IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmptyAbsolute drawEmpty, float itemHeight ) { DoListFieldAbsolute<T>( position, list, drawItem, drawEmpty, itemHeight, 0 ); } /// <inheritdoc cref="DoListField{T}(IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmpty, float, ReorderableListFlags)"/> public static void ListField<T>( IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmpty drawEmpty, ReorderableListFlags flags ) { DoListField<T>( list, drawItem, drawEmpty, DefaultItemHeight, flags ); } /// <inheritdoc cref="DoListFieldAbsolute{T}(Rect, IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmptyAbsolute, float, ReorderableListFlags)"/> public static void ListFieldAbsolute<T>( Rect position, IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmptyAbsolute drawEmpty, ReorderableListFlags flags ) { DoListFieldAbsolute<T>( position, list, drawItem, drawEmpty, DefaultItemHeight, flags ); } /// <inheritdoc cref="DoListField{T}(IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmpty, float, ReorderableListFlags)"/> public static void ListField<T>( IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmpty drawEmpty ) { DoListField<T>( list, drawItem, drawEmpty, DefaultItemHeight, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute{T}(Rect, IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmptyAbsolute, float, ReorderableListFlags)"/> public static void ListFieldAbsolute<T>( Rect position, IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListControl.DrawEmptyAbsolute drawEmpty ) { DoListFieldAbsolute<T>( position, list, drawItem, drawEmpty, DefaultItemHeight, 0 ); } /// <inheritdoc cref="DoListField{T}(IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmpty, float, ReorderableListFlags)"/> public static void ListField<T>( IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, float itemHeight, ReorderableListFlags flags ) { DoListField<T>( list, drawItem, null, itemHeight, flags ); } /// <inheritdoc cref="DoListFieldAbsolute{T}(Rect, IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmptyAbsolute, float, ReorderableListFlags)"/> public static void ListFieldAbsolute<T>( Rect position, IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, float itemHeight, ReorderableListFlags flags ) { DoListFieldAbsolute<T>( position, list, drawItem, null, itemHeight, flags ); } /// <inheritdoc cref="DoListField{T}(IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmpty, float, ReorderableListFlags)"/> public static void ListField<T>( IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, float itemHeight ) { DoListField<T>( list, drawItem, null, itemHeight, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute{T}(Rect, IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmptyAbsolute, float, ReorderableListFlags)"/> public static void ListFieldAbsolute<T>( Rect position, IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, float itemHeight ) { DoListFieldAbsolute<T>( position, list, drawItem, null, itemHeight, 0 ); } /// <inheritdoc cref="DoListField{T}(IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmpty, float, ReorderableListFlags)"/> public static void ListField<T>( IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListFlags flags ) { DoListField<T>( list, drawItem, null, DefaultItemHeight, flags ); } /// <inheritdoc cref="DoListFieldAbsolute{T}(Rect, IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmptyAbsolute, float, ReorderableListFlags)"/> public static void ListFieldAbsolute<T>( Rect position, IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem, ReorderableListFlags flags ) { DoListFieldAbsolute<T>( position, list, drawItem, null, DefaultItemHeight, flags ); } /// <inheritdoc cref="DoListField{T}(IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmpty, float, ReorderableListFlags)"/> public static void ListField<T>( IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem ) { DoListField<T>( list, drawItem, null, DefaultItemHeight, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute{T}(Rect, IList{T}, ReorderableListControl.ItemDrawer{T}, ReorderableListControl.DrawEmptyAbsolute, float, ReorderableListFlags)"/> public static void ListFieldAbsolute<T>( Rect position, IList<T> list, ReorderableListControl.ItemDrawer<T> drawItem ) { DoListFieldAbsolute<T>( position, list, drawItem, null, DefaultItemHeight, 0 ); } /// <summary> /// Calculate height of list field for absolute positioning. /// </summary> /// <param name="itemCount">Count of items in list.</param> /// <param name="itemHeight">Fixed height of list item.</param> /// <param name="flags">Optional flags to pass into list field.</param> /// <returns> /// Required list height in pixels. /// </returns> public static float CalculateListFieldHeight( int itemCount, float itemHeight, ReorderableListFlags flags ) { // We need to push/pop flags so that nested controls are properly calculated. var restoreFlags = defaultListControl.flags; try { defaultListControl.flags = flags; return defaultListControl.CalculateListHeight( itemCount, itemHeight ); } finally { defaultListControl.flags = restoreFlags; } } /// <inheritdoc cref="CalculateListFieldHeight(int, float, ReorderableListFlags)"/> public static float CalculateListFieldHeight( int itemCount, ReorderableListFlags flags ) { return CalculateListFieldHeight( itemCount, DefaultItemHeight, flags ); } /// <inheritdoc cref="CalculateListFieldHeight(int, float, ReorderableListFlags)"/> public static float CalculateListFieldHeight( int itemCount, float itemHeight ) { return CalculateListFieldHeight( itemCount, itemHeight, 0 ); } /// <inheritdoc cref="CalculateListFieldHeight(int, float, ReorderableListFlags)"/> public static float CalculateListFieldHeight( int itemCount ) { return CalculateListFieldHeight( itemCount, DefaultItemHeight, 0 ); } #endregion #region SerializedProperty Control /// <summary> /// Draw list field control for serializable property array. /// </summary> /// <param name="arrayProperty">Serializable property.</param> /// <param name="fixedItemHeight">Use fixed height for items rather than <see cref="UnityEditor.EditorGUI.GetPropertyHeight(SerializedProperty)"/>.</param> /// <param name="drawEmpty">Callback to draw custom content for empty list (optional).</param> /// <param name="flags">Optional flags to pass into list field.</param> private static void DoListField( SerializedProperty arrayProperty, float fixedItemHeight, ReorderableListControl.DrawEmpty drawEmpty, ReorderableListFlags flags ) { var adaptor = new SerializedPropertyAdaptor( arrayProperty, fixedItemHeight ); ReorderableListControl.DrawControlFromState( adaptor, drawEmpty, flags ); } /// <summary> /// Draw list field control for serializable property array. /// </summary> /// <param name="position">Position of control.</param> /// <param name="arrayProperty">Serializable property.</param> /// <param name="fixedItemHeight">Use fixed height for items rather than <see cref="UnityEditor.EditorGUI.GetPropertyHeight(SerializedProperty)"/>.</param> /// <param name="drawEmpty">Callback to draw custom content for empty list (optional).</param> /// <param name="flags">Optional flags to pass into list field.</param> private static void DoListFieldAbsolute( Rect position, SerializedProperty arrayProperty, float fixedItemHeight, ReorderableListControl.DrawEmptyAbsolute drawEmpty, ReorderableListFlags flags ) { var adaptor = new SerializedPropertyAdaptor( arrayProperty, fixedItemHeight ); ReorderableListControl.DrawControlFromState( position, adaptor, drawEmpty, flags ); } /// <inheritdoc cref="DoListField(SerializedProperty, float, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( SerializedProperty arrayProperty, ReorderableListControl.DrawEmpty drawEmpty, ReorderableListFlags flags ) { DoListField( arrayProperty, 0, drawEmpty, flags ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, SerializedProperty, float, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, SerializedProperty arrayProperty, ReorderableListControl.DrawEmptyAbsolute drawEmpty, ReorderableListFlags flags ) { DoListFieldAbsolute( position, arrayProperty, 0, drawEmpty, flags ); } /// <inheritdoc cref="DoListField(SerializedProperty, float, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( SerializedProperty arrayProperty, ReorderableListControl.DrawEmpty drawEmpty ) { DoListField( arrayProperty, 0, drawEmpty, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, SerializedProperty, float, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, SerializedProperty arrayProperty, ReorderableListControl.DrawEmptyAbsolute drawEmpty ) { DoListFieldAbsolute( position, arrayProperty, 0, drawEmpty, 0 ); } /// <inheritdoc cref="DoListField(SerializedProperty, float, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( SerializedProperty arrayProperty, ReorderableListFlags flags ) { DoListField( arrayProperty, 0, null, flags ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, SerializedProperty, float, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, SerializedProperty arrayProperty, ReorderableListFlags flags ) { DoListFieldAbsolute( position, arrayProperty, 0, null, flags ); } /// <inheritdoc cref="DoListField(SerializedProperty, float, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( SerializedProperty arrayProperty ) { DoListField( arrayProperty, 0, null, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, SerializedProperty, float, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, SerializedProperty arrayProperty ) { DoListFieldAbsolute( position, arrayProperty, 0, null, 0 ); } /// <summary> /// Calculate height of list field for absolute positioning. /// </summary> /// <param name="arrayProperty">Serializable property.</param> /// <param name="flags">Optional flags to pass into list field.</param> /// <returns> /// Required list height in pixels. /// </returns> public static float CalculateListFieldHeight( SerializedProperty arrayProperty, ReorderableListFlags flags ) { // We need to push/pop flags so that nested controls are properly calculated. var restoreFlags = defaultListControl.flags; try { defaultListControl.flags = flags; return defaultListControl.CalculateListHeight( new SerializedPropertyAdaptor( arrayProperty ) ); } finally { defaultListControl.flags = restoreFlags; } } /// <inheritdoc cref="CalculateListFieldHeight(SerializedProperty, ReorderableListFlags)"/> public static float CalculateListFieldHeight( SerializedProperty arrayProperty ) { return CalculateListFieldHeight( arrayProperty, 0 ); } #endregion #region SerializedProperty Control (Fixed Item Height) /// <inheritdoc cref="DoListField(SerializedProperty, float, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( SerializedProperty arrayProperty, float fixedItemHeight, ReorderableListControl.DrawEmpty drawEmpty, ReorderableListFlags flags ) { DoListField( arrayProperty, fixedItemHeight, drawEmpty, flags ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, SerializedProperty, float, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, SerializedProperty arrayProperty, float fixedItemHeight, ReorderableListControl.DrawEmptyAbsolute drawEmpty, ReorderableListFlags flags ) { DoListFieldAbsolute( position, arrayProperty, fixedItemHeight, drawEmpty, flags ); } /// <inheritdoc cref="DoListField(SerializedProperty, float, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( SerializedProperty arrayProperty, float fixedItemHeight, ReorderableListControl.DrawEmpty drawEmpty ) { DoListField( arrayProperty, fixedItemHeight, drawEmpty, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, SerializedProperty, float, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, SerializedProperty arrayProperty, float fixedItemHeight, ReorderableListControl.DrawEmptyAbsolute drawEmpty ) { DoListFieldAbsolute( position, arrayProperty, fixedItemHeight, drawEmpty, 0 ); } /// <inheritdoc cref="DoListField(SerializedProperty, float, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( SerializedProperty arrayProperty, float fixedItemHeight, ReorderableListFlags flags ) { DoListField( arrayProperty, fixedItemHeight, null, flags ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, SerializedProperty, float, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, SerializedProperty arrayProperty, float fixedItemHeight, ReorderableListFlags flags ) { DoListFieldAbsolute( position, arrayProperty, fixedItemHeight, null, flags ); } /// <inheritdoc cref="DoListField(SerializedProperty, float, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( SerializedProperty arrayProperty, float fixedItemHeight ) { DoListField( arrayProperty, fixedItemHeight, null, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, SerializedProperty, float, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, SerializedProperty arrayProperty, float fixedItemHeight ) { DoListFieldAbsolute( position, arrayProperty, fixedItemHeight, null, 0 ); } #endregion #region Adaptor Control /// <summary> /// Draw list field control for adapted collection. /// </summary> /// <param name="adaptor">Reorderable list adaptor.</param> /// <param name="drawEmpty">Callback to draw custom content for empty list (optional).</param> /// <param name="flags">Optional flags to pass into list field.</param> private static void DoListField( IReorderableListAdaptor adaptor, ReorderableListControl.DrawEmpty drawEmpty, ReorderableListFlags flags = 0 ) { ReorderableListControl.DrawControlFromState( adaptor, drawEmpty, flags ); } /// <summary> /// Draw list field control for adapted collection. /// </summary> /// <param name="position">Position of control.</param> /// <param name="adaptor">Reorderable list adaptor.</param> /// <param name="drawEmpty">Callback to draw custom content for empty list (optional).</param> /// <param name="flags">Optional flags to pass into list field.</param> private static void DoListFieldAbsolute( Rect position, IReorderableListAdaptor adaptor, ReorderableListControl.DrawEmptyAbsolute drawEmpty, ReorderableListFlags flags = 0 ) { ReorderableListControl.DrawControlFromState( position, adaptor, drawEmpty, flags ); } /// <inheritdoc cref="DoListField(IReorderableListAdaptor, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( IReorderableListAdaptor adaptor, ReorderableListControl.DrawEmpty drawEmpty, ReorderableListFlags flags ) { DoListField( adaptor, drawEmpty, flags ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, IReorderableListAdaptor, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, IReorderableListAdaptor adaptor, ReorderableListControl.DrawEmptyAbsolute drawEmpty, ReorderableListFlags flags ) { DoListFieldAbsolute( position, adaptor, drawEmpty, flags ); } /// <inheritdoc cref="DoListField(IReorderableListAdaptor, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( IReorderableListAdaptor adaptor, ReorderableListControl.DrawEmpty drawEmpty ) { DoListField( adaptor, drawEmpty, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, IReorderableListAdaptor, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, IReorderableListAdaptor adaptor, ReorderableListControl.DrawEmptyAbsolute drawEmpty ) { DoListFieldAbsolute( position, adaptor, drawEmpty, 0 ); } /// <inheritdoc cref="DoListField(IReorderableListAdaptor, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( IReorderableListAdaptor adaptor, ReorderableListFlags flags ) { DoListField( adaptor, null, flags ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, IReorderableListAdaptor, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, IReorderableListAdaptor adaptor, ReorderableListFlags flags ) { DoListFieldAbsolute( position, adaptor, null, flags ); } /// <inheritdoc cref="DoListField(IReorderableListAdaptor, ReorderableListControl.DrawEmpty, ReorderableListFlags)"/> public static void ListField( IReorderableListAdaptor adaptor ) { DoListField( adaptor, null, 0 ); } /// <inheritdoc cref="DoListFieldAbsolute(Rect, IReorderableListAdaptor, ReorderableListControl.DrawEmptyAbsolute, ReorderableListFlags)"/> public static void ListFieldAbsolute( Rect position, IReorderableListAdaptor adaptor ) { DoListFieldAbsolute( position, adaptor, null, 0 ); } /// <summary> /// Calculate height of list field for adapted collection. /// </summary> /// <param name="adaptor">Reorderable list adaptor.</param> /// <param name="flags">Optional flags to pass into list field.</param> /// <returns> /// Required list height in pixels. /// </returns> public static float CalculateListFieldHeight( IReorderableListAdaptor adaptor, ReorderableListFlags flags ) { // We need to push/pop flags so that nested controls are properly calculated. var restoreFlags = defaultListControl.flags; try { defaultListControl.flags = flags; return defaultListControl.CalculateListHeight( adaptor ); } finally { defaultListControl.flags = restoreFlags; } } /// <inheritdoc cref="CalculateListFieldHeight(IReorderableListAdaptor, ReorderableListFlags)"/> public static float CalculateListFieldHeight( IReorderableListAdaptor adaptor ) { return CalculateListFieldHeight( adaptor, 0 ); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Threading; using System.Diagnostics; using System.Collections; using System.Collections.Generic; namespace System.Collections.Concurrent { // Abstract base for a thread-safe dictionary mapping a set of keys (K) to values (V). // // This flavor of ConcurrentUnifier holds values using weak references. It does not store the keys directly. Instead, values are // required to contain the key and expose it via IKeyedItem<K>. This flavor should be used in situations where the keys themselves // could store direct or indirect references to the value (thus, preventing the value's from being GC'd if the table were to // store the keys directly.) // // Value immortality is guaranteed. Once entered into the dictionary, the value never expires // in an observable way as long as values don't have finalizers. // // To create an actual dictionary, subclass this type and override the protected Factory method // to instantiate values (V) for the "Add" case. // // The key must be of a type that implements IEquatable<K>. The unifier calls IEquality<K>.Equals() // and Object.GetHashCode() on the keys. // // The value must be a reference type that implements IKeyedItem<K>. The unifier invokes the // IKeyedItem<K>.PrepareKey() method (outside the lock) on any value returned by the factory. This gives the value // a chance to do any lazy evaluation of the keys while it's safe to do so. // // Deadlock risks: // - Keys may be tested for equality and asked to compute their hashcode while the unifier // holds its lock. Thus these operations must be written carefully to avoid deadlocks and // reentrancy in to the table. // // - Values may get their IKeyedItem<K>.Key property called while the unifier holds its lock. // Values that need to do lazy evaluation to compute their keys should do that in the PrepareKey() // method which the unifier promises to call outside the lock prior to entering the value into the table. // // - The Factory method will never be called inside the unifier lock. If two threads race to // enter a value for the same key, the Factory() may get invoked twice for the same key - one // of them will "win" the race and its result entered into the dictionary - other gets thrown away. // // Notes: // - This class is used to look up types when GetType() or typeof() is invoked. // That means that this class itself cannot do or call anything that does these // things. // // - For this reason, it chooses not to mimic the official ConcurrentDictionary class // (I don't even want to risk using delegates.) Even the LowLevel versions of these // general utility classes may not be low-level enough for this class's purpose. // // Thread safety guarantees: // // ConcurrentUnifier is fully thread-safe and requires no // additional locking to be done by callers. // // Performance characteristics: // // ConcurrentUnifier will not block a reader, even while // the table is being written. Only one writer is allowed at a time; // ConcurrentUnifier handles the synchronization that ensures this. // // Safety for concurrent readers is ensured as follows: // // Each hash bucket is maintained as a stack. Inserts are done under // a lock in one of two ways: // // - The entry is filled out completely, then "published" by a // single write to the top of the bucket. This ensures that a reader // will see a valid snapshot of the bucket, once it has read the head. // // - An expired WeakReference inside an existing entry is replaced atomically // by a new WeakReference. A reader will either see the old expired WeakReference // (if so, he'll wait for the current lock to be released then do the locked retry) // or the new WeakReference (which is fine for him to see.)) // // On resize, we allocate an entirely new table, rather than resizing // in place. We fill in the new table completely, under the lock, // then "publish" it with a single write. Any reader that races with // this will either see the old table or the new one; each will contain // the same data. // internal abstract class ConcurrentUnifierWKeyed<K, V> where K : IEquatable<K> where V : class, IKeyedItem<K> { protected ConcurrentUnifierWKeyed() { _lock = new Lock(); _container = new Container(this); } // // Retrieve the *unique* value for a given key. If the key was previously not entered into the dictionary, // this method invokes the overridable Factory() method to create the new value. The Factory() method is // invoked outside of any locks. If two threads race to enter a value for the same key, the Factory() // may get invoked twice for the same key - one of them will "win" the race and its result entered into the // dictionary - other gets thrown away. // public V GetOrAdd(K key) { Debug.Assert(key != null); Debug.Assert(!_lock.IsAcquired, "GetOrAdd called while lock already acquired. A possible cause of this is an Equals or GetHashCode method that causes reentrancy in the table."); int hashCode = key.GetHashCode(); V value; bool found = _container.TryGetValue(key, hashCode, out value); #if DEBUG { V checkedValue; bool checkedFound; // In debug builds, always exercise a locked TryGet (this is a good way to detect deadlock/reentrancy through Equals/GetHashCode()). using (LockHolder.Hold(_lock)) { _container.VerifyUnifierConsistency(); int h = key.GetHashCode(); checkedFound = _container.TryGetValue(key, h, out checkedValue); } if (found) { // Since this DEBUG code is holding a strong reference to "value", state of a key must never go from found to not found, // and only one value may exist per key. Debug.Assert(checkedFound); Debug.Assert(Object.ReferenceEquals(checkedValue, value)); GC.KeepAlive(value); } } #endif //DEBUG if (found) return value; value = this.Factory(key); // This doesn't catch every object that has a finalizer, but the old saying about half a loaf... Debug.Assert(!(value is IDisposable), "Values placed in this table should not have finalizers. ConcurrentUnifiers guarantee observational immortality only " + "in the absence of finalizers. Or to speak more plainly, we can use WeakReferences to guarantee observational immortality " + "without paying the cost of storage immortality."); if (value == null) { // There's no point in caching null's in the dictionary as a WeakReference of null will always show up as expired // and force a re-add every time. Just return the null value without storing it. This does mean that repeated look ups // for this case will be very slow - this generally corresponds to scenarios like looking for a type member that doesn't // exist so hopefully, it's better to have awful throughput for such cases rather than polluting the dictionary with // "null entries" that have to be special-cased for everyone. return null; } // While still outside the lock, invoke the value's PrepareKey method to give the chance to do any lazy evaluation // it needs to produce the key quickly and in a deadlock-free manner once we're inside the lock. value.PrepareKey(); using (LockHolder.Hold(_lock)) { V heyIWasHereFirst; if (_container.TryGetValue(key, hashCode, out heyIWasHereFirst)) return heyIWasHereFirst; if (!_container.HasCapacity) _container.Resize(); // This overwrites the _container field. _container.Add(key, hashCode, value); return value; } } protected abstract V Factory(K key); private volatile Container _container; private readonly Lock _lock; private sealed class Container { public Container(ConcurrentUnifierWKeyed<K, V> owner) { // Note: This could be done by calling Resize()'s logic but we cannot safely do that as this code path is reached // during class construction time and Resize() pulls in enough stuff that we get cyclic cctor warnings from the build. _buckets = new int[_initialCapacity]; for (int i = 0; i < _initialCapacity; i++) _buckets[i] = -1; _entries = new Entry[_initialCapacity]; _nextFreeEntry = 0; _owner = owner; } private Container(ConcurrentUnifierWKeyed<K, V> owner, int[] buckets, Entry[] entries, int nextFreeEntry) { _buckets = buckets; _entries = entries; _nextFreeEntry = nextFreeEntry; _owner = owner; } public bool TryGetValue(K key, int hashCode, out V value) { // Lock acquistion NOT required. int bucket = ComputeBucket(hashCode, _buckets.Length); int i = Volatile.Read(ref _buckets[bucket]); while (i != -1) { V actualValue; if (hashCode == _entries[i]._hashCode && _entries[i]._weakValue.TryGetTarget(out actualValue)) { K actualKey = actualValue.Key; if (key.Equals(actualKey)) { value = actualValue; return true; } } i = _entries[i]._next; } value = default(V); return false; } public void Add(K key, int hashCode, V value) { Debug.Assert(_owner._lock.IsAcquired); int bucket = ComputeBucket(hashCode, _buckets.Length); int newEntryIdx = _nextFreeEntry; _entries[newEntryIdx]._weakValue = new WeakReference<V>(value, trackResurrection: false); _entries[newEntryIdx]._hashCode = hashCode; _entries[newEntryIdx]._next = _buckets[bucket]; _nextFreeEntry++; // The line that atomically adds the new key/value pair. If the thread is killed before this line executes but after // we've incremented _nextFreeEntry, this entry is harmlessly leaked until the next resize. Volatile.Write(ref _buckets[bucket], newEntryIdx); VerifyUnifierConsistency(); } public bool HasCapacity { get { Debug.Assert(_owner._lock.IsAcquired); return _nextFreeEntry != _entries.Length; } } public void Resize() { Debug.Assert(_owner._lock.IsAcquired); // Before we actually grow the size of the table, figure out how much we can recover just by dropping entries with // expired weak references. int estimatedNumLiveEntries = 0; for (int bucket = 0; bucket < _buckets.Length; bucket++) { for (int entry = _buckets[bucket]; entry != -1; entry = _entries[entry]._next) { // Check if the weakreference has expired. V value; if (_entries[entry]._weakValue.TryGetTarget(out value)) estimatedNumLiveEntries++; } } double estimatedLivePercentage = ((double)estimatedNumLiveEntries) / ((double)(_entries.Length)); int newSize; if (estimatedLivePercentage < _growThreshold && (_entries.Length - estimatedNumLiveEntries) > _initialCapacity) { newSize = _buckets.Length; } else { newSize = HashHelpers.GetPrime(_buckets.Length * 2); #if DEBUG newSize = _buckets.Length + 3; #endif if (newSize <= _nextFreeEntry) throw new OutOfMemoryException(); } Entry[] newEntries = new Entry[newSize]; int[] newBuckets = new int[newSize]; for (int i = 0; i < newSize; i++) newBuckets[i] = -1; // Note that we walk the bucket chains rather than iterating over _entries. This is because we allow for the possibility // of abandoned entries (with undefined contents) if a thread is killed between allocating an entry and linking it onto the // bucket chain. int newNextFreeEntry = 0; for (int bucket = 0; bucket < _buckets.Length; bucket++) { for (int entry = _buckets[bucket]; entry != -1; entry = _entries[entry]._next) { // Check if the weakreference has expired. If so, this is where we drop the entry altogether. V value; if (_entries[entry]._weakValue.TryGetTarget(out value)) { newEntries[newNextFreeEntry]._weakValue = _entries[entry]._weakValue; newEntries[newNextFreeEntry]._hashCode = _entries[entry]._hashCode; int newBucket = ComputeBucket(newEntries[newNextFreeEntry]._hashCode, newSize); newEntries[newNextFreeEntry]._next = newBuckets[newBucket]; newBuckets[newBucket] = newNextFreeEntry; newNextFreeEntry++; } } } // The assertion is "<=" rather than "==" because we allow an entry to "leak" until the next resize if // a thread died between the time between we allocated the entry and the time we link it into the bucket stack. // In addition, we don't bother copying entries where the weak reference has expired. Debug.Assert(newNextFreeEntry <= _nextFreeEntry); // The line that atomically installs the resize. If this thread is killed before this point, // the table remains full and the next guy attempting an add will have to redo the resize. _owner._container = new Container(_owner, newBuckets, newEntries, newNextFreeEntry); _owner._container.VerifyUnifierConsistency(); } private static int ComputeBucket(int hashCode, int numBuckets) { int bucket = (hashCode & 0x7fffffff) % numBuckets; return bucket; } [Conditional("DEBUG")] public void VerifyUnifierConsistency() { #if DEBUG // There's a point at which this check becomes gluttonous, even by checked build standards... if (_nextFreeEntry >= 5000 && (0 != (_nextFreeEntry % 100))) return; Debug.Assert(_owner._lock.IsAcquired); Debug.Assert(_nextFreeEntry >= 0 && _nextFreeEntry <= _entries.Length); int numEntriesEncountered = 0; for (int bucket = 0; bucket < _buckets.Length; bucket++) { int walk1 = _buckets[bucket]; int walk2 = _buckets[bucket]; // walk2 advances two elements at a time - if walk1 ever meets walk2, we've detected a cycle. while (walk1 != -1) { numEntriesEncountered++; Debug.Assert(walk1 >= 0 && walk1 < _nextFreeEntry); Debug.Assert(walk2 >= -1 && walk2 < _nextFreeEntry); Debug.Assert(_entries[walk1]._weakValue != null); V value; if (_entries[walk1]._weakValue.TryGetTarget(out value)) { K key = value.Key; Debug.Assert(key != null); int hashCode = key.GetHashCode(); Debug.Assert(hashCode == _entries[walk1]._hashCode); } int storedBucket = ComputeBucket(_entries[walk1]._hashCode, _buckets.Length); Debug.Assert(storedBucket == bucket); walk1 = _entries[walk1]._next; if (walk2 != -1) walk2 = _entries[walk2]._next; if (walk2 != -1) walk2 = _entries[walk2]._next; if (walk1 == walk2 && walk2 != -1) Debug.Assert(false, "Bucket " + bucket + " has a cycle in its linked list."); } } // The assertion is "<=" rather than "==" because we allow an entry to "leak" until the next resize if // a thread died between the time between we allocated the entry and the time we link it into the bucket stack. Debug.Assert(numEntriesEncountered <= _nextFreeEntry); #endif //DEBUG } private readonly int[] _buckets; private readonly Entry[] _entries; private int _nextFreeEntry; private readonly ConcurrentUnifierWKeyed<K, V> _owner; private const int _initialCapacity = 5; private const double _growThreshold = 0.75; } private struct Entry { public WeakReference<V> _weakValue; public int _hashCode; public int _next; } } }